hdfs读取某一文件具体内容的java代码

由于mapreduce实验总要查看output/part-r-00000

所以写个程序

package Utils;


import java.net.URI;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;


/**
 * 读取hdfs上指定文件中的内容
 * @company 源辰信息
 * @author navy
 */
public class FindHDFSText {
    private static Logger log = Logger.getLogger(FindHDFSText.class);// 创建日志记录器


    public static void main(String[] args) {
        FileSystem fs = null;
        try {
            Configuration conf = new Configuration();// 加载配置文件
            conf.set("dfs.client.use.datanode.hostname", "true");
            URI uri = new URI("hdfs://**:9000/"); // 连接资源位置

            fs = FileSystem.get(uri,conf,"hadoop"); // 创建文件系统实例对象
            
            Path p= new Path("output/part-r-00000"); // 默认是读取/user/navy/下的指定文件


            System.out.println("要查看的文件路径为:"+fs.getFileStatus(p).getPath());

            FSDataInputStream fsin = fs.open(fs.getFileStatus(p).getPath());
            byte[] bs = new byte[1024 * 1024];
            int len = 0;
            while((len = fsin.read(bs)) != -1){
                System.out.print(new String(bs, 0, len));
            }

            System.out.println();
            fsin.close();
        } catch (Exception e) {
            log.error("hdfs操作失败!!!", e);
        }
    }
}