试试云上的输入输出流

package hadoopTest;

import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class Merge {
    Path inputPath = new Path("/test4/text3");
    Path outputPath = new Path("d:/test1/test2.txt");

    public void doMerge() throws IOException {
        Configuration config = new Configuration();
        config.set("fs.default.name", "hdfs://192.168.20.128:9000");
        URI uri = URI.create(outputPath.toString());
        FileSystem inputFs = FileSystem.get(config);
        FileSystem outputFs = FileSystem.get(config);
        FSDataInputStream in = inputFs.open(inputPath);
        
        FileOutputStream out = new FileOutputStream("e:/test1/test2.txt");
        byte[] data = new byte[1024];
        int read = -1;
        for (; (read = in.read(data)) != -1;) {
            out.write(data, 0, read);
        }
        out.close();
        in.close();
    }

}

不能输出到本地的时候不能用hadoop的FSDataOutputStream,因为这个是操作云端的,要用回以前的FileOutputStream

原文地址:https://www.cnblogs.com/vhyc/p/6594547.html