hdfs极限编程遇到的问题

eclipse远程连接hdfs:

示例如下:

package qq;


import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URL;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;


public class Test1 {
    
    public static void caozuo() throws Exception {
        InputStream in = null;
        //配置文件
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.108.11:9000/"), conf, "hadoop");
        Path path = new Path("/sunbinghai/hdfstest1.txt");
        FSDataOutputStream out = fs.create(path);
        out.writeBytes("信1605-220163469孙丙海课堂测试");
        out.flush();

        //in.close();
        fs.close();
    }
    
    
    
    public void mv() throws Exception{
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.108.11:9000/"), conf, "hadoop");
        Path dst = new Path("/sunbinghai/hdfstest2.txt");
        Path src = new Path("/sunbinghai/hdfstest1.txt");
        fs.rename(src, dst);
    }
    

    public void readText() throws Exception{
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.108.11:9000/"), conf, "hadoop");
        Path path = new Path("/sunbinghai/hdfstest2.txt");
        FSDataInputStream in = fs.open(path);
        String line = null;
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));    
        while((line = bufferedReader.readLine()) !=null) {
            System.out.println("读取的内容为"+line);
        }
        fs.close();
        
    }
    public static void main(String[] args) throws Exception {
        caozuo();
    }
}

原文地址:https://www.cnblogs.com/xiaohaigege666/p/9733595.html