javaAPI操作HDFS

2017-10-14  本文已影响25人  _Kantin
/**
 * javaAPI操作HDFS客户端
 */
public class test {
    public static final String  HDFS_PATH="hdfs://10.23.12.188:8020";

    FileSystem fileSystem = null;
    Configuration configuration = null;
    @Before
    public void setUp() throws  Exception{
        System.out.println("HDFS_PATH setup");
        configuration = new Configuration();
        fileSystem = FileSystem.get(new URI(HDFS_PATH),configuration,"zikang");
    }

    //在HDFS上创建文件
    @Test
    public void getInstance()  throws  Exception {
        fileSystem.mkdirs(new Path("/javaApi"));
    }

    //往HDFS上面的文件写入字符
    @Test
    public void create()  throws  Exception{
        FSDataOutputStream output  = fileSystem.create(new Path("/javaApi/test.txt"));
        output.write("hello world".getBytes());
        output.flush();
        output.close();
    }
    //查看HDFS上面文件的内容
    @Test
    public void cat() throws Exception{
        FSDataInputStream in = fileSystem.open(new Path("/javaApi/test.txt"));
        IOUtils.copyBytes(in ,System.out,1024);
        in.close();
    }

    //对HDFS上的文件进行重命名
    @Test
    public void rename() throws Exception{
        Path oldPath = new Path("/javaApi/test.txt");
        Path newPath = new Path("/javaApi/change.txt");
        fileSystem.rename(oldPath,newPath);
    }

    //从本地文件复制到HDFS
    @Test
    public void copyFromLocalFile()  throws Exception{
        Path localPath = new Path("");
        Path hdfsPath = new Path("");
        fileSystem.copyFromLocalFile(localPath,hdfsPath);
    }

    //大文件上传到HDFS时候输出。。作为进度
    @Test
    public  void copyFromLocalFileWithProgress()  throws Exception{
        InputStream in = new BufferedInputStream(
                  new FileInputStream(
                           new File("")));

        FSDataOutputStream output = fileSystem.create(new Path(""),
                new Progressable() {
                    public void progress() {
                        System.out.println("..");
                    }
                });
    }

    //下载HDFS文件
    @Test
    public  void copyToLocalFile() throws Exception{
        Path oldPath = new Path("/javaApi/test.txt");
        Path newPath = new Path("/javaApi/change.txt");
        fileSystem.copyToLocalFile(oldPath,newPath);
    }

    /**
     * 查看目录下的所有文件
     * 注:虽然设置了副本的系数为3,但是通过HDFS shell的方式put上的,那么
     * 将采用默认的副本系数1,如果是java API的话那么就是默认的3
     */
    @Test
    public void listFiles() throws Exception{
        //文件 3 11 hdfs://10.23.12.188:8020/javaApi/change.txt
        FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/javaApi"));
        for(FileStatus fileStatus : fileStatuses){
            String isDir = fileStatus.isDirectory()?"文件夹":"文件";
            short replication = fileStatus.getReplication();
            long len = fileStatus.getLen();
            String path = fileStatus.getPath().toString();
            System.out.println(isDir+" "+ replication+ " "+len+" "+path);
        }
    }
    //删除HDFS上面的文件
    @Test
    public  void  delete()  throws Exception{
        fileSystem.delete(new Path(""),true);
    }

    @After
    public  void tearDown() throws  Exception{
        System.out.println("HDFS_PATH tearDown");
        configuration = null;
        fileSystem = null;

    }
上一篇下一篇

猜你喜欢

热点阅读