hdfs文件操作和Hive 数据库操作(java api)

2017-02-14  本文已影响148人  StoneHeart

1.hdfs文件操作

public static void main(String[] args) throws IOException
 {
    System.out.print("初始化hdfs文件系统\n");
    FSDataInputStream in = null;
    OutputStream out = null;
    Configuration conf = new Configuration();
    conf.set("hadoop.job.user", "cdh5");
    String hdfsUri="hdfs://172.16.1.77:8020";
    FileSystem fs=FileSystem.get(URI.create(hdfsUri), conf);
    

    System.out.print("读取文件\n");
    String url="/user/chenjie/helloworld.txt";
    in = fs.open(new Path(url));
    BufferedReader reader=null;
    StringBuffer strBuffer = new StringBuffer();
    reader=new BufferedReader(new InputStreamReader(in));
    String sTempOneLine;
    while ((sTempOneLine = reader.readLine()) != null) {
        strBuffer.append(sTempOneLine).append(" ");
      }
    System.out.println("result is : " + strBuffer.toString());
    System.out.println("读取文件成功\n");
    
    
    System.out.print("创建新文件");
    final String content = "hi, I am bigdata. It is successful if you can see me.\n";
    FSDataOutputStream outputStream = fs.create(new Path("/user/chenjie/hello.txt"));
    outputStream.write(content.getBytes());;
    outputStream.close();
    fs.close();
    System.out.println("文件创建成功!");
    
    
    System.out.print("删除文件\n");
    @SuppressWarnings("deprecation")
    boolean isok=fs.delete(new Path("/user/chenjie/hello.txt"));
    if(isok)
    {
        System.out.println("删除文件成功\n");
    }else
    {
        System.out.println("删除文件失败\n");
    }
    
    
    
    System.out.print("创建目录\n");
    boolean mkisok=fs.mkdirs(new Path("/user/chenjie/hello.txt"));
    if(mkisok)
    {
        System.out.print("创建目录成功\n");
    }else
    {
        System.out.print("创建目录失败\n");
    }
    
    
    System.out.print("文件重命名\n");
    boolean reisok=fs.rename(new Path("/user/chenjie/hello.txt"), new Path("/user/chenjie/hello_world.txt"));
    if(reisok)
    {
        System.out.print("文件重命名成功\n");
    }else{
        System.out.print("文件重命名失败\n");
    }
    
    System.out.print("文件上传\n");
    fs.copyFromLocalFile(false, new Path("G:\\Tip.txt"), new Path("/user/chenjie/"));
    fs.close();
}

2.hive数据库连接

public class hive {
private static final String HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver";
private static String user = "cdh5";  //hive用户名
private static String password = "cdh5";//hive用户密码
private static ResultSet res;
public static void main(String[] args) throws ClassNotFoundException, SQLException{
    

    String sBuilder="jdbc:hive2://172.16.1.77:11000";
    Class.forName(HIVE_DRIVER);//加载hive驱动
    Connection connection = null;
    connection=DriverManager.getConnection(sBuilder,user,password);
    Statement stmt = connection.createStatement();
    String sql= " select * from chenjie.tw_usrcall_stick_day";
    ResultSet res = stmt.executeQuery(sql);
     while (res.next()) {  
            System.out.println("Result: key:" + res.getString(1) + "  –>  value:" + res.getString(2)+res.getString(3)); 
     }
     System.out.println("success");
}

}

上一篇下一篇

猜你喜欢

热点阅读