C++HadoopC语言

C语言操作hdfs

2018-04-26  本文已影响26人  zlcook

需求

目标

环境

思路

安装并运行

JAVA_HOME=/usr/java/jdk1.8.0_151
GCC_HOME=/usr/local/gcc-4.8.5
HADOOP_HOME=/usr/hadoop-2.7.6
PATH=$HADOOP_HOME/bin:$GCC_HOME/bin:$JAVA_HOME/bin:$PATH
export HADOOP_HOME GCC_HOME JAVA_HOME PATH
export LD_LIBRARY_PATH=$JAVA_HOME/jre/lib/amd64/server

在hdfs上建文件

#创建liang目录
hdfs dfs -mkdir /liang
#在本地创建hello.txt文件
touch hello.txt
#将hello.txt发送到/liang目录下
hdfs dfs -put hello.txt /liang

编写writepro.c程序

//hdfs操作api头文件
#include "hdfs.h"
#include <stdio.h>

int main(int argc, char **argv) {
    //连接hdfs
    hdfsFS fs = hdfsConnect("default", 0);
    //要写的文件路径
    const char* writePath = "/liang/hello.txt";
  //获取写文件对象
    hdfsFile writeFile = hdfsOpenFile(fs, writePath, O_WRONLY |O_CREAT, 0, 0, 0);
    if(!writeFile) {
          fprintf(stderr, "Failed to open %s for writing!\n", writePath);
          exit(-1);
    }
  //要写的内容
    char* buffer = "Hello, World!";
  //开始写内容
    tSize num_written_bytes = hdfsWrite(fs, writeFile, (void*)buffer, strlen(buffer)+1);
  //flush一下
    if (hdfsFlush(fs, writeFile)) {
           fprintf(stderr, "Failed to 'flush' %s\n", writePath);
          exit(-1);
    }
 //关闭连接
    hdfsCloseFile(fs, writeFile);
}

使用gcc编译成writepro

动态链接库加载到缓存中

# 下面执行完后不会有任何信息显示
ldconfig
idconfig -p
显示格式:函数库名称 =》该函数库实际路径

编译成writepro文件

[root@CentOS usr]# gcc writepro.c -I$HADOOP_HOME/include -L$HADOOP_HOME/lib/native -lhdfs -o writepro
writepro.c: 在函数‘main’中:
writepro.c:14:11: 警告:隐式声明与内建函数‘exit’不兼容 [默认启用]
           exit(-1);
           ^
writepro.c:19:71: 警告:隐式声明与内建函数‘strlen’不兼容 [默认启用]
     tSize num_written_bytes = hdfsWrite(fs, writeFile, (void*)buffer, strlen(buffer)+1);
                                                                       ^
writepro.c:23:11: 警告:隐式声明与内建函数‘exit’不兼容 [默认启用]
           exit(-1);

运行writepro

参考文档:CLASSPATH

#hadoop classpath --glob命令会生成classpath所需内容
[root@CentOS /]# export CLASSPATH=`hadoop classpath --glob`
[root@CentOS /]# ./usr/writepro
18/04/26 23:59:47 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable

查看内容是否写入

[root@CentOS /]# hadoop fs -get /liang/hello.txt helloresult.txt
[root@CentOS /]# cat helloresult.txt
Hello, World!

总结

libjvm.so动态链接库找不到问题

1.编译时报 libjvm.so动态链接库找不到

[root@CentOS usr]# gcc writepro.c -I$HADOOP_HOME/include -L$HADOOP_HOME/lib/native -lhdfs -o writepro

/usr/bin/ld: warning: libjvm.so, needed by /usr/hadoop-2.7.6/lib/native/libhdfs.so, not found (try using -rpath or -rpath-link)
/usr/hadoop-2.7.6/lib/native/libhdfs.so: undefined reference to `JNI_CreateJavaVM@SUNWprivate_1.1'
/usr/hadoop-2.7.6/lib/native/libhdfs.so: undefined reference to `JNI_GetCreatedJavaVMs@SUNWprivate_1.1'
collect2: 错误:ld 返回 1

2.问题分析

解决方法

# 指定编译和执行寻找libjvm路径
export LD_LIBRARY_PATH=$JAVA_HOME/jre/lib/amd64/server
gcc writepro.c -I$HADOOP_HOME/include -L$HADOOP_HOME/lib/native -lhdfs -o writepro
#指定编译和执行寻找libjvm路径
gcc writepro.c -I$HADOOP_HOME/include -L$HADOOP_HOME/lib/native -lhdfs -Wl,-rpath=${JAVA_HOME}/jre/lib/amd64/server -o writepro
#-ljvm -L${JAVA_HOME}/jre/lib/amd64/server让其编译通过
gcc writepro.c -I$HADOOP_HOME/include -L$HADOOP_HOME/lib/native -lhdfs -ljvm -L${JAVA_HOME}/jre/lib/amd64/server -o writepro
#在ld.so.conf中添加libjvm路径让其其执行通过,配置完要执行ldconfig
[root@CentOS usr]# vim /etc/ld.so.conf
include ld.so.conf.d/*.conf
/usr/hadoop-2.7.6/lib/native
/usr/java/jdk1.8.0_151/jre/lib/amd64/server

查看writepro所依赖的动态函数库

# 库=》对应路径(地址)
[root@CentOS usr]# ldd writepro
    linux-vdso.so.1 =>  (0x00007ffe329d6000)
    libhdfs.so.0.0.0 => /usr/hadoop-2.7.6/lib/native/libhdfs.so.0.0.0 (0x00007f32dbfdb000)
    libc.so.6 => /lib64/libc.so.6 (0x000000310f800000)
    libjvm.so => /usr/java/jdk1.8.0_151/jre/lib/amd64/server/libjvm.so (0x00007f32dafe3000)
    libdl.so.2 => /lib64/libdl.so.2 (0x0000003110000000)
    libpthread.so.0 => /lib64/libpthread.so.0 (0x000000310fc00000)
    /lib64/ld-linux-x86-64.so.2 (0x000000310f400000)
    libm.so.6 => /lib64/libm.so.6 (0x0000003110800000)

root@CentOS usr]# ldd /usr/hadoop-2.7.6/lib/native/libhdfs.so
    linux-vdso.so.1 =>  (0x00007fff8b31c000)
    libjvm.so => /usr/java/jdk1.8.0_151/jre/lib/amd64/server/libjvm.so (0x00007fb42669f000)
    libdl.so.2 => /lib64/libdl.so.2 (0x00007fb42649a000)
    libpthread.so.0 => /lib64/libpthread.so.0 (0x00007fb42627d000)
    libc.so.6 => /lib64/libc.so.6 (0x00007fb425ee9000)
    /lib64/ld-linux-x86-64.so.2 (0x000000310f400000)
    libm.so.6 => /lib64/libm.so.6 (0x00007fb425c64000)

执行writepro出错

[root@CentOS usr]# ./writepro 
./writepro: error while loading shared libraries: libjvm.so: cannot open shared object file: No such file or directory
[root@CentOS usr]# ldd writepro
    linux-vdso.so.1 =>  (0x00007ffdf21c6000)
    libhdfs.so.0.0.0 => /usr/hadoop-2.7.6/lib/native/libhdfs.so.0.0.0 (0x00007f50526b9000)
    libjvm.so => not found
    libc.so.6 => /lib64/libc.so.6 (0x000000310f800000)
    libjvm.so => not found
    libdl.so.2 => /lib64/libdl.so.2 (0x0000003110000000)
    libpthread.so.0 => /lib64/libpthread.so.0 (0x000000310fc00000)
    /lib64/ld-linux-x86-64.so.2 (0x000000310f400000)

参考

上一篇 下一篇

猜你喜欢

热点阅读