常用操作命令

2019-01-10  本文已影响20人  0_oHuanyu

hive带日志启动:
hive --hiveconf hive.root.logger=DEBUG,console (hive启动时用该命令替代)

登陆mysql 87
mysql -h 10.0.90.87 -P 3306 -u username -ppwd

逍遥模拟器连接adb
adb connect 127.0.0.1:21503

sudo su
ssh -p32022 hadoop101

linux 查看内存占用
free -m
linux查看所有进程
ps aux | less

flume-ng agent --conf-file /home/webadmin/liuhuanyu/flume-files/conf.txt --name a1

hive-site的jdbc配置
jdbc:mysql://realtime-3:3306/hive?characterEncoding=UTF-8

200机子上flume导日志命令
/home/webadmin/liuhuanyu/apache-flume-1.6.0-cdh5.13.0-bin/bin/flume-ng agent --conf-file /home/webadmin/liuhuanyu/flume-files/conf.txt --name a1

向hive中插入数据
insert into table log_db_200 partition(log_date="2013-03-08") (guid,create_time,userid,type_flag,type,recharge_z,recharge,conver,sys,give_z,free_recharge,free_give,rela_userid,rela_recharge,rela_give,sync_flag,mentor,energy,integral,income,vip_level,flow_23,flow_24,flow_25,log_flag) values(791936071859961901,'2018-02-28 00:00:00',19506395,1,64,27,0,0,0,3,368300,16101,25697072,54,6,0,0,0,30,30,3,4,50,0,5);

spark提交flume任务
/opt/ydbsoftware/spark-2.2.0/bin/spark-submit --jars /home/webadmin/liuhuanyu/flume-files/spark-streaming-flume_2.11-2.2.0.jar,/home/webadmin/liuhuanyu/flume-files/spark-streaming-flume-sink_2.11-2.2.0.jar,/home/webadmin/liuhuanyu/flume-files/flume-ng-configuration-1.6.0.jar,/home/webadmin/liuhuanyu/flume-files/flume-ng-core-1.6.0.jar,/home/webadmin/liuhuanyu/flume-files/flume-ng-sdk-1.6.0.jar /home/webadmin/liuhuanyu/flume-files/sparkTest.jar

spark提交kafka任务
/opt/ydbsoftware/spark-2.2.0/bin/spark-submit
--jars
/opt/ydbsoftware/spark-2.2.0/hadoop-lzo-0.4.15-cdh5.13.1.jar
--class com.esky.offline.LiveAction
/home/webadmin/liuhuanyu/test/live.jar

hive的元数据存储位置
!connect jdbc:hive2://realtime-3:10000

清除hdfs上checkpoint的文件
hdfs dfs -rm -R hdfs://zhugeio/user/webadmin/liuhuanyu/logdata/checkpoint/*

spark 参数
--num-executors 100 --executor-memory 2G --executor-cores 4 --driver-memory 1G --conf spark.default.parallelism=1000 --conf spark.storage.memoryFraction=0.5 --conf spark.shuffle.memoryFraction=0.3 --conf spark.sql.hive.filesourcePartitionFileCacheSize=5368709100 --driver-class-path

spark 提交普通任务
/opt/ydbsoftware/spark-2.2.0/bin/spark-submit --jars /opt/ydbsoftware/spark-2.2.0/hadoop-lzo-0.4.15-cdh5.13.1.jar,/home/webadmin/liuhuanyu/flume-files/mysql-connector-java-5.1.43.jar /home/webadmin/liuhuanyu/test/casual_play.jar

/opt/ydbsoftware/spark-2.2.0/bin/spark-submit --jars /opt/ydbsoftware/spark-2.2.0/hadoop-lzo-0.4.15-cdh5.13.1.jar,/home/webadmin/liuhuanyu/flume-files/mysql-connector-java-5.1.43.jar /home/webadmin/liuhuanyu/online/casual_play.jar

启动hue
nohup /home/webadmin/liuhuanyu/online/hue-4.0.0/build/env/bin/supervisor &

beeline连接hs2
!connect jdbc:hive2://realtime-3:10000;

查看hive的运行配置
set -v;

java运行jar包 指定依赖
java -Djava.ext.dirs=/home/webadmin/liuhuanyu/impala-files -cp base_util.jar jdbc.util.ImpalaUtil

hue测试运行命令
nohup /home/webadmin/liuhuanyu/online/hue-4.0.0/build/env/bin/hue runserver 10.0.3.201:8000 &

// hive开启权限控制和权限设置
set hive.security.authorization.task.factory=org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl;
set hive.security.authorization.createtable.owner.grants=ALL ;
set hive.security.authorization.enabled=true ;

hive 查看权限
show role grant user ipaychat

hive 赋予权限
grant role pm to user maojunchi

zk消费者
bin/kafka-console-consumer.sh --zookeeper 10.0.0.175:9093 --from-beginning --topic topic_data_stat2

print正则替换:
print((.+))
print("1 = "+ str(1))

sqoop导数据
/opt/sqoop/sqoop-1.4.6-cdh5.13.0/bin/sqoop import
--connect jdbc:mysql://192.168.90.231:3306/yt_fl_video_chat_log
--username ytflhdcaiy --password-file file:/root/.sqoop_mysql.password
--target-dir '/user/hive/warehouse/fl_video.db/t_videopair_log/log_date='{sPartition}'' \ --delete-target-dir \ --table t_videopair_log_{sPartition}
--fields-terminated-by '|'
--null-string '\N'
--null-non-string '\N'

apache sentry 启动的命令(103上)
sentry --command service --conffile ${SENTRY_HOME}/conf/sentry-site.xml

python 启动cgi http服务
python3 -m http.server --cgi 10086

crul使用http访问
curl 0.0.0.0:10086/cgi-bin/test.sh

查看zookeeper版本号
echo stat|nc 127.0.0.1 2181

kafka包下启动zookeeper:
bin\windows\zookeeper-server-start.bat config\zookeeper.properties

kafka包下启动broker:
bin\windows\kafka-server-start.bat config\server.properties

上一篇 下一篇

猜你喜欢

热点阅读