Shell脚本大数据

hive表分区查看

2021-09-18  本文已影响0人  堂哥000

仅针对hive内表,查看表(分区)创建时间

#!/bin/bash
. /etc/profile
. ~/.bash_profile
. ~/.bashrc
# 参数 "tmp" "tmp_20200808" "201909"
if [[ $# = 3 ]];then
    database=$1
    table_name=$2
    hadoop fs -du -h /user/hive/warehouse/${database}".db"/${table_name} |grep $3
# 参数 "tmp.tmp_20200808" "201909"
elif [[ $# = 2 ]] && [[ $1 =~ '.' ]]; then
    avg=$1
    database=`echo ${avg}|awk -F'.' '{print $1}'`
    table_name=`echo ${avg}|awk -F'.' '{print $2}'`
    hadoop fs -du -h /user/hive/warehouse/${database}".db"/${table_name} |grep $2
# 参数 "tmp" "tmp_20200808"
elif [[ $# = 2 ]]; then 
    database=$1
    table_name=$2
    hadoop fs -du -h /user/hive/warehouse/${database}".db"/${table_name}
#参数 "tmp.db/tmp_20200808"
elif [[ $# = 1 ]] && [[ $1 =~ '.db' ]]; then
    avg=$1
    database=`echo ${avg}|awk -F'/' '{print $1}'`
    table_name=`echo ${avg}|awk -F'/' '{print $2}'`
    hadoop fs -du -h /user/hive/warehouse/${database}/${table_name}
elif [[ $# = 1 ]]; then 
    avg=$1
    database=`echo ${avg}|awk -F'.' '{print $1}'`
    table_name=`echo ${avg}|awk -F'.' '{print $2}'`
    hadoop fs -du -h /user/hive/warehouse/${database}".db"/${table_name}
else 
  echo "检查参数个数和格式"
fi
[ds@dz-hadoop-web hicore]$ cat /data/ds/third_party/dingding/hdfsls.sh
#!/bin/bash
. /etc/profile
. ~/.bash_profile

# 参数 "tmp" "tmp_20200808" "201909"
if [[ $# = 3 ]];then
    database=$1
    table_name=$2
    hadoop fs -ls /user/hive/warehouse/${database}".db"/${table_name} |grep $3
# 参数 "tmp.tmp_20200808" "201909"
elif [[ $# = 2 ]] && [[ $1 =~ '.' ]]; then
    avg=$1
    database=`echo ${avg}|awk -F'.' '{print $1}'`
    table_name=`echo ${avg}|awk -F'.' '{print $2}'`
    hadoop fs -ls /user/hive/warehouse/${database}".db"/${table_name} |grep $2
# 参数 "tmp" "tmp_20200808"
elif [[ $# = 2 ]]; then 
    database=$1
    table_name=$2
    hadoop fs -ls /user/hive/warehouse/${database}".db"/${table_name}
#参数 "tmp.db/tmp_20200808"
elif [[ $# = 1 ]] && [[ $1 =~ '.db' ]]; then
    avg=$1
    database=`echo ${avg}|awk -F'/' '{print $1}'`
    table_name=`echo ${avg}|awk -F'/' '{print $2}'`
    hadoop fs -ls /user/hive/warehouse/${database}/${table_name}
elif [[ $# = 1 ]]; then 
    avg=$1
    database=`echo ${avg}|awk -F'.' '{print $1}'`
    table_name=`echo ${avg}|awk -F'.' '{print $2}'`
    hadoop fs -ls /user/hive/warehouse/${database}".db"/${table_name}
else 
  echo "检查参数个数和格式"
fi

用法

脚本名: hdfsls.sh
在~/.bashrc 中添加,workdir 为脚本目录

alias hdfsls="sh ${workdir}/hdfsls.sh"

保存并刷新资源 source ~/.bashrc

#查看整库
hdfsls tmp
#查看某个表所有一级分区或分区表文件
hdfsls tmp.table_name
# 支持路径形式库表名
hdfsls tmp.db/table_name
#支持过滤特定分区
hdfsls tmp.db/table_name 20210919
上一篇 下一篇

猜你喜欢

热点阅读