我爱编程

Hadoop 安装配置

2016-12-20  本文已影响0人  StonyBlue

1.创建用户组和用户

groupadd hadoops  
useradd hadoop -g hadoops
passwd hadoop

2.解压jdk和hadoop

tar -zxvf jdk1.7.0_80.tar.gz -C /usr/local
mv /usr/local/jdk1.7.0_80 /usr/local/java

tar -zxvf hadoop-2.7.3.tar.gz -C /u01
mv /u01/hadoop-2.7.3  /u01/hadoop

chown -R hadoop:hadoop /u01/hadoop/etc/hadoop/**
chown -R hadoop:hadoop /u01/hadoop/bin/**
chown -R hadoop:hadoop /u01/hadoop/sbin/**

3.配置环境变量

vi /etc/profile

export HADOOP_HOME=/u01/hadoop
export PATH=$HADOOP_HOME/bin:$PATH
export JAVA_HOME=/usr/local/java
export PATH=$JAVA_HOME/bin:$PATH

source /etc/profile

4.配置hosts

vi /etc/hosts

192.168.0.9 hadmaster
192.168.0.27 hadslave1
192.168.0.28 hadslave2

5.创建目录

mkdri /u01/hadoopData
chown hadoop:hadoop /u01/hadoopData

vi /u01/hadoop/etc/hadoop/slaves

hadslave1
hadslave2

vi /u01/hadoop/etc/hadoop/masters
hadmaster

6.配置hadoop

7.ssh免密码登陆

su - hadoop
cd /home/hadoop
ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
cp id_rsa.pub authorized_keys
cd /home/hadoop/.ssh
scp authorized_keys hadslave1:/home/hadoop/.ssh
scp authorized_keys hadslave2:/home/hadoop/.ssh
ssh hadmaster 
exit
ssh hadslave1
exit
ssh hadslave2
exit
rsync -avxP /u01/hadoop/ hadoop@hadslave1:/u01/hadoop/
rsync -avxP /u01/hadoop/ hadoop@hadslave2:/u01/hadoop/
rsync -avxP /u01/hadoop/etc/hadoop/ hadoop@hadslave1:/u01/hadoop/etc/hadoop/
rsync -avxP /u01/hadoop/etc/hadoop/ hadoop@hadslave2:/u01/hadoop/etc/hadoop/

8.hive元数据

create database hive_meta; 
create user 'hive' identified by 'hive'; 
grant all privileges on hive_meta.* to
'hive'@'5'
identified by 'hive' WITH GRANT OPTION; 
flush privileges; 

9.hive环境变量

tar -zxvf apache-hive-2.1.0-bin.tar.gz -C /u01
mv /u01/apache-hive-2.1.0-bin /u01/hive
chown hadoop:hadoop  /u01/hive
vi /etc/profile
export HIVE_HOME=/u01/hive
export PATH=$PATH:$HIVE_HOME/bin
source /etc/profile

10.hive配置

11.hdfs目录

hdfs dfs -mkdir /tmp
hdfs dfs -mkdir -p /user/hive/warehouse
hdfs dfs -chmod g+w /tmp
hdfs dfs -chmod g+w /user/hive/warehouse

12.初始化hive元数据

cd /u01/hive/bin
./schematool -dbType mysql -initSchema

13.测试hive

hive
show tables

14.连接hive

hive --service metastore >/dev/null 2>/dev/null &
hive --service hiveserver2 10000 >/dev/null 2>/dev/null &

$HIVE_HOME/bin/hive --service hiveserver2 10000 >/dev/null 2>/dev/null &
连接Hive JDBC URL:jdbc:hive2://192.168.6.116:10000/default     (Hive默认端口:10000  默认数据库名:default)

beeline
!connect jdbc:hive2://localhost:10000 hadoop hadoop

15.hiveServer2WebUI

<property>
    <name>hive.server2.webui.host</name>
    <value>192.168.0.9</value>
</property>

<property>
    <name>hive.server2.webui.port</name>
    <value>10002</value>
</property>

git仓库地址

上一篇 下一篇

猜你喜欢

热点阅读