1. 程式人生 > >spark--環境搭建--Hive0.13搭建

spark--環境搭建--Hive0.13搭建

local asto install onu grant pan scala ado export

在spark1上操作

1. 安裝hive

$ cd /usr/local/

$ tar -zxvf apache-hive-0.13.1-bin.tar.gz

$ mv apache-hive-0.13.1-bin hive

$ cd

$ vi .bashrc

export HIVE_HOME=/usr/local/hive/
export PATH=$PATH:$HIVE_HOME/bin

$ source .bashrc

$ yum install -y mysql-server

2. 安裝mysql

$ yum install -y mysql-server

$ service mysqld start

$ chkconfig mysqld on

$ yum install -y mysql-connector-java

$ cp /usr/share/java/mysql-connector-java-5.1.17.jar /usr/local/hive/lib/

$ mysql

create database if not exists hive_metadata;
grant all privileges on hive_metadata.* to hive@% identified by hive;
grant all privileges on hive_metadata.* to 
hive@localhost identified by hive; grant all privileges on hive_metadata.* to hive@spark1 identified by hive; flush privileges; use hive_metadata; exit

$ mv hive-default.xml.template hive-site.xml

# 修改value
<property>
  <name>javax.jdo.option.ConnectionURL</name>
  <value>jdbc:mysql://spark1:3306/hive_metadata?createDatabaseIfNotExist=true</value>
  <description>JDBC connect string for a JDBC metastore</description>
</property>
<property>
  <name>javax.jdo.option.ConnectionDriverName</name>
  <value>com.mysql.jdbc.Drive</value>
  <description>Driver class name for a JDBC metastore</description>
</property>

<property>
  <name>javax.jdo.option.ConnectionUserName</name>
  <value>hive</value>
  <description>username to use against metastore database</description>
</property>
<property>
  <name>javax.jdo.option.ConnectionPassword</name>
  <value>hive</value>
  <description>password to use against metastore database</description>
</property>

<property>
  <name>hive.metastore.warehouse.dir</name>
  <value>/user/hive/warehouse</value>
  <description>location of default database for the warehouse</description>
</property>

$ mv hive-env.sh.template hive-env.sh

$ cd ../bin

$ vi hive-config.sh

export JAVA_HOME=/usr/java/latest/
export HIVE_HOME=/usr/local/hive/
export HADOOP_HOME=/usr/local/hadoop

spark--環境搭建--Hive0.13搭建