1. 程式人生 > 其它 >2022.3.29 工作日誌 科研指標匯入負責人編碼,負責人驗證

2022.3.29 工作日誌 科研指標匯入負責人編碼,負責人驗證

配置三臺主機的主機名、網路、域名解析

#master
hostnamectl set-hostname master
bash

#slave1
hostnamectl set-hostname master
bash

#slave2
hostnamectl set-hostname master
bash
#master
nmcli con modify 網絡卡名 ipv4.addresses 10.10.10.128/24 ipv4.gateway 10.10.10.2 ipv4.dns 8.8.8.8 ipv4.method manual
nmcli con reload
nmcli con up 網絡卡名

#slave1
nmcli con modify 網絡卡名 ipv4.addresses 10.10.10.129/24 ipv4.gateway 10.10.10.2 ipv4.dns 8.8.8.8 ipv4.method manual
nmcli con reload
nmcli con up 網絡卡名

#slave2
nmcli con modify 網絡卡名 ipv4.addresses 10.10.10.130/24 ipv4.gateway 10.10.10.2 ipv4.dns 8.8.8.8 ipv4.method manual
nmcli con reload
nmcli con up 網絡卡名
#master
vi /etc/hosts
10.10.10.128 	master

#slave1
vi /etc/hosts
10.10.10.129 	slave1

#slave2
vi /etc/hosts
10.10.10.130 	slave2

關閉防火牆和selinux

#master
systemctl disable --now firewalld
vi /etc/selinux/config
SELINUX=disabled
setenforce 0

#slave1
systemctl disable --now firewalld
vi /etc/selinux/config
SELINUX=disabled
setenforce 0

#slave2
systemctl disable --now firewalld
vi /etc/selinux/config
SELINUX=disabled
setenforce 0

安裝hadoop

先用xftp傳包進來

#master
tar -zxvf /opt/software/jdk-8u152-linuxx64.tar.gz -C /usr/local/src
tar -zxvf hadoop-2.7.1.tar.gz -C /usr/local/src/
cd /usr/local/src/
mv jdk1.8.0_152 jdk
mv hadoop-2.7.1 hadoop
vi /etc/profile
export JAVA_HOME=/usr/local/src/jdk
export HADOOP_HOME=/usr/local/src/hadoop
export PATH=${JAVA_HOME}/bin:${HADOOP_HOME}/bin:${HADOOP_HOME}/sbin:$PATH
source /etc/profile.d/hadoop.sh
echo $PATH
vi /usr/local/src/hadoop/etc/hadoop/hadoop-env.sh
export JAVA_HOME=/usr/local/src/jdk

配置檔案引數

#master
vi /usr/local/src/hadoop/etc/hadoop/hdfs-site.xml
<configuration>
	<property>
		<name>dfs.namenode.name.dir</name>
		<value>file:/usr/local/src/hadoop/dfs/name</value>
	</property>
	<property>
		<name>dfs.datanode.data.dir</name>
		<value>file:/usr/local/src/hadoop/dfs/data</value>
	</property>
	<property>
		<name>dfs.replication</name>
		<value>2</value>
	</property>
</configuration>

mkdir -p /usr/local/src/hadoop/dfs/{name,data}

vi /usr/local/src/hadoop/etc/hadoop/core-site.xml
<configuration>
	<property>
		<name>fs.defaultFS</name>
		<value>hdfs://master:9000</value>
	</property>
	<property>
		<name>io.file.buffer.size</name>
		<value>131072</value>
	</property>
	<property>
		<name>hadoop.tmp.dir</name>
		<value>file:/usr/local/src/hadoop/tmp</value>
	</property>
</configuration>

mkdir -p /usr/local/src/hadoop/tmp

cd /usr/local/src/hadoop/etc/hadoop
cp mapred-site.xml.template mapred-site.xml
vi /usr/local/src/hadoop/etc/hadoop/mapred-site.xml
<configuration>
	<property>
		<name>mapreduce.framework.name</name>
		<value>yarn</value>
	</property>
	<property>
		<name>mapreduce.jobhistory.address</name>
		<value>master:10020</value>
	</property>
	<property>
		<name>mapreduce.jobhistory.webapp.address</name>
		<value>master:19888</value>
	</property>
</configuration>

vi /usr/local/src/hadoop/etc/hadoop/yarn-site.xml
<configuration>
	<property>
		<name>arn.resourcemanager.address</name>
		<value>master:8032</value>
	</property>
	<property>
		<name>yarn.resourcemanager.scheduler.address</name>
		<value>master:8030</value>
	</property>
	<property>
		<name>yarn.resourcemanager.webapp.address</name>
		<value>master:8088</value>
	</property>
	<property>
		<name>yarn.resourcemanager.resource-tracker.address</name>
		<value>master:8031</value>
	</property>
	<property>
		<name>yarn.resourcemanager.admin.address</name>
		<value>master:8033</value>
	</property>
	<property>
		<name>yarn.nodemanager.aux-services</name>
		<value>mapreduce_shuffle</value>
	</property>
	<property>
		<name>yarn.nodemanager.aux-services.mapreduce_shuffle.class</name>
		<value>org.apache.hadoop.mapred.ShuffleHandler</value>
	</property>
</configuration>

Hadoop其他配置

#master
vi /usr/local/src/hadoop/etc/hadoop/masters
10.10.10.128

vi /usr/local/src/hadoop/etc/hadoop/slaves
10.10.10.129
10.10.10.130

useradd hadoop
echo 'hadoop' | passwd --stdin hadoop
chown -R hadoop.hadoop /usr/local/src
ll /usr/local/src/
ssh-keygen -t rsa
ssh-copy-id root@slave1
ssh-copy-id root@slave2
scp -r /usr/local/src/* root@slave1:/usr/local/src/
scp -r /usr/local/src/* root@slave2:/usr/local/src/
scp /etc/profile.d/hadoop.sh root@slave1:/etc/profile.d/
scp /etc/profile.d/hadoop.sh root@slave2:/etc/profile.d/

#slave1
useradd hadoop
echo 'hadoop' | passwd --stdin hadoop
chown -R hadoop.hadoop /usr/local/src
ll /usr/local/src/
source /etc/profile.d/hadoop.sh
echo $PATH

#slave2
useradd hadoop
echo 'hadoop' | passwd --stdin hadoop
chown -R hadoop.hadoop /usr/local/src
ll /usr/local/src/
source /etc/profile.d/hadoop.sh
echo $PATH