1. 程式人生 > >hadoop讀寫操作

hadoop讀寫操作

新建專案:

匯入libs:再hadoop解壓下的庫

2.7.2:

https://download.csdn.net/download/ssllkkyyaa/10758406

 

檔案api測試:

package com.example.demo;

import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;

public class MyTest {
    @Test
	public void readFile() throws Exception{
		//註冊url流處理器工廠(hdfs)
		URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());

		URL url = new URL("hdfs://192.168.77.200:8020/user/centos/hadoop/index.html");
		URLConnection conn = url.openConnection();
		InputStream is = conn.getInputStream();
		byte[] buf = new byte[is.available()];
		is.read(buf);
		is.close();
		String str = new String(buf);
		System.out.println(str);
	}
    /**
     //	 * 通過hadoop API訪問檔案
     //	 */
	@Test
	public void readFileByAPI() throws Exception{
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.77.200:8020/");
		FileSystem fs = FileSystem.get(conf) ;
		Path p = new Path("/user/centos/hadoop/index.html");
		FSDataInputStream fis = fs.open(p);
		byte[] buf = new byte[1024];
		int len = -1 ;

		ByteArrayOutputStream baos = new ByteArrayOutputStream();
		while((len = fis.read(buf)) != -1){
			baos.write(buf, 0, len);
		}
		fis.close();
		baos.close();
		System.out.println(new String(baos.toByteArray()));
	}
    /**
     //	 * 通過hadoop API訪問檔案
     //	 */
	@Test
	public void readFileByAPI2() throws Exception{
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.77.200:8020/");
		FileSystem fs = FileSystem.get(conf) ;
		ByteArrayOutputStream baos = new ByteArrayOutputStream();
		Path p = new Path("/user/centos/hadoop/index.html");
		FSDataInputStream fis = fs.open(p);
		IOUtils.copyBytes(fis, baos, 1024);
		System.out.println(new String(baos.toByteArray()));
	}

	/**
	 * mkdir建立目錄  注意操作使用者許可權
	 */
	@Test
	public void mkdir() throws Exception{
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.77.200:8020/");
		FileSystem fs = FileSystem.get(conf) ;
		fs.mkdirs(new Path("/user/centos/myhadoop"));
	}
    	/**
	 * putFile  寫檔案
	 */
	@Test
	public void putFile() throws Exception{
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.77.200:8020/");
		FileSystem fs = FileSystem.get(conf) ;
		FSDataOutputStream out = fs.create(new Path("/user/centos/myhadoop/a.txt"));
		out.write("helloworld".getBytes());
		out.close();
	}
    	/**
	 * removeFile
	 */
	@Test
	public void removeFile() throws Exception{
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.77.200:8020/");
		FileSystem fs = FileSystem.get(conf) ;
		Path p = new Path("/user/centos/myhadoop");
		fs.delete(p, true);
	}
}

下載某個檔案:

將下載域名改成ip或者改hosts

http://192.168.77.202:50075/webhdfs/v1/user/centos/myhadoop/a.txt?op=OPEN&namenoderpcaddress=s200:8020&offset=0