Hadoop---Java實現HDFS的新增、刪除、上傳、下載功能
阿新 • • 發佈:2018-12-29
Java實現HDFS的新增、刪除、上傳、下載功能
package com.kevin.hdfs; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.junit.Before; import org.junit.Test; /** * @author kevin * @version 1.0 * @description 對HDFS實現新增刪除,上傳下載功能 * @createDate 2018/12/18 */ public class HDFSDemo { private FileSystem fs = null; public static final String HDFS_PATH = "hdfs://Master:9000"; //hdfs路徑 //設定初始化檔案 設定hdfs的路徑,許可權為root @Before public void init() throws IOException, URISyntaxException, InterruptedException{ fs = FileSystem.get(new URI(HDFS_PATH), new Configuration(),"root"); } //刪除檔案 在hdfs刪除一個檔案(或者資料夾) @Test public void testDel() throws IllegalArgumentException, IOException{ boolean flag = fs.delete(new Path("/iamkevin"), true); System.out.println(flag); } //建立檔案 在hdfs建立一個檔案(或者資料夾) @Test public void testMkdir() throws IllegalArgumentException, IOException{ boolean flag = fs.mkdirs(new Path("/iamkevin")); System.out.println(flag); } //上傳檔案 將本地檔案上傳到hdfs @Test public void testUpload() throws IllegalArgumentException, IOException{ Path in = new Path("C:/Users/kevin/Desktop/README"); Path out = new Path(HDFS_PATH+"/test/"); fs.copyFromLocalFile(false, in, out); fs.close(); System.out.println("檔案上傳成功..."); } //下載檔案 將hdfs上的檔案下載到本地 @Test public void downloadData() throws IOException { InputStream in = fs.open(new Path("/test/word.txt")); //開啟hdfs原始檔 FileOutputStream out = new FileOutputStream(new File("C:/Users/kevin/Desktop/hello.txt")); IOUtils.copyBytes(in, out, 1024, true); //輸出到本地 fs.close(); System.out.println("下載成功"); } }