1. 程式人生 > 實用技巧 >向HDFS中上傳任意文字檔案,如果指定的檔案在HDFS中已經存在,由使用者指定是追加到原有檔案末尾還是覆蓋原有的檔案

向HDFS中上傳任意文字檔案,如果指定的檔案在HDFS中已經存在,由使用者指定是追加到原有檔案末尾還是覆蓋原有的檔案

 1 import java.io.FileInputStream;
 2 import java.io.IOException;
 3 import java.util.Scanner;
 4 
 5 import org.apache.hadoop.conf.Configuration;
 6 import org.apache.hadoop.fs.FSDataOutputStream;
 7 import org.apache.hadoop.fs.FileSystem;
 8 import org.apache.hadoop.fs.Path;
 9 
10 public class A_UpdatedFile {
11 12 13 public static void appendToFile(FileSystem fs, String localFilePath, 14 String remoteFilePath) { 15 Path remotePath = new Path(remoteFilePath); 16 try { 17 FileInputStream in = new FileInputStream(localFilePath); 18 FSDataOutputStream out = fs.append(remotePath);
19 byte[] data = new byte[1024]; 20 int read = -1; 21 while ((read = in.read(data)) > 0) { 22 out.write(data, 0, read); 23 } 24 out.close(); 25 } catch (IOException e) { 26 e.printStackTrace(); 27 }
28 } 29 30 public static void main(String[] args) { 31 Var_init var = new Var_init(); 32 try { 33 boolean fileExists = var.fs.exists(var.p_remoteFilePath); 34 if (fileExists) { 35 System.out.println(var.s_remoteFilePath + " 已存在."); 36 } else { 37 System.out.println(var.s_remoteFilePath + " 不存在."); 38 } 39 if (!fileExists) { 40 var.fs.copyFromLocalFile(false, true, var.p_localFilePath, var.p_remoteFilePath); 41 System.out.println(var.s_localFilePath + " 已上傳至 " 42 + var.s_remoteFilePath); 43 } else { 44 Scanner choose = new Scanner(System.in); 45 System.out.println("input c to cover,input a to append"); 46 String str = choose.next(); 47 if(str.equals("c")) 48 { 49 var.fs.copyFromLocalFile(false, true, var.p_localFilePath, var.p_remoteFilePath); 50 System.out.println("cover successfully"); 51 } 52 else if(str.equals("a")) 53 { 54 A_UpdatedFile.appendToFile(var.fs, var.s_localFilePath, var.s_remoteFilePath); 55 System.out.println("append successfully"); 56 } 57 else 58 { 59 System.out.println("plz input right common"); 60 } 61 } 62 } catch (Exception e) { 63 e.printStackTrace(); 64 } 65 } 66 }
View Code
 1 import org.apache.hadoop.conf.Configuration;
 2 import org.apache.hadoop.fs.FSDataInputStream;
 3 import org.apache.hadoop.fs.FSDataOutputStream;
 4 import org.apache.hadoop.fs.FileStatus;
 5 import org.apache.hadoop.fs.FileSystem;
 6 import org.apache.hadoop.fs.Path;
 7 public class Var_init {
 8     public Configuration conf ;
 9     public String s_localFilePath; 
10     public String s_remoteFilePath; 
11     public Path p_remoteFilePath;
12     public Path p_localFilePath;
13     public FileSystem fs;
14     public FSDataInputStream in;
15     public FSDataOutputStream out;
16     public FileStatus fileinfo;
17     Var_init()
18     {
19         try
20         {
21             s_localFilePath = "/home/hadoop/Desktop/QWQ.txt";
22             p_localFilePath = new Path(s_localFilePath);
23             s_remoteFilePath = "/user/tinyy/QWQ.txt";
24             p_remoteFilePath = new Path(s_remoteFilePath);
25             conf = new Configuration();
26             conf.set("fs.defaultFS", "hdfs://localhost:9000");
27             conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
28             fs = FileSystem.get(conf);
29         }
30         catch(Exception E)
31         {
32             E.printStackTrace();
33         }
34     }
35 }
View Code

人沒了要