hdfs檔案,從隨機地方開始讀,讀取任意長度
阿新 • • 發佈:2018-12-24
package com.ghgj.cn.zy;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop .fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class InputRandom {
//從隨機地方讀檔案,讀任意長度
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
Random rand = new Random(10);
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop01:9000"), conf, "hadoop");
//建立輸入流
FSDataInputStream open = fs.open(new Path("/tt/aa/ff.txt"));
open.seek(rand.nextInt(100));
//建立輸出流
FSDataOutputStream create = fs.create(new Path("/t"));
int i = rand.nextInt (100);
IOUtils.copyBytes(open, create, new Long(i), true);
}
}