hadoop 文件操作
存储文件:
?import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
?
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
?
public class AddFile {
public static void main(String[] args) throws Exception {
String localSrc = "E:\\test\\spring3_MVC.docx";
String dst = "hdfs://localhost:9000/home/hdfs/s3.docx";
InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
OutputStream out = fs.create(new Path(dst), true);
? ? ? ? ? ? ? ? long l1 = System.currentTimeMillis();
IOUtils.copyBytes(in, out, 4096, true);
System.out.println("ms = " + (System.currentTimeMillis() - l1));
}
}
?
?
?
下载文件:
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
?
public class readFile{
public static void main(String[] args) throws Exception {
? ? ? ? String uri = "hdfs://localhost:9000/home/hdfs/s3.docx";
? ? ? ? Configuration conf = new Configuration();
? ? ? ? FileSystem fs = FileSystem.get(URI.create(uri), conf);
? ? ? ? InputStream in = null;
? ? ? ? OutputStream out = null;
? ? ? ? try {
? ? ? ? ? ? in = fs.open(new Path(uri));
? ? ? ? ? ? out = new FileOutputStream("D:/s3.docx");
? ? ? ? ? ? IOUtils.copyBytes(in, out, 4096, false);
? ? ? ? } finally {
? ? ? ? ? ? IOUtils.closeStream(in);
? ? ? ? ? ? IOUtils.closeStream(out);
? ? ? ? }
?
? ? }
}