java hdfs读写_使用Java向hdfs写入数据

package hadoop.write;

import java.io.BufferedInputStream;

import java.io.FileInputStream;

import java.io.IOException;

import java.io.InputStream;

import java.net.URI;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FSDataOutputStream;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.IOUtils;

public class FileCopyWithProgress {

public static void main(String[] args) throws IOException {

String localSrc = "D:/eclipse/test.txt";

String dst = "hdfs://lzvm:9000/CopyWithProgress/eclipse.ini";

InputStream in = new BufferedInputStream(new FileInputStream(localSrc));

Configuration conf = new Configuration();

//        conf.set("dfs.support.append","true");

//        conf.set("fs.hdfs.impl","org.apache.hadoop.hdfs.DistributedFileSystem");

conf.set("dfs.client.block.write.replace-datanode-on-failure.policy" ,"NEVER" );

conf.set("dfs.client.block.write.replace-datanode-on-failure.enable" ,"true" );

FileSystem fs = FileSystem.get(URI.create(dst), conf);

FSDataOutputStream out = fs.append(new Path(dst));

out.hflush();

IOUtils.copyBytes(in, out, conf, false);

in.close();

out.close();

}

}


版权声明:本文为weixin_30895121原创文章,遵循CC 4.0 BY-SA版权协议,转载请附上原文出处链接和本声明。