1:创建一个可以上传到hdfs文件系统的文件(wc.input)
2:编写java代码调用api完成附件的上传相当于执行-put命令
3:查看执行结果
全部代码
package com.lizh.hadoop.hdfs;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class HdfsApp {
public static FileSystem getFileSystem() throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
return fs;
}
public static void readFile(String path1) {
try {
FileSystem fs = getFileSystem();
Path path = new Path(path1);
FSDataInputStream in = fs.open(path);
IOUtils.copyBytes(in, System.out, 4000, false);
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
}
public static void main(String[] args) throws Exception{
//String path ="/user/beifeng/mapreduce/wordcount/input/wc.input";
//readFile(path);
//把/opt/modules/hadoop-2.5.0/wc.input 内容写入到 /user/beifeng/put-wc.input
//write file
FileInputStream fisin =null;
FSDataOutputStream fsoutstream =null;
try {
String filename="/user/beifeng/put-wc.input";//文件系统目录
Path inputpath = new Path(filename);
FileSystem fs = getFileSystem();
fsoutstream = fs.create(inputpath);
fisin = new FileInputStream(new File("/opt/modules/hadoop-2.5.0/wc.input"));//本地系统目录文件
IOUtils.copyBytes(fisin, fsoutstream, 4000, false);
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}finally{
IOUtils.closeStream(fsoutstream);
IOUtils.closeStream(fisin);
}
}
}
写文件流程图