理解HDFS Java API编程原理;
掌握HDFS的命令;
掌握Elipse远程调试Hadoop程序的方法;
掌握HDFS基本的API调用方法
通过调用Java API实现对HDFS的文件系统的操作
将fout中的文本内容写入fin路径下的文件中
1. package hdfsapi; 2. 3. import java.io.BufferedReader; 4. import java.io.BufferedWriter; 5. import java.io.File; 6. import java.io.FileInputStream; 7. import java.io.IOException; 8. import java.io.InputStreamReader; 9. import java.io.OutputStreamWriter; 10. import java.net.URI; 11. 12. import org.apache.commons.io.CopyUtils; 13. import org.apache.hadoop.conf.Configuration; 14. import org.apache.hadoop.fs.FSDataInputStream; 15. import org.apache.hadoop.fs.FSDataOutputStream; 16. import org.apache.hadoop.fs.FileSystem; 17. import org.apache.hadoop.fs.Path; 18. import org.apache.hadoop.io.IOUtils; 19. 20. public class E7_WriteDataAPI { 21. 22. public static void main(String[] args) throws IllegalArgumentException, IOException, InterruptedException { 23. // TODO Auto-generated method stub 24. Configuration conf =new Configuration(); 25. conf.set("fs.defaultFS", "hdfs://master:8020"); 26. conf.setBoolean("dfs.support.append", true); 27. conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER"); 28. FileSystem fs = FileSystem.get(URI.create("hdfs://master:8020"),conf,"root"); 29. 30. FileInputStream fin = new FileInputStream(new File("E:\\tmp\\workspace\\test2.txt")); 31. FSDataOutputStream fout = fs.append(new Path("/user/root/hello1.txt")); 32. IOUtils.copyBytes(fin, fout, 4096,true); 33. fout.close(); 34. fin.close(); 35. fs.close(); 36. System.out.println("done!"); 37. } 38. 39. }