代码
1 import org.junit.Before; 2 import org.junit.Test; 3 4 package zhouls.bigdata.myWholeHadoop.HDFS.hdfs1; 5 6 import java.io.FileInputStream; 7 import java.io.FileNotFoundException; 8 import java.io.FileOutputStream; 9 import java.io.IOException; 10 import java.net.URI; 11 12 import org.apache.commons.io.IOUtils; 13 import org.apache.hadoop.conf.Configuration; 14 import org.apache.hadoop.fs.FSDataInputStream; 15 import org.apache.hadoop.fs.FSDataOutputStream; 16 import org.apache.hadoop.fs.FileStatus; 17 import org.apache.hadoop.fs.FileSystem; 18 import org.apache.hadoop.fs.LocatedFileStatus; 19 import org.apache.hadoop.fs.Path; 20 import org.apache.hadoop.fs.RemoteIterator; 21 import org.junit.Before; 22 import org.junit.Test; 23 24 public class HdfsUtil { 25 26 FileSystem fs = null; 27 28 29 @Before//@Before是在所拦截单元测试方法执行之前执行一段逻辑,读艾特Before 30 public void init() throws Exception{ 31 32 //读取classpath下的xxx-site.xml 配置文件,并解析其内容,封装到conf对象中 33 Configuration conf = new Configuration(); 34 35 //也可以在代码中对conf中的配置信息进行手动设置,会覆盖掉配置文件中的读取的值 36 conf.set("fs.defaultFS", "hdfs://HadoopMaster:9000/"); 37 38 //根据配置信息,去获取一个具体文件系统的客户端操作实例对象 39 fs = FileSystem.get(new URI("hdfs://HadoopMaster:9000/"),conf,"hadoop"); 40 41 42 } 43 44 45 46 /** 47 * 上传文件,比较底层的写法 48 * 49 * @throws Exception 50 */ 51 @Test//@Test是测试方法提示符,一般与@Before组合使用 52 public void upload() throws Exception { 53 54 Configuration conf = new Configuration(); 55 conf.set("fs.defaultFS", "hdfs://HadoopMaster:9000/"); 56 57 FileSystem fs = FileSystem.get(conf); 58 59 Path dst = new Path("hdfs://HadoopMaster:9000/aa/qingshu.txt"); 60 61 FSDataOutputStream os = fs.create(dst); 62 63 FileInputStream is = new FileInputStream("c:/qingshu.txt"); 64 65 IOUtils.copy(is, os); 66 67 68 } 69 70 /** 71 * 上传文件,封装好的写法 72 * @throws Exception 73 * @throws IOException 74 */ 75 @Test//@Test是测试方法提示符,一般与@Before组合使用 76 public void upload2() throws Exception, IOException{ 77 78 fs.copyFromLocalFile(new Path("c:/qingshu.txt"), new Path("hdfs://HadoopMaster:9000/aaa/bbb/ccc/qingshu2.txt")); 79 80 } 81 82 83 /** 84 * 下载文件 85 * @throws Exception 86 * @throws IllegalArgumentException 87 */ 88 @Test//@Test是测试方法提示符,一般与@Before组合使用 89 public void download() throws Exception { 90 91 fs.copyToLocalFile(new Path("hdfs://HadoopMaster:9000/aa/qingshu2.txt"), new Path("c:/qingshu2.txt")); 92 93 } 94 95 /** 96 * 查看文件信息 97 * @throws IOException 98 * @throws IllegalArgumentException 99 * @throws FileNotFoundException 100 * 101 */ 102 @Test//@Test是测试方法提示符,一般与@Before组合使用 103 public void listFiles() throws FileNotFoundException, IllegalArgumentException, IOException { 104 105 // listFiles列出的是文件信息,而且提供递归遍历 106 RemoteIterator<LocatedFileStatus> files = fs.listFiles(new Path("/"), true); 107 108 while(files.hasNext()){ 109 110 LocatedFileStatus file = files.next(); 111 Path filePath = file.getPath(); 112 String fileName = filePath.getName(); 113 System.out.println(fileName); 114 115 } 116 117 System.out.println("---------------------------------"); 118 119 //listStatus 可以列出文件和文件夹的信息,但是不提供自带的递归遍历 120 FileStatus[] listStatus = fs.listStatus(new Path("/")); 121 for(FileStatus status: listStatus){ 122 123 String name = status.getPath().getName(); 124 System.out.println(name + (status.isDirectory()?" is dir":" is file")); 125 126 } 127 128 } 129 130 /** 131 * 创建文件夹 132 * @throws Exception 133 * @throws IllegalArgumentException 134 */ 135 @Test//@Test是测试方法提示符,一般与@Before组合使用 136 public void mkdir() throws IllegalArgumentException, Exception { 137 138 fs.mkdirs(new Path("/aaa/bbb/ccc")); 139 140 141 } 142 143 /** 144 * 删除文件或文件夹 145 * @throws IOException 146 * @throws IllegalArgumentException 147 */ 148 @Test//@Test是测试方法提示符,一般与@Before组合使用 149 public void rm() throws IllegalArgumentException, IOException { 150 151 fs.delete(new Path("/aa"), true); 152 153 } 154 155 156 public static void main(String[] args) throws Exception { 157 158 Configuration conf = new Configuration(); 159 conf.set("fs.defaultFS", "hdfs://HadoopMaster:9000/"); 160 161 FileSystem fs = FileSystem.get(conf); 162 163 FSDataInputStream is = fs.open(new Path("/jdk-7u65-linux-i586.tar.gz")); 164 165 FileOutputStream os = new FileOutputStream("c:/jdk7.tgz"); 166 167 IOUtils.copy(is, os); 168 } 169 170 171 172 }
1 package zhouls.bigdata.myWholeHadoop.HDFS.hdfs1; 2 3 import java.io.IOException; 4 import java.net.URI; 5 6 7 import org.apache.hadoop.conf.Configuration; 8 import org.apache.hadoop.fs.FileSystem; 9 import org.apache.hadoop.fs.Path; 10 11 public class HdfsUtilHA { 12 public static void main(String[] args) throws Exception{ 13 Configuration conf = new Configuration(); 14 FileSystem fs = FileSystem.get(new URI("hdfs://HadoopMaster/9000"), conf, "hadoop"); 15 fs.copyFromLocalFile(new Path("C:/test.txt"), new Path("hdfs://HadoopMaster/9000")); 16 } 17 }
本文转自大数据躺过的坑博客园博客,原文链接:http://www.cnblogs.com/zlslch/p/6175628.html,如需转载请自行联系原作者