使用Hadoop自己的类操作HDFS
Posted Mr.He多多指教
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了使用Hadoop自己的类操作HDFS相关的知识,希望对你有一定的参考价值。
1 package hdfs; 2 3 import java.io.FileInputStream; 4 import java.io.FileNotFoundException; 5 import java.io.IOException; 6 import java.net.URI; 7 import java.net.URISyntaxException; 8 9 import org.apache.hadoop.conf.Configuration; 10 import org.apache.hadoop.fs.FSDataInputStream; 11 import org.apache.hadoop.fs.FSDataOutputStream; 12 import org.apache.hadoop.fs.FileStatus; 13 import org.apache.hadoop.fs.FileSystem; 14 import org.apache.hadoop.fs.Path; 15 import org.apache.hadoop.io.IOUtils; 16 17 public class App2 { 18 static final String PATH = "hdfs://chaoren:9000/"; 19 static final String DIR = "/d1"; 20 static final String FILE = "/d1/hello"; 21 public static void main(String[] args) throws Exception { 22 FileSystem fileSystem = getFileSystem(); 23 //创建文件夹 hadoop fs -mkdir /f1 24 mkdir(fileSystem); 25 //上传文件 -put src des 26 putData(fileSystem); 27 //下载文件 hadoop fs -get src des 28 //getData(fileSystem); 29 //浏览文件夹 30 list(fileSystem); 31 //删除文件夹 32 //remove(fileSystem); 33 } 34 private static void list(FileSystem fileSystem) throws IOException { 35 final FileStatus[] listStatus = fileSystem.listStatus(new Path("/")); 36 for (FileStatus fileStatus : listStatus) { 37 String isDir = fileStatus.isDir()?"文件夹":"文件"; 38 final String permission = fileStatus.getPermission().toString(); 39 final short replication = fileStatus.getReplication(); 40 final long len = fileStatus.getLen(); 41 final String path = fileStatus.getPath().toString(); 42 System.out.println(isDir+"\t"+permission+"\t"+replication+"\t"+len+"\t"+path); 43 } 44 } 45 private static void getData(FileSystem fileSystem) throws IOException { 46 final FSDataInputStream in = fileSystem.open(new Path(FILE)); 47 IOUtils.copyBytes(in, System.out, 1024, true); 48 } 49 private static void putData(FileSystem fileSystem) throws IOException, 50 FileNotFoundException { 51 final FSDataOutputStream out = fileSystem.create(new Path(FILE)); 52 final FileInputStream in = new FileInputStream("H:/kuaipan/hadoop/classes/yy131009/day2/readme.txt"); 53 IOUtils.copyBytes(in, out, 1024, true); 54 } 55 private static void remove(FileSystem fileSystem) throws IOException { 56 fileSystem.delete(new Path(DIR), true); 57 } 58 private static void mkdir(FileSystem fileSystem) throws IOException { 59 fileSystem.mkdirs(new Path(DIR)); 60 } 61 private static FileSystem getFileSystem() throws IOException, URISyntaxException { 62 return FileSystem.get(new URI(PATH), new Configuration()); 63 } 64 }
以上是关于使用Hadoop自己的类操作HDFS的主要内容,如果未能解决你的问题,请参考以下文章