1、先获取连接:
public class Utils { public static FileSystem HDFS() throws Exception{ Configuration conf = new Configuration(); conf.set("fs.defaultFS","hdfs://potter2:9000"); System.setProperty("HADOOP_USER_NAME", "potter"); FileSystem fs = FileSystem.get(conf); return fs; } }
2、以下是主要代码
package api; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.junit.Test; /** * 删除某个路径下特定类型的文件,比如class类型文件,比如txt类型文件 * @author Administrator * */ public class Dels { public static final String TXT = "txt"; @Test public void dels() throws Exception{ Path path = new Path("/a"); //获取连接 FileSystem fs = Utils.HDFS(); //获取HDFS上路径 FileStatus fileStatus = fs.getFileStatus(path); boolean directory = fileStatus.isDirectory(); System.out.println(directory); System.out.println("--------------------"); // 根据该目录是否是文件或者文件夹进行相应的操作 if (directory) { Directory(path); }else { Delete(path); } } public void Directory (Path path) throws Exception{ //获取连接 FileSystem fs = Utils.HDFS(); //查看该目录Path目录下一级子目录和子文件的状态 FileStatus[] listStatus = fs.listStatus(path); //对该目录进行遍历,寻找.txt文件 for(FileStatus fStatus : listStatus){ Path p = fStatus.getPath(); System.out.println(p+"***********"); //如果是文件,并且是以.txt结尾,则删除,否则继续遍历下一级目录 if (fStatus.isFile()) { Delete(p); }else{ Directory(p); } } } public void Delete(Path path) throws Exception{ FileSystem fs = Utils.HDFS(); //获取路径下文件的名称 String name = path.getName(); System.out.println(name); //判断是不是以.txt结尾 int start = name.length() - TXT.length(); int end = name.length(); //就得后缀名 String Suffix = name.substring(start,end); if (Suffix.equals(TXT)) { fs.delete(path,true); } } }