hadoop: hdfs API示例

时间:2022-05-22 15:33:20

利用hdfs的api,可以实现向hdfs的文件、目录读写,利用这一套API可以设计一个简易的山寨版云盘,见下图:

hadoop: hdfs API示例

为了方便操作,将常用的文件读写操作封装了一个工具类:

 import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils; import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream; /**
* HDFS工具类
* Author: 菩提树下的杨过(http://yjmyzz.cnblogs.com)
* Since: 2015-05-21
*/
public class HDFSUtil { private HDFSUtil() { } /**
* 判断路径是否存在
*
* @param conf
* @param path
* @return
* @throws IOException
*/
public static boolean exits(Configuration conf, String path) throws IOException {
FileSystem fs = FileSystem.get(conf);
return fs.exists(new Path(path));
} /**
* 创建文件
*
* @param conf
* @param filePath
* @param contents
* @throws IOException
*/
public static void createFile(Configuration conf, String filePath, byte[] contents) throws IOException {
FileSystem fs = FileSystem.get(conf);
Path path = new Path(filePath);
FSDataOutputStream outputStream = fs.create(path);
outputStream.write(contents);
outputStream.close();
fs.close();
} /**
* 创建文件
*
* @param conf
* @param filePath
* @param fileContent
* @throws IOException
*/
public static void createFile(Configuration conf, String filePath, String fileContent) throws IOException {
createFile(conf, filePath, fileContent.getBytes());
} /**
* @param conf
* @param localFilePath
* @param remoteFilePath
* @throws IOException
*/
public static void copyFromLocalFile(Configuration conf, String localFilePath, String remoteFilePath) throws IOException {
FileSystem fs = FileSystem.get(conf);
Path localPath = new Path(localFilePath);
Path remotePath = new Path(remoteFilePath);
fs.copyFromLocalFile(true, true, localPath, remotePath);
fs.close();
} /**
* 删除目录或文件
*
* @param conf
* @param remoteFilePath
* @param recursive
* @return
* @throws IOException
*/
public static boolean deleteFile(Configuration conf, String remoteFilePath, boolean recursive) throws IOException {
FileSystem fs = FileSystem.get(conf);
boolean result = fs.delete(new Path(remoteFilePath), recursive);
fs.close();
return result;
} /**
* 删除目录或文件(如果有子目录,则级联删除)
*
* @param conf
* @param remoteFilePath
* @return
* @throws IOException
*/
public static boolean deleteFile(Configuration conf, String remoteFilePath) throws IOException {
return deleteFile(conf, remoteFilePath, true);
} /**
* 文件重命名
*
* @param conf
* @param oldFileName
* @param newFileName
* @return
* @throws IOException
*/
public static boolean renameFile(Configuration conf, String oldFileName, String newFileName) throws IOException {
FileSystem fs = FileSystem.get(conf);
Path oldPath = new Path(oldFileName);
Path newPath = new Path(newFileName);
boolean result = fs.rename(oldPath, newPath);
fs.close();
return result;
} /**
* 创建目录
*
* @param conf
* @param dirName
* @return
* @throws IOException
*/
public static boolean createDirectory(Configuration conf, String dirName) throws IOException {
FileSystem fs = FileSystem.get(conf);
Path dir = new Path(dirName);
boolean result = fs.mkdirs(dir);
fs.close();
return result;
} /**
* 列出指定路径下的所有文件(不包含目录)
*
* @param conf
* @param basePath
* @param recursive
*/
public static RemoteIterator<LocatedFileStatus> listFiles(FileSystem fs, String basePath, boolean recursive) throws IOException { RemoteIterator<LocatedFileStatus> fileStatusRemoteIterator = fs.listFiles(new Path(basePath), recursive); return fileStatusRemoteIterator;
} /**
* 列出指定路径下的文件(非递归)
*
* @param conf
* @param basePath
* @return
* @throws IOException
*/
public static RemoteIterator<LocatedFileStatus> listFiles(Configuration conf, String basePath) throws IOException {
FileSystem fs = FileSystem.get(conf);
RemoteIterator<LocatedFileStatus> remoteIterator = fs.listFiles(new Path(basePath), false);
fs.close();
return remoteIterator;
} /**
* 列出指定目录下的文件\子目录信息(非递归)
*
* @param conf
* @param dirPath
* @return
* @throws IOException
*/
public static FileStatus[] listStatus(Configuration conf, String dirPath) throws IOException {
FileSystem fs = FileSystem.get(conf);
FileStatus[] fileStatuses = fs.listStatus(new Path(dirPath));
fs.close();
return fileStatuses;
} /**
* 读取文件内容
*
* @param conf
* @param filePath
* @return
* @throws IOException
*/
public static String readFile(Configuration conf, String filePath) throws IOException {
String fileContent = null;
FileSystem fs = FileSystem.get(conf);
Path path = new Path(filePath);
InputStream inputStream = null;
ByteArrayOutputStream outputStream = null;
try {
inputStream = fs.open(path);
outputStream = new ByteArrayOutputStream(inputStream.available());
IOUtils.copyBytes(inputStream, outputStream, conf);
fileContent = outputStream.toString();
} finally {
IOUtils.closeStream(inputStream);
IOUtils.closeStream(outputStream);
fs.close();
}
return fileContent;
}
}

简单的测试了一下:

     @Test
public void test() throws IOException {
Configuration conf = new Configuration();
String newDir = "/test";
//01.检测路径是否存在 测试
if (HDFSUtil.exits(conf, newDir)) {
System.out.println(newDir + " 已存在!");
} else {
//02.创建目录测试
boolean result = HDFSUtil.createDirectory(conf, newDir);
if (result) {
System.out.println(newDir + " 创建成功!");
} else {
System.out.println(newDir + " 创建失败!");
}
}
String fileContent = "Hi,hadoop. I love you";
String newFileName = newDir + "/myfile.txt"; //03.创建文件测试
HDFSUtil.createFile(conf, newFileName, fileContent);
System.out.println(newFileName + " 创建成功"); //04.读取文件内容 测试
System.out.println(newFileName + " 的内容为:\n" + HDFSUtil.readFile(conf, newFileName)); //05. 测试获取所有目录信息
FileStatus[] dirs = HDFSUtil.listStatus(conf, "/");
System.out.println("--根目录下的所有子目录---");
for (FileStatus s : dirs) {
System.out.println(s);
} //06. 测试获取所有文件
FileSystem fs = FileSystem.get(conf);
RemoteIterator<LocatedFileStatus> files = HDFSUtil.listFiles(fs, "/", true);
System.out.println("--根目录下的所有文件---");
while (files.hasNext()) {
System.out.println(files.next());
}
fs.close(); //删除文件测试
boolean isDeleted = HDFSUtil.deleteFile(conf, newDir);
System.out.println(newDir + " 已被删除"); }

注:测试时,不要忘记了在resources目录下放置core-site.xml文件,不然IDE环境下,代码不知道去连哪里的HDFS

输出结果:

/test 已存在!
/test/myfile.txt 创建成功
/test/myfile.txt 的内容为:
Hi,hadoop. I love you
--根目录下的所有子目录---
FileStatus{path=hdfs://172.28.20.102:9000/jimmy; isDirectory=true; modification_time=1432176691550; access_time=0; owner=hadoop; group=supergroup; permission=rwxrwxrwx; isSymlink=false}
FileStatus{path=hdfs://172.28.20.102:9000/test; isDirectory=true; modification_time=1432181331362; access_time=0; owner=jimmy; group=supergroup; permission=rwxr-xr-x; isSymlink=false}
FileStatus{path=hdfs://172.28.20.102:9000/user; isDirectory=true; modification_time=1431931797244; access_time=0; owner=hadoop; group=supergroup; permission=rwxr-xr-x; isSymlink=false}
--根目录下的所有文件---
LocatedFileStatus{path=hdfs://172.28.20.102:9000/jimmy/input/README.txt; isDirectory=false; length=1366; replication=1; blocksize=134217728; modification_time=1431922483851; access_time=1432174134018; owner=hadoop; group=supergroup; permission=rw-r--r--; isSymlink=false}
LocatedFileStatus{path=hdfs://172.28.20.102:9000/jimmy/output/_SUCCESS; isDirectory=false; length=0; replication=3; blocksize=134217728; modification_time=1432176692454; access_time=1432176692448; owner=jimmy; group=supergroup; permission=rw-r--r--; isSymlink=false}
LocatedFileStatus{path=hdfs://172.28.20.102:9000/jimmy/output/part-r-00000; isDirectory=false; length=1306; replication=3; blocksize=134217728; modification_time=1432176692338; access_time=1432176692182; owner=jimmy; group=supergroup; permission=rw-r--r--; isSymlink=false}
LocatedFileStatus{path=hdfs://172.28.20.102:9000/test/myfile.txt; isDirectory=false; length=21; replication=3; blocksize=134217728; modification_time=1432181331601; access_time=1432181331362; owner=jimmy; group=supergroup; permission=rw-r--r--; isSymlink=false}
/test 已被删除

用spring-mvc结合hdfs api仿造hadoop的文件浏览管理界面,做了一个山寨版:(只完成了文件列表功能)

hadoop: hdfs API示例

源代码托管在taobao开源平台上了,有需要的可以参考下:

http://code.taobao.org/p/hdfs-web-client/src/trunk/