HDFS API基本操作

网友投稿 232 2022-11-24

HDFS API基本操作

对HDFS API基本操作都是通过 org.apache.hadoop.fs.FileSystem类进行的,以下是一些常见的操作: package HdfsAPI; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.junit.Test; import junit.framework.TestCase; public class HDFSAPI extends TestCase{ public static String hdfsUrl = "hdfs://master:8020"; //创建文件夹 @Test public void testHDFSmkdir() throws IOException{ Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf); Path path = new Path("/liguodong/hdfs"); fs.mkdirs(path); } //创建文件 @Test public void testHDFSmkFile() throws IOException{ Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf); Path path = new Path("/liguodong/hdfs/liguodong.txt"); FSDataOutputStream fdos = fs.create(path); fdos.write("hello hadoop".getBytes()); } //重命名 @Test public void testHDFSRenameFile() throws IOException{ Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf); Path path = new Path("/liguodong/hdfs/liguodong.txt"); Path Renamepath = new Path("/liguodong/hdfs/love.txt"); System.out.println(fs.rename(path, Renamepath)); } //上传一个本地文件 @Test public void testHDFSPutLocalFile1() throws IOException{ Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf); //由于在windows上面调试,所以选择的是windows的目录, //如果在Linxu,需要Linux目录。 Path srcpath = new Path("g:/liguodong.txt"); Path destpath = new Path("/liguodong/hdfs"); fs.copyFromLocalFile(srcpath, destpath); } @Test public void testHDFSPutLocalFile2() throws IOException{ Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf); //由于在windows上面调试,所以选择的是windows的目录, //如果在Linxu,需要Linux目录。 String srcpath ="g:/oncemore.txt"; Path destpath = new Path("/liguodong/hdfs/kkk.txt"); InputStream is = new BufferedInputStream( new FileInputStream(new File(srcpath))); FSDataOutputStream fdos = fs.create(destpath); IOUtils.copyBytes(is, fdos, 4094); } //查看某个文件夹下面的所有文件 @Test public void testHDFSListFiles() throws IOException{ Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf); Path path = new Path("/liguodong/hdfs"); FileStatus[] files = fs.listStatus(path); for (FileStatus file : files) { System.out.println(file.getPath().toString()); } } //查看某个文件的数据块信息 @Test public void testHDFSGetBlockInfo() throws IOException{ Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf); Path path = new Path("/liguodong/hdfs/kkk.txt"); FileStatus filestatus = fs.getFileStatus(path); BlockLocation[] blkLoc = fs.getFileBlockLocations (filestatus, 0, filestatus.getLen()); for (BlockLocation loc : blkLoc) { for (int i = 0; i < loc.getHosts().length; i++) { //获取数据块在哪些主机上 System.out.println(loc.getHosts()[i]);//获取文件块的主机名 //由于这个文件只有一个块,所以输出结果为:slave2、slave1、slave5 } } } }

版权声明:本文内容由网络用户投稿,版权归原作者所有,本站不拥有其著作权,亦不承担相应法律责任。如果您发现本站中有涉嫌抄袭或描述失实的内容,请联系我们jiasou666@gmail.com 处理,核实后本网站将在24小时内删除侵权内容。

上一篇:小疆智控RS232转PROFINET网关产品简介
下一篇:HDFS数据完整性
相关文章

 发表评论

暂时没有评论,来抢沙发吧~