hadoop编辑器

网友投稿 219 2022-11-23

hadoop编辑器

package hadoopbianjiqi; import java.io.*; import java.util.Scanner; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.*; public class main { public static Scanner scan = new Scanner(System.in); //路径是否存在 public static boolean test(Configuration conf,String path) throws IOException { FileSystem fs=FileSystem.get(conf); return fs.exists(new Path(path)); } //创建目录 public static boolean mkdir (Configuration conf ,String remoteDir)throws IOException { FileSystem fs=FileSystem.get(conf); Path dirPath=new Path(remoteDir); boolean result=fs.mkdirs(dirPath); fs.close(); return result; } //创建文件 public static void touchz(Configuration conf,String remoteFilePath )throws IOException { FileSystem fs=FileSystem.get(conf); Path remotePath=new Path(remoteFilePath); FSDataOutputStream outputStream =fs.create(remotePath); outputStream.close(); fs.close(); } //删除文件 public static boolean rm(Configuration conf,String remoteFilePath)throws IOException { FileSystem fs=FileSystem.get(conf); Path remotePath=new Path(remoteFilePath); boolean result=fs.delete(remotePath,false); fs.close(); return result; } //读文件 public static void cat(Configuration conf,String FilePath)throws IOException { FileSystem fs=FileSystem.get(conf); Path file=new Path(FilePath); FSDataInputStream getIt=fs.open(file); BufferedReader d=new BufferedReader(new InputStreamReader(getIt)); String content=d.readLine(); System.out.println(content); d.close(); fs.close(); } //追加文件内容 public static void appendContentToFile(Configuration conf,String content,String remoteFilePath)throws IOException { FileSystem fs=FileSystem.get(conf); Path remotePath=new Path(remoteFilePath); FSDataOutputStream out=fs.append(remotePath); out.write(content.getBytes()); out.close(); fs.close(); } //将文件1写入文件2 public static void appendContentToFile2(Configuration conf,String remoteFilePath,String remoteFilePath2)throws IOException { FileSystem fs=FileSystem.get(conf); Path file=new Path(remoteFilePath); FSDataInputStream getIt=fs.open(file); BufferedReader d=new BufferedReader(new InputStreamReader(getIt)); String content1=d.readLine(); Path remotePath=new Path(remoteFilePath2); FSDataOutputStream out=fs.append(remotePath); out.write(content1.getBytes()); d.close(); out.close(); fs.close(); } public static void main(String[] args) { //创建目录 int n=0; while(n!=5) { System.out.println("请选择功能"); System.out.println(" 1-新建\n 2-删除\n 3-编辑\n 4-退出\n"); n=scan.nextInt(); scan.nextLine();//处理输入最后的回车 main beg=new main(); switch (n) { case 1:{ try { touchz(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } break; } case 2:{ //beg.reducejisuan(); break; } case 3:{ //beg.chenjisuan(); break; } } if(n==4) { System.out.println("已成功退出系统"); } } } private static void touchz() throws IOException { Configuration conf=new Configuration(); conf.set("dfs.client.block.write.replace-datanode-on-failure.policy","NEVER"); conf.set("dfs.client.block.write.replace-datanode-on-failure.enable","true"); conf.set("fs.default.name", "hdfs://192.168.198.130:8020"); conf.set("fs.hdfs.impl","org.apache.hadoop.hdfs.DistributedFileSystem"); // TODO Auto-generated method stub System.out.println("请输入内容"); String remoteDir ="/hadoopusers/diaoling"; String content=scan.nextLine(); System.out.println("请输入文件名"); String name=scan.nextLine(); String remoteFilePath="/hadoopusers/diaoling/"+name+".txt"; try { if(!main.test(conf, remoteDir)) { main.mkdir(conf,remoteDir); System.out.println("创建目录"+remoteDir); } else { System.out.println(remoteDir+"目录已存在"); } //创建文件 if(!main.test(conf, remoteFilePath)) { main.touchz(conf, remoteFilePath); System.out.println("创建文件"+remoteFilePath); } else { System.out.println(remoteFilePath+"已存在"); } //向文件内输入 main.appendContentToFile(conf, content, remoteFilePath); main.cat(conf, remoteFilePath); } catch (IOException e) { e.printStackTrace(); } } }

版权声明:本文内容由网络用户投稿,版权归原作者所有,本站不拥有其著作权,亦不承担相应法律责任。如果您发现本站中有涉嫌抄袭或描述失实的内容,请联系我们jiasou666@gmail.com 处理,核实后本网站将在24小时内删除侵权内容。

上一篇:hadoop的一些基本功能
下一篇:基于Altera ASI IP核的ASI发送卡实现
相关文章

 发表评论

暂时没有评论,来抢沙发吧~