c语言sscanf函数的用法是什么
240
2022-11-23
hadoop的一些基本功能
import org.apache.hadoop.fs.*;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class hdoopduqu extends FSDataInputStream {
private static hdoopduqu myFSDataInputStream;
private static InputStream inputStream;
private hdoopduqu(InputStream in) {
super(in);
inputStream = in;
}
public static hdoopduqu getInstance(InputStream inputStream){
if (null == myFSDataInputStream){
synchronized (hdoopduqu.class){
if (null == myFSDataInputStream){
myFSDataInputStream = new hdoopduqu(inputStream);
}
}
}
return myFSDataInputStream;
}
public static String readline(FileSystem fileStatus){
try {
// FSDataInputStream inputStream = fileStatus.open(remotePath);
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
String line = null;
if ((line = bufferedReader.readLine()) != null){
bufferedReader.close();
inputStream.close();
return line;
}
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class ifexit {
public static void main(String[] args) {
try {
String fileName="tmp";
Configuration conf=new Configuration();
conf.set("fs.defaultFS", "hdfs://192.168.198.130:8020");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fs=FileSystem.get(conf);
if(fs.exists(new Path(fileName))) {
System.out.println("File exists");
}else {
System.out.println("File doesnot exist");
}
}catch (Exception e) {
e.printStackTrace();
}
}
}
import java.io.IOException;
import java.util.Iterator;
import java.util.StringTokenizer;
import org.apache.hadoop.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
public class WordCount {
public WordCount() {
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
String[] otherArgs = (new GenericOptionsParser(conf, args)).getRemainingArgs();
if(otherArgs.length < 2) {
System.err.println("Usage: wordcount
版权声明:本文内容由网络用户投稿,版权归原作者所有,本站不拥有其著作权,亦不承担相应法律责任。如果您发现本站中有涉嫌抄袭或描述失实的内容,请联系我们jiasou666@gmail.com 处理,核实后本网站将在24小时内删除侵权内容。
发表评论
暂时没有评论,来抢沙发吧~