其实很简单,只要配置正确,就行
?
直接给代码吧
package com.younglibin.hadoop;import java.io.IOException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;public class HDFSystem { public static void uploadLocalFile2HDFS(Configuration config, String localFile, String remoteFile) throws IOException { FileSystem hdfs = FileSystem.get(config); Path src = new Path(localFile); Path dst = new Path(remoteFile); hdfs.copyFromLocalFile(src, dst); hdfs.close(); } public static void createNewHDFSFile(String toCreateFilePath, String content) throws IOException { Configuration config = new Configuration(); FileSystem hdfs = FileSystem.get(config); FSDataOutputStream os = hdfs.create(new Path(toCreateFilePath)); os.write(content.getBytes("UTF-8")); os.close(); hdfs.close(); } public static boolean deleteHDFSFile(String dst) throws IOException { Configuration config = new Configuration(); FileSystem hdfs = FileSystem.get(config); Path path = new Path(dst); boolean isDeleted = hdfs.delete(path); hdfs.close(); return isDeleted; } public static byte[] readHDFSFile(String dst) throws Exception { Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); // check if the file exists Path path = new Path(dst); if (fs.exists(path)) { FSDataInputStream is = fs.open(path); // get the file info to create the buffer FileStatus stat = fs.getFileStatus(path); // create the buffer byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat .getLen()))]; is.readFully(0, buffer); is.close(); fs.close(); return buffer; } else { throw new Exception("the file is not found ."); } }}
?
?
下边还是原来的 wordcount程序,改造了一下,每次执行的 本地文件,其实 也就是现将本文件上传到服务器上,让后再执行
?
注意上传文件的时候添加了 ?conf.set("fs.default.name", "hdfs://172.16.236.11:9000"); ?这句话 ,否则出现 ?找不到文件的问题 参考:http://www.cnblogs.com/bingofworld/archive/2013/06/09/3129299.html
?Hadoop HDFS Wrong FS: hdfs:/ expected file:///
?
看代码吧:
public static void main(String[] args) throws Exception { File jarFile = EJob.createTempJar("bin"); EJob.addClasspath("/home/libin/software/hadoop/hadoop-1.2.1/conf"); ClassLoader classLoader = EJob.getClassLoader(); Thread.currentThread().setContextClassLoader(classLoader); Configuration conf = new Configuration(); conf.set("mapred.job.tracker", "172.16.236.11:9001"); conf.set("fs.default.name", "hdfs://172.16.236.11:9000"); String fileName = "local"; HDFSystem.uploadLocalFile2HDFS(conf, new File(fileName).getAbsolutePath(), "hdfs://172.16.236.11:9000/user/libin/input/" + fileName); args = new String[] { "/user/libin/input/" + fileName, "/user/libin/output/wordcount" + System.currentTimeMillis() }; String[] otherArgs = new GenericOptionsParser(conf, args) .getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: wordcount <in> <out>"); System.exit(2); } Job job = new Job(conf, "word count"); job.setJarByClass(WordCount.class); ((JobConf) job.getConfiguration()).setJar(jarFile.toString()); job.setMapperClass(TokenizerMapper.class); job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path("hdfs://172.16.236.11:9000" + File.separator + otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path( "hdfs://172.16.236.11:9000" + File.separator + otherArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
?