1.在主程序中需要添加这几个参数配置
Configuration conf = new Configuration(); // 1、设置job运行时要访问的默认文件系统 conf.set("fs.defaultFS", HADOOP_ROOT_PATH); // 2、设置job提交到哪去运行 conf.set("yarn.resourcemanager.hostname", "hadoop1"); conf.set("mapreduce.framework.name", "yarn"); // 3、如果要从windows系统上运行这个job提交客户端程序,则需要加这个跨平台提交的参数 conf.set("mapreduce.app-submission.cross-platform", "true"); Job job = Job.getInstance(conf); // 1、封装参数:jar包所在的位置 job.setJar("D:\HadoopStudy\Workspace\hdfs24\hdfs24_fat.jar"); //job.setJarByClass(WordCountMain.class); // 2、封装参数: 本次job所要调用的Mapper实现类、Reducer实现类 job.setMapperClass(WordCountMapper.class); job.setReducerClass(WordcountReducer.class); // 3、封装参数:本次job的Mapper实现类、Reducer实现类产生的结果数据的key、value类型 job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); // 4、封装参数:本次job要处理的输入数据集所在路径、最终结果的输出路径 Path output = new Path(HADOOP_OUTPUT_PATH); FileSystem fs = FileSystem.get(new URI(HADOOP_ROOT_PATH), conf); if (fs.exists(output)) { fs.delete(output, true); } FileInputFormat.setInputPaths(job, new Path(HADOOP_INPUT_PATH)); FileOutputFormat.setOutputPath(job, output); // 注意:输出路径必须不存在 // 5、封装参数:想要启动的reduce task的数量 job.setNumReduceTasks(2); // 6、提交job给yarn boolean res = job.waitForCompletion(true); System.out.println("OK"); System.exit(res ? 0 : -1);
2.需要配置window中的hosts 文件(识别yarn程序配置的主机ip:我在yarn程序中配置的是hadoop1)
修改打 C盘 -> Windows -> System32 -> drives -> etc -> hosts文件
在最下面 增加一行
10.1.7.96 hadoop1
如果没有会报以下错误:
Exception in thread "main" java.net.UnknownHostException: Invalid host name: local host is: (unknown); destination host is: "hadoop1":8032; java.net.UnknownHostException; For more details see: http://wiki.apache.org/hadoop/UnknownHost at sun.reflect.GeneratedConstructorAccessor5.newInstance(Unknown Source) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:801) at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:744) at org.apache.hadoop.ipc.Client$Connection.<init>(Client.java:445) at org.apache.hadoop.ipc.Client.getConnection(Client.java:1522) at org.apache.hadoop.ipc.Client.call(Client.java:1373) at org.apache.hadoop.ipc.Client.call(Client.java:1337) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:227) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116) at com.sun.proxy.$Proxy13.getNewApplication(Unknown Source) at org.apache.hadoop.yarn.api.impl.pb.client.ApplicationClientProtocolPBClientImpl.getNewApplication(ApplicationClientProtocolPBClientImpl.java:217) at sun.reflect.GeneratedMethodAccessor3.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:398) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:163) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:155) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:335) at com.sun.proxy.$Proxy14.getNewApplication(Unknown Source) at org.apache.hadoop.yarn.cl