zoukankan      html  css  js  c++  java
  • MapReduce编程系列 — 3:数据去重

    1、项目名称:

    2、程序代码:

    package com.dedup;
    
    import java.io.IOException;
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.Reducer;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    import org.apache.hadoop.util.GenericOptionsParser;
    
    public class Dedup {
        //map将输入中的value复制到输出数据的key上,并直接输出,注意参数类型和个数
        public static class Map extends Mapper<Object, Text, Text, Text>{
            public static Text line = new Text();
            //注意参数类型和个数
            public void map(Object key , Text value , Context context) throws IOException,InterruptedException{
                System.out.println("mapper.......");
                System.out.println("key:"+key+"  value:"+value);
                line = value;
                context.write(line, new Text(" "));    
                System.out.println("line:"+ line +" value"+ value +"  context:" + context);
            }        
        }
        //reduce将输入中的key复制到输出数据的key上,并直接输出,注意参数类型和个数
        public static class Reduce extends Reducer<Text, Text, Text, Text>{
            //注意参数类型和个数
            public void reduce(Text key , Iterable<Text> values, Context context)throws IOException,InterruptedException{
                System.out.println("reducer.......");
                System.out.println("key:"+key+"  values:"+values);
                context.write(key, new Text(" "));
                System.out.println("key:"+key+"  values"+values+"  context:"+context);
            }
        }
    
        public static void main(String [] args)throws Exception{
            Configuration conf = new Configuration();
            String otherArgs[] = new GenericOptionsParser(conf,args).getRemainingArgs();
            if(otherArgs.length!=2){
                System.out.println("Usage:dedup <in> <out>");
                System.exit(2);
            }
            Job job = new Job(conf,"Data Deduplication");
            job.setJarByClass(Dedup.class);
    
            job.setMapperClass(Map.class);
            job.setReducerClass(Reduce.class);
    
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(Text.class);
    
            FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
            FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));        
            System.exit(job.waitForCompletion(true)? 0 : 1 );
        }
    }

    3、测试数据:

    file1:
    2006-6-9 a
    2006-6-10 b
    2006-6-11 c
    2006-6-12 d
    2006-6-13 a
    2006-6-14 b
    2006-6-15 c
    2006-6-11 c
     
    file2:
    2006-6-9 b
    2006-6-10 a
    2006-6-11 b
    2006-6-12 d
    2006-6-13 a
    2006-6-14 c
    2006-6-15 d
    2006-6-11 c
     
    4、运行过程:
    14/09/21 16:51:16 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
    14/09/21 16:51:16 WARN mapred.JobClient: No job jar file set.  User classes may not be found. See JobConf(Class) or JobConf#setJar(String).
    14/09/21 16:51:16 INFO input.FileInputFormat: Total input paths to process : 2
    14/09/21 16:51:16 WARN snappy.LoadSnappy: Snappy native library not loaded
    14/09/21 16:51:16 INFO mapred.JobClient: Running job: job_local_0001
    14/09/21 16:51:16 INFO util.ProcessTree: setsid exited with exit code 0
    14/09/21 16:51:16 INFO mapred.Task:  Using ResourceCalculatorPlugin : org.apache.hadoop.util.LinuxResourceCalculatorPlugin@2e9aa770
    14/09/21 16:51:16 INFO mapred.MapTask: io.sort.mb = 100
    14/09/21 16:51:16 INFO mapred.MapTask: data buffer = 79691776/99614720
    14/09/21 16:51:16 INFO mapred.MapTask: record buffer = 262144/327680
    mapper.......
    key:0  value:2006-6-9 a
    line:2006-6-9 a value2006-6-9 a  context:org.apache.hadoop.mapreduce.Mapper$Context@2d3b0087
    mapper.......
    key:11  value:2006-6-10 b
    line:2006-6-10 b value2006-6-10 b  context:org.apache.hadoop.mapreduce.Mapper$Context@2d3b0087
    mapper.......
    key:23  value:2006-6-11 c
    line:2006-6-11 c value2006-6-11 c  context:org.apache.hadoop.mapreduce.Mapper$Context@2d3b0087
    mapper.......
    key:35  value:2006-6-12 d
    line:2006-6-12 d value2006-6-12 d  context:org.apache.hadoop.mapreduce.Mapper$Context@2d3b0087
    mapper.......
    key:47  value:2006-6-13 a
    line:2006-6-13 a value2006-6-13 a  context:org.apache.hadoop.mapreduce.Mapper$Context@2d3b0087
    mapper.......
    key:59  value:2006-6-14 b
    line:2006-6-14 b value2006-6-14 b  context:org.apache.hadoop.mapreduce.Mapper$Context@2d3b0087
    mapper.......
    key:71  value:2006-6-15 c
    line:2006-6-15 c value2006-6-15 c  context:org.apache.hadoop.mapreduce.Mapper$Context@2d3b0087
    mapper.......
    key:83  value:2006-6-11 c
    line:2006-6-11 c value2006-6-11 c  context:org.apache.hadoop.mapreduce.Mapper$Context@2d3b0087
    14/09/21 16:51:16 INFO mapred.MapTask: Starting flush of map output
    14/09/21 16:51:16 INFO mapred.MapTask: Finished spill 0
    14/09/21 16:51:16 INFO mapred.Task: Task:attempt_local_0001_m_000000_0 is done. And is in the process of commiting
    14/09/21 16:51:17 INFO mapred.JobClient:  map 0% reduce 0%
    14/09/21 16:51:19 INFO mapred.LocalJobRunner:
    14/09/21 16:51:19 INFO mapred.Task: Task 'attempt_local_0001_m_000000_0' done.
    14/09/21 16:51:19 INFO mapred.Task:  Using ResourceCalculatorPlugin : org.apache.hadoop.util.LinuxResourceCalculatorPlugin@3697e580
    14/09/21 16:51:19 INFO mapred.MapTask: io.sort.mb = 100
    14/09/21 16:51:19 INFO mapred.MapTask: data buffer = 79691776/99614720
    14/09/21 16:51:19 INFO mapred.MapTask: record buffer = 262144/327680
    mapper.......
    key:0  value:2006-6-9 b
    line:2006-6-9 b value2006-6-9 b  context:org.apache.hadoop.mapreduce.Mapper$Context@319af5dd
    mapper.......
    key:11  value:2006-6-10 a
    line:2006-6-10 a value2006-6-10 a  context:org.apache.hadoop.mapreduce.Mapper$Context@319af5dd
    mapper.......
    key:23  value:2006-6-11 b
    line:2006-6-11 b value2006-6-11 b  context:org.apache.hadoop.mapreduce.Mapper$Context@319af5dd
    mapper.......
    key:35  value:2006-6-12 d
    line:2006-6-12 d value2006-6-12 d  context:org.apache.hadoop.mapreduce.Mapper$Context@319af5dd
    mapper.......
    key:47  value:2006-6-13 a
    line:2006-6-13 a value2006-6-13 a  context:org.apache.hadoop.mapreduce.Mapper$Context@319af5dd
    mapper.......
    key:59  value:2006-6-14 c
    line:2006-6-14 c value2006-6-14 c  context:org.apache.hadoop.mapreduce.Mapper$Context@319af5dd
    mapper.......
    key:71  value:2006-6-15 d
    line:2006-6-15 d value2006-6-15 d  context:org.apache.hadoop.mapreduce.Mapper$Context@319af5dd
    mapper.......
    key:83  value:2006-6-11 c
    line:2006-6-11 c value2006-6-11 c  context:org.apache.hadoop.mapreduce.Mapper$Context@319af5dd
    14/09/21 16:51:19 INFO mapred.MapTask: Starting flush of map output
    14/09/21 16:51:19 INFO mapred.MapTask: Finished spill 0
    14/09/21 16:51:19 INFO mapred.Task: Task:attempt_local_0001_m_000001_0 is done. And is in the process of commiting
    14/09/21 16:51:20 INFO mapred.JobClient:  map 100% reduce 0%
    14/09/21 16:51:22 INFO mapred.LocalJobRunner:
    14/09/21 16:51:22 INFO mapred.Task: Task 'attempt_local_0001_m_000001_0' done.
    14/09/21 16:51:22 INFO mapred.Task:  Using ResourceCalculatorPlugin : org.apache.hadoop.util.LinuxResourceCalculatorPlugin@3c844c07
    14/09/21 16:51:22 INFO mapred.LocalJobRunner:
    14/09/21 16:51:22 INFO mapred.Merger: Merging 2 sorted segments
    14/09/21 16:51:22 INFO mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 258 bytes
    14/09/21 16:51:22 INFO mapred.LocalJobRunner:
    reducer.......
    key:2006-6-10 a  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-10 a  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    reducer.......
    key:2006-6-10 b  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-10 b  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    reducer.......
    key:2006-6-11 b  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-11 b  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    reducer.......
    key:2006-6-11 c  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-11 c  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    reducer.......
    key:2006-6-12 d  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-12 d  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    reducer.......
    key:2006-6-13 a  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-13 a  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    reducer.......
    key:2006-6-14 b  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-14 b  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    reducer.......
    key:2006-6-14 c  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-14 c  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    reducer.......
    key:2006-6-15 c  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-15 c  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    reducer.......
    key:2006-6-15 d  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-15 d  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    reducer.......
    key:2006-6-9 a  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-9 a  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    reducer.......
    key:2006-6-9 b  values:org.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78
    key:2006-6-9 b  valuesorg.apache.hadoop.mapreduce.ReduceContext$ValueIterable@9c8fd78  context:org.apache.hadoop.mapreduce.Reducer$Context@52767ce8
    14/09/21 16:51:22 INFO mapred.Task: Task:attempt_local_0001_r_000000_0 is done. And is in the process of commiting
    14/09/21 16:51:22 INFO mapred.LocalJobRunner:
    14/09/21 16:51:22 INFO mapred.Task: Task attempt_local_0001_r_000000_0 is allowed to commit now
    14/09/21 16:51:22 INFO output.FileOutputCommitter: Saved output of task 'attempt_local_0001_r_000000_0' to hdfs://localhost:9000/user/hadoop/dedup_output
    14/09/21 16:51:25 INFO mapred.LocalJobRunner: reduce > reduce
    14/09/21 16:51:25 INFO mapred.Task: Task 'attempt_local_0001_r_000000_0' done.
    14/09/21 16:51:26 INFO mapred.JobClient:  map 100% reduce 100%
    14/09/21 16:51:26 INFO mapred.JobClient: Job complete: job_local_0001
    14/09/21 16:51:26 INFO mapred.JobClient: Counters: 22
    14/09/21 16:51:26 INFO mapred.JobClient:   Map-Reduce Framework
    14/09/21 16:51:26 INFO mapred.JobClient:     Spilled Records=32
    14/09/21 16:51:26 INFO mapred.JobClient:     Map output materialized bytes=266
    14/09/21 16:51:26 INFO mapred.JobClient:     Reduce input records=16
    14/09/21 16:51:26 INFO mapred.JobClient:     Virtual memory (bytes) snapshot=0
    14/09/21 16:51:26 INFO mapred.JobClient:     Map input records=16
    14/09/21 16:51:26 INFO mapred.JobClient:     SPLIT_RAW_BYTES=232
    14/09/21 16:51:26 INFO mapred.JobClient:     Map output bytes=222
    14/09/21 16:51:26 INFO mapred.JobClient:     Reduce shuffle bytes=0
    14/09/21 16:51:26 INFO mapred.JobClient:     Physical memory (bytes) snapshot=0
    14/09/21 16:51:26 INFO mapred.JobClient:     Reduce input groups=12
    14/09/21 16:51:26 INFO mapred.JobClient:     Combine output records=0
    14/09/21 16:51:26 INFO mapred.JobClient:     Reduce output records=12
    14/09/21 16:51:26 INFO mapred.JobClient:     Map output records=16
    14/09/21 16:51:26 INFO mapred.JobClient:     Combine input records=0
    14/09/21 16:51:26 INFO mapred.JobClient:     CPU time spent (ms)=0
    14/09/21 16:51:26 INFO mapred.JobClient:     Total committed heap usage (bytes)=813170688
    14/09/21 16:51:26 INFO mapred.JobClient:   File Input Format Counters
    14/09/21 16:51:26 INFO mapred.JobClient:     Bytes Read=190
    14/09/21 16:51:26 INFO mapred.JobClient:   FileSystemCounters
    14/09/21 16:51:26 INFO mapred.JobClient:     HDFS_BYTES_READ=475
    14/09/21 16:51:26 INFO mapred.JobClient:     FILE_BYTES_WRITTEN=122061
    14/09/21 16:51:26 INFO mapred.JobClient:     FILE_BYTES_READ=1665
    14/09/21 16:51:26 INFO mapred.JobClient:     HDFS_BYTES_WRITTEN=166
    14/09/21 16:51:26 INFO mapred.JobClient:   File Output Format Counters
    14/09/21 16:51:26 INFO mapred.JobClient:     Bytes Written=166
    5、运行结果:
    2006-6-10 a    
    2006-6-10 b    
    2006-6-11 b    
    2006-6-11 c    
    2006-6-12 d    
    2006-6-13 a    
    2006-6-14 b    
    2006-6-14 c    
    2006-6-15 c    
    2006-6-15 d    
    2006-6-9 a    
    2006-6-9 b
  • 相关阅读:
    VS快捷键
    IIS部署WCF
    WLAN的优点
    局域网与WAN比较
    局域网拓扑结构
    局域网协议
    局域网介绍
    wifi主要特性
    wifi发展前景
    Wi-Fi与WAPI主要区别
  • 原文地址:https://www.cnblogs.com/yangyquin/p/5021166.html
Copyright © 2011-2022 走看看