zoukankan      html  css  js  c++  java
  • MapReduce单表关联学习~

     首先考虑表的自连接,其次是列的设置,最后是结果的整理.

    文件内容:

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.conf.Configured;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.IntWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.Reducer;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    import org.apache.hadoop.util.Tool;
    import org.apache.hadoop.util.ToolRunner;
    
    
    import java.io.IOException;
    import java.util.Iterator;
    import java.util.Objects;
    
    
    public class STjoin extends Configured implements Tool {
        public static int time = 0;
      //map将输入分割成child和parent,然后正序输出一次作为右表,反序输出一次作为左表
      //需要注意的是在输出的value中必须加上左右表区别标志
    public static class Map extends Mapper<Object,Text,Text,Text>{ public void map(Object key,Text value,Context context) throws IOException, InterruptedException{ String childname = new String(); String parentname = new String(); String relationtype = new String(); String line = value.toString(); int i = 0;
           //文件以空格分隔
    while(line.charAt(i) != ' '){ i++; }
           //拆分child 和 parent String[] values
    = {line.substring(0,i),line.substring(i+1)}; if(values[0].compareTo("child") != 0){ childname = values[0]; parentname = values[1];
             //左右表区分标志 relationtype
    = "1"; context.write(new Text(values[1]),new Text(relationtype + "+" + childname + "+" + parentname)); relationtype = "2"; context.write(new Text(values[0]),new Text(relationtype + "+" + childname + "+" + parentname)); } } } public static class Reduce extends Reducer<Text,Text,Text,Text>{ public void reduce(Text key,Iterable<Text> values,Context context) throws IOException,InterruptedException{
            //输出表头
    if(time == 0){ context.write(new Text("grandchild"),new Text("grandparent")); time++; } int grandchildnum = 0; String grandchild[] = new String[10]; int grandparentnum = 0; String grandparent[] = new String[10]; Iterator ite = values.iterator(); while(ite.hasNext()){ String record = ite.next().toString(); int len = record.length(); int i = 2; if(len == 0){ continue; } char relationtype = record.charAt(0); String childname = new String(); String parentname = new String(); while(record.charAt(i) != '+'){ childname = childname + record.charAt(i); i++; } i = i+1; while(i<len){ parentname = parentname + record.charAt(i); i++; } if(relationtype == '1') { grandchild[grandchildnum] = childname; ; grandchildnum++; }else{ grandparent[grandparentnum] = parentname; grandparentnum++; } } if(grandparentnum != 0 && grandchildnum != 0){ for(int m = 0;m<grandchildnum;m++){ for(int n = 0;n<grandparentnum;n++){ System.out.println(grandchild[m] + " " + grandparent[n]); context.write(new Text(grandchild[m]),new Text(grandparent[n])); } } } } } public int run(String[] args) throws Exception{ Configuration aaa = new Configuration(); Job job = Job.getInstance(aaa); String InputPaths = "/usr/local/idea-IC-139.1117.1/Hadoop/out/datainput/child-parent.txt"; String OutputPath = "/usr/local/idea-IC-139.1117.1/Hadoop/out/dataout/"; job.setJarByClass(Sort.class); job.setJobName("Sort"); job.setMapperClass(Map.class); job.setReducerClass(Reduce.class); FileInputFormat.setInputPaths(job, new Path(InputPaths)); FileOutputFormat.setOutputPath(job, new Path(OutputPath)); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(org.apache.hadoop.mapreduce.lib.output.TextOutputFormat.class); boolean success = job.waitForCompletion(true); return success ? 0 : 1; } public static void main(String[] args) throws Exception{ int ret = ToolRunner.run(new STjoin(), args); System.exit(ret); } }

    输出结果:

    参考:《Hadoop实战》

  • 相关阅读:
    “键鼠耕耘,IT家园”,博客园2010T恤正式发布
    解决jQuery冲突问题
    上周热点回顾(5.316.6)
    博客园电子期刊2010年5月刊发布啦
    上周热点回顾(6.76.13)
    Chrome/5.0.375.70 处理 <pre></pre> 的 Bug
    [转]C# MemoryStream和BinaryFormatter
    [转]Android adb不是内部或外部命令 问题解决
    [转]HttpWebRequest解析 作用 介绍
    财富中文网 2010年世界500强排行榜(企业名单)
  • 原文地址:https://www.cnblogs.com/yangsy0915/p/5484219.html
Copyright © 2011-2022 走看看