zoukankan      html  css  js  c++  java
  • MapReduce读取hdfs上文件,建立词频的倒排索引到Hbase

    Hdfs上的数据文件为T0,T1,T2(无后缀):

    T0:

    What has come into being in him was life, and the life was the light of all people. 
    The light shines in the darkness, and the darkness did not overcome it. Enter through the narrow gate;
    for the gate is wide and the road is easy that leads to destruction, and there are many who take it.
    For the gate is narrow and the road is hard that leads to life, and there are few who find it

    T1:

    Where, O death, is your victory? Where, O death, is your sting? The sting of death is sin, and.
    The power of sin is the law. But thanks be to God, who gives us the victory through our Lord Jesus Christ.
    The grass withers, the flower fades, when the breath of the LORD blows upon it; surely the people are grass.
    The grass withers, the flower fades; but the word of our God will stand forever.

    T2:

    What has come into being in him was life, and the life was the light of all people. 
    The light shines in the darkness, and the darkness did not overcome it. Enter through the narrow gate;
    for the gate is wide and the road is easy that leads to destruction, and there are many who take it.
    For the gate is narrow and the road is hard that leads to life, and there are few who find it.

    实现代码如下:

    package com.pro.bq;
    
    import java.io.IOException;
    import java.util.StringTokenizer;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.Put;
    import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
    import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
    import org.apache.hadoop.hbase.mapreduce.TableReducer;
    import org.apache.hadoop.hbase.util.Bytes;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.Reducer;
    import org.apache.hadoop.mapreduce.lib.input.FileSplit;
    import org.apache.hadoop.util.GenericOptionsParser;
    
    
    public class DataFromHdfs {
        public static class LocalMap extends Mapper<Object, Text, Text, Text>
        {
            private FileSplit split=null;
            private Text keydata=null;
            public void map(Object key, Text value,Context context)
                    throws IOException, InterruptedException {
                
                split=(FileSplit) context.getInputSplit();
                StringTokenizer tokenStr=new StringTokenizer(value.toString());
                while(tokenStr.hasMoreTokens())
                {
                    String token=tokenStr.nextToken();
                    if(token.contains(",")|| token.contains(".")||token.contains(";")||token.contains("?"))
                    {
                        token=token.substring(0, token.length()-1);
                    }
                    String filePath=split.getPath().toString();
                    int index=filePath.indexOf("T");
                    keydata=new Text(token+":"+filePath.substring(index));
                    context.write(keydata, new Text("1"));
                }
            }
        }
        public static class LocalCombiner extends Reducer<Text, Text, Text, Text>
        {
    
            public void reduce(Text key, Iterable<Text> values,Context context)
                    throws IOException, InterruptedException {
                int index=key.toString().indexOf(":");
                Text keydata=new Text(key.toString().substring(0, index));
                String filename=key.toString().substring(index+1);
                int sum=0;
                for(Text val:values)
                {
                    sum++;
                }
                context.write(keydata, new Text(filename+":"+String.valueOf(sum)));
            }
        }
        public static class TableReduce extends TableReducer<Text, Text, ImmutableBytesWritable>
        {
    
            public void reduce(Text key, Iterable<Text> values,Context context)
                    throws IOException, InterruptedException {
                for(Text val:values)
                {
                    int index=val.toString().indexOf(":");
                    String filename=val.toString().substring(0, index);
                    int sum=Integer.parseInt(val.toString().substring(index+1));
                    String row=key.toString();
                    Put put=new Put(Bytes.toBytes(key.toString()));
    //                put.add(Bytes.toBytes("word"), Bytes.toBytes("content"), Bytes.toBytes(key.toString()));
                    put.add(Bytes.toBytes("filesum"), Bytes.toBytes("filename"), Bytes.toBytes(filename));
                    put.add(Bytes.toBytes("filesum"), Bytes.toBytes("count"), Bytes.toBytes(String.valueOf(sum)));
                    context.write(new ImmutableBytesWritable(Bytes.toBytes(row)), put);
                }
    
            }
        }
        public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
            Configuration conf=new Configuration();
            conf=HBaseConfiguration.create(conf);
            // conf.set("hbase.zookeeper.quorum.", "localhost"); 
            String hdfsPath="hdfs://localhost:9000/user/haduser/";
            String[] argsStr=new String[]{hdfsPath+"input/reverseIndex"};
            String[] otherArgs=new GenericOptionsParser(conf, argsStr).getRemainingArgs();
            Job job=new Job(conf);
            job.setJarByClass(DataFromHdfs.class);
            
            job.setMapperClass(LocalMap.class);
            job.setCombinerClass(LocalCombiner.class);
            job.setReducerClass(TableReduce.class);
            
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(Text.class);//combiner的输入和输出类型同map相同
            
            //之前要新建"index"表,否则会报错
            TableMapReduceUtil.initTableReducerJob("index", TableReduce.class, job);
            
            FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
            System.exit(job.waitForCompletion(true)?0:1);
        }
    }

    运行之前用Shell创建”index“表,命令:” create 'index','filensum'  “

    程序运行之后,再执行shell命令:" scan 'index' ",执行效果如下:

  • 相关阅读:
    按照指定的字符串拆分字符串,split()方法。
    charAt()取出指定位置的字符 .length()得到一个字符串的长度 indexOf()查找一个指定的字符是否存在并返回其位置 trim()去掉字符串的左右空格 substring()字符串的截取 str.replace(" ", ""); 去掉所有空格,包括首尾、中间
    字符串与字符数组的多种转换方式。
    匿名对象。
    构造方法。
    递归的练习,1.统计文件夹大小 2.删除文件夹及文件夹下的文件
    jquery零散小饼干
    jQuery review
    git解决冲突
    url、href、src
  • 原文地址:https://www.cnblogs.com/wzyj/p/3565172.html
Copyright © 2011-2022 走看看