zoukankan      html  css  js  c++  java
  • Hadoop_25_MapReduce实现日志清洗程序

    1、需求:

      对web访问日志中的各字段识别切分,去除日志中不合法的记录,根据KPI统计需求,生成各类访问请求过滤数据

    2、实现代码:

    a) 定义一个bean,用来记录日志数据中的各数据字段

    package cn.bigdta.hdfs.weblog;
    
    public class WebLogBean {
        
        private String remote_addr;// 记录客户端的ip地址
        private String remote_user;// 记录客户端用户名称,忽略属性"-"
        private String time_local;// 记录访问时间与时区
        private String request;// 记录请求的url与http协议
        private String status;// 记录请求状态;成功是200
        private String body_bytes_sent;// 记录发送给客户端文件主体内容大小
        private String http_referer;// 用来记录从那个页面链接访问过来的
        private String http_user_agent;// 记录客户浏览器的相关信息
    
        private boolean valid = true;// 判断数据是否合法
    
        
        
        public String getRemote_addr() {
            return remote_addr;
        }
    
        public void setRemote_addr(String remote_addr) {
            this.remote_addr = remote_addr;
        }
    
        public String getRemote_user() {
            return remote_user;
        }
    
        public void setRemote_user(String remote_user) {
            this.remote_user = remote_user;
        }
    
        public String getTime_local() {
            return time_local;
        }
    
        public void setTime_local(String time_local) {
            this.time_local = time_local;
        }
    
        public String getRequest() {
            return request;
        }
    
        public void setRequest(String request) {
            this.request = request;
        }
    
        public String getStatus() {
            return status;
        }
    
        public void setStatus(String status) {
            this.status = status;
        }
    
        public String getBody_bytes_sent() {
            return body_bytes_sent;
        }
    
        public void setBody_bytes_sent(String body_bytes_sent) {
            this.body_bytes_sent = body_bytes_sent;
        }
    
        public String getHttp_referer() {
            return http_referer;
        }
    
        public void setHttp_referer(String http_referer) {
            this.http_referer = http_referer;
        }
    
        public String getHttp_user_agent() {
            return http_user_agent;
        }
    
        public void setHttp_user_agent(String http_user_agent) {
            this.http_user_agent = http_user_agent;
        }
    
        public boolean isValid() {
            return valid;
        }
    
        public void setValid(boolean valid) {
            this.valid = valid;
        }
        
        
        @Override
        public String toString() {
            StringBuilder sb = new StringBuilder();
            sb.append(this.valid);
            sb.append("01").append(this.remote_addr);
            sb.append("01").append(this.remote_user);
            sb.append("01").append(this.time_local);
            sb.append("01").append(this.request);
            sb.append("01").append(this.status);
            sb.append("01").append(this.body_bytes_sent);
            sb.append("01").append(this.http_referer);
            sb.append("01").append(this.http_user_agent);
            return sb.toString();
        }
    }
    View Code

     b)定义一个parser用来解析过滤web访问日志原始记录

    package cn.bigdta.hdfs.weblog;
    
    import java.text.ParseException;
    import java.text.SimpleDateFormat;
    import java.util.Date;
    import java.util.Locale;
    
    public class WebLogParser {
    
        static SimpleDateFormat sd1 = new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss", Locale.US);
    
        static SimpleDateFormat sd2 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    
        public static WebLogBean parser(String line) {
            WebLogBean webLogBean = new WebLogBean();
            String[] arr = line.split(" ");
            if (arr.length > 11) {
                webLogBean.setRemote_addr(arr[0]);
                webLogBean.setRemote_user(arr[1]);
                webLogBean.setTime_local(parseTime(arr[3].substring(1)));
                webLogBean.setRequest(arr[6]);
                webLogBean.setStatus(arr[8]);
                webLogBean.setBody_bytes_sent(arr[9]);
                webLogBean.setHttp_referer(arr[10]);
    
                if (arr.length > 12) {
                    webLogBean.setHttp_user_agent(arr[11] + " " + arr[12]);
                } else {
                    webLogBean.setHttp_user_agent(arr[11]);
                }
                if (Integer.parseInt(webLogBean.getStatus()) >= 400) {// 大于400,HTTP错误
                    webLogBean.setValid(false);
                }
            } else {
                webLogBean.setValid(false);
            }
            return webLogBean;
        }
    
        public static String parseTime(String dt) {
    
            String timeString = "";
            try {
                Date parse = sd1.parse(dt);
                timeString = sd2.format(parse);
    
            } catch (ParseException e) {
                e.printStackTrace();
            }
            return timeString;
        }
    }
    View Code

     c) mapreduce程序

    package cn.bigdta.hdfs.weblog;
    import java.io.IOException;
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.NullWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    
    public class WeblogPreProcess {
    
        static class WeblogPreProcessMapper extends Mapper<LongWritable, Text, Text, NullWritable> {
            Text k = new Text();
            NullWritable v = NullWritable.get();
    
            @Override
            protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    
                String line = value.toString();
                WebLogBean webLogBean = WebLogParser.parser(line);
                //可以插入一个静态资源过滤(.....)
                /*WebLogParser.filterStaticResource(webLogBean);*/
                if (!webLogBean.isValid())
                    return;
                k.set(webLogBean.toString());
                context.write(k, v);
            }
        }
    
        public static void main(String[] args) throws Exception {
    
            Configuration conf = new Configuration();
            Job job = Job.getInstance(conf);
    
            job.setJarByClass(WeblogPreProcess.class);
    
            job.setMapperClass(WeblogPreProcessMapper.class);
    
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(NullWritable.class);
    
            FileInputFormat.setInputPaths(job, new Path("F:/weblog"));
            FileOutputFormat.setOutputPath(job, new Path("F:/weblogOut"));
    
            job.waitForCompletion(true);
    
        }
    }

     日志文件下载:https://pan.baidu.com/s/17oOaA_S5RRDKjFhCqMm40A

  • 相关阅读:
    数据结构与算法的思维导图
    第九周知识总结
    第八周知识总结
    作业七:问卷调查

    图的基本概念及基本术语
    二叉树

    队列

  • 原文地址:https://www.cnblogs.com/yaboya/p/9253491.html
Copyright © 2011-2022 走看看