zoukankan      html  css  js  c++  java
  • hadoop ha 读取 activce状态的活动节点

     方式一

    package com.xxx.hadoop;
    
    import com.sun.demo.jvmti.hprof.Tracker;
    import com.sun.xml.bind.util.Which;
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FSDataInputStream;
    import org.apache.hadoop.fs.FileStatus;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.hdfs.HAUtil;
    
    import java.io.BufferedReader;
    import java.io.IOException;
    import java.io.InputStreamReader;
    import java.lang.reflect.WildcardType;
    import java.net.InetAddress;
    import java.net.InetSocketAddress;
    import java.util.ArrayList;
    import java.util.List;
    
    /**
     * @Desription:
     * @CreatedDate: 2018/1/5 10:01
     * @Author:wangel.lee
     */
    public class TestNodeActive {
        public static void main(String[] args) throws IOException {
            getActiveNameNode();
        }
    
        /**
         * 判断hadoop ha的active节点是否存活
         * @throws IOException
         */
        public static void getActiveNameNode() throws IOException {
            String[] mapredJopTrackers = {"hdfs://10.1.1.12:8030","hdfs://10.1.1.11:8030"};
            String[] fsDefaultNames = {"hdfs://10.1.1.12:8020","hdfs://10.1.1.11:8020"};
    
            String hivePath = "";
            for (int i = 0 ;i <2 ; i++) {
                Configuration conf = createConfigurationDefault(mapredJopTrackers[i],fsDefaultNames[i]);
                // 远程会出现问题 Windows测试需要如下配置
                List<String> data = new ArrayList<String>();
                FileSystem fileSystem = FileSystem.get(conf);
                InetSocketAddress active = null;
                try {
                    active = HAUtil.getAddressOfActive(fileSystem);
                } catch ( Exception e ){
                    System.out.println("hostname:"+mapredJopTrackers[i]+" is not active.");
                }
                if( active == null ) continue;
                InetAddress address = active.getAddress();
                hivePath = "hdfs://" + address.getHostAddress() + ":" + active.getPort();
                break;
            }
            System.out.println(hivePath);
        }
    
        private static Configuration createConfigurationDefault(String mapredJopTracker, String fsDefaultName) throws IOException {
            Configuration conf = new Configuration();
            // 远程会出现问题 Windows测试需要如下配置
            conf.set("mapred.jop.tracker", mapredJopTracker);
            conf.set("fs.default.name", fsDefaultName);
            System.setProperty("hadoop.home.dir", "E:\ProgramFiles\java\hadoop\hadoop-common-2.2.0-bin-master");
    
            return conf;
        }
    }

    方式二

    package com.yanheap.hadoop;
    
    
    import com.alibaba.druid.util.StringUtils;
    import org.apache.commons.io.FileUtils;
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FSDataInputStream;
    import org.apache.hadoop.fs.FSDataOutputStream;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.hdfs.HAUtil;
    
    import java.io.BufferedReader;
    import java.io.IOException;
    import java.io.InputStreamReader;
    import java.net.InetAddress;
    import java.net.InetSocketAddress;
    import java.net.URI;
    import java.util.ArrayList;
    import java.util.List;
    
    /**
     * @Desription:
     * @CreatedDate: 2018/1/5 14:48
     * @Author:wangel.lee
     */
    public class ZookeeperActive {
    
        private static String clusterName = "nameservice1";
        private static final String HADOOP_URL = "hdfs://"+clusterName;
        private static Configuration conf;
        static{
            conf = new Configuration();
            conf = new Configuration();
            conf.set("fs.defaultFS", HADOOP_URL);
            conf.set("dfs.nameservices", clusterName);
            conf.set("dfs.ha.namenodes."+clusterName, "nn1,nn2");
            // 高可用的配置:当其中一个变成standy时,打印异常,并自动切换到另一个namedata去取数据
            conf.set("dfs.namenode.rpc-address."+clusterName+".nn1", "10.1.1.12:8020");
            conf.set("dfs.namenode.rpc-address."+clusterName+".nn2", "10.1.1.11:8020");
            //conf.setBoolean(name, value);
            conf.set("dfs.client.failover.proxy.provider."+clusterName,
                    "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        }
    
        /**
         * 读取ha的active节点
         * @throws IOException
         */
        private static void getActive() throws IOException {
            FileSystem fileSystem = FileSystem.get(URI.create(HADOOP_URL), conf);
    
            long l1 = System.currentTimeMillis();
    
            InetSocketAddress active = null;
            //try {
                active = HAUtil.getAddressOfActive(fileSystem);
            //} catch ( Exception e ){
                //e.printStackTrace();
            //}
            InetAddress address = active.getAddress();
            String hivePath = "hdfs://" + address.getHostAddress() + ":" + active.getPort();
    
            System.out.println("-------------------------------------->"+hivePath);
            System.out.println("-------------------------------------->"+(System.currentTimeMillis()-l1));
        }
    
    
        /**
         * 读取hdfs中的文件
         * @throws IOException
         */
        private static void readFile() throws IOException {
            FileSystem fileSystem = FileSystem.get(URI.create(HADOOP_URL), conf);
            FSDataInputStream in = null;
            BufferedReader br = null;
    
            try {
                fileSystem = FileSystem.get(conf);
                Path srcPath = new Path("/tmp/media_info.csv");
                in = fileSystem.open(srcPath);
                br = new BufferedReader(new InputStreamReader(in));
                String line = null;
                while (null != (line = br.readLine())) {
                    if (!StringUtils.isEmpty(line)) {
                        System.out.println("-------->:"+line);
                    }
                }
            } catch (IllegalArgumentException e) {
                e.printStackTrace();
            } catch (Exception e) {
                throw e;
            } finally {
                br.close();
                fileSystem.close();
            }
    
        }
    
        public static void main(String[] args)throws IOException  {
            //getActive();
            readFile();
        }
    
    }

    另,本地windows执行hadoop需要环境变量的支持,如下提供hadoop命令下载包,下载后直接添加环境变量对他的引用

    链接:https://pan.baidu.com/s/1eRIHmdO 密码:ly38

  • 相关阅读:
    格式布局
    hive UDAF源代码分析
    HIVE自定义函数 UDF
    HIVE函数UDAF 最大值
    牛顿法求平方根 scala
    mongoDB
    java类的加载机制
    类的加载过程
    Redis学习手册(目录)
    我与小娜(05):变换时空,重返北京
  • 原文地址:https://www.cnblogs.com/hwaggLee/p/8204121.html
Copyright © 2011-2022 走看看