zoukankan      html  css  js  c++  java
  • HDFS API操作

    package test;
    import org.apache.hadoop.fs.*;
    import org.apache.commons.io.IOUtils;
    import java.io.BufferedReader;
    import java.io.IOException;
    import java.io.InputStream;
    import java.io.InputStreamReader;
    import java.net.MalformedURLException;
    import java.net.URI;
    import java.net.URL;
    import java.text.SimpleDateFormat;
    
    import org.apache.hadoop.conf.Configuration;
    
    public class Test extends FSDataInputStream {
        private static Configuration conf ;
        static{
            URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
        }
        public static void Config(){
            //配置文件
            conf= new Configuration();
            //conf.set("fs.defaultFS","hdfs://hadoop102:8020");//使用默认配置可连接成功
            conf.set("fs.hdfs.impl","org.apache.hadoop.hdfs.DistributedFileSystem");
        }
    
        public static int ReadLine(String path) throws IOException {
            URI uri= URI.create("hdfs://hadoop102:8020/user/atguigu/txt2");
            FileSystem fs = FileSystem.get(uri,conf);
            Path file = new Path(uri);
            FSDataInputStream getIt = fs.open(file);
            BufferedReader d = new BufferedReader(new InputStreamReader(getIt));
    
            String content;// = d.readLine(); //读取文件一行
            if((content=d.readLine())!=null){
                System.out.println(content);
            }
            //  System.out.println(content);
            d.close(); //关闭文件
            fs.close(); //关闭hdfs
            return 0;
        }
    
        public static void PrintFile() throws MalformedURLException, IOException{
            String FilePath="hdfs://hadoop102:8020/user/atguigu/txt2";//读取的为HDFS目录:hadoop dfs -ls /查看HDFS目录
            InputStream in=null;
            in=new URL(FilePath).openStream();
            IOUtils.copy(in,System.out);
    
        }
        public static void lsDir(Configuration conf,String remoteDir){
            URI uri= URI.create("hdfs://hadoop102:8020/user");
            try(FileSystem fs=FileSystem.get(uri,conf)){
                Path dirPath=new Path(uri);
                //递归获取目录下的所有文件
                RemoteIterator<LocatedFileStatus>remoteIterator=fs.listFiles(
                        dirPath,true);
                //输出每个文件的信息
                while(remoteIterator.hasNext()){
                    FileStatus s= remoteIterator.next();
                    System.out.printf("路径:"+s.getPath().toString());
                    System.out.printf("权限:"+s.getPermission().toString());
                    System.out.println("大小:"+s.getLen());
                    //返回的是时间戳,转化为时间日期格式
                    Long timeStamp=s.getModificationTime();
                    SimpleDateFormat format=new SimpleDateFormat("" +
                            "yyyy-MM-dd HH:mm:ss");
                    String date=format.format(timeStamp);
                    System.out.printf("时间:"+date);
                    System.out.println();
                }
            }catch(IOException e) {
                e.printStackTrace();
            }
        }
        //创建文件
        public static void createFile() throws IOException {
            URI uri= URI.create("hdfs://hadoop102:8020/user/atguigu/new");
            FileSystem fs = FileSystem.get(uri,conf);
            fs.mkdirs(new Path(uri));
            fs.close();
        }
        //文件上传,注意权限问题
        public static void putFile() throws IOException {
            URI uri= URI.create("hdfs://hadoop102:8020/user/atguigu/");
            FileSystem fs = FileSystem.get(uri,conf);
            fs.copyFromLocalFile(false,false,new Path("F:\file2.txt"),new Path(uri));
            fs.close();
        }
        //文件下载 只写F:\会报错,要具体到文件
        public static void downloadFile() throws IOException {
            URI uri= URI.create("hdfs://hadoop102:8020/user/atguigu/txt2/");
            FileSystem fs = FileSystem.get(uri,conf);
            fs.copyToLocalFile(false,new Path(uri),new Path("F:\txt2.txt"),true);
            fs.close();
        }
        //文件删除
        public static void deleteFile() throws IOException {
            URI uri= URI.create("hdfs://hadoop102:8020/user/atguigu/file2.txt/");
            FileSystem fs = FileSystem.get(uri,conf);
            fs.delete(new Path(uri),false);
            fs.close();
        }
        public static void main(String[] arg) throws IOException{
            Test.Config();//配置conf
    //        Test.ReadLine("/user/hadoop/txt2");
    //        Test.PrintFile();//读取HDFS下的文件
    //        //递归读取目录下的所有文件信息
    //        lsDir(conf,"/");
    //        putFile();
    //        downloadFile();
    //        deleteFile();
            createFile();
        }
        public Test(InputStream in) {
            super(in);
        }
    }
  • 相关阅读:
    swift把颜色转成图片
    第四篇:断路器(Hystrix)
    第三篇: 服务消费者(Feign)
    第二篇:服务消费者(RestTemplate+ribbon)
    第一篇:服务的注册与发现Eureka(Finchley版本)
    递归打印目录层次(java版)
    zuul熔断代码
    Window安装Erlang环境
    移动一根火柴使等式成立js版本(递归)
    mysql 存储过程 游标嵌套
  • 原文地址:https://www.cnblogs.com/fengchuiguobanxia/p/15309406.html
Copyright © 2011-2022 走看看