zoukankan      html  css  js  c++  java
  • hdfs的JAVA必会操作

    hdfs的必会操作

    创建目录

    //创建目录
        public static void mkdir(String filePath) throws URISyntaxException, IOException, InterruptedException{
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            Path path= new Path(filePath);
            fs.mkdirs(path);
            System.out.println("目录创建成功:"+filePath);
            fs.close();
        }

    创建文件

    //创建文件
        public static void createFile(String remoteFilePath)throws URISyntaxException, IOException, InterruptedException {
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            Path remotePath = new Path(remoteFilePath);
            FSDataOutputStream outputStream = fs.create(remotePath);
            outputStream.close();
            System.out.println("文件创建成功!!"+remoteFilePath);
        }

    删除文件

    //删除文件
        public static void deleteFile(String filePath) throws URISyntaxException, IOException, InterruptedException{
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            Path path= new Path(filePath);
            if(fs.deleteOnExit(path)) {
                System.out.println("文件删除成功:"+filePath);
            } else {
                System.out.println("文件删除失败:"+filePath);
            }
            fs.close();
        }

    移动文件到本地

     // 移动文件到本地
        public static void moveToLocalFile(String remoteFilePath, String localFilePath) throws IOException, InterruptedException, URISyntaxException {
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            Path remotePath = new Path(remoteFilePath);
            Path localPath = new Path(localFilePath);
            fs.moveToLocalFile(remotePath, localPath);
        }

    显示文件里的内容

    //显示文件内容
        public static void cat(String file)throws URISyntaxException, IOException, InterruptedException
        {
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            Path filePath = new Path(file);
            if(fs.exists(filePath)) {
                FSDataInputStream in = fs.open(filePath);
                BufferedReader br = new BufferedReader(new InputStreamReader(in));
                String content = null;
                while((content = br.readLine()) != null) {
                    System.out.println(content);
                }
                br.close();
                fs.close();
            }else {
                System.out.println("file "+filePath+ "doesn't exist.");
            }
            fs.close();
        }

    移动hdfs的文件

    //移动hdfs中的文件
        public static void moveFile(String srcPath, String dirPath) throws URISyntaxException, IOException, InterruptedException {
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            if(fs.exists(new Path(dirPath))) {
                System.out.println("文件被占用。");
                return;
            }
            if(fs.rename(new Path(srcPath), new Path(dirPath))) {
                System.out.println("文件移动成功。");
            } else {
                System.out.println("文件移动失败。");
            }
        }

    上传与下载

    //将本地文件上传到hdfs
        public static void copyFromLocalFile(String localPath,String srcPath) throws URISyntaxException, IOException, InterruptedException
        {
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            fs.copyFromLocalFile(new Path(localPath),new Path(srcPath));
            System.out.println("上传成功!!!");
            fs.close();
        }
    
        //将hdfs的文件下载到本地
        public static void downFromHdfs(String src,String dst)throws URISyntaxException, IOException, InterruptedException
        {
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            Path dstPath=new Path(dst);
            //第一个参数为:是否删除原文件,源文件,目的文件,是否使用本地文件系统
            fs.copyToLocalFile(false,new Path(src),dstPath,true);
            fs.close();
            System.out.println("文件下载成功已存放到"+dst);
        }

    追加内容到文件结尾

    //追加到结尾
        public static void addContentToTail(String filePath, String content, boolean head) throws IOException, InterruptedException, URISyntaxException {
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            Path path= new Path(filePath);
            FSDataOutputStream ops = fs.append(path);
            ops.write(content.getBytes());
            if (!head) {
                System.out.println("内容以追加到结尾。");
            }
            ops.close();
            fs.close();
        }

    追加本地文件内容到hdfs文件结尾

     // 追加本地文件内容到目的文件结尾
        public static void addFileToTail(String localFilePath, String remoteFilePath) throws IOException, InterruptedException, URISyntaxException{
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            Path remotePath = new Path(remoteFilePath);
            // 创建一个本地文件(需要追加的文件)读入流
            FileInputStream inps = new FileInputStream(localFilePath);
            // 创建一个文件输出留,输出的内容追加到文件末尾
            FSDataOutputStream ops = fs.append(remotePath);
            byte[] buffer = new byte[1024];
            int read = -1;
            while((read = inps.read(buffer)) > 0) {
                ops.write(buffer, 0, read);
            }
            ops.close();
            inps.close();
            fs.close();
        }

    追加内容到hdfs文件开头

    // 追加内容到文件开头
        public static void addContentToHead(String filePath, String content) throws IOException, InterruptedException, URISyntaxException {
            // 创建一个临时本地文件
            String localFilePath = new File("").getCanonicalPath()+"\web\text\xlf.txt" ;
            // 将要追加的文件移动到本地
            moveToLocalFile(filePath, localFilePath);
            // 创建一个新的HDFS文件(空的)
            createFile(filePath);
            appendContentToTail(content,filePath, true);
            addFileToTail(localFilePath, filePath);
            System.out.println("内容以追加到开头。");
        }

    判断hdfs中文件是否存在

    public  static void existandcreat(String path) throws URISyntaxException, IOException, InterruptedException {
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            Path path1=new Path(path);
            if(fs.exists(path1))
            {
                System.out.println("存在!!");
            }
            else
            {
                FSDataOutputStream ops=fs.create(path1);
                ops.close();
                fs.close();
            }
        }

    递归查看目录下的文件信息

    /*查看当前目录下的文件信息*/
        public static void printfile(String file)throws URISyntaxException, IOException, InterruptedException
        {
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            FileStatus[] statuses=fs.listStatus(new Path(file));
            for(FileStatus s:statuses)
            {
                System.out.println("读写权限:"+s.getPermission()+"; 文件大小:"+s.getBlockSize()+"; 文件路径:"
                        +s.getPath()+"; 文件创建时间:"+s.getModificationTime());
            }
            fs.close();
        }
    
        /*递归查看目录下的文件信息*/
        public static void prinfileInfo(String file)throws URISyntaxException, IOException, InterruptedException
        {
            FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:8020"), new Configuration(), "root");
            Path path= new Path(file);
            RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(path, true);
            while(iterator.hasNext()) {
                FileStatus s = iterator.next();
                System.out.println("读写权限:"+s.getPermission()+"; 文件大小:"+s.getBlockSize()+"; 文件路径:"
                        +s.getPath()+"; 文件创建时间:"+s.getModificationTime());
            }
            fs.close();
        }
  • 相关阅读:
    在vscode中显示空格和tab符号
    如何正确理解关键字"with"与上下文管理器
    HADOOP基本操作命令
    Ganglia环境搭建并监控Hadoop分布式集群
    关于分布式系统的数据一致性问题
    hadoop snapshot 备份恢复 .
    hadoop主节点(NameNode)备份策略以及恢复方法
    HDFS snapshot操作实战
    从 secondarynamenode 中恢复 namenode
    hadoop 通过distcp进行并行复制
  • 原文地址:https://www.cnblogs.com/xiaofengzai/p/13820724.html
Copyright © 2011-2022 走看看