zoukankan      html  css  js  c++  java
  • java操作HDFS

    
    

     

    创建文件

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FileStatus;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.fs.permission.FsAction;
    import org.apache.hadoop.fs.permission.FsPermission;
    
    import java.io.IOException;
    
    public class FileMaker {
    
        public static void main(String[] args) throws IOException {
    
            Configuration configuration = new Configuration();
    
            FileSystem fileSystem = FileSystem.get(configuration);
    
           fileSystem.mkdirs(new Path(""));
    
           fileSystem.mkdirs(new Path(""), new FsPermission(FsAction.ALL, FsAction.EXECUTE, FsAction.NONE));
        }
    }

    读取文件:

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FSDataInputStream;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    
    import java.io.*;
    import java.net.URI;
    
    public class FileReader {
    
        public static void main(String[] args) throws IOException {
            String dest = "hdfs://master:9999/user/hadoop-twq/cmd/java_writer.txt";
    
            Configuration configuration = new Configuration();
            FileSystem fileSystem = FileSystem.get(URI.create(dest), configuration);
            FSDataInputStream in = fileSystem.open(new Path(dest));
    
            BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
    
            String line = null;
    
            while ((line = bufferedReader.readLine()) != null) {
                System.out.println(line);
            }
    
            in.close();
    
        }
    }

    写入文件:

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FSDataOutputStream;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    
    import java.io.IOException;
    import java.io.OutputStream;
    import java.net.URI;
    
    /**
     * 需要执行
     * hadoop fs -chmod 757 hdfs://master:9999/user/hadoop-twq/cmd/
     */
    
    public class FileWriter {
    
        public static void main(String[] args) throws IOException {
            String content = "this is an example";
            String dest = "hdfs://master:9999/user/hadoop-twq/cmd/java_writer.txt";
    
            Configuration configuration = new Configuration();
            FileSystem fileSystem = FileSystem.get(URI.create(dest), configuration);
            FSDataOutputStream out = fileSystem.create(new Path(dest));
    
            out.write(content.getBytes("UTF-8"));
    
            out.close();
        }
    }

    删除文件:

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    
    import java.io.BufferedReader;
    import java.io.IOException;
    import java.io.InputStream;
    import java.io.InputStreamReader;
    import java.net.URI;
    
    public class FileDeleter {
    
        public static void main(String[] args) throws IOException {
            String dest = "hdfs://master:9999/user/hadoop-twq/cmd/java_writer.txt";
    
            Configuration configuration = new Configuration();
            FileSystem fileSystem = FileSystem.get(URI.create(dest), configuration);
    
            fileSystem.delete(new Path(dest), false);
    
            fileSystem.delete(new Path(""), true);
        }
    }
     
  • 相关阅读:
    linux-shell编程-1-简介
    linux-tail
    linux-grep
    linux-sort
    linux-sed
    linux-awk
    函数调用
    选择结构和循环结构
    列表字典集合常用函数
    datetime模块
  • 原文地址:https://www.cnblogs.com/tesla-turing/p/11957502.html
Copyright © 2011-2022 走看看