zoukankan      html  css  js  c++  java
  • 啦啦啦

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.*;
    import org.apache.hadoop.io.IOUtils;
    import org.apache.hadoop.util.Progressable;
    import org.junit.After;
    import org.junit.Before;
    import org.junit.Test;

    import java.io.BufferedInputStream;
    import java.io.BufferedReader;
    import java.io.BufferedWriter;
    import java.io.File;
    import java.io.FileInputStream;
    import java.io.InputStream;
    import java.io.InputStreamReader;
    import java.io.OutputStreamWriter;
    import java.net.URI;

    /**
    * Hadoop HDFS Java API 操作
    */
    public class HDFSApp {

    public static final String HDFS_PATH = "hdfs://192.168.223.132:9000";

    FileSystem fileSystem = null;
    Configuration configuration = null;


    /**
    * 创建HDFS目录
    */
    @Test
    public void mkdir() throws Exception {
    fileSystem.mkdirs(new Path("/kr"));
    }

    /**
    * 创建文件
    */
    @Test
    public void create() throws Exception {
    FSDataOutputStream output = fileSystem.create(new Path("/kr/hdfstest1.txt"));
    output.write("1605-1 123456 HDFS".getBytes());
    output.flush();
    output.close();
    }

    /**
    * 查看HDFS文件的内容
    */
    @Test
    public void cat() throws Exception {
    FSDataInputStream fin = fileSystem.open(new Path("/hdfsapi/test/hdfstest2.txt"));
    BufferedReader in = new BufferedReader(new InputStreamReader(fin, "UTF-8"));
    System.out.println(in.readLine());
    in.close();
    }


    @Test
    public void copy() throws Exception {
    FSDataInputStream fin = fileSystem.open(new Path("/kr/hdfstest1.txt"));
    BufferedReader in = new BufferedReader(new InputStreamReader(fin, "UTF-8"));
    FSDataOutputStream fout = fileSystem.create(new Path("/hdfsapi/test/hdfstest2.txt"));
    BufferedWriter out = new BufferedWriter(new OutputStreamWriter(fout, "UTF-8"));
    out.write(in.readLine());
    out.flush();
    out.close();
    }

    /**
    * 重命名
    */
    @Test
    public void rename() throws Exception {
    Path oldPath = new Path("/hdfsapi/test/a.txt");
    Path newPath = new Path("/hdfsapi/test/b.txt");
    fileSystem.rename(oldPath, newPath);
    }

    /**
    * 上传文件到HDFS
    *
    * @throws Exception
    */
    @Test
    public void copyFromLocalFile() throws Exception {
    Path localPath = new Path("E:/data/input.txt");
    Path hdfsPath = new Path("/hdfsapi/test");
    fileSystem.copyFromLocalFile(localPath, hdfsPath);
    }

    /**
    * 上传文件到HDFS
    */
    @Test
    public void copyFromLocalFileWithProgress() throws Exception {
    InputStream in = new BufferedInputStream(
    new FileInputStream(
    new File("/Users/rocky/source/spark-1.6.1/spark-1.6.1-bin-2.6.0-cdh5.5.0.tgz")));

    FSDataOutputStream output = fileSystem.create(new Path("/hdfsapi/test/spark-1.6.1.tgz"),
    new Progressable() {
    public void progress() {
    System.out.print("."); //带进度提醒信息
    }
    });


    IOUtils.copyBytes(in, output, 4096);
    }


    /**
    * 下载HDFS文件
    */
    @Test
    public void copyToLocalFile() throws Exception {
    Path localPath = new Path("/Users/rocky/tmp/h.txt");
    Path hdfsPath = new Path("/hdfsapi/test/hello.txt");
    fileSystem.copyToLocalFile(hdfsPath, localPath);
    }

    /**
    * 查看某个目录下的所有文件
    */
    @Test
    public void listFiles() throws Exception {
    FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/"));

    for(FileStatus fileStatus : fileStatuses) {
    String isDir = fileStatus.isDirectory() ? "文件夹" : "文件";
    short replication = fileStatus.getReplication();
    long len = fileStatus.getLen();
    String path = fileStatus.getPath().toString();

    System.out.println(isDir + " " + replication + " " + len + " " + path);
    }

    }

    /**
    * 删除
    */
    @Test
    public void delete() throws Exception{
    fileSystem.delete(new Path("/"), true);
    }


    @Before
    public void setUp() throws Exception {

    configuration = new Configuration();
    fileSystem = FileSystem.get(new URI(HDFS_PATH), configuration, "keke");
    System.out.println("HDFSApp.setUp");
    }

    @After
    public void tearDown() throws Exception {
    configuration = null;
    fileSystem = null;

    System.out.println("HDFSApp.tearDown");
    }

    }

  • 相关阅读:
    Everspin MRAM技术的可靠性
    如何减小SRAM读写操作时的串扰
    SRAM电路工作原理
    关于如何提高SRAM存储器的新方法
    低功耗SRAM主要三部分功耗来源
    [小米OJ] 6. 交叉队列
    [小米OJ] 4. 最长连续数列
    [小米OJ] 5. 找出旋转有序数列的中间值
    [小米OJ] 3. 大数相减
    [剑指offer] 66. 机器人的运动范围
  • 原文地址:https://www.cnblogs.com/lijing925/p/9733039.html
Copyright © 2011-2022 走看看