zoukankan      html  css  js  c++  java
  • HDFS常用的文件API操作

    1、常用文件API操作

    package cn.luxh.app.util;
    
    import java.io.IOException;
    import java.text.SimpleDateFormat;
    import java.util.Date;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.BlockLocation;
    import org.apache.hadoop.fs.FSDataOutputStream;
    import org.apache.hadoop.fs.FileStatus;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.hdfs.DistributedFileSystem;
    import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
    
    public class HDFSUtil {
        
        /**
         * 从本地复制文件到HDFS
         * @param srcFilePath
         * @param dstFilePath
         * @throws IOException
         */
        public static void copyFile2HDFS(String srcFilePath,String dstFilePath) throws IOException{
            Configuration conf = new Configuration();
    
            Path src = new Path(srcFilePath);
            Path dst = new Path(dstFilePath);
            
            FileSystem hdfs = dst.getFileSystem(conf);
            
            hdfs.copyFromLocalFile(src, dst);
            
            FileStatus[] files = hdfs.listStatus(dst);
            if(files!=null) {
                for(int i=0;i<files.length;i++) {
                    System.out.println("the file is:"+files[i].getPath().getName());
                }
            }else {
                System.out.println("no files");
            }
            hdfs.close();
            
            
        }
        
        /**
         * 在HDFS上创建文件
         * @param content
         * @param dstFile
         * @throws IOException 
         */
        public static void createFileInHDFS(String content,String dstFile) throws IOException {
            Configuration conf = new Configuration();
            Path dst = new Path(dstFile);
            FileSystem hdfs = null;
            FSDataOutputStream out = null;
            try {
                hdfs = dst.getFileSystem(conf);
                out = hdfs.create(dst);
                out.writeBytes(content);
                out.flush();
            } catch (IOException e) {
                e.printStackTrace();
                throw new IOException(e);
            }finally {
                if(hdfs != null) {
                    hdfs.close();
                }
                if(out != null) {
                    try {
                        out.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
                
            }
            
        }
        
        /**
         * 重命名文件
         * @param originalFile
         * @param newFile
         * @throws IOException
         */
        public static void renameFileInHDFS(String originalFile,String newFile) throws IOException {
            Configuration conf = new Configuration();
            Path originalPath = new Path(originalFile);
            Path newPath = new Path(newFile);
            FileSystem hdfs = newPath.getFileSystem(conf);
            hdfs.rename(originalPath, newPath);
            hdfs.close();
            
            
        }
        
        /**
         * 获得文件的最后修改时间
         * @param dstFile
         * @throws IOException
         */
        public static void getFileLastModifyTime(String dstFile) throws IOException{
            Configuration conf = new Configuration();
            Path dstPath = new Path(dstFile);
            FileSystem hdfs =  dstPath.getFileSystem(conf);
            FileStatus file = hdfs.getFileStatus(dstPath);
            long time = file.getModificationTime();
            hdfs.close();
            System.out.println("the last modify time is : "+new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date(time)));
            
        }
        
        /**
         * 检查文件是否存在
         * @param dstFile
         * @throws IOException
         */
        public static void checkFileIsExists(String dstFile) throws IOException {
            Configuration conf = new Configuration();
            Path dstPath = new Path(dstFile);
            FileSystem hdfs =  dstPath.getFileSystem(conf);
            boolean flag = hdfs.exists(dstPath);
            hdfs.close();
            System.out.println("is the file exists:"+flag);
        }
        
        /**
         * 获得文件的存放位置
         * @param dstFile
         * @throws IOException
         */
        public static void getFileLocations(String dstFile) throws IOException {
            Configuration conf = new Configuration();
            Path dstPath = new Path(dstFile);
            FileSystem hdfs =  dstPath.getFileSystem(conf);
            FileStatus file = hdfs.getFileStatus(dstPath);
            BlockLocation[] blkLocations = hdfs.getFileBlockLocations(file, 0, file.getLen());
            if(blkLocations != null) {
                int len = blkLocations.length;
                for(int i=0;i<len;i++) {
                    String[] hosts = blkLocations[i].getHosts();
                    for(String host:hosts) {
                        System.out.println("the location'host is : "+host);
                    }
                }
            }
            
            hdfs.close();
        }
        
        /**
         * 删除文件
         * @throws IOException
         */
        public static void deleteFile(String dstFile) throws IOException {
            Configuration conf = new Configuration();
            Path dstPath = new Path(dstFile);
            FileSystem hdfs =  dstPath.getFileSystem(conf);
            boolean flag = hdfs.delete(dstPath, true);
            
            System.out.println("is deleted : "+flag);
        }
    }

    2、测试

    package cn.luxh.app.test;
    
    import java.io.IOException;
    
    import org.junit.Test;
    
    import cn.luxh.app.util.HDFSUtil;
    
    public class HDFSTester {
        
        @Test
        public void testCopyFile2HDFS() throws IOException {
            String srcFilePath = "/home/coder/words.txt";
            String dstFilePath = "hdfs://h1:9000/user/coder/in";
            HDFSUtil.copyFile2HDFS(srcFilePath, dstFilePath);
        }
        
        @Test
        public void testCreateFileInHDFS() throws IOException {
            String content = "hey,Hadoop.";
            String dstFile = "hdfs://h1:9000/user/coder/in/hey";
            HDFSUtil.createFileInHDFS(content, dstFile);
        }
        
        @Test
        public void testRenameFileInHDFS() throws IOException {
            String originalFile = "hdfs://h1:9000/user/coder/in/hey";
            String newFile = "hdfs://h1:9000/user/coder/in/hey_hadoop";
            HDFSUtil.renameFileInHDFS(originalFile, newFile);
        }
        
        @Test
        public void testGetFileLastModifyTimme() throws IOException {
            String dstFile = "hdfs://h1:9000/user/coder/in/hey_hadoop";
            HDFSUtil.getFileLastModifyTime(dstFile);
        }
        
        @Test
        public void testCheckFileIsExists() throws IOException {
            String dstFile = "hdfs://h1:9000/user/coder/in/hey_hadoop";
            HDFSUtil.checkFileIsExists(dstFile);
        }
        
        @Test
        public void testGetFileLocations() throws IOException {
            String dstFile = "hdfs://h1:9000/user/coder/in/hey_hadoop";
            HDFSUtil.getFileLocations(dstFile);
        }
        
        @Test
        public void testDeleteFile() throws IOException {
            String dstFile = "hdfs://h1:9000/user/coder/output";
            HDFSUtil.deleteFile(dstFile);
        }
    }
  • 相关阅读:
    D3学习笔记一
    Python生成pyc文件
    Linux上用户之间对话
    uwsgi错误invalid request block size
    Nginx的Permission denied错误
    CentOS7关闭防火墙
    CentOS7.0安装Nginx
    应用IBatisNet+Castle进行项目的开发
    再论IBatisNet + Castle进行项目的开发
    DotNet软件开发框架
  • 原文地址:https://www.cnblogs.com/luxh/p/2996726.html
Copyright © 2011-2022 走看看