经常用到,做个眼
如果是单机版 centos(参照给力星 Hadoop配置),要在 core-site.xml 里面配置 ip:9000 别 localhost:9000 windows 不识别
package com.xuliugen.hdfs; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import java.util.Scanner; public class success { //creat direct public static void creat_dir() throws IOException { System.out.println("请输入云文件的地址"); Scanner scan1 = new Scanner(System.in); String str1=null; if (scan1.hasNext()) { str1 = scan1.next(); System.out.println("输入的数据为:" + str1); } //scan1.close(); String rootPath =str1; //String rootPath = "hdfs://192.168.51.227:9000";//dfs.namenode.rpc-address.mycluster.m1 hdfs://172.16.5.174:8020 System.out.println("请输入要创建的目录名"); Scanner scan11 = new Scanner(System.in); String str11=null; if (scan11.hasNext()) { str11 = scan11.next(); System.out.println("输入的数据为:" + str11); } scan11.close(); Path p = new Path(rootPath + str11); Configuration conf = new Configuration(); FileSystem fs = p.getFileSystem(conf); boolean b = fs.mkdirs(p); System.out.println(b); fs.close(); System.out.println("目录已经创建完成"); } //delect directl public static void delect_dir() { System.out.println("请输入要删除的内容"); Scanner scan1 = new Scanner(System.in); String str1=null; if (scan1.hasNext()) { str1 = scan1.next(); System.out.println("输入的数据为:" + str1); } scan1.close(); String uri=str1; //String uri = "hdfs://192.168.1.25:8020/user/root/wordcount/output"; Path path = new Path(uri); Configuration conf = new Configuration(); try { FileSystem fs = path.getFileSystem(conf); //递归删除文件夹及文件夹下的文件 boolean b = fs.delete(path, true); System.out.println(b); fs.close(); } catch (IOException e) { e.printStackTrace(); } } //下载 public static void download() throws IOException { System.out.println("请输入原目录"); Scanner scan = new Scanner(System.in); String str11=null; if (scan.hasNext()) { str11 = scan.next(); System.out.println("输入的数据为:" + str11); } //scan.close(); //String hdfsPath="hdfs://192.168.51.227:9000/olpl"; String hdfsPath=str11; System.out.println("请输入要下载到的目录"); Scanner scan1 = new Scanner(System.in); String str1=null; if (scan1.hasNext()) { str1 = scan1.next(); System.out.println("输入的数据为:" + str1); } scan1.close(); String localPath=str1; Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf); Path hdfs_path = new Path(hdfsPath); Path local_path = new Path(localPath); fs.copyToLocalFile(hdfs_path, local_path); fs.close(); System.out.println("下载过程完成"); } //文件的删除 public static void file_delect() { System.out.println("请输入要删除的具体内容"); Scanner scan1 = new Scanner(System.in); String str1=null; if (scan1.hasNext()) { str1 = scan1.next(); System.out.println("输入的数据为:" + str1); } scan1.close(); String uri=str1; //String uri = "hdfs://192.168.51.227:9000/yikatong/output/three_meals/1515925747-r-00000"; Path path = new Path(uri); Configuration conf = new Configuration(); try { FileSystem fs = path.getFileSystem(conf); //递归删除文件夹及文件夹下的文件 boolean b = fs.delete(path, true); System.out.println(b); fs.close(); System.out.println("文件已删除"); } catch (IOException e) { e.printStackTrace(); } } public static void Listfiles() throws IOException { System.out.println("列出目录下的文件或目录,请输入地址"); Scanner scan1 = new Scanner(System.in); String str1=null; if (scan1.hasNext()) { str1 = scan1.next(); System.out.println("输入的数据为:" + str1); } scan1.close(); String uri=str1; //String uri = "hdfs://192.168.51.227:9000/yikatong"; Configuration cfg = new Configuration(); FileSystem fs= FileSystem.get(URI.create(uri),cfg); Path path = new Path(uri); FileStatus[] fss = fs.listStatus(path); for(FileStatus f:fss){ if(f.isFile()) System.out.println("File:"+f.getPath().toString()); else System.out.println("Dir:"+f.getPath().toString()); } } //读取文件 public static void ReadFile() throws IOException { System.out.println("请输入要下载到的目录"); Scanner scan1 = new Scanner(System.in); String str1=null; if (scan1.hasNext()) { str1 = scan1.next(); System.out.println("输入的数据为:" + str1); } scan1.close(); String uri=str1; //String uri="hdfs://192.168.1.25:8020/user/root/wordcount/input/words.txt"; Configuration cfg=new Configuration(); FileSystem fs= FileSystem.get(URI.create(uri),cfg); InputStream in=null; try{ in=fs.open(new Path(uri)); IOUtils.copyBytes(in, System.out,4096,false); }catch(Exception e){ System.out.println(e.getMessage()); }finally{ IOUtils.closeStream(in); } } //上传 public static void shangchuan() throws IOException { //本地文件路径 System.out.println("请输入要上传的具体文件名称"); Scanner scan = new Scanner(System.in); String str1=null; if (scan.hasNext()) { str1 = scan.next(); System.out.println("输入的数据为:" + str1); } String local =str1; System.out.println("请输入到达哪个云盘的目录下并改名称为"); System.out.println(""); Scanner scan1 = new Scanner(System.in); String str11=null; if (scan1.hasNext()) { str11 = scan1.next(); System.out.println("输入的数据为:" + str11); } scan1.close(); //String dest="hdfs://192.168.51.227:9000/qingshu.txt"; String dest=str11; InputStream in=new BufferedInputStream(new FileInputStream(local)); Configuration cfg=new Configuration(); FileSystem fs= FileSystem.get(URI.create(dest),cfg); OutputStream out=fs.create(new Path(dest)); IOUtils.copyBytes(in, out,4096,true); fs.close(); IOUtils.closeStream(in); System.out.println("上传完成"); } public static void main(String[] args) throws IOException{ System.out.println("欢迎来到自定义小云盘的世界"); System.out.println("请输入您的需求前的数字"); System.out.println("0:创建目录"); System.out.println("1:删除目录"); System.out.println("2:下载云盘文件到本地"); System.out.println("3:列出所输入路径下的所有内容"); System.out.println("4:云盘文件的删除"); System.out.println("5:阅读云盘内容"); System.out.println("6:上传本地文件到云盘"); System.out.println("7:退出"); Scanner scan = new Scanner(System.in); int s=0; if (scan.hasNext()) { s = scan.nextInt(); System.out.println("输入的数据为:" + s); } int i=s; switch(i){ case 0: creat_dir();break; case 1: delect_dir();break; case 2: download();break; case 3: Listfiles();break; case 4: file_delect();break; case 5: ReadFile();break; case 6: shangchuan();break; case 7: main(null);break; default: System.out.println("default");break; } } }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.xuliugen.kafka</groupId> <artifactId>kafka.demo</artifactId> <version>1.0-SNAPSHOT</version> <dependencies> <dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka-clients</artifactId> <version>2.0.0</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>1.7.12</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>1.7.12</version> </dependency> <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>2.8.5</version> </dependency> <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>2.8.5</version> </dependency> </dependencies> </project>