文件简单写操作:
1 import org.apache.hadoop.conf.Configuration; 2 import org.apache.hadoop.fs.FSDataOutputStream; 3 import org.apache.hadoop.fs.FileSystem; 4 import org.apache.hadoop.fs.Path; 5 public class writeFile { 6 public static void main(String[] args) { 7 try{ 8 Configuration conf=new Configuration(); 9 //如果没有把配置文件加入bin文件夹,那么需要加入下面两行 10 //conf.set("fs.defaultFS","hdfs://localhost:9000" ); 11 //conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem"); 12 FileSystem fs =FileSystem.get(conf); 13 byte[] buffer="Hello world!".getBytes(); 14 String Filename="hdfs://localhost:9000/user/hadoop/file5.txt"; 15 16 FSDataOutputStream os=fs.create(new Path(Filename)); 17 os.write(buffer,0,buffer.length); 18 19 System.out.println("creat "+Filename+" successfully"); 20 os.close(); 21 fs.close(); 22 } 23 catch(Exception e){ 24 e.printStackTrace(); 25 } 26 } 27 }
文件简单读操作:
1 import java.io.BufferedReader; 2 import java.io.InputStreamReader; 3 4 import org.apache.hadoop.conf.Configuration; 5 import org.apache.hadoop.fs.FSDataInputStream; 6 import org.apache.hadoop.fs.FileSystem; 7 import org.apache.hadoop.fs.Path; 8 9 public class readFile { 10 public static void main(String argsp[]){ 11 try{ 12 Configuration conf = new Configuration(); 13 conf.set("fs.defaultFS","hdfs://localhost:9000" ); 14 conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem"); 15 FileSystem fs = FileSystem.get(conf); 16 17 Path file = new Path("hdfs://localhost:9000/user/hadoop/file5.txt"); 18 19 FSDataInputStream is = fs.open(file); 20 21 BufferedReader bd = new BufferedReader(new InputStreamReader(is)); 22 String content = bd.readLine(); 23 24 System.out.println(content); 25 26 bd.close(); 27 fs.close(); 28 } 29 catch(Exception e){ 30 e.printStackTrace(); 31 } 32 } 33 }