创建用户实操
1.创建用户god
useradd god
passwd god
2.设置ssh免密
ssh-copy-id -i id_dsa node02
3.修改hdfs-site.xml中的ssh路径,并且分发
4.god
start-dfs.sh
用户权限实操
node01: su god hdfs dfs -mkdir /temp hdfs dfs -chown god:ooxx /temp hdfs dfs -chmod 770 /temp node04: root: useradd good groupadd ooxx usermod -a -G ooxx good id good su good hdfs dfs -mkdir /temp/abc <失败 hdfs groups good: <因为hdfs已经启动了,不知道你操作系统又偷偷摸摸创建了用户和组 *node01: root: useradd good groupadd ooxx usermod -a -G ooxx good su god hdfs dfsadmin -refreshUserToGroupsMappings node04: good: hdfs groups good : good ooxx
HDFS-API实操
1.添加环境变量
HADOOP_USER_NAME god
2.启动IDEA,创建一个Maven Project
Maven网址
https://mvnrepository.com/
hdfs的pom文件:
hadoop是由common,hdfs,yarn,mapreduce组成,maven中导包2.6.5版本
实际测试:
package com.littlepage.hadoop.hdfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.IOUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.*; import java.net.URI; public class TestHDFS { public Configuration conf=null; public FileSystem fs=null; @Before public void conn(){ try { conf=new Configuration(true);//true,为加载配置文件 //fs=FileSystem.get(conf); //fs返回是参考了fs.defaltFS标签中的值,去环境变量取一个HADOOP_USER_NAME取一个值 fs=FileSystem.get(URI.create("hdfs://mycluster/"),conf,"root"); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } } @Test public void mkdir() throws IOException { Path dir=new Path("/cccc"); fs.mkdirs(dir); System.out.println("ok"); } @Test public void update() throws IOException { InputStream is= new BufferedInputStream(new FileInputStream(new File("data/hello.txt"))); Path path=new Path("/xxx/out.txt"); FSDataOutputStream outFile = fs.create(path); IOUtils.copyBytes(is,outFile,conf,true); } @Test public void block() throws IOException { //获得区块 Path file=new Path("/data.txt"); FileStatus fsta=fs.getFileStatus(file); ; BlockLocation[] bl=fs.getFileBlockLocations(fsta,0,fsta.getLen()); for (BlockLocation b:bl) { System.out.println(b); } //这里可以进行计算,计算向数据移动 FSDataInputStream in=fs.open(file); in.seek(bl.length); for(int i=0;i<100;i++){ System.out.print((char)in.readByte()); } } @After public void close(){ try { fs.close(); } catch (IOException e) { e.printStackTrace(); } } }