import contextlib import pyhdfs fs = pyhdfs.HdfsClient(hosts='name-node1:9870,name-node2:9870', user_name='hdfs') response = fs.open('/tmp/README.txt') #读 response.read() response.readline() response.seek() with contextlib.closing(fs.open('/fruit/apple')) as f: f.read() #写 fs.append("/user/hadoop/test.csv", "0,2,0 ") #本地导入导出hdfs fs.copy_from_local('/opt/test/wangbin/addr.py', '/tmp/addr.py') fs.copy_to_local('', '') #目录操作 fs.listdir("/user/hadoop/") fs.mkdirs() fs.exists('/path') fs.delete('') fs.delete('',recursive=True)