1、环境问题
maven
<dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.13.2</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>2.7.1</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>2.7.1</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-core</artifactId> <version>2.7.1</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-jobclient</artifactId> <version>2.7.1</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-common</artifactId> <version>2.7.1</version> </dependency>
复制代码
二、提示无权限
vim hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/hadoop/tmp/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/local/hadoop/tmp/dfs/data</value>
</property>
<property>
<name>dfs.permissions.enabled</name>
<value>false</value>
</property>
</configuration>
三、api 操作
package hdfs;
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.*;import org.apache.hadoop.fs.permission.FsPermission;import org.apache.hadoop.io.IOUtils;import org.apache.hadoop.util.Progressable;import org.junit.Test;
import java.io.*;import java.net.URI;
public class hdfsapi {
/** * ./hadoop fs -ls / * @throws IOException */@Testpublic void createFolder() throws IOException { Configuration conf=new Configuration(); FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf); Path path=new Path("/test2"); fs.mkdirs(path, FsPermission.getDirDefault()); System.out.println("finished");}
/** * ./hadoop fs -ls /test2/ * @throws IOException */@Testpublic void createFile() throws IOException { Configuration conf=new Configuration(); FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf); Path path=new Path("/test2/a1.txt"); FSDataOutputStream out = fs.create(path); out.write("11111".getBytes());
System.out.println("finished");}
/** * ./hadoop fs -ls /test2/ * @throws IOException */@Testpublic void renameFile() throws IOException { Configuration conf=new Configuration(); FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf); Path path=new Path("/test2/a1.txt"); Path path2=new Path("/test2/a2.txt"); boolean result = fs.rename(path, path2);
System.out.println(result);}
/** * ./hadoop fs -ls /test2/ * @throws IOException */@Testpublic void uploadFile() throws IOException { Configuration conf=new Configuration(); FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf); Path path=new Path("F:\\5、开发语言\\java\\it.cast\\knowledge\\src\\main\\resources\\1.txt"); Path path2=new Path("/test2/a3.txt"); fs.copyFromLocalFile(path,path2);
System.out.println("finished.");}
/** * 上传文件方式2 显示进度 * @throws IOException */@Testpublic void uploadFile2() throws IOException { Configuration conf=new Configuration(); FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf); InputStream in =new BufferedInputStream(new FileInputStream(new File("D:\\software\\CodeSmithGeneratoTemplates.7z"))); FSDataOutputStream out=fs.create(new Path("/test2/a4.7z"),new Progressable(){ @Override public void progress(){ System.out.println("..."); } }); IOUtils.copyBytes(in,out,4096);
System.out.println("finished.");}
/** * ./hadoop fs -ls /test2/ * @throws IOException */@Testpublic void getFileList() throws IOException { Configuration conf=new Configuration(); FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf); Path path=new Path("/test2"); FileStatus[] fileStatuses = fs.listStatus(path); for (FileStatus fileStatus:fileStatuses) { System.out.println(fileStatus.getPath()); }}
/** * ./hadoop fs -ls /test2/ * @throws IOException */@Testpublic void getFileBlock() throws IOException { Configuration conf=new Configuration(); FileSystem fs=FileSystem.get(URI.create("hdfs://192.168.253.128:9000"),conf); Path path=new Path("/test2/a4.7z"); FileStatus fileStatus=fs.getFileStatus(path); BlockLocation[] fileBlockLocations = fs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen()); for(BlockLocation loc:fileBlockLocations){ for(int i=0;i<loc.getHosts().length;i++){ System.out.println(loc.getHosts()[i]); } }}
复制代码
}
评论