Java操作HDFS
```java
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsPermission;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
public class TestHdfsApi {
/**
* 使用java操作HDFS
* 创建文件夹,上传文件,下载文件,列出日志,查看block块的信息
*
* @param args
*/
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();//org.apache.hadoop.conf
//配置主节点信息
conf.set("fs.defaultFS","hdfs://master01:9000");
//通过hadoop提供的FileSystem抽象类中的方法,可以根据我们自己提供的配置来获取操作hdfs的对象
FileSystem fs = FileSystem.get(conf);
boolean b = fs.mkdirs(new Path("/shujia/bigdata19/"));
System.out.println(b);
//上传文件-本地文件路径写绝对路径
// fs.copyFromLocalFile(new Path("F://software//IdeaProjects//bigdata19-project//bigdata19-hadoop//src//data//students.txt"),new Path("/shujia/bigdata19/students.txt"));
//下载文件
// fs.copyToLocalFile(new Path("/shujia/bigdata19/students.txt"),new Path("F://software//IdeaProjects//bigdata19-project//bigdata19-hadoop//src//data//students2.txt"));
//列出当前目录下所有文件
RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/shujia/bigdata19"), true);
while (listFiles.hasNext()){
LocatedFileStatus fileStatus = listFiles.next();
BlockLocation[] blockLocations = fileStatus.getBlockLocations();
for (BlockLocation location : blockLocations) {
String[] names = location.getNames();
String s = Arrays.toString(names);
System.out.println(s);
}
//获取文件所属用户
System.out.println(fileStatus.getOwner());
//获取文件的名字
String name = fileStatus.getPath().getName();
System.out.println(name);
//获取文件的权限
FsPermission permission = fileStatus.getPermission();
System.out.println(permission);
//获取文件最后修改的时间
long accessTime = fileStatus.getAccessTime();
System.out.println(accessTime);
Date date = new Date(accessTime);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
System.out.println(sdf.format(date));
}
//释放资源
fs.close();
}
}
原创文章,作者:ItWorker,如若转载,请注明出处:https://blog.ytso.com/281727.html