java
作者:
ylnsf
,
2024-03-22 19:41:46
,
所有人可见
,
阅读 3
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
import java.text.SimpleDateFormat;
import java.util.Date;
public class HDFStest1 {
private static final String hdfsDir = "hdfsDir"; // HDFS目录
public static void main(String[] args) {
String studentId = "102202113"; // 学号
String studentName = "xuxuanyu"; // 姓名拼音
String localFilePath = "/home/xuxuanyu/xuxuanyu.txt";
String hdfsFilePath = hdfsDir + studentId + "/" + studentName + ".txt";
try {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
// 在HDFS上创建目录
if (!fs.exists(new Path(hdfsDir + studentId))) {
// 目录不存在时才创建目录
fs.mkdirs(new Path(hdfsDir + studentId));
} else {
System.out.println("Directory already exists: " + hdfsDir + studentId);
}
// 上传本地文件到HDFS
fs.copyFromLocalFile(new Path(localFilePath), new Path(hdfsFilePath));
// 写入文件内容
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
String content = studentName + "-" + studentId + " added on " + sdf.format(new Date());
FSDataOutputStream outputStream = fs.create(new Path(hdfsFilePath));
outputStream.writeBytes(content);
outputStream.close();
// 读取文件内容并输出到控制台
FSDataInputStream inputStream = fs.open(new Path(hdfsFilePath));
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
String line;
while ((line = reader.readLine()) != null) {
System.out.println(line);
}
reader.close();
// 删除目录
fs.delete(new Path(hdfsDir + studentId), true);
fs.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}