文件简单写操作:
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class writeFile {
public static void main(String[] args) {
try{
Configuration conf=new Configuration();
//如果没有把配置文件加入bin文件夹,那么需要加入下面两行
//conf.set("fs.defaultFS","hdfs://localhost:9000" );
//conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fs =FileSystem.get(conf);
byte[] buffer="Hello world!".getBytes();
String Filename="hdfs://localhost:9000/user/hadoop/file5.txt"; FSDataOutputStream os=fs.create(new Path(Filename));
os.write(buffer,0,buffer.length); System.out.println("creat "+Filename+" successfully");
os.close();
fs.close();
}
catch(Exception e){
e.printStackTrace();
}
}
}
文件简单读操作:
import java.io.BufferedReader;
import java.io.InputStreamReader; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; public class readFile {
public static void main(String argsp[]){
try{
Configuration conf = new Configuration();
conf.set("fs.defaultFS","hdfs://localhost:9000" );
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fs = FileSystem.get(conf); Path file = new Path("hdfs://localhost:9000/user/hadoop/file5.txt"); FSDataInputStream is = fs.open(file); BufferedReader bd = new BufferedReader(new InputStreamReader(is));
String content = bd.readLine(); System.out.println(content); bd.close();
fs.close();
}
catch(Exception e){
e.printStackTrace();
}
}
}