package com.tdxx.hadoop.sequencefile; import java.io.IOException; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Writer; public class SequenceFileStudy{ /**
* 写入到sequence file
*
* @param filePath
* @param conf
* @param datas
*/
public static void write2SequenceFile(String filePath, Configuration conf,
LongWritable key,LongWritable val) {
FileSystem fs = null;
Writer writer = null;
Path path = null;
//LongWritable idKey = new LongWritable(0); try {
fs = FileSystem.get(conf);
path = new Path(filePath);
writer = SequenceFile.createWriter(fs, conf, path,
LongWritable.class, LongWritable.class);
writer.append(key, val); } catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
IOUtils.closeStream(writer);
}
} private static Configuration getDefaultConf() {
Configuration conf = new Configuration();
conf.set("mapred.job.tracker", "local");
conf.set("fs.default.name", "file:///");
return conf;
} /**
* @param args
*/
public static void main(String[] args) {
String filePath = "data/longValue.sequence"; // 文件路径 // 生成数据
for (int i = 1; i <= 20; i++) {
// 写入到sequence file
LongWritable key = new LongWritable(i);
LongWritable value = new LongWritable(i+1);
write2SequenceFile(filePath, getDefaultConf(), key,value);
}
} }