现在的位置: 首页 > 综合 > 正文

mahout classify 输入编程

2014年01月26日 ⁄ 综合 ⁄ 共 1223字 ⁄ 字号 评论关闭

package com.unicom.classifiers;

import java.io.BufferedReader;
import java.io.FileReader;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Writer;
public class DataToSeq {

/**
* @param args
*/
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
if(args.length != 2){System.err.println("Arguments is [-i data file] [-o sequence file]");return;}
String inputFileName = args[0];
String outputDirName = args[1];
Configuration configuration = new Configuration();
FileSystem fs = FileSystem.get(configuration);
Writer writer = new SequenceFile.Writer(fs, configuration, new Path(outputDirName+"/chunk-0"), Text.class, Text.class);
int count = 0;
BufferedReader reader = new BufferedReader(new FileReader(inputFileName));
Text key = new Text();
Text value = new Text();
while(true){
String line = reader.readLine();
if(line == null)
break;
String[] tokens = line.split("\t",2);
if(tokens.length != 2)
continue;
String catetory = tokens[0];
String description = tokens[1];
key.set(catetory);
value.set(description);
writer.append(key, value);
count++;
}
writer.close();
reader.close();
System.out.println("wrote "+count+"entries");

}

}

抱歉!评论已关闭.