有点挑战,但有趣的话题

时间:2015-10-30 05:27:23

标签: hadoop mapreduce

我们必须编写一个InputFormat和RecordReader,用于读取由' $'分隔的文本字符串。字符而不是换行符
现在假设我们有一个像这样的数据集 $ <some data> $ <some data>

我们希望使用第一个$作为分隔符,我们必须免除中期&#34;美元&#34;从使它成为分界符号。

您能否提出必要的功能或必要的步骤来实现这一目标?
提前谢谢

1 个答案:

答案 0 :(得分:1)

输入文件

$Aniruddha Sinha$23$Hadoop$mapreduce$Kishore$30$Hadoop$mapreduce

输出文件

Aniruddha Sinha,23,Hadoop,mapreduce
Kishore,30,Hadoop,mapreduce

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.SkipBadRecords;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class WordCount {

  public static class TokenizerMapper
       extends Mapper<Object, Text, Text, IntWritable>{

    private final static IntWritable one = new IntWritable(1);
    private Text word = new Text();
    private int count = 0;
    private StringBuilder sb = new StringBuilder();
    public void map(Object key, Text value, Context context
                    ) throws IOException, InterruptedException {
      if(count < 4){
          sb.append(value.toString());
          sb.append(",");
          count++;
          if(count == 4){
              System.out.println(sb.substring(0, sb.length()-1));
              count = 0;
              word.set(sb.substring(0, sb.length()-1));
              context.write(word, one);
              sb.delete(0, sb.length());
          }
      }
    }
  }


  public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    conf.set("textinputformat.record.delimiter", "$");
    Job job = Job.getInstance(conf, "word count");
    job.setJarByClass(WordCount.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    SkipBadRecords.setMapperMaxSkipRecords(conf, 10);
    FileInputFormat.setInputPaths(job, new Path("/home/kishore/input"));
    FileOutputFormat.setOutputPath(job, new Path("/home/kishore/output"));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
}