001package com.hadoop.mapreduce; 002 003import java.io.IOException; 004import org.apache.hadoop.fs.Path; 005import org.apache.hadoop.io.LongWritable; 006import org.apache.hadoop.mapred.FileAlreadyExistsException; 007import org.apache.hadoop.mapreduce.JobContext; 008import org.apache.hadoop.mapreduce.OutputCommitter; 009import org.apache.hadoop.mapreduce.OutputFormat; 010import org.apache.hadoop.mapreduce.RecordWriter; 011import org.apache.hadoop.mapreduce.TaskAttemptContext; 012 013public class LzoIndexOutputFormat extends OutputFormat<Path, LongWritable> { 014 015 @Override 016 public RecordWriter<Path, LongWritable> getRecordWriter(TaskAttemptContext taskAttemptContext) 017 throws IOException, InterruptedException { 018 return new LzoIndexRecordWriter(taskAttemptContext); 019 } 020 021 @Override 022 public void checkOutputSpecs(JobContext job) throws FileAlreadyExistsException, IOException { 023 } 024 025 // A totally no-op output committer, because the LzoIndexRecordWriter opens a file on the side 026 // and writes to that instead. 027 @Override 028 public OutputCommitter getOutputCommitter(TaskAttemptContext taskAttemptContext) 029 throws IOException, InterruptedException { 030 return new OutputCommitter() { 031 @Override 032 public void setupJob(JobContext jobContext) throws IOException { 033 } 034 035 @Override 036 public void cleanupJob(JobContext jobContext) throws IOException { 037 } 038 039 @Override 040 public void setupTask(TaskAttemptContext taskAttemptContext) throws IOException { 041 } 042 043 @Override 044 public void commitTask(TaskAttemptContext taskAttemptContext) throws IOException { 045 } 046 047 @Override 048 public void abortTask(TaskAttemptContext taskAttemptContext) throws IOException { 049 } 050 051 @Override 052 public boolean needsTaskCommit(TaskAttemptContext taskAttemptContext) throws IOException { 053 return false; 054 } 055 }; 056 } 057}