import java.io.IOException; import java.util.*; import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.util.*; // outputkey is going to be the condidate number public class combinecounter extends Reducer { protected void reduce(IntWritable inkey, Iterable vals, Context collector) throws IOException, InterruptedException { Iterator values = vals.iterator(); long ccount = 0; // catval first = values.next(); // ccount += first.count; // int filenum = first.filenum; // String suffix = first.suffix; // String prefix = first.prefix; String[] Fix = new String[10000]; // evens for prefix, odd for suffix int maxfix = 0; while (values.hasNext()) { catval current = values.next(); ccount += current.count; Fix[current.linenum*2] = new String(current.prefix); // at this point, check suffix+prefix for more cats // ccount += mapcounter.countcats(suffix+prefix); // suffix = current.suffix; // set new suffix Fix[current.linenum*2+1] = new String(current.suffix); if (current.linenum*2+1 > maxfix) maxfix = current.linenum*2+1; } // at this point most may not point to same object anymore for(int i=1;i