From bf8890cffa2085c1ff7040b0630729aeb65938cb Mon Sep 17 00:00:00 2001 From: Roger Chen Date: Wed, 1 Feb 2017 20:25:21 +0800 Subject: [PATCH] Correct indentation issues --- .../stanford/cs246/wordcount/WordCount.java | 94 +++++++++---------- 1 file changed, 47 insertions(+), 47 deletions(-) diff --git a/src/main/java/edu/stanford/cs246/wordcount/WordCount.java b/src/main/java/edu/stanford/cs246/wordcount/WordCount.java index 2ab2f22..ee04505 100644 --- a/src/main/java/edu/stanford/cs246/wordcount/WordCount.java +++ b/src/main/java/edu/stanford/cs246/wordcount/WordCount.java @@ -19,58 +19,58 @@ import org.apache.hadoop.util.ToolRunner; public class WordCount extends Configured implements Tool { - public static void main(String[] args) throws Exception { - System.out.println(Arrays.toString(args)); - int res = ToolRunner.run(new Configuration(), new WordCount(), args); - - System.exit(res); - } + public static void main(String[] args) throws Exception { + System.out.println(Arrays.toString(args)); + int res = ToolRunner.run(new Configuration(), new WordCount(), args); - @Override - public int run(String[] args) throws Exception { - System.out.println(Arrays.toString(args)); - Job job = new Job(getConf(), "WordCount"); - job.setJarByClass(WordCount.class); - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(IntWritable.class); + System.exit(res); + } - job.setMapperClass(Map.class); - job.setReducerClass(Reduce.class); + @Override + public int run(String[] args) throws Exception { + System.out.println(Arrays.toString(args)); + Job job = new Job(getConf(), "WordCount"); + job.setJarByClass(WordCount.class); + job.setOutputKeyClass(Text.class); + job.setOutputValueClass(IntWritable.class); - job.setInputFormatClass(TextInputFormat.class); - job.setOutputFormatClass(TextOutputFormat.class); + job.setMapperClass(Map.class); + job.setReducerClass(Reduce.class); - FileInputFormat.addInputPath(job, new Path(args[0])); - FileOutputFormat.setOutputPath(job, new Path(args[1])); + job.setInputFormatClass(TextInputFormat.class); + job.setOutputFormatClass(TextOutputFormat.class); - job.waitForCompletion(true); - - return 0; - } - - public static class Map extends Mapper { - private final static IntWritable ONE = new IntWritable(1); - private Text word = new Text(); + FileInputFormat.addInputPath(job, new Path(args[0])); + FileOutputFormat.setOutputPath(job, new Path(args[1])); - @Override - public void map(LongWritable key, Text value, Context context) - throws IOException, InterruptedException { - for (String token: value.toString().split("\\s+")) { - word.set(token); - context.write(word, ONE); - } - } - } + job.waitForCompletion(true); - public static class Reduce extends Reducer { - @Override - public void reduce(Text key, Iterable values, Context context) - throws IOException, InterruptedException { - int sum = 0; - for (IntWritable val : values) { - sum += val.get(); - } - context.write(key, new IntWritable(sum)); - } - } + return 0; + } + + public static class Map extends Mapper { + private final static IntWritable ONE = new IntWritable(1); + private Text word = new Text(); + + @Override + public void map(LongWritable key, Text value, Context context) + throws IOException, InterruptedException { + for (String token: value.toString().split("\\s+")) { + word.set(token); + context.write(word, ONE); + } + } + } + + public static class Reduce extends Reducer { + @Override + public void reduce(Text key, Iterable values, Context context) + throws IOException, InterruptedException { + int sum = 0; + for (IntWritable val : values) { + sum += val.get(); + } + context.write(key, new IntWritable(sum)); + } + } } \ No newline at end of file