Slide 1

Slide 1 text

Map Reduce Ruby &

Slide 2

Slide 2 text

@vanstee github.com/vanstee Patrick Van Stee highgroove.com

Slide 3

Slide 3 text

•Map Reduce In 5 Seconds •Hadoop as a Black Box •Wukong and Examples •Demo Time

Slide 4

Slide 4 text

TBs of data Cheap Servers Problem Solution Simplify Queries Distribute Work

Slide 5

Slide 5 text

[1.2, 3.4, 5.6]. map(&:round). reduce(:+) # => 10 Input Map Reduce Output

Slide 6

Slide 6 text

Map Reduce Input Output

Slide 7

Slide 7 text

Hadoop Java Fault Tolerant Map Distributed File System Reduce Open Source

Slide 8

Slide 8 text

Framework

Slide 9

Slide 9 text

Framework Hadoop Data Script Result Map Reduce

Slide 10

Slide 10 text

[1.2, 3.4, 5.6]. map(&:round). reduce(:+) # => 10

Slide 11

Slide 11 text

import java.io.IOException; import java.util.StringTokenizer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; public class Sum extends Configured implements Tool { public static class Map extends Mapper { private IntWritable number = new IntWritable(); @Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); StringTokenizer tokenizer = new StringTokenizer(line); while (tokenizer.hasMoreTokens()) { number.set(tokenizer.nextToken().toDouble().round()); context.write(number); } } } public static class Reduce extends Reducer { @Override protected void reduce(Iterable values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable value : values) { sum += value.get(); } context.write(new IntWritable(sum)); } } @Override public int run(String[] args) throws Exception { Configuration conf = getConf(); Job job = Job.getInstance(conf, "sum"); job.setJarByClass(getClass()); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setMapperClass(Map.class); job.setCombinerClass(Reduce.class); job.setReducerClass(Reduce.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); FileInputFormat.setInputPaths(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); boolean success = job.waitForCompletion(true); return success ? 0 : 1; } public static void main(String[] args) throws Exception { int r = ToolRunner.run(new Sum(), args); System.exit(r); } }

Slide 12

Slide 12 text

import java.io.IOException; import java.util.StringTokenizer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; public class Sum extends Configured implements Tool { public static class Map extends Mapper { private IntWritable number = new IntWritable(); @Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); StringTokenizer tokenizer = new StringTokenizer(line); while (tokenizer.hasMoreTokens()) { number.set(tokenizer.nextToken().toDouble().round()); context.write(number); } } } public static class Reduce extends Reducer { @Override protected void reduce(Iterable values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable value : values) { sum += value.get(); } context.write(new IntWritable(sum)); } } @Override public int run(String[] args) throws Exception { Configuration conf = getConf(); Job job = Job.getInstance(conf, "sum"); job.setJarByClass(getClass()); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setMapperClass(Map.class); job.setCombinerClass(Reduce.class); job.setReducerClass(Reduce.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); FileInputFormat.setInputPaths(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); boolean success = job.waitForCompletion(true); return success ? 0 : 1; } public static void main(String[] args) throws Exception { int r = ToolRunner.run(new Sum(), args); System.exit(r); } } OMG WTF BBQ

Slide 13

Slide 13 text

Ruby + Java Streaming API Wukong

Slide 14

Slide 14 text

require 'wukong' class Mapper < Wukong::Streamer::LineStreamer def process(line) yield ['sum', line.to_f.round] end end class Reducer < Wukong::Streamer::ListReducer def finalize(line) yield [key, values.map(&:to_i).sum] end end Wukong::Script.new(Mapper, Reducer).run

Slide 15

Slide 15 text

bin/round_and_sum --run=local numbers.txt output Test locally with numbers.txt --run=<local or hadoop> <input> <output> Run on a 100 node cluster with 100 TB of input bin/round_and_sum --run=hadoop \ hdfs://datanode/numbers-*.txt \ hdfs://datanode/output \ --jobtracker=jobtracker

Slide 16

Slide 16 text

DEMO

Slide 17

Slide 17 text

Gems Rubies Use the same & On All Workers

Slide 18

Slide 18 text

Ruby is slow...

Slide 19

Slide 19 text

?

Slide 20

Slide 20 text

Hack Night