#!/bin/sh HADOOP_HOME=/usr/lib/hadoop HADOOP_VERSION=0.20.2-cdh3u1 JAR=contrib/streaming/hadoop-streaming-$HADOOP_VERSION.jar HSTREAMING="$HADOOP_HOME/bin/hadoop jar $HADOOP_HOME/$JAR" hadoop dfs -put input input hadoop dfs -put my_module.zip my_module.zip hadoop dfs -put other_module.zip other_module.zip IN=input OUT=output $HSTREAMING \ -input $IN\ -output $OUT \ -mapper "python mapper.py" \ -reducer "NONE" \ -file mapper.py\ -cacheArchive 'hdfs:///user/simianer/my_module.zip#my_module' \ -cacheArchive 'hdfs:///user/simianer/other_module.zip#other_module' \ -jobconf mapred.reduce.tasks=30 #hier mal 30 statt 3 hadoop dfs -get $OUT . hadoop dfs -rm $IN hadoop dfs -rmr $OUT