Created
May 20, 2015 15:13
-
-
Save keyki/11337f32467fa2680dfe to your computer and use it in GitHub Desktop.
hadoop_tests
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/bin/bash | |
| ################################################################################################################## | |
| # dfsio writes to the local file system, so the hdfs user should have permission to write to the actual dir | |
| ################################################################################################################## | |
| export HADOOP_LIBS=/usr/hdp/current/hadoop-mapreduce-client | |
| export JAR_EXAMPLES=$HADOOP_LIBS/hadoop-mapreduce-examples.jar | |
| export JAR_JOBCLIENT=$HADOOP_LIBS/hadoop-mapreduce-client-jobclient.jar | |
| sorting(){ | |
| echo "############################################" | |
| echo Running sorting tests.. | |
| echo "############################################" | |
| CMD="hadoop jar $JAR_EXAMPLES randomwriter /user/ambari-qa/sortInputDir" | |
| echo TEST 1: $CMD | |
| su hdfs -c "$CMD" 1> sorting-writer-time.log 2> sorting-writer.log | |
| CMD="hadoop jar $JAR_EXAMPLES sort /user/ambari-qa/sortInputDir /user/ambari-qa/sortOutputDir" | |
| echo TEST 2: $CMD | |
| su hdfs -c "$CMD" 1> sorting-sort-time.log 2> sorting-sort.log | |
| CMD="hadoop jar $JAR_JOBCLIENT testmapredsort -sortInput /user/ambari-qa/sortInputDir -sortOutput /user/ambari-qa/sortOutputDir" | |
| echo TEST 3: $CMD | |
| su hdfs -c "$CMD" 1> sorting-test-time.log 2> sorting-test.log | |
| } | |
| shuffle(){ | |
| echo "############################################" | |
| echo Running shuffle tests.. | |
| echo "############################################" | |
| CMD="hadoop jar $JAR_JOBCLIENT loadgen -m 200 -r 150 -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text" | |
| echo TEST 1: $CMD | |
| su hdfs -c "$CMD" 1> shuffle-time.log 2> shuffle.log | |
| } | |
| smalljobs(){ | |
| echo "############################################" | |
| echo Running smalljobs tests.. | |
| echo "############################################" | |
| CMD="hadoop jar $JAR_JOBCLIENT mrbench -baseDir /user/ambari-qa/smallJobsBenchmark -numRuns 30 -maps 10 -reduces 5 -inputLines 10 -inputType ascending" | |
| echo TEST 1: $CMD | |
| su hdfs -c "$CMD" 1> smalljobs-time.log 2> smalljobs.log | |
| } | |
| dfsio(){ | |
| echo "############################################" | |
| echo Running DFSIO tests.. | |
| echo "############################################" | |
| CMD="hadoop jar $JAR_JOBCLIENT TestDFSIO -write -nrFiles 1000 -fileSize 1024" | |
| echo TEST 1: $CMD | |
| su hdfs -c "$CMD" 2> dfsio-write.log | |
| CMD="hadoop jar $JAR_JOBCLIENT TestDFSIO -read -nrFiles 1000 -fileSize 1024" | |
| echo TEST 2: $CMD | |
| su hdfs -c "$CMD" 2> dfsio-read.log | |
| } | |
| terasort(){ | |
| echo "############################################" | |
| echo Running terasort tests.. | |
| echo "############################################" | |
| CMD="hadoop jar $JAR_EXAMPLES teragen -Dmapreduce.job.maps=250 10000000000 /user/ambari-qa/terasort-input" | |
| echo TEST 1: $CMD | |
| su hdfs -c "$CMD" 1> terasort-teragen-time.log 2> terasort-teragen.log | |
| CMD="hadoop jar $JAR_EXAMPLES terasort -Dmapreduce.job.reduces=200 /user/ambari-qa/terasort-input /user/ambari-qa/terasort-output" | |
| echo TEST 2: $CMD | |
| su hdfs -c "$CMD" 1> terasort-terasort-time.log 2> terasort-terasort.log | |
| CMD="hadoop jar $JAR_EXAMPLES teravalidate /user/ambari-qa/terasort-output /user/ambari-qa/terasort-report" | |
| echo TEST 3: $CMD | |
| su hdfs -c "$CMD" 1> terasort-teravalidate-time.log 2> terasort-teravalidate.log | |
| } | |
| slive(){ | |
| echo "############################################" | |
| echo Running slive tests.. | |
| echo "############################################" | |
| echo TEST 1: CREATE | |
| CMD="hadoop org.apache.hadoop.fs.slive.SliveTest -appendSize 1,67108864 -append 0,uniform -baseDir /user/ambari-qa/S1-Live -blockSize 67108864,67108864 -create 100,uniform -delete 0,uniform -dirSize 16 -duration 3 -files 10000 -ls 0,uniform -maps 20 -mkdir 0,uniform -ops 10 -packetSize 65536 -readSize 1,4294967295 -read 0,uniform -reduces 10 -rename 0,uniform -replication 1,3 -resFile /tmp/1405594474 -seed 12345678 -sleep 100,1000 -writeSize 1,67108864" | |
| su hdfs -c "$CMD" 2> slive-create.log | |
| echo TEST 2: READ | |
| CMD="hadoop org.apache.hadoop.fs.slive.SliveTest -appendSize 1,67108864 -append 0,uniform -baseDir /user/ambari-qa/S2-Live -blockSize 67108864,67108864 -create 0,uniform -delete 0,uniform -dirSize 16 -duration 3 -files 10000 -ls 0,uniform -maps 20 -mkdir 0,uniform -ops 10 -packetSize 65536 -readSize 1,4294967295 -read 100,uniform -reduces 10 -rename 0,uniform -replication 1,3 -resFile /tmp/1405594511 -seed 12345678 -sleep 100,1000 -writeSize 1,67108864" | |
| su hdfs -c "$CMD" 2> slive-read.log | |
| echo TEST 3: APPEND | |
| CMD="hadoop org.apache.hadoop.fs.slive.SliveTest -appendSize 1,1 -append 100,uniform -baseDir /user/ambari-qa/S3-Live -blockSize 67108864,67108864 -create 0,uniform -delete 0,uniform -dirSize 16 -duration 3 -files 10000 -ls 0,uniform -maps 20 -mkdir 0,uniform -ops 10 -packetSize 65536 -readSize 1,4294967295 -read 0,uniform -reduces 10 -rename 0,uniform -replication 1,3 -resFile /tmp/1405594549 -seed 12345678 -sleep 100,1000 -writeSize 1,67108864" | |
| su hdfs -c "$CMD" 2> slive-append.log | |
| echo TEST 4: MKDIR | |
| CMD="hadoop org.apache.hadoop.fs.slive.SliveTest -appendSize 1,67108864 -append 0,uniform -baseDir /user/ambari-qa/S4-Live -blockSize 67108864,67108864 -create 0,uniform -delete 0,uniform -dirSize 16 -duration 3 -files 10000 -ls 0,uniform -maps 20 -mkdir 100,uniform -ops 10 -packetSize 65536 -readSize 1,4294967295 -read 0,uniform -reduces 10 -rename 0,uniform -replication 1,3 -resFile /tmp/1405594588 -seed 12345678 -sleep 100,1000 -writeSize 1,67108864" | |
| su hdfs -c "$CMD" 2> slive-mkdir.log | |
| echo TEST 5: RENAME | |
| CMD="hadoop org.apache.hadoop.fs.slive.SliveTest -appendSize 1,67108864 -append 0,uniform -baseDir /user/ambari-qa/S5-Live -blockSize 67108864,67108864 -create 0,uniform -delete 0,uniform -dirSize 16 -duration 3 -files 10000 -ls 0,uniform -maps 20 -mkdir 0,uniform -ops 10 -packetSize 65536 -readSize 1,4294967295 -read 0,uniform -reduces 10 -rename 100,uniform -replication 1,3 -resFile /tmp/1405594624 -seed 12345678 -sleep 100,1000 -writeSize 1,67108864" | |
| su hdfs -c "$CMD" 2> slive-rename.log | |
| echo TEST 6: DELETE | |
| CMD="hadoop org.apache.hadoop.fs.slive.SliveTest -appendSize 1,67108864 -append 0,uniform -baseDir /user/ambari-qa/S6-Live -blockSize 67108864,67108864 -create 0,uniform -delete 100,uniform -dirSize 16 -duration 3 -files 10000 -ls 0,uniform -maps 20 -mkdir 0,uniform -ops 10 -packetSize 65536 -readSize 1,4294967295 -read 0,uniform -reduces 10 -rename 0,uniform -replication 1,3 -resFile /tmp/1405594741 -seed 12345678 -sleep 100,1000 -writeSize 1,67108864" | |
| su hdfs -c "$CMD" 2> slive-delete.log | |
| } | |
| tests=("sorting" "shuffle" "dfsio" "terasort" "smalljobs" "slive") | |
| select test in "${tests[@]}"; do | |
| case $test in | |
| "sorting") sorting;; | |
| "shuffle") shuffle;; | |
| "smalljobs") smalljobs;; | |
| "dfsio") dfsio;; | |
| "terasort") terasort;; | |
| "slive") slive;; | |
| esac | |
| break | |
| done | |
| tar -zcvf logs.tar.gz *.log |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment