spark-shellで処理をfile指定して実行させるには


#!/bin/sh                                                                                                               
cur=$(dirname $0)

spark_home=/opt/spark
hadoop_home=/opt/hadoop

file=$1

if [ -s $file ] ; then
    $spark_home/bin/spark-shell --num-executors 18 --executor-cores 2 --master yarn --deploy-mode client -i $file
else
    $spark_home/bin/spark-shell --num-executors 18 --executor-cores 2 --master yarn --deploy-mode client
fi