#!/bin/bash sudo yum -y install blas-devel lapack-devel libxslt-devel libffi-devel libxml2-devel sudo chown hadoop.hadoop ~/.aliasrc echo 'source ~/.aliasrc' >>/home/hadoop/.bashrc echo 'source /usr/local/bin/virtualenvwrapper.sh' >>/home/hadoop/.bashrc source ~/.bashrc mkvirtualenv envtmp workon envtmp pushd /home/hadoop/source/ pip install --no-index --find-links="./python_source/" numpy==1.9.2 pip install --no-index --find-links="./python_source/" six==1.8.0 pip install --no-index --find-links="./python_source/" -r requirements.txt pip install virtualenvwrapper pip install PIL --allow-external PIL --allow-unverified PIL tar zxvf newdist.tar.gz -C ~/Programs/ popd pushd /home/hadoop/Programs/ grep sparkS -lr /home/hadoop/Programs/newdist | xargs sed -i "s/sparkS/master/g" popd pushd /home/hadoop/Programs/newdist/ cd JpegUtils/pyjpeg/ && python setup.py install && cd - cd JpegUtils/pysteg/ && python setup.py install && cd - cd ImageR/ && python setup.py install && cd - echo "/home/hadoop/Programs/lib/spark/python" > $WORKON_HOME/envtmp/lib/python2.7/site-packages/pyspark.pth popd