Last active
February 25, 2020 14:48
-
-
Save aaronsilber/47371fec0022d1530dce to your computer and use it in GitHub Desktop.
CentOS Scrapyd web crawler init script (w/ chkconfig support)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# | |
# scrapyd This shell script enables scrapyd server on boot | |
# | |
# Author: Aaron Silber <[email protected]> | |
# | |
# chkconfig: - 50 01 | |
# | |
# description: Autostart scrapyd web scraper framework daemon | |
# processname: scrapyd | |
# | |
# source function library | |
. /etc/rc.d/init.d/functions | |
RETVAL=0 | |
PID='/var/run/scrapyd.pid' | |
VIRTUALENV='/usr/local/python27/bin/activate' | |
OPTIONS='--pidfile='$PID | |
start() { | |
source $VIRTUALENV | |
if [ -f $PID ]; then | |
echo -n $"Scrapy already running." | |
RETVAL=0 | |
else | |
echo -n $"Attempting to start scrapyd service... " | |
scrapyd $OPTIONS & | |
RETVAL=$? | |
fi | |
echo | |
} | |
stop() { | |
source $VIRTUALENV | |
if [ -f $PID ]; then | |
echo -n $"Attempting to stop scrapyd service... " | |
kill `cat $PID` | |
RETVAL=$? | |
else | |
echo -n $"Scrapyd not running" | |
RETVAL=0 | |
fi | |
echo | |
} | |
restart() { | |
stop | |
start | |
} | |
case "$1" in | |
start) | |
start | |
;; | |
stop) | |
stop | |
;; | |
restart|force-reload) | |
restart | |
;; | |
reload) | |
;; | |
status) | |
if [ -f $PID ]; then | |
echo $"Scrapyd is running." | |
RETVAL=0 | |
else | |
echo $"Scrapyd is not running." | |
RETVAL=3 | |
fi | |
;; | |
*) | |
echo $"Usage: $0 {start|stop|status|restart|reload|force-reload}" | |
exit 1 | |
esac | |
exit $RETVAL |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Hey,
thanks for the script! Did you use it to run scrapyd on centos?
I am struggeling with running scrapyd on centos. I could install and start the scrapyd server, but it is not working to add a scrapy crawler to it. I always get an "scrapy - no active projekt" error. Do you have any experience with running scrapyd on centos?