#!/bin/bash # Version 1.14a Incorporated a patch by Tim Rosmus to allow configuration of the "mail" command. Also includes fixes for GREP variable usage. Thanks Tim. 14a: added TMPDIR variable check so it may be configured from my_rules_du_jour or config file. # Version 1.13a Added an alternate method suggested bo Rubin Bennett. Instead of using a wrapper script, simply source the configuration files from one of a few possible places in /etc. You should use either a configuration file or use my_rules_du_jour; not both. Changed name of WStearn's RANDOM RULESET to avoid SA marking any RDJ messages :) ## This file updates SpamAssassin RuleSet files from the internet. ## ## It is important that you *only* automatically update ## RuleSet files from sources that you trust and that you ## always *TEST* your configurations. ## ## Rules du Jour will NOTIFY you when new versions of Rules du Jour are ## released, but does NOT AUTOMATICALLY UPGRADE itself. ## ## I strongly suggest also retrieving and configuring "My Rules du Jour" ## (my_rules_du_jour -- from the same place you retrieved this script) ## in order to store your own custom configuration in a permanent manner. ## Using my_rules_du_jour to launch rules_du_jour allows you to ## overwrite rules_du_jour whenever a new version is released without ## clobbering your own configuration. ## Note: When running this script interactively, debug mode is enabled to allow you to view the results. # Ignore steps 1 and 2 if you use my_rules_du_jour. Instead, follow the similar instructions in that script. # Usage instructions: # 1) Choose rulesets to update (TRUSTED_RULESETS below) # 2) Configure Local SpamAssassin settings (SA_DIR, MAIL_ADDRESS, SA_RESTART below) # 3) Run this script periodically (manually or crontab) # 3a) To run manually, first make it executable (chmod +x rules_du_jour) then execute (./rules_du_jour) # 3b) To run via cron, edit your cron (crontab -e) and add a line such as this: # 28 1 * * * /root/bin/rules_du_jour # The crontab line above runs /root/bin/rules_du_jour at 1:28AM every day. (choose a different time, please) # Make sure the user who's crontab you are editing has permission to write files to the SA config dir. # Important Note: Jennifer's rulesets are no longer available from emtinc.net and are no longer # included in the standard RDJ config. At some point, they may be re-added to RDJ, depending on # circumstances. # Note: Do not use POPCORN if you use BACKHAIR (POPCORN is deprecated and was merged into BACKHAIR). # The following lines are an alternate persistent configuration method to my_rules_du_jour. Use this # method (undocumented but very trivial), or use my_rules_du_jour. Do not use both. # TODO: Need a better naming convention... I don't like the mixed case. Undersxcores # Reads persistent configuration files from /etc, if one exists. for i in /etc/rulesdujour /etc/sysconfig/RulesDuJour /etc/sysconfig/rulesdujour /etc/rulesdujour/config ; do [ -f $i ] && source $i ; done; # Choose Rulesets from this list: # BIGEVIL TRIPWIRE ANTIDRUG EVILNUMBERS # IMPORTANT: Edit this line to choose which RuleSets to update [ "${TRUSTED_RULESETS}" ] || \ TRUSTED_RULESETS="MRWIGGLY BIGEVIL TRIPWIRE ANTIDRUG EVILNUMBERS SARE_RANDOM"; #### Local SpamAssassin/system Settings #### #### Modify these to match your system. #### [ "${SA_DIR}" ] || SA_DIR="/etc/spamassassin"; # Change this to your SA local config # directory, probably /etc/mail/spamassassin. # For amavisd chrooted, this may be: # /var/amavisd/etc/mail/spamassassin [ "${MAIL_ADDRESS}" ] || MAIL_ADDRESS="root"; # Where do Email notifications go [ "${SINGLE_EMAIL_ONLY}" ] || \ SINGLE_EMAIL_ONLY=""; # Set this to "true" to send only one notification # email per RDJ run with "interesting" # activity. Set to "" to send a separate # for each interesting activity. [ "${SA_LINT}" ] || SA_LINT="spamassassin --lint"; # Command used to lint the rules [ "${SA_RESTART}" ] || \ SA_RESTART="/etc/init.d/spamassassin restart"; # Command used to restart spamd # May be /etc/rc.d/init.d/spamassassin restart # For amavisd, may be /etc/init.d/amavisd restart # For minedefang, may be /etc/init.d/mimedefang restart [ "${WGET}" ] || WGET="wget -N"; # Location (and parameters) of the wget program [ "${PERL}" ] || PERL="perl"; # Location of the perl program [ "${GREP}" ] || GREP="grep"; # Location of the grep program # (solaris users may want to point this to gnu grep) [ "${MAILCMD}" ] || MAILCMD="mail"; # Location of the mail program # that takes and understand the -s flag # DEBUG="true"; # Uncomment this to force debug mode on (or use -D) #### End Local SpamAssassin Settings #### [ "${TMPDIR}" ] || TMPDIR="${SA_DIR}/RulesDuJour"; # Where we store old rulesets. If you delete # this directory, RuleSets may be detected as # out of date the next time you run rules_du_jour. RDJ_URL="http://sandgnat.com/rdj/rules_du_jour"; # URL to update this script #### CF Files information #### # These are bash Array Variables ("man bash" for more information) [ ${CF_URLS} ] || declare -a CF_URLS; # Array that contains URLs of the files. [ ${CF_FILES} ] || declare -a CF_FILES; # Local name of the CF file; eg: bigevil.cf [ ${CF_NAMES} ] || declare -a CF_NAMES; # Happy Name of CF file; eg: "Big Evil" [ ${PARSE_NEW_VER_SCRIPTS} ] || \ declare -a PARSE_NEW_VER_SCRIPTS; # Command to run on the file to retrieve new version info [ ${CF_MUNGE_SCRIPTS} ] || declare -a CF_MUNGE_SCRIPTS; # This (optionally) modifies the file; eg: lower scores ######################################### #### Begin Rules File Registry #### ######################################### # If you add more RuleSets to your own registry, please contribute the settings to the www.exit0.us wiki # http://www.exit0.us/index.php/RulesDuJourRuleSets #### Here are settings for Tripwire. #### TRIPWIRE=0; # Index of Tripwire data into the arrays is 0 CF_URLS[0]="http://www.rulesemporium.com/rules/99_FVGT_Tripwire.cf"; CF_FILES[0]="tripwire.cf"; CF_NAMES[0]="TripWire"; PARSE_NEW_VER_SCRIPTS[0]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[0]="nothing necessary for this ruleset."; #### Here are settings for Big Evil. #### BIGEVIL=1; # Index of Big Evil is 1 CF_URLS[1]="http://www.rulesemporium.com/rules/bigevil.cf"; CF_FILES[1]="bigevil.cf"; CF_NAMES[1]="Big Evil"; PARSE_NEW_VER_SCRIPTS[1]="head -1"; # CF_MUNGE_SCRIPTS[1]="nothing necessary for this ruleset."; ### INDEX NUMBERS 2-6 ARE RESERVED. DO NOT USE. # NOTE: As of 2004-02-26, backhair, weedsk, and chickenpox updates are no longer available. # BACKHAIR=3; # Index of Backhair is 3 # WEEDS1=4; # Index of Weeds Set 1 is 4 # WEEDS2=5; # Index of Weeds Set 2 is 5 # CHICKENPOX=6; # Index of ChickenPox is 6 #### Here are settings for AntiDrug. #### ANTIDRUG=7; # Index of antidrug is 7 CF_URLS[7]="http://mywebpages.comcast.net/mkettler/sa/antidrug.cf" CF_FILES[7]="antidrug.cf"; CF_NAMES[7]="Matt Kettler's AntiDrug"; PARSE_NEW_VER_SCRIPTS[7]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[7]="nothing for this ruleset."; #### Here are settings for evilnumber #### EVILNUMBERS=8; # Index of evilnumbers data into the arrays is 8 CF_URLS[8]="http://www.rulesemporium.com/rules/evilnumbers.cf"; CF_FILES[8]="evilnumbers.cf"; CF_NAMES[8]="EvilNumber"; PARSE_NEW_VER_SCRIPTS[8]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[8]="nothing for this ruleset."; #### Here are settings for sa-blacklist #### BLACKLIST=9; # Index of sa-blacklist data into the arrays is 9 CF_URLS[9]="http://www.stearns.org/sa-blacklist/sa-blacklist.current"; CF_FILES[9]="blacklist.cf"; CF_NAMES[9]="William Stearn's sa-blacklist"; PARSE_NEW_VER_SCRIPTS[9]="${GREP} -i '^#.*sa-blacklist: 200' | sort | tail -1"; # CF_MUNGE_SCRIPTS[9]="nothing for this ruleset."; #### Here are settings for sa-blacklist-uri #### BLACKLIST_URI=10; # Index of sa-blacklist-uri data into the arrays is 10 CF_URLS[10]="http://www.stearns.org/sa-blacklist/sa-blacklist.current.uri.cf"; CF_FILES[10]="blacklist-uri.cf"; CF_NAMES[10]="William Stearn's URI blacklist"; PARSE_NEW_VER_SCRIPTS[10]="${GREP} -i '^#.*sa-blacklist.uri: 200' | sort | tail -1"; # CF_MUNGE_SCRIPTS[10]="nothing for this ruleset."; #### Here are settings for sa-blacklist-random #### RANDOMVAL=11; # Index of sa-blacklist-random data into the arrays is 11 CF_URLS[11]="http://www.stearns.org/sa-blacklist/random.current.cf"; CF_FILES[11]="random.cf"; CF_NAMES[11]="William Stearn's RANDOM WORD Ruleset"; PARSE_NEW_VER_SCRIPTS[11]="${GREP} -i '^#release' | tail -1"; # CF_MUNGE_SCRIPTS[11]="nothing for this ruleset."; #### Here are settings for Tim Jackson's (et al) bogus virus warnings #### BOGUSVIRUS=12; # Index of bogus-virus-warnings data into the arrays is 12 CF_URLS[12]="http://www.timj.co.uk/linux/bogus-virus-warnings.cf"; CF_FILES[12]="bogus-virus-warnings.cf"; CF_NAMES[12]="Tim Jackson's (et al) bogus virus warnings"; PARSE_NEW_VER_SCRIPTS[12]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[12]="nothing for this ruleset."; #### Here are settings for Chris Santerre's MrWiggly.cf #### MRWIGGLY=13; # Index of MrWiggly.cf data into the arrays is 13 CF_URLS[13]="http://www.rulesemporium.com/rules/mr_wiggly.cf"; CF_FILES[13]="mr_wiggly.cf"; CF_NAMES[13]="MrWiggly Ruleset"; PARSE_NEW_VER_SCRIPTS[13]="${GREP} -i '^# SARE rule' | tail -1"; # CF_MUNGE_SCRIPTS[13]="nothing for this ruleset."; #### Here are settings for sare_adult #### SARE_ADULT=14; # Index of sare_adult.cf data into the arrays is 14 CF_URLS[14]="http://www.rulesemporium.com/rules/70_sare_adult.cf" CF_FILES[14]="70_sare_adult.cf"; CF_NAMES[14]="SARE Adult Content Ruleset"; PARSE_NEW_VER_SCRIPTS[14]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[14]="nothing for this ruleset."; #### Here are settings for sare_fraud_post25x #### SARE_FRAUD=15; # Index of sare_fraud_post25x data into the arrays is 15 CF_URLS[15]="http://www.rulesemporium.com/rules/99_sare_fraud_post25x.cf" CF_FILES[15]="99_sare_fraud_post25x.cf"; CF_NAMES[15]="SARE Fraud Detection Ruleset (for SA ver. 2.5x and greater)"; PARSE_NEW_VER_SCRIPTS[15]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[15]="nothing for this ruleset."; #### Here are settings for sare_fraud_pre25x #### SARE_FRAUD_PRE25X=16; # Index of sare_fraud_pre25x data into the arrays is 16 CF_URLS[16]="http://www.rulesemporium.com/rules/99_sare_fraud_pre25x.cf" CF_FILES[16]="99_sare_fraud_pre25x.cf"; CF_NAMES[16]="SARE Fraud Detection Ruleset (for SA prior to ver. 2.5x)"; PARSE_NEW_VER_SCRIPTS[16]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[16]="nothing for this ruleset."; #### Here are settings for sare_biz_market_learn_post25x #### SARE_BML=17; # Index of sare_biz_market_learn_post25x data into the arrays is 17 CF_URLS[17]="http://www.rulesemporium.com/rules/72_sare_bml_post25x.cf" CF_FILES[17]="72_sare_bml_post25x.cf"; CF_NAMES[17]="SARE BIZ/Marketing/Learning Ruleset (for SA ver. 2.5x and greater)"; PARSE_NEW_VER_SCRIPTS[17]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[17]="nothing for this ruleset."; #### Here are settings for sare_biz_market_learn_pre25x #### SARE_BML_PRE25X=18; # Index of sare_biz_market_learn_pre25x data into the arrays is 18 CF_URLS[18]="http://www.rulesemporium.com/rules/71_sare_bml_pre25x.cf" CF_FILES[18]="71_sare_bml_pre25x.cf"; CF_NAMES[18]="SARE BIZ/Marketing/Learning Ruleset (for SA prior to ver. 2.5x)"; PARSE_NEW_VER_SCRIPTS[18]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[18]="nothing for this ruleset."; #### Here are settings for ratware #### SARE_RATWARE=19; # Index of ratware data into the arrays is 19 CF_URLS[19]="http://www.rulesemporium.com/rules/70_sare_ratware.cf" CF_FILES[19]="70_sare_ratware.cf"; OLD_CF_FILES[19]="ratware.cf"; CF_NAMES[19]="SARE Ratware Detection Ruleset"; PARSE_NEW_VER_SCRIPTS[19]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[19]="nothing for this ruleset."; #### Here are settings for sare_spoof #### SARE_SPOOF=20; # Index of sare_spoof data into the arrays is 20 CF_URLS[20]="http://www.rulesemporium.com/rules/70_sare_spoof.cf" CF_FILES[20]="70_sare_spoof.cf"; CF_NAMES[20]="SARE Spoof Ruleset for SpamAssassin"; PARSE_NEW_VER_SCRIPTS[20]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i ;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[20]="nothing for this ruleset."; #### Here are settings for sare_bayes_poison_nxm #### SARE_BAYES_POISON_NXM=21; # Index of sare_bayes_poison_nxm data into the arrays is 21 CF_URLS[21]="http://www.rulesemporium.com/rules/70_sare_bayes_poison_nxm.cf" CF_FILES[21]="70_sare_bayes_poison_nxm.cf"; CF_NAMES[21]="SARE 70_sare_bayes_poison_nxm.cf Ruleset for SpamAssassin"; PARSE_NEW_VER_SCRIPTS[21]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i ;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[21]="nothing for this ruleset."; #### Here are settings for sare_oem #### SARE_OEM=22; # Index of sare_oem data into the arrays is 22 CF_URLS[22]="http://www.rulesemporium.com/rules/70_sare_oem.cf" CF_FILES[22]="70_sare_oem.cf"; CF_NAMES[22]="SARE OEM Ruleset for SpamAssassin"; PARSE_NEW_VER_SCRIPTS[22]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i ;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[22]="nothing for this ruleset."; #### Here are settings for sare_random #### SARE_RANDOM=23; # Index of sare_random data into the arrays is 23 CF_URLS[23]="http://www.rulesemporium.com/rules/70_sare_random.cf" CF_FILES[23]="70_sare_random.cf"; CF_NAMES[23]="SARE Random Ruleset for SpamAssassin 2.5x and higher"; PARSE_NEW_VER_SCRIPTS[23]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i ;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[23]="nothing for this ruleset."; #### Here are settings for sare_header_abuse #### SARE_HEADER_ABUSE=24; # Index of sare_header_abuse data into the arrays is 24 CF_URLS[24]="http://www.rulesemporium.com/rules/70_sare_header_abuse.cf" CF_FILES[24]="70_sare_header_abuse.cf"; OLD_CF_FILES[24]="header_abuse.cf"; CF_NAMES[24]="Ruleset for header abuse"; PARSE_NEW_VER_SCRIPTS[24]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i ;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[24]="nothing for this ruleset."; #### Here are settings for coding_html #### SARE_CODING_HTML=25; # Don't use. Use SARE_CODING instead. Kept for backwards compatibility. SARE_CODING=25; # Index of coding_html data into the arrays is 25 CF_URLS[25]="http://www.rulesemporium.com/rules/70_sare_html.cf" CF_FILES[25]="70_sare_html.cf"; OLD_CF_FILES[25]="coding_html.cf"; CF_NAMES[25]="Ruleset for html coding abuse"; PARSE_NEW_VER_SCRIPTS[25]="${PERL} -ne 'print if /^\s*#.*(vers?|version|rev|revision)[:\.\s]*[0-9]/i ;' | sort | tail -1"; # CF_MUNGE_SCRIPTS[25]="nothing for this ruleset."; ######################################### #### End Rules File Registry #### ######################################### # Do not update beyond this line unless you know what you are doing. ######################################### #### Begin rules update code #### ######################################### # if invoked with -D, enable DEBUG here. [ "$1" = "-D" ] && DEBUG="true"; # if running interactively, enable DEBUG here. [ -t 0 ] && DEBUG="true"; # If we're not running interactively, add a random delay here. This should # help reduce spikes on the servers hosting the rulesets (Thanks, Bob) MAXDELAY=3600; DELAY=0; [ ! -t 0 ] && [ ${MAXDELAY} -gt 0 ] && let DELAY="${RANDOM} % ${MAXDELAY}"; [ "${DEBUG}" ] && [ ${DELAY} -gt 0 ] && echo "Probably running from cron... sleeping for a random interval (${DELAY} seconds)"; [ ${DELAY} -gt 0 ] && sleep ${DELAY}; # Save old working dir OLDDIR=`pwd`; # This variable is used to indicate if we should restart spamd. Currently empty (false). RESTART_REQUIRED=""; # This variable is used to indicate if we should send an email notification when all is said and done. # It is toggled on whenever an "interesting" event happens (404, rule updated, etc) QUEUE_SINGLE_EMAIL=""; # The beginnings of an email and/or debug summary text MESSAGES="RulesDuJour Run Summary on `hostname`:"; [ ! -e ${TMPDIR} ] && mkdir ${TMPDIR}; cd ${TMPDIR}; [ ! "${DONT_CHECK_FOR_RDJ_UPDATES}" ] && { if [ -f ${TMPDIR}/rules_du_jour ] ; then ${WGET} ${RDJ_URL} > ${TMPDIR}/wget.log 2>&1; ${GREP} 'saved' ${TMPDIR}/wget.log > /dev/null; DOWNLOADED=$?; [ ${DOWNLOADED} = 0 ] && { NEWVER=`${GREP} "^# Version" ${TMPDIR}/rules_du_jour`; MSG_RDJ_UPDATED="Rules Du Jour has an update available.\nNew version of Rules Du Jour is ${NEWVER}\nNew version of Rules Du Jour was downloaded to ${TMPDIR}/rules_du_jour\n"; [ "${SINGLE_EMAIL_ONLY}" ] && QUEUE_SINGLE_EMAIL="true" || echo -e "${MSG_RDJ_UPDATED}" | \ ${MAILCMD} -s "RulesDuJour/`hostname`: new Rules Du Jour version available." ${MAIL_ADDRESS}; MESSAGES="${MESSAGES}\n${MSG_RDJ_UPDATED}"; } else ${WGET} ${RDJ_URL} > ${TMPDIR}/wget.log 2>&1; fi } for RULESET_NAME in ${TRUSTED_RULESETS} ; do # Set up some array variables INDEX=${!RULESET_NAME}; if [ ! "${INDEX}" ] ; then MSG_INVALID_RULENAME="\nNo index found for ruleset named ${RULESET_NAME}. Check that this ruleset is still valid."; [ "${DEBUG}" ] && echo -e "${MSG_INVALID_RULENAME}"; MESSAGES="${MESSAGES}\n${MSG_INVALID_RULENAME}"; else CF_URL=${CF_URLS[${INDEX}]}; CF_FILE=${CF_FILES[${INDEX}]}; OLD_CF_FILES=${OLD_CF_FILES[${INDEX}]}; CF_NAME=${CF_NAMES[${INDEX}]}; PARSE_NEW_VER_SCRIPT=${PARSE_NEW_VER_SCRIPTS[${INDEX}]}; CF_MUNGE_SCRIPT=${CF_MUNGE_SCRIPTS[${INDEX}]}; # Get the filename the author chose. CF_BASENAME=`basename ${CF_URL}`; DATE=`date +"%Y%m%d-%H%M"` if [ "${DEBUG}" ] ; then # Dump the variables to stdout echo ""; echo "------ ${RULESET_NAME} ------"; echo "RULESET_NAME=${RULESET_NAME}"; echo "INDEX=${INDEX}"; echo "CF_URL=${CF_URL}"; echo "CF_FILE=${CF_FILE}"; echo "CF_NAME=${CF_NAME}"; echo "PARSE_NEW_VER_SCRIPT=${PARSE_NEW_VER_SCRIPT}"; echo "CF_MUNGE_SCRIPT=${CF_MUNGE_SCRIPT}"; fi [ "${DEBUG}" ] && [ -f ${TMPDIR}/${CF_BASENAME} ] && echo "Old ${CF_BASENAME} already existed in ${TMPDIR}..."; [ "${DEBUG}" ] && [ ! -f ${TMPDIR}/${CF_BASENAME} ] && \ [ ! -f ${SA_DIR}/${CF_FILE} ] && echo "This is the first time downloading ${CF_BASENAME}..."; [ "${DEBUG}" ] && [ ! -f ${TMPDIR}/${CF_BASENAME} ] && [ -f ${SA_DIR}/${CF_FILE} ] && \ echo "Copying from ${SA_DIR}/${CF_FILE} to ${TMPDIR}/${CF_BASENAME}..."; [ ! -f ${TMPDIR}/${CF_BASENAME} ] && [ -f ${SA_DIR}/${CF_FILE} ] && \ cp ${SA_DIR}/${CF_FILE} ${TMPDIR}/${CF_BASENAME} && \ touch -r ${SA_DIR}/${CF_FILE} ${TMPDIR}/${CF_BASENAME}; [ "${DEBUG}" ] && echo "Retrieving file from ${CF_URL}..."; # send wget output to a temp file for grepping ${WGET} ${CF_URL} > ${TMPDIR}/wget.log 2>&1; ${GREP} 'saved' ${TMPDIR}/wget.log > /dev/null; DOWNLOADED=$?; ${GREP} 'ERROR 4[0-9][0-9]' ${TMPDIR}/wget.log > /dev/null; WAS404=$?; ${GREP} -i 'failed: ' ${TMPDIR}/wget.log > /dev/null; FAILED=$?; [ ! ${DOWNLOADED} = 0 ] && DOWNLOADED=; # Unset DOWNLOADED if the file was already current [ ! ${WAS404} = 0 ] && WAS404=; # Unset WAS404 if the file didn't return 404. [ ! ${FAILED} = 0 ] && FAILED=; # Unset FAILED if wget succeded # Append these errors to a variable to be mailed to the admin (later in script) [ "${FAILED}" ] && RULES_THAT_404ED="${RULES_THAT_404ED}\n${CF_NAME} had an unknown error:\n`cat ${TMPDIR}/wget.log`"; [ "${WAS404}" ] && RULES_THAT_404ED="${RULES_THAT_404ED}\n${CF_NAME} not found (404) at ${CF_URL}"; [ "${DEBUG}" ] && [ ${WAS404} ] && echo "Got 404 from ${CF_NAME} (${CF_URL})..."; [ "${DEBUG}" ] && [ ! ${WAS404} ] && ([ "${DOWNLOADED}" ] && \ echo "New version downloaded..." || \ echo "${CF_BASENAME} was up to date (skipped downloading of ${CF_URL})..."); # If we downloaded a new version, or if we have the current version, # but it is not installed, copy or munge to CF_FILE.2 if ([ "${DOWNLOADED}" ] || \ ( [ ! -f ${SA_DIR}/${CF_FILE} ] && \ [ -f ${TMPDIR}/${CF_BASENAME} ]) ) ; then if [ "${CF_MUNGE_SCRIPT}" ] ; then [ "${DEBUG}" ] && echo "Munging output using command: ${CF_MUNGE_SCRIPT}"; sh -c "${CF_MUNGE_SCRIPT}" < ${TMPDIR}/${CF_BASENAME} > ${TMPDIR}/${CF_BASENAME}.2; else cp ${TMPDIR}/${CF_BASENAME} ${TMPDIR}/${CF_BASENAME}.2; fi # Set munged file to same timestamp as downloaded file... touch -r ${TMPDIR}/${CF_BASENAME} ${TMPDIR}/${CF_BASENAME}.2; fi # Update SA config dir if this is the first time we've seen the ruleset, or if the ruleset has changed. if ( [ -f ${TMPDIR}/${CF_BASENAME}.2 ] && \ ( [ ! -f ${SA_DIR}/${CF_FILE} ] || \ ! cmp -s ${TMPDIR}/${CF_BASENAME}.2 ${SA_DIR}/${CF_FILE} ) ); then # If we know the source rules file has had its name changed, delete the old filename and install the new one. for OLD_CF_FILE in ${OLD_CF_FILES} ; do if [ -f ${SA_DIR}/${OLD_CF_FILE} ] ; then MSG_FILENAME_CHANGED="\n*** ${CF_NAME} has changed names from ${OLD_CF_FILE} to ${CF_FILE}.\nBecause of the name change I am removing the old file and installing the new file."; [ "${DEBUG}" ] && echo -e "${MSG_FILENAME_CHANGED}"; MESSAGES="${MESSAGES}\n${MSG_FILENAME_CHANGED}"; [ "${SINGLE_EMAIL_ONLY}" ] && QUEUE_SINGLE_EMAIL="true" || \ echo -e ${MSG_FILENAME_CHANGED} | ${MAILCMD} -s "RulesDuJour/`hostname`: ${CF_NAME} RuleSet's name has changed" ${MAIL_ADDRESS} UNDO_COMMAND="${UNDO_COMMAND} mv -f ${TMPDIR}/${OLD_CF_FILE}.2 ${SA_DIR}/${OLD_CF_FILE};"; mv -f ${SA_DIR}/${OLD_CF_FILE} ${TMPDIR}/${OLD_CF_FILE}.2; fi done [ "${DEBUG}" ] && [ ! -f ${SA_DIR}/${CF_FILE} ] && echo "Installing new ruleset from ${TMPDIR}/${CF_BASENAME}.2" ; [ "${DEBUG}" ] && [ -f ${SA_DIR}/${CF_FILE} ] && echo "Old version ${SA_DIR}/${CF_FILE} differs from new version ${TMPDIR}/${CF_BASENAME}.2" && echo "Backing up old version..."; [ -f ${SA_DIR}/${CF_FILE} ] && mv -f ${SA_DIR}/${CF_FILE} ${TMPDIR}/${CF_FILE}.${DATE}; # Save the command that can be used to undo this change, if rules won't --lint UNDO_COMMAND="${UNDO_COMMAND} mv -f ${SA_DIR}/${CF_FILE} ${TMPDIR}/${CF_BASENAME}.2;"; [ -f ${TMPDIR}/${CF_FILE}.${DATE} ] && \ UNDO_COMMAND="${UNDO_COMMAND} mv -f ${TMPDIR}/${CF_FILE}.${DATE} ${SA_DIR}/${CF_FILE};" || \ UNDO_COMMAND="${UNDO_COMMAND} rm -f ${SA_DIR}/${CF_FILE};"; [ "${DEBUG}" ] && [ -f ${TMPDIR}/${CF_BASENAME}.2 ] && echo "Installing new version..."; [ -f ${TMPDIR}/${CF_BASENAME}.2 ] && mv -f ${TMPDIR}/${CF_BASENAME}.2 ${SA_DIR}/${CF_FILE}; NEWVER=`sh -c "cat ${SA_DIR}/${CF_FILE} | ${PARSE_NEW_VER_SCRIPT}"`; MSG_CHANGED="\n${CF_NAME} has changed on `hostname`.\nVersion line: ${NEWVER}"; MESSAGES="${MESSAGES}\n${MSG_CHANGED}"; [ "${DEBUG}" ] && echo -e "${MSG_CHANGED}"; [ "${SINGLE_EMAIL_ONLY}" ] && QUEUE_SINGLE_EMAIL="true" || \ echo -e ${MSG_CHANGED} | ${MAILCMD} -s "RulesDuJour/`hostname`: ${CF_NAME} RuleSet has been updated" ${MAIL_ADDRESS} RESTART_REQUIRED="true"; fi fi done # Cleanup, lint, email admin if required, restart SA if required [ -f ${TMPDIR}/wget.log ] && rm -f ${TMPDIR}/wget.log; [ "${RULES_THAT_404ED}" ] && { MSG_404S="The following rules had 404 (not found) errors:${RULES_THAT_404ED}"; [ "${SINGLE_EMAIL_ONLY}" ] && QUEUE_SINGLE_EMAIL="true" || \ echo -e "${MSG_404S}" | ${MAILCMD} -s "RulesDuJour/`hostname`: 404 errors" ${MAIL_ADDRESS}; MESSAGES="${MESSAGES}\n\n${MSG_404S}"; } [ "${RESTART_REQUIRED}" ] && { [ "${DEBUG}" ] && echo "Attempting to --lint the rules."; ${SA_LINT} > /dev/null 2>&1; LINTFAILED=$?; [ "${LINTFAILED}" = "0" ] && LINTFAILED=; # Unset LINTFAILED if lint didn't fail. # Lint failed. Run the undo commands, send administrative notification if [ ${LINTFAILED} ] ; then LINTERRORS=`${SA_LINT} 2>&1`; WARNMSG="***WARNING***: ${SA_LINT} failed.\nRolling configuration files back, not restarting SpamAssassin.\nRollback command is: ${UNDO_COMMAND}\n\nLint output: ${LINTERRORS}\n"; MESSAGES="${MESSAGES}\n\n${WARNMSG}"; sh -c "${UNDO_COMMAND}" && RESTART_REQUIRED= ; [ "${SINGLE_EMAIL_ONLY}" ] && QUEUE_SINGLE_EMAIL="true" || \ echo -e "${WARNMSG}" | ${MAILCMD} -s "RulesDuJour/`hostname`: lint failed. Updates rolled back." ${MAIL_ADDRESS}; else [ "${DEBUG}" ] && echo "Restarting SpamAssassin using: sh -c \"${SA_RESTART}\""; sh -c "${SA_RESTART}" > /dev/null 2>&1; fi } [ "${DEBUG}" ] && [ ! "${RESTART_REQUIRED}" ] && echo "No files updated; No restart required."; [ "${DEBUG}" ] && echo -e "\n\n\n\n\nRules Du Jour Run Summary:${MESSAGES}"; # Send the single consolidated notification email here. [ "${SINGLE_EMAIL_ONLY}" ] && [ "${QUEUE_SINGLE_EMAIL}" ] && \ echo -e "${MESSAGES}" | ${MAILCMD} -s "RulesDuJour Run Summary on `hostname`" ${MAIL_ADDRESS}; cd ${OLDDIR};