Up clam ex.sh: Difference between revisions
Clandmeter (talk | contribs) (New page: #!/bin/sh # update_clamd_extra_sigs.sh v0.2 by Dan Larsson <dl|at|tyfon|dot|net> # ============================================================================== # LICENSE # ========...) |
Dubiousjim (talk | contribs) (Category:Mail) |
||
(One intermediate revision by one other user not shown) | |||
Line 1: | Line 1: | ||
<pre> | |||
#!/bin/sh | |||
# update_clamd_extra_sigs.sh v0.2 by Dan Larsson <dl|at|tyfon|dot|net> | |||
# ============================================================================== | |||
# LICENSE | |||
# ============================================================================== | |||
# "THE BEER-WARE LICENSE" (Revision 42): | |||
# <dl|at|tyfon|dot|net> wrote this file. As long as you retain this | |||
# notice you can do whatever you want with this stuff. If we meet | |||
# some day, and you think this stuff is worth it, you can buy me a | |||
# beer in return. Dan Larsson | |||
# | |||
# ============================================================================== | |||
# VERSION INFO | |||
# ============================================================================== | |||
# v0.2 - 2008-08-23 | |||
# * Don't use non-word chars in function names | |||
# * Changed the SecuriteInfo signature source url | |||
# (Thank's to Bill Landry for pointing out the above two) | |||
# * Cleaned up and added comments | |||
# * Misc cosmetic changes | |||
# | |||
# v0.1 - 2008-08-22 | |||
# * Initial release, branched from v1.4 of Bill Landry's | |||
# ss-msrbl.sh script | |||
# | |||
# ============================================================================== | |||
# README | |||
# ============================================================================== | |||
# In order to run this script you need to have curl, rsync and clamd installed | |||
# on your machine aswell as the basic set of unix-like tools (i.e. awk, sed, | |||
# cat, cp, gunzip etc...). | |||
# | |||
# If this script fails to run on your system or you have made improvements that | |||
# you wish to share, you're welcome to drop me a line. | |||
# | |||
# ============================================================================== | |||
# USAGE | |||
# ============================================================================== | |||
# Using this script is easy, just configure the parameters, save the changes | |||
# and execute from the prompt (or via cron). Should you want to add additional | |||
# signature databases simply add their download urls to the appropriate | |||
# section(s) here below and you're done! Naturally, it's just as easy to remove | |||
# and edit :-) No script coding necessary! | |||
# | |||
# ============================================================================== | |||
# SIGNATURE SOURCES | |||
# ============================================================================== | |||
# SaneSecurity (phish.ndb, scam.ndb) | |||
# http://www.sanesecurity.com/clamav/usage.htm | |||
# | |||
# SecuriteInfo (vx.hdb) | |||
# http://www.securiteinfo.com/services/clamav_unofficial_malwares_signatures.shtml | |||
# | |||
# MalwareBlockList (mbl.db) | |||
# http://www.malware.com.br/clamav.txt | |||
# | |||
# MSRBL (MSRBL-Images.hdb, MSRBL-SPAM.ndb) | |||
# http://www.msrbl.com/site/msrblimagesdownload | |||
# http://www.msrbl.com/site/msrblspamdownload | |||
# | |||
# ============================================================================== | |||
# SOURCE ARGUMENTS ( see below for more info in source arguments ) | |||
# ============================================================================== | |||
# Name Value Comment | |||
# ------------------- --------- ------------------------------------------------ | |||
# fetch_interval integer Forced delay in seconds between download | |||
# attempts | |||
# target_file string Use this name for the signature database | |||
# (instead of extracting it from the source file) | |||
################################################################################ | |||
# SCRIPT USER EDIT SECTION - SET PROGRAM PATHS AND OTHER VARIABLES # | |||
################################################################################ | |||
# *** COMMENT OUT THE BELOW LINE WHEN YOU HAVE CONFIGURED THIS SCRIPT *** | |||
#script_not_configured=1 | |||
# Set and export the command searchpaths | |||
PATH=/root/bin:/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin | |||
export PATH | |||
# Set path to ClamAV database dir location as well as | |||
# the clamd user and group account | |||
clamd_dbdir=/var/lib/clamav | |||
clamd_user=clamav | |||
clamd_group=clamav | |||
# Set path to the clamd pidfile | |||
# (comment out to disable forced database reload) | |||
clamd_pidfile=/var/run/clamav/clamd.pid | |||
# Set backup and temp working directory paths | |||
# (edit to meet your own needs) | |||
backup_dir=/var/backups/clamav | |||
tmp_dir=/var/tmp/clamdb | |||
rsync_dir=/var/tmp/rsync | |||
# HTTP source urls - *MUST* be HTTP urls, one url per line | |||
# (add/remove/modify urls as per preference and/or need, | |||
# to disable, comment out the below six lines) | |||
http_source_urls=" | |||
http://www.sanesecurity.com/clamav/phishsigs/phish.ndb.gz | |||
http://www.sanesecurity.com/clamav/scamsigs/scam.ndb.gz | |||
http://clamav.securiteinfo.com/vx.hdb.gz | |||
http://www.malware.com.br/cgi/submit?action=list_clamav,fetch_interval=86400,target_file=mbl.db | |||
" | |||
# RSYNC source urls - *MUST* be RSYNC urls, one url per line | |||
# (add/remove/modify urls as per preference and/or need, | |||
# to disable, comment out the below four lines) | |||
rsync_source_urls=" | |||
rsync://rsync.mirror.msrbl.com/msrbl/MSRBL-Images.hdb | |||
rsync://rsync.mirror.msrbl.com/msrbl/MSRBL-SPAM.ndb | |||
" | |||
# Arguments can be appended to the source_url, if you do so | |||
# seperate them from the source url and eachother with commas | |||
# ( e.g. scheme://hostname/path,arg1=123,arg2=abc ) | |||
# Please note that it's very important you null their value when | |||
# they've served their purpose, not doing so will lead to weird | |||
# results | |||
# Enable random sleeping before processing | |||
# - recommeded when running via cron! | |||
# (to disable this comment out the below line) | |||
#sleep_enabled=1 | |||
# Show each slept second visually | |||
# - disabled when running via cron. | |||
# (to disable this comment out the below line) | |||
sleep_visual=1 | |||
# Compress all downloaded *source files* with gzip | |||
# (to disable this comment out the below line) | |||
keep_sources_gzipped=1 | |||
################################################################################ | |||
# END OF SCRIPT USER EDIT SECTION - YOU SHOULD NOT NEED TO EDIT ANYTHING BELOW # | |||
################################################################################ | |||
# Initializes the source arguments | |||
sarg_init() { | |||
sarg_init_success= | |||
if [ -n "$source_args" ] ; then | |||
for arg in `echo $source_args | sed 's/,/ /g'` ; do | |||
eval $arg | |||
sarg_init_success=1 | |||
done | |||
source_url=`echo $source_url | awk -F, '{print $1}'` | |||
fi | |||
source_args= | |||
} | |||
# Runs all source argument subroutines. If you add your own | |||
# checks/processing add them to this function. | |||
sarg_process() { | |||
# Check for fetch interval restriction | |||
if [ -n "$fetch_interval" ] && ! sarg_check_fetch_interval ; then | |||
echo | |||
echo Skipped due to interval restriction in effect | |||
continue | |||
fi | |||
# Insert your own argument processing here | |||
} | |||
# Handles processing of the "$fetch_interval" source argument | |||
# Returns 0 when fetching is permitted ( i.e. elapsed seconds | |||
# since last fetch is equal or greater than $fetch_interval ) | |||
# respectively returns 1 when fetching is not permitted. | |||
sarg_check_fetch_interval() { | |||
local fetch_lastrun fetch_rundiff fetch_stampfile fetch_thisrun | |||
fetch_stampfile=$backup_dir/FETCHSTAMP.$source_file | |||
fetch_thisrun=`date +%s` | |||
fetch_stampfile_old=$backup_dir/LASTRUN.$source_file | |||
if [ -f $fetch_stampfile_old ] ; then | |||
mv $fetch_stampfile_old $fetch_stampfile | |||
fi | |||
if [ -f $fetch_stampfile ] ; then | |||
fetch_lastrun=`cat $fetch_stampfile` | |||
fetch_rundiff=$(($fetch_thisrun - $fetch_lastrun)) | |||
if [ $fetch_rundiff -lt $fetch_interval ] ; then | |||
fetch_interval= | |||
return 1 | |||
fi | |||
fi | |||
echo $fetch_thisrun > $fetch_stampfile | |||
fetch_interval= | |||
return 0 | |||
} | |||
# Shows the source header | |||
# (the below function also calls the source argument | |||
# init and processing functions) | |||
show_source_header() { | |||
sarg_init | |||
echo | |||
echo ==================================================== | |||
echo Processing signature database: $target_file | |||
echo ==================================================== | |||
# Process arguments if any are set for this source | |||
[ -n "$sarg_init_success" ] && sarg_process | |||
} | |||
#### actual script execution begins here #### | |||
if [ -n "$script_not_configured" ] ; then | |||
echo '*** SCRIPT NOT CONFIGURED ***' | |||
echo Please take the time to configure this script before running it. | |||
echo When you have, comment out the \'script_not_configured=1\' line at | |||
echo the top in the user editables section and execute the script again | |||
exit 1 | |||
fi | |||
echo "Script started: "`date` | |||
# Check to see if the working directories exist. | |||
# If not, create them. Otherwise, ignore and proceed with script | |||
mkdir -p $tmp_dir $rsync_dir $backup_dir | |||
# Change working directory to ClamAV database directory | |||
cd $clamd_dbdir | |||
# Get the timestamp from the previous run if it exists and | |||
# update it. | |||
stamp_lastrun=0 | |||
stamp_thisrun=`date +%s` | |||
if [ -f $backup_dir/LASTRUN ] ; then | |||
stamp_lastrun=`cat $backup_dir/LASTRUN` | |||
fi | |||
echo $stamp_thisrun > $backup_dir/LASTRUN | |||
# To "play nice" with the source servers don't run more frequently | |||
# than once every hour. Also, attempt to keep off any peak crontimes | |||
# by adding a randomized (between 30 seconds and 10 minutes) sleep period. | |||
# --- Idea inspired by Rick Cooper's "UpdateSaneSecurity" script. | |||
# ( You can ofcourse disable the sleep-feature by commenting out the | |||
# 'sleep_enabled=1' line in the user editables section here above ) | |||
if [ -n "$sleep_enabled" ] ; then | |||
# Calculate if we have run in the last hour. If we have add the | |||
# remainder to the sleep time | |||
sleep_forced=0 | |||
if [ $stamp_lastrun -gt 0 ] ; then | |||
stamp_rundiff=$(($stamp_thisrun - $stamp_lastrun)) | |||
if [ $stamp_rundiff -lt 3600 ] ; then | |||
sleep_forced=$((3600 - $stamp_rundiff)) | |||
fi | |||
fi | |||
# NOTE: | |||
# Please note that I'm very well aware of the $RANDOM variable, however | |||
# since it is not a FreeBSD sh(1) native variable (which is the O/S and | |||
# shell I'm running this script under) I'm staying off that path. Feel | |||
# free to implement and use the $RANDOM method, if you want to :-) | |||
# Get a random number between 30 and 600. First attempt this by using | |||
# the jot(1) utility (installed by default on *BSD systems)... | |||
sleep_random=`jot -r 1 30 600 2>/dev/null` | |||
# ...if jot(1) failed attempt another (more portable?) method | |||
if [ -z "$sleep_random" ] ; then | |||
sleep_random=0 | |||
while [ $sleep_random -lt 30 ] || [ $sleep_random -gt 600 ] ; do | |||
sleep_random=`head -1 /dev/urandom | od -N 1 | awk '$2~/^0/{ print $2 / 1 }'` | |||
done | |||
fi | |||
# Add the two values together and sleep for that amount of seconds. | |||
# If the $TERM variable isn't set we're probably running from cron so | |||
# disable visual sleeping in that case | |||
sleep_forced=$(($sleep_forced + $sleep_random)) | |||
echo ==================================================== | |||
echo Sleeping $sleep_forced seconds before proceeding... | |||
echo ==================================================== | |||
if [ -n "$TERM" -a -n "$sleep_visual" ] ; then | |||
while [ $sleep_forced -gt 0 ] ; do | |||
sleep_forced=$(($sleep_forced - 1)) | |||
echo -n . | |||
sleep 1 | |||
done | |||
echo | |||
else | |||
sleep $sleep_forced | |||
fi | |||
fi | |||
# Process http://urls | |||
for source_url in $http_source_urls ; do | |||
source_file=`basename $source_url | awk -F, '{print $1}'` | |||
source_args=`basename $source_url | sed "s/^\$source_file//;s/^,//"` | |||
target_file=`echo $source_file | sed 's/\.gz$//'` | |||
# If the source and target filenames are equal the source is not gzipped | |||
# (this will have to be expanded upon if/when additional forms of source | |||
# compression are to be supported). | |||
source_not_gzipped= | |||
if [ $source_file = $target_file ] ; then | |||
source_not_gzipped=1 | |||
fi | |||
# Remove any non-word characters from the source filename. | |||
# We need this since it's used in various file operations | |||
source_file=`echo $source_file | sed 's/[^[:alnum:]\.-]/_/g'` | |||
# Produce the source header | |||
show_source_header | |||
# Check for an existing database file. If it exists then run an | |||
# update check. Otherwise, just download and extract the database file. | |||
if [ ! -s $target_file ] ; then | |||
# Redirect stderr to stdout while downloading the file. | |||
( curl -L -R -o $tmp_dir/$source_file $source_url 2>&1 ) | |||
# If the source isn't gzipped, compress it if $keep_sources_gzipped | |||
# is non-empty | |||
if [ -n "$keep_sources_gzipped" -a -n "$source_not_gzipped" ] ; then | |||
test -s $tmp_dir/$source_file && \ | |||
gzip -9f $tmp_dir/$source_file && \ | |||
source_file=${source_file}.gz | |||
fi | |||
# Validate the source file through a series of tests. | |||
# If all tests succeed install the source and database files | |||
# in the ClamAV database directory ($clamd_dbdir). | |||
test -s $tmp_dir/$source_file && \ | |||
gunzip -cdf $tmp_dir/$source_file > $tmp_dir/$target_file && \ | |||
clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \ | |||
mv -f $tmp_dir/$target_file $tmp_dir/$source_file . && \ | |||
do_clamd_reload=$(($do_clamd_reload + 1)) | |||
else | |||
# Select which file to use as a timestamp reference. | |||
source_timeref=$source_file | |||
if [ -n "$keep_sources_gzipped" -a -f ${source_file}.gz ] || \ | |||
[ ! -f $source_file -a -f ${source_file}.gz ] ; then | |||
source_timeref=${source_file}.gz | |||
fi | |||
# Redirect stderr to stdout while downloading the source file, tell curl | |||
# to use $source_timeref as a timestamp reference | |||
( curl -L -R -z $source_timeref -o $tmp_dir/$source_file $source_url 2>&1 ) | |||
# If the source isn't gzipped... | |||
if [ -n "$keep_sources_gzipped" -a -n "$source_not_gzipped" ] ; then | |||
test -s $tmp_dir/$source_file && \ | |||
gzip -9f $tmp_dir/$source_file && \ | |||
source_file=${source_file}.gz | |||
fi | |||
# Validate the source file... | |||
test -s $tmp_dir/$source_file && \ | |||
gunzip -cdf $tmp_dir/$source_file > $tmp_dir/$target_file && \ | |||
clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \ | |||
cp -f -p $target_file $backup_dir && \ | |||
mv -f $tmp_dir/$target_file $tmp_dir/$source_file . && \ | |||
do_clamd_reload=$(($do_clamd_reload + 1)) | |||
fi | |||
done | |||
# Process rsync://urls | |||
for source_url in $rsync_source_urls ; do | |||
source_file=`basename $source_url | awk -F, '{print $1}'` | |||
source_args=`basename $source_url | sed "s/^\$source_file//;s/^,//"` | |||
target_file=$source_file | |||
# Produce the source header | |||
show_source_header | |||
# Check for an existing database file. If it exists then run an | |||
# update check. Otherwise, just download and extract the database file. | |||
if [ ! -s $target_file ] ; then | |||
# Redirect stderr to stdout while downloading the file. | |||
( rsync -t --stats $source_url $rsync_dir/$target_file 2>&1 ) | |||
# Validate the source file through a series of tests. | |||
# If all tests succeed install the source and database files | |||
# in the ClamAV database directory ($clamd_dbdir). | |||
cp -p $rsync_dir/$target_file $tmp_dir && \ | |||
test -s $tmp_dir/$target_file && \ | |||
clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \ | |||
mv -f $tmp_dir/$target_file . && \ | |||
do_clamd_reload=$(($do_clamd_reload + 1)) | |||
else | |||
# Download the source file... | |||
( rsync -tu --stats $source_url $rsync_dir/$target_file 2>&1 ) | |||
# Validate the source file... | |||
test $rsync_dir/$target_file -nt $target_file && \ | |||
cp -p $rsync_dir/$target_file $tmp_dir && \ | |||
test -s $tmp_dir/$target_file && \ | |||
clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \ | |||
cp -f -p $target_file $backup_dir && \ | |||
mv -f $tmp_dir/$target_file . && \ | |||
do_clamd_reload=$(($do_clamd_reload + 1)) | |||
fi | |||
done | |||
# Set appropriate file access permissions | |||
chown -R $clamd_user:$clamd_group $clamd_dbdir | |||
# Remove any leftover files in the $tmp_dir working directory | |||
# (should only happen when a corrupted database is detected) | |||
rm -f $tmp_dir/* | |||
# Reload the clamd database if $clamd_pidfile and $do_clamd_reload | |||
# are both non-empty | |||
if [ -n "$clamd_pidfile" -a -n "$do_clamd_reload" ] ; then | |||
echo | |||
echo ==================================================== | |||
echo Reloading the ClamAV databases \($do_clamd_reload updated\) | |||
echo ==================================================== | |||
kill -USR2 `cat $clamd_pidfile` | |||
fi | |||
echo | |||
echo "Script ended: "`date` | |||
exit $? | |||
</pre> | |||
[[Category:Mail]] |
Latest revision as of 09:42, 12 March 2012
#!/bin/sh # update_clamd_extra_sigs.sh v0.2 by Dan Larsson <dl|at|tyfon|dot|net> # ============================================================================== # LICENSE # ============================================================================== # "THE BEER-WARE LICENSE" (Revision 42): # <dl|at|tyfon|dot|net> wrote this file. As long as you retain this # notice you can do whatever you want with this stuff. If we meet # some day, and you think this stuff is worth it, you can buy me a # beer in return. Dan Larsson # # ============================================================================== # VERSION INFO # ============================================================================== # v0.2 - 2008-08-23 # * Don't use non-word chars in function names # * Changed the SecuriteInfo signature source url # (Thank's to Bill Landry for pointing out the above two) # * Cleaned up and added comments # * Misc cosmetic changes # # v0.1 - 2008-08-22 # * Initial release, branched from v1.4 of Bill Landry's # ss-msrbl.sh script # # ============================================================================== # README # ============================================================================== # In order to run this script you need to have curl, rsync and clamd installed # on your machine aswell as the basic set of unix-like tools (i.e. awk, sed, # cat, cp, gunzip etc...). # # If this script fails to run on your system or you have made improvements that # you wish to share, you're welcome to drop me a line. # # ============================================================================== # USAGE # ============================================================================== # Using this script is easy, just configure the parameters, save the changes # and execute from the prompt (or via cron). Should you want to add additional # signature databases simply add their download urls to the appropriate # section(s) here below and you're done! Naturally, it's just as easy to remove # and edit :-) No script coding necessary! # # ============================================================================== # SIGNATURE SOURCES # ============================================================================== # SaneSecurity (phish.ndb, scam.ndb) # http://www.sanesecurity.com/clamav/usage.htm # # SecuriteInfo (vx.hdb) # http://www.securiteinfo.com/services/clamav_unofficial_malwares_signatures.shtml # # MalwareBlockList (mbl.db) # http://www.malware.com.br/clamav.txt # # MSRBL (MSRBL-Images.hdb, MSRBL-SPAM.ndb) # http://www.msrbl.com/site/msrblimagesdownload # http://www.msrbl.com/site/msrblspamdownload # # ============================================================================== # SOURCE ARGUMENTS ( see below for more info in source arguments ) # ============================================================================== # Name Value Comment # ------------------- --------- ------------------------------------------------ # fetch_interval integer Forced delay in seconds between download # attempts # target_file string Use this name for the signature database # (instead of extracting it from the source file) ################################################################################ # SCRIPT USER EDIT SECTION - SET PROGRAM PATHS AND OTHER VARIABLES # ################################################################################ # *** COMMENT OUT THE BELOW LINE WHEN YOU HAVE CONFIGURED THIS SCRIPT *** #script_not_configured=1 # Set and export the command searchpaths PATH=/root/bin:/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin export PATH # Set path to ClamAV database dir location as well as # the clamd user and group account clamd_dbdir=/var/lib/clamav clamd_user=clamav clamd_group=clamav # Set path to the clamd pidfile # (comment out to disable forced database reload) clamd_pidfile=/var/run/clamav/clamd.pid # Set backup and temp working directory paths # (edit to meet your own needs) backup_dir=/var/backups/clamav tmp_dir=/var/tmp/clamdb rsync_dir=/var/tmp/rsync # HTTP source urls - *MUST* be HTTP urls, one url per line # (add/remove/modify urls as per preference and/or need, # to disable, comment out the below six lines) http_source_urls=" http://www.sanesecurity.com/clamav/phishsigs/phish.ndb.gz http://www.sanesecurity.com/clamav/scamsigs/scam.ndb.gz http://clamav.securiteinfo.com/vx.hdb.gz http://www.malware.com.br/cgi/submit?action=list_clamav,fetch_interval=86400,target_file=mbl.db " # RSYNC source urls - *MUST* be RSYNC urls, one url per line # (add/remove/modify urls as per preference and/or need, # to disable, comment out the below four lines) rsync_source_urls=" rsync://rsync.mirror.msrbl.com/msrbl/MSRBL-Images.hdb rsync://rsync.mirror.msrbl.com/msrbl/MSRBL-SPAM.ndb " # Arguments can be appended to the source_url, if you do so # seperate them from the source url and eachother with commas # ( e.g. scheme://hostname/path,arg1=123,arg2=abc ) # Please note that it's very important you null their value when # they've served their purpose, not doing so will lead to weird # results # Enable random sleeping before processing # - recommeded when running via cron! # (to disable this comment out the below line) #sleep_enabled=1 # Show each slept second visually # - disabled when running via cron. # (to disable this comment out the below line) sleep_visual=1 # Compress all downloaded *source files* with gzip # (to disable this comment out the below line) keep_sources_gzipped=1 ################################################################################ # END OF SCRIPT USER EDIT SECTION - YOU SHOULD NOT NEED TO EDIT ANYTHING BELOW # ################################################################################ # Initializes the source arguments sarg_init() { sarg_init_success= if [ -n "$source_args" ] ; then for arg in `echo $source_args | sed 's/,/ /g'` ; do eval $arg sarg_init_success=1 done source_url=`echo $source_url | awk -F, '{print $1}'` fi source_args= } # Runs all source argument subroutines. If you add your own # checks/processing add them to this function. sarg_process() { # Check for fetch interval restriction if [ -n "$fetch_interval" ] && ! sarg_check_fetch_interval ; then echo echo Skipped due to interval restriction in effect continue fi # Insert your own argument processing here } # Handles processing of the "$fetch_interval" source argument # Returns 0 when fetching is permitted ( i.e. elapsed seconds # since last fetch is equal or greater than $fetch_interval ) # respectively returns 1 when fetching is not permitted. sarg_check_fetch_interval() { local fetch_lastrun fetch_rundiff fetch_stampfile fetch_thisrun fetch_stampfile=$backup_dir/FETCHSTAMP.$source_file fetch_thisrun=`date +%s` fetch_stampfile_old=$backup_dir/LASTRUN.$source_file if [ -f $fetch_stampfile_old ] ; then mv $fetch_stampfile_old $fetch_stampfile fi if [ -f $fetch_stampfile ] ; then fetch_lastrun=`cat $fetch_stampfile` fetch_rundiff=$(($fetch_thisrun - $fetch_lastrun)) if [ $fetch_rundiff -lt $fetch_interval ] ; then fetch_interval= return 1 fi fi echo $fetch_thisrun > $fetch_stampfile fetch_interval= return 0 } # Shows the source header # (the below function also calls the source argument # init and processing functions) show_source_header() { sarg_init echo echo ==================================================== echo Processing signature database: $target_file echo ==================================================== # Process arguments if any are set for this source [ -n "$sarg_init_success" ] && sarg_process } #### actual script execution begins here #### if [ -n "$script_not_configured" ] ; then echo '*** SCRIPT NOT CONFIGURED ***' echo Please take the time to configure this script before running it. echo When you have, comment out the \'script_not_configured=1\' line at echo the top in the user editables section and execute the script again exit 1 fi echo "Script started: "`date` # Check to see if the working directories exist. # If not, create them. Otherwise, ignore and proceed with script mkdir -p $tmp_dir $rsync_dir $backup_dir # Change working directory to ClamAV database directory cd $clamd_dbdir # Get the timestamp from the previous run if it exists and # update it. stamp_lastrun=0 stamp_thisrun=`date +%s` if [ -f $backup_dir/LASTRUN ] ; then stamp_lastrun=`cat $backup_dir/LASTRUN` fi echo $stamp_thisrun > $backup_dir/LASTRUN # To "play nice" with the source servers don't run more frequently # than once every hour. Also, attempt to keep off any peak crontimes # by adding a randomized (between 30 seconds and 10 minutes) sleep period. # --- Idea inspired by Rick Cooper's "UpdateSaneSecurity" script. # ( You can ofcourse disable the sleep-feature by commenting out the # 'sleep_enabled=1' line in the user editables section here above ) if [ -n "$sleep_enabled" ] ; then # Calculate if we have run in the last hour. If we have add the # remainder to the sleep time sleep_forced=0 if [ $stamp_lastrun -gt 0 ] ; then stamp_rundiff=$(($stamp_thisrun - $stamp_lastrun)) if [ $stamp_rundiff -lt 3600 ] ; then sleep_forced=$((3600 - $stamp_rundiff)) fi fi # NOTE: # Please note that I'm very well aware of the $RANDOM variable, however # since it is not a FreeBSD sh(1) native variable (which is the O/S and # shell I'm running this script under) I'm staying off that path. Feel # free to implement and use the $RANDOM method, if you want to :-) # Get a random number between 30 and 600. First attempt this by using # the jot(1) utility (installed by default on *BSD systems)... sleep_random=`jot -r 1 30 600 2>/dev/null` # ...if jot(1) failed attempt another (more portable?) method if [ -z "$sleep_random" ] ; then sleep_random=0 while [ $sleep_random -lt 30 ] || [ $sleep_random -gt 600 ] ; do sleep_random=`head -1 /dev/urandom | od -N 1 | awk '$2~/^0/{ print $2 / 1 }'` done fi # Add the two values together and sleep for that amount of seconds. # If the $TERM variable isn't set we're probably running from cron so # disable visual sleeping in that case sleep_forced=$(($sleep_forced + $sleep_random)) echo ==================================================== echo Sleeping $sleep_forced seconds before proceeding... echo ==================================================== if [ -n "$TERM" -a -n "$sleep_visual" ] ; then while [ $sleep_forced -gt 0 ] ; do sleep_forced=$(($sleep_forced - 1)) echo -n . sleep 1 done echo else sleep $sleep_forced fi fi # Process http://urls for source_url in $http_source_urls ; do source_file=`basename $source_url | awk -F, '{print $1}'` source_args=`basename $source_url | sed "s/^\$source_file//;s/^,//"` target_file=`echo $source_file | sed 's/\.gz$//'` # If the source and target filenames are equal the source is not gzipped # (this will have to be expanded upon if/when additional forms of source # compression are to be supported). source_not_gzipped= if [ $source_file = $target_file ] ; then source_not_gzipped=1 fi # Remove any non-word characters from the source filename. # We need this since it's used in various file operations source_file=`echo $source_file | sed 's/[^[:alnum:]\.-]/_/g'` # Produce the source header show_source_header # Check for an existing database file. If it exists then run an # update check. Otherwise, just download and extract the database file. if [ ! -s $target_file ] ; then # Redirect stderr to stdout while downloading the file. ( curl -L -R -o $tmp_dir/$source_file $source_url 2>&1 ) # If the source isn't gzipped, compress it if $keep_sources_gzipped # is non-empty if [ -n "$keep_sources_gzipped" -a -n "$source_not_gzipped" ] ; then test -s $tmp_dir/$source_file && \ gzip -9f $tmp_dir/$source_file && \ source_file=${source_file}.gz fi # Validate the source file through a series of tests. # If all tests succeed install the source and database files # in the ClamAV database directory ($clamd_dbdir). test -s $tmp_dir/$source_file && \ gunzip -cdf $tmp_dir/$source_file > $tmp_dir/$target_file && \ clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \ mv -f $tmp_dir/$target_file $tmp_dir/$source_file . && \ do_clamd_reload=$(($do_clamd_reload + 1)) else # Select which file to use as a timestamp reference. source_timeref=$source_file if [ -n "$keep_sources_gzipped" -a -f ${source_file}.gz ] || \ [ ! -f $source_file -a -f ${source_file}.gz ] ; then source_timeref=${source_file}.gz fi # Redirect stderr to stdout while downloading the source file, tell curl # to use $source_timeref as a timestamp reference ( curl -L -R -z $source_timeref -o $tmp_dir/$source_file $source_url 2>&1 ) # If the source isn't gzipped... if [ -n "$keep_sources_gzipped" -a -n "$source_not_gzipped" ] ; then test -s $tmp_dir/$source_file && \ gzip -9f $tmp_dir/$source_file && \ source_file=${source_file}.gz fi # Validate the source file... test -s $tmp_dir/$source_file && \ gunzip -cdf $tmp_dir/$source_file > $tmp_dir/$target_file && \ clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \ cp -f -p $target_file $backup_dir && \ mv -f $tmp_dir/$target_file $tmp_dir/$source_file . && \ do_clamd_reload=$(($do_clamd_reload + 1)) fi done # Process rsync://urls for source_url in $rsync_source_urls ; do source_file=`basename $source_url | awk -F, '{print $1}'` source_args=`basename $source_url | sed "s/^\$source_file//;s/^,//"` target_file=$source_file # Produce the source header show_source_header # Check for an existing database file. If it exists then run an # update check. Otherwise, just download and extract the database file. if [ ! -s $target_file ] ; then # Redirect stderr to stdout while downloading the file. ( rsync -t --stats $source_url $rsync_dir/$target_file 2>&1 ) # Validate the source file through a series of tests. # If all tests succeed install the source and database files # in the ClamAV database directory ($clamd_dbdir). cp -p $rsync_dir/$target_file $tmp_dir && \ test -s $tmp_dir/$target_file && \ clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \ mv -f $tmp_dir/$target_file . && \ do_clamd_reload=$(($do_clamd_reload + 1)) else # Download the source file... ( rsync -tu --stats $source_url $rsync_dir/$target_file 2>&1 ) # Validate the source file... test $rsync_dir/$target_file -nt $target_file && \ cp -p $rsync_dir/$target_file $tmp_dir && \ test -s $tmp_dir/$target_file && \ clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \ cp -f -p $target_file $backup_dir && \ mv -f $tmp_dir/$target_file . && \ do_clamd_reload=$(($do_clamd_reload + 1)) fi done # Set appropriate file access permissions chown -R $clamd_user:$clamd_group $clamd_dbdir # Remove any leftover files in the $tmp_dir working directory # (should only happen when a corrupted database is detected) rm -f $tmp_dir/* # Reload the clamd database if $clamd_pidfile and $do_clamd_reload # are both non-empty if [ -n "$clamd_pidfile" -a -n "$do_clamd_reload" ] ; then echo echo ==================================================== echo Reloading the ClamAV databases \($do_clamd_reload updated\) echo ==================================================== kill -USR2 `cat $clamd_pidfile` fi echo echo "Script ended: "`date` exit $?