Up clam ex.sh: Difference between revisions

From Alpine Linux
(New page: #!/bin/sh # update_clamd_extra_sigs.sh v0.2 by Dan Larsson <dl|at|tyfon|dot|net> # ============================================================================== # LICENSE # ========...)
 
(reformat)
Line 1: Line 1:
#!/bin/sh
<pre>
#!/bin/sh


# update_clamd_extra_sigs.sh v0.2 by Dan Larsson <dl|at|tyfon|dot|net>
# update_clamd_extra_sigs.sh v0.2 by Dan Larsson <dl|at|tyfon|dot|net>
# ==============================================================================
# ==============================================================================
# LICENSE
# LICENSE
# ==============================================================================
# ==============================================================================
# "THE BEER-WARE LICENSE" (Revision 42):
# "THE BEER-WARE LICENSE" (Revision 42):
# <dl|at|tyfon|dot|net> wrote this file. As long as you retain this
# <dl|at|tyfon|dot|net> wrote this file. As long as you retain this
# notice you can do whatever you want with this stuff. If we meet
# notice you can do whatever you want with this stuff. If we meet
# some day, and you think this stuff is worth it, you can buy me a
# some day, and you think this stuff is worth it, you can buy me a
# beer in return. Dan Larsson
# beer in return. Dan Larsson
#
#
# ==============================================================================
# ==============================================================================
# VERSION INFO
# VERSION INFO
# ==============================================================================
# ==============================================================================
# v0.2 - 2008-08-23
# v0.2 - 2008-08-23
#  * Don't use non-word chars in function names
#  * Don't use non-word chars in function names
#  * Changed the SecuriteInfo signature source url
#  * Changed the SecuriteInfo signature source url
#    (Thank's to Bill Landry for pointing out the above two)
#    (Thank's to Bill Landry for pointing out the above two)
#  * Cleaned up and added comments
#  * Cleaned up and added comments
#  * Misc cosmetic changes
#  * Misc cosmetic changes
#
#
# v0.1 - 2008-08-22
# v0.1 - 2008-08-22
#  * Initial release, branched from v1.4 of Bill Landry's
#  * Initial release, branched from v1.4 of Bill Landry's
#    ss-msrbl.sh script
#    ss-msrbl.sh script
#
#
# ==============================================================================
# ==============================================================================
# README
# README
# ==============================================================================
# ==============================================================================
# In order to run this script you need to have curl, rsync and clamd installed
# In order to run this script you need to have curl, rsync and clamd installed
# on your machine aswell as the basic set of unix-like tools (i.e. awk, sed,
# on your machine aswell as the basic set of unix-like tools (i.e. awk, sed,
# cat, cp, gunzip etc...).
# cat, cp, gunzip etc...).
#
#
# If this script fails to run on your system or you have made improvements that
# If this script fails to run on your system or you have made improvements that
# you wish to share, you're welcome to drop me a line.
# you wish to share, you're welcome to drop me a line.
#
#
# ==============================================================================
# ==============================================================================
# USAGE
# USAGE
# ==============================================================================
# ==============================================================================
# Using this script is easy, just configure the parameters, save the changes
# Using this script is easy, just configure the parameters, save the changes
# and execute from the prompt (or via cron). Should you want to add additional
# and execute from the prompt (or via cron). Should you want to add additional
# signature databases simply add their download urls to the appropriate
# signature databases simply add their download urls to the appropriate
# section(s) here below and you're done! Naturally, it's just as easy to remove
# section(s) here below and you're done! Naturally, it's just as easy to remove
# and edit :-) No script coding necessary!
# and edit :-) No script coding necessary!
#
#
# ==============================================================================
# ==============================================================================
# SIGNATURE SOURCES
# SIGNATURE SOURCES
# ==============================================================================
# ==============================================================================
# SaneSecurity (phish.ndb, scam.ndb)
# SaneSecurity (phish.ndb, scam.ndb)
# http://www.sanesecurity.com/clamav/usage.htm
# http://www.sanesecurity.com/clamav/usage.htm
#
#
# SecuriteInfo (vx.hdb)
# SecuriteInfo (vx.hdb)
# http://www.securiteinfo.com/services/clamav_unofficial_malwares_signatures.shtml
# http://www.securiteinfo.com/services/clamav_unofficial_malwares_signatures.shtml
#
#
# MalwareBlockList (mbl.db)
# MalwareBlockList (mbl.db)
# http://www.malware.com.br/clamav.txt
# http://www.malware.com.br/clamav.txt
#
#
# MSRBL (MSRBL-Images.hdb, MSRBL-SPAM.ndb)
# MSRBL (MSRBL-Images.hdb, MSRBL-SPAM.ndb)
# http://www.msrbl.com/site/msrblimagesdownload
# http://www.msrbl.com/site/msrblimagesdownload
# http://www.msrbl.com/site/msrblspamdownload
# http://www.msrbl.com/site/msrblspamdownload
#
#
# ==============================================================================
# ==============================================================================
# SOURCE ARGUMENTS ( see below for more info in source arguments )
# SOURCE ARGUMENTS ( see below for more info in source arguments )
# ==============================================================================
# ==============================================================================
# Name                Value        Comment
# Name                Value        Comment
# ------------------- --------- ------------------------------------------------
# ------------------- --------- ------------------------------------------------
# fetch_interval      integer  Forced delay in seconds between download
# fetch_interval      integer  Forced delay in seconds between download
#                              attempts
#                              attempts
# target_file        string    Use this name for the signature database
# target_file        string    Use this name for the signature database
#                              (instead of extracting it from the source file)
#                              (instead of extracting it from the source file)


################################################################################
################################################################################
#      SCRIPT USER EDIT SECTION - SET PROGRAM PATHS AND OTHER VARIABLES      #
#      SCRIPT USER EDIT SECTION - SET PROGRAM PATHS AND OTHER VARIABLES      #
################################################################################
################################################################################


# *** COMMENT OUT THE BELOW LINE WHEN YOU HAVE CONFIGURED THIS SCRIPT ***
# *** COMMENT OUT THE BELOW LINE WHEN YOU HAVE CONFIGURED THIS SCRIPT ***
#script_not_configured=1
#script_not_configured=1


# Set and export the command searchpaths
# Set and export the command searchpaths
PATH=/root/bin:/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
PATH=/root/bin:/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
export PATH
export PATH


# Set path to ClamAV database dir location as well as
# Set path to ClamAV database dir location as well as
# the clamd user and group account
# the clamd user and group account
clamd_dbdir=/var/lib/clamav
clamd_dbdir=/var/lib/clamav
clamd_user=clamav
clamd_user=clamav
clamd_group=clamav
clamd_group=clamav


# Set path to the clamd pidfile
# Set path to the clamd pidfile
# (comment out to disable forced database reload)
# (comment out to disable forced database reload)
clamd_pidfile=/var/run/clamav/clamd.pid
clamd_pidfile=/var/run/clamav/clamd.pid


# Set backup and temp working directory paths
# Set backup and temp working directory paths
# (edit to meet your own needs)
# (edit to meet your own needs)
backup_dir=/var/backups/clamav
backup_dir=/var/backups/clamav
tmp_dir=/var/tmp/clamdb
tmp_dir=/var/tmp/clamdb
rsync_dir=/var/tmp/rsync
rsync_dir=/var/tmp/rsync


# HTTP source urls - *MUST* be HTTP urls, one url per line
# HTTP source urls - *MUST* be HTTP urls, one url per line
# (add/remove/modify urls as per preference and/or need,
# (add/remove/modify urls as per preference and/or need,
# to disable, comment out the below six lines)
# to disable, comment out the below six lines)
http_source_urls="
http_source_urls="
    http://www.sanesecurity.com/clamav/phishsigs/phish.ndb.gz
  http://www.sanesecurity.com/clamav/phishsigs/phish.ndb.gz
    http://www.sanesecurity.com/clamav/scamsigs/scam.ndb.gz
  http://www.sanesecurity.com/clamav/scamsigs/scam.ndb.gz
    http://clamav.securiteinfo.com/vx.hdb.gz
  http://clamav.securiteinfo.com/vx.hdb.gz
    http://www.malware.com.br/cgi/submit?action=list_clamav,fetch_interval=86400,target_file=mbl.db
  http://www.malware.com.br/cgi/submit?action=list_clamav,fetch_interval=86400,target_file=mbl.db
"
"


# RSYNC source urls - *MUST* be RSYNC urls, one url per line
# RSYNC source urls - *MUST* be RSYNC urls, one url per line
# (add/remove/modify urls as per preference and/or need,
# (add/remove/modify urls as per preference and/or need,
# to disable, comment out the below four lines)
# to disable, comment out the below four lines)
rsync_source_urls="
rsync_source_urls="
    rsync://rsync.mirror.msrbl.com/msrbl/MSRBL-Images.hdb
  rsync://rsync.mirror.msrbl.com/msrbl/MSRBL-Images.hdb
    rsync://rsync.mirror.msrbl.com/msrbl/MSRBL-SPAM.ndb
  rsync://rsync.mirror.msrbl.com/msrbl/MSRBL-SPAM.ndb
"
"


# Arguments can be appended to the source_url, if you do so
# Arguments can be appended to the source_url, if you do so
# seperate them from the source url and eachother with commas
# seperate them from the source url and eachother with commas
# ( e.g. scheme://hostname/path,arg1=123,arg2=abc )
# ( e.g. scheme://hostname/path,arg1=123,arg2=abc )
# Please note that it's very important you null their value when
# Please note that it's very important you null their value when
# they've served their purpose, not doing so will lead to weird
# they've served their purpose, not doing so will lead to weird
# results
# results


# Enable random sleeping before processing
# Enable random sleeping before processing
# - recommeded when running via cron!
# - recommeded when running via cron!
# (to disable this comment out the below line)
# (to disable this comment out the below line)
#sleep_enabled=1
#sleep_enabled=1


# Show each slept second visually
# Show each slept second visually
# - disabled when running via cron.
# - disabled when running via cron.
# (to disable this comment out the below line)
# (to disable this comment out the below line)
sleep_visual=1
sleep_visual=1


# Compress all downloaded *source files* with gzip
# Compress all downloaded *source files* with gzip
# (to disable this comment out the below line)
# (to disable this comment out the below line)
keep_sources_gzipped=1
keep_sources_gzipped=1


################################################################################
################################################################################
# END OF SCRIPT USER EDIT SECTION - YOU SHOULD NOT NEED TO EDIT ANYTHING BELOW #
# END OF SCRIPT USER EDIT SECTION - YOU SHOULD NOT NEED TO EDIT ANYTHING BELOW #
################################################################################
################################################################################


# Initializes the source arguments
# Initializes the source arguments
sarg_init() {
sarg_init() {
    sarg_init_success=
  sarg_init_success=
    if [ -n "$source_args" ] ; then
  if [ -n "$source_args" ] ; then
      for arg in `echo $source_args | sed 's/,/ /g'` ; do
      for arg in `echo $source_args | sed 's/,/ /g'` ; do
          eval $arg
        eval $arg
          sarg_init_success=1
        sarg_init_success=1
      done
      done
      source_url=`echo $source_url | awk -F, '{print $1}'`
      source_url=`echo $source_url | awk -F, '{print $1}'`
    fi
  fi
    source_args=
  source_args=
}
}


# Runs all source argument subroutines. If you add your own
# Runs all source argument subroutines. If you add your own
# checks/processing add them to this function.
# checks/processing add them to this function.
sarg_process() {
sarg_process() {
    # Check for fetch interval restriction
  # Check for fetch interval restriction
    if [ -n "$fetch_interval" ] && ! sarg_check_fetch_interval ; then
  if [ -n "$fetch_interval" ] && ! sarg_check_fetch_interval ; then
      echo
      echo
      echo Skipped due to interval restriction in effect
      echo Skipped due to interval restriction in effect
      continue
      continue
    fi
  fi
    # Insert your own argument processing here
  # Insert your own argument processing here
}
}


# Handles processing of the "$fetch_interval" source argument
# Handles processing of the "$fetch_interval" source argument
# Returns 0 when fetching is permitted ( i.e. elapsed seconds
# Returns 0 when fetching is permitted ( i.e. elapsed seconds
# since last fetch is equal or greater than $fetch_interval )
# since last fetch is equal or greater than $fetch_interval )
# respectively returns 1 when fetching is not permitted.
# respectively returns 1 when fetching is not permitted.
sarg_check_fetch_interval() {
sarg_check_fetch_interval() {
    local fetch_lastrun fetch_rundiff fetch_stampfile fetch_thisrun
  local fetch_lastrun fetch_rundiff fetch_stampfile fetch_thisrun
    fetch_stampfile=$backup_dir/FETCHSTAMP.$source_file
  fetch_stampfile=$backup_dir/FETCHSTAMP.$source_file
    fetch_thisrun=`date +%s`
  fetch_thisrun=`date +%s`


    fetch_stampfile_old=$backup_dir/LASTRUN.$source_file
  fetch_stampfile_old=$backup_dir/LASTRUN.$source_file
    if [ -f $fetch_stampfile_old ] ; then
  if [ -f $fetch_stampfile_old ] ; then
      mv $fetch_stampfile_old $fetch_stampfile
      mv $fetch_stampfile_old $fetch_stampfile
    fi
  fi


    if [ -f $fetch_stampfile ] ; then
  if [ -f $fetch_stampfile ] ; then
      fetch_lastrun=`cat $fetch_stampfile`
      fetch_lastrun=`cat $fetch_stampfile`
      fetch_rundiff=$(($fetch_thisrun - $fetch_lastrun))
      fetch_rundiff=$(($fetch_thisrun - $fetch_lastrun))
      if [ $fetch_rundiff -lt $fetch_interval ] ; then
      if [ $fetch_rundiff -lt $fetch_interval ] ; then
          fetch_interval=
        fetch_interval=
          return 1
        return 1
      fi
      fi
    fi
  fi
    echo $fetch_thisrun > $fetch_stampfile
  echo $fetch_thisrun > $fetch_stampfile
    fetch_interval=
  fetch_interval=
    return 0
  return 0
}
}


# Shows the source header
# Shows the source header
# (the below function also calls the source argument
# (the below function also calls the source argument
# init and processing functions)
# init and processing functions)
show_source_header() {
show_source_header() {
    sarg_init
  sarg_init
    echo
  echo
    echo ====================================================
  echo ====================================================
    echo Processing signature database: $target_file
  echo Processing signature database: $target_file
    echo ====================================================
  echo ====================================================
    # Process arguments if any are set for this source
  # Process arguments if any are set for this source
    [ -n "$sarg_init_success" ] && sarg_process
  [ -n "$sarg_init_success" ] && sarg_process
}
}


#### actual script execution begins here ####
#### actual script execution begins here ####
if [ -n "$script_not_configured" ] ; then
if [ -n "$script_not_configured" ] ; then
    echo '*** SCRIPT NOT CONFIGURED ***'
  echo '*** SCRIPT NOT CONFIGURED ***'
    echo Please take the time to configure this script before running it.
  echo Please take the time to configure this script before running it.
    echo When you have, comment out the \'script_not_configured=1\' line at
  echo When you have, comment out the \'script_not_configured=1\' line at
    echo the top in the user editables section and execute the script again
  echo the top in the user editables section and execute the script again
    exit 1
  exit 1
fi
fi
echo "Script started: "`date`
echo "Script started: "`date`


# Check to see if the working directories exist.
# Check to see if the working directories exist.
# If not, create them. Otherwise, ignore and proceed with script
# If not, create them. Otherwise, ignore and proceed with script
mkdir -p $tmp_dir $rsync_dir $backup_dir
mkdir -p $tmp_dir $rsync_dir $backup_dir


# Change working directory to ClamAV database directory
# Change working directory to ClamAV database directory
cd $clamd_dbdir
cd $clamd_dbdir


# Get the timestamp from the previous run if it exists and
# Get the timestamp from the previous run if it exists and
# update it.
# update it.
stamp_lastrun=0
stamp_lastrun=0
stamp_thisrun=`date +%s`
stamp_thisrun=`date +%s`
if [ -f $backup_dir/LASTRUN ] ; then
if [ -f $backup_dir/LASTRUN ] ; then
    stamp_lastrun=`cat $backup_dir/LASTRUN`
  stamp_lastrun=`cat $backup_dir/LASTRUN`
fi
fi
echo $stamp_thisrun > $backup_dir/LASTRUN
echo $stamp_thisrun > $backup_dir/LASTRUN


# To "play nice" with the source servers don't run more frequently
# To "play nice" with the source servers don't run more frequently
# than once every hour. Also, attempt to keep off any peak crontimes
# than once every hour. Also, attempt to keep off any peak crontimes
# by adding a randomized (between 30 seconds and 10 minutes) sleep period.
# by adding a randomized (between 30 seconds and 10 minutes) sleep period.
#  --- Idea inspired by Rick Cooper's "UpdateSaneSecurity" script.
#  --- Idea inspired by Rick Cooper's "UpdateSaneSecurity" script.
# ( You can ofcourse disable the sleep-feature by commenting out the
# ( You can ofcourse disable the sleep-feature by commenting out the
# 'sleep_enabled=1' line in the user editables section here above )
# 'sleep_enabled=1' line in the user editables section here above )
if [ -n "$sleep_enabled" ] ; then
if [ -n "$sleep_enabled" ] ; then
    # Calculate if we have run in the last hour. If we have add the
  # Calculate if we have run in the last hour. If we have add the
    # remainder to the sleep time
  # remainder to the sleep time
    sleep_forced=0
  sleep_forced=0
    if [ $stamp_lastrun -gt 0 ] ; then
  if [ $stamp_lastrun -gt 0 ] ; then
      stamp_rundiff=$(($stamp_thisrun - $stamp_lastrun))
      stamp_rundiff=$(($stamp_thisrun - $stamp_lastrun))
      if [ $stamp_rundiff -lt 3600 ] ; then
      if [ $stamp_rundiff -lt 3600 ] ; then
          sleep_forced=$((3600 - $stamp_rundiff))
        sleep_forced=$((3600 - $stamp_rundiff))
      fi
      fi
    fi
  fi


    # NOTE:
  # NOTE:
    # Please note that I'm very well aware of the $RANDOM variable, however
  # Please note that I'm very well aware of the $RANDOM variable, however
    # since it is not a FreeBSD sh(1) native variable (which is the O/S and
  # since it is not a FreeBSD sh(1) native variable (which is the O/S and
    # shell I'm running this script under) I'm staying off that path. Feel
  # shell I'm running this script under) I'm staying off that path. Feel
    # free to implement and use the $RANDOM method, if you want to :-)
  # free to implement and use the $RANDOM method, if you want to :-)


    # Get a random number between 30 and 600. First attempt this by using
  # Get a random number between 30 and 600. First attempt this by using
    # the jot(1) utility (installed by default on *BSD systems)...
  # the jot(1) utility (installed by default on *BSD systems)...
    sleep_random=`jot -r 1 30 600 2>/dev/null`
  sleep_random=`jot -r 1 30 600 2>/dev/null`


    # ...if jot(1) failed attempt another (more portable?) method
  # ...if jot(1) failed attempt another (more portable?) method
    if [ -z "$sleep_random" ] ; then
  if [ -z "$sleep_random" ] ; then
      sleep_random=0
      sleep_random=0
      while [ $sleep_random -lt 30 ] || [ $sleep_random -gt 600 ] ; do
      while [ $sleep_random -lt 30 ] || [ $sleep_random -gt 600 ] ; do
          sleep_random=`head -1 /dev/urandom | od -N 1 | awk '$2~/^0/{ print $2 / 1 }'`
        sleep_random=`head -1 /dev/urandom | od -N 1 | awk '$2~/^0/{ print $2 / 1 }'`
      done
      done
    fi
  fi


    # Add the two values together and sleep for that amount of seconds.
  # Add the two values together and sleep for that amount of seconds.
    # If the $TERM variable isn't set we're probably running from cron so
  # If the $TERM variable isn't set we're probably running from cron so
    # disable visual sleeping in that case
  # disable visual sleeping in that case
    sleep_forced=$(($sleep_forced + $sleep_random))
  sleep_forced=$(($sleep_forced + $sleep_random))
    echo ====================================================
  echo ====================================================
    echo Sleeping $sleep_forced seconds before proceeding...
  echo Sleeping $sleep_forced seconds before proceeding...
    echo ====================================================
  echo ====================================================
    if [ -n "$TERM" -a -n "$sleep_visual" ] ; then
  if [ -n "$TERM" -a -n "$sleep_visual" ] ; then
      while [ $sleep_forced -gt 0 ] ; do
      while [ $sleep_forced -gt 0 ] ; do
          sleep_forced=$(($sleep_forced - 1))
        sleep_forced=$(($sleep_forced - 1))
          echo -n .
        echo -n .
          sleep 1
        sleep 1
      done
      done
      echo
      echo
    else
  else
      sleep $sleep_forced
      sleep $sleep_forced
    fi
  fi
fi
fi


# Process http://urls
# Process http://urls
for source_url in $http_source_urls ; do
for source_url in $http_source_urls ; do
    source_file=`basename $source_url | awk -F, '{print $1}'`
  source_file=`basename $source_url | awk -F, '{print $1}'`
    source_args=`basename $source_url | sed "s/^\$source_file//;s/^,//"`
  source_args=`basename $source_url | sed "s/^\$source_file//;s/^,//"`
    target_file=`echo $source_file | sed 's/\.gz$//'`
  target_file=`echo $source_file | sed 's/\.gz$//'`


    # If the source and target filenames are equal the source is not gzipped
  # If the source and target filenames are equal the source is not gzipped
    # (this will have to be expanded upon if/when additional forms of source
  # (this will have to be expanded upon if/when additional forms of source
    # compression are to be supported).
  # compression are to be supported).
    source_not_gzipped=
  source_not_gzipped=
    if [ $source_file = $target_file ] ; then
  if [ $source_file = $target_file ] ; then
      source_not_gzipped=1
      source_not_gzipped=1
    fi
  fi


    # Remove any non-word characters from the source filename.
  # Remove any non-word characters from the source filename.
    # We need this since it's used in various file operations
  # We need this since it's used in various file operations
    source_file=`echo $source_file | sed 's/[^[:alnum:]\.-]/_/g'`
  source_file=`echo $source_file | sed 's/[^[:alnum:]\.-]/_/g'`


    # Produce the source header
  # Produce the source header
    show_source_header
  show_source_header


    # Check for an existing database file. If it exists then run an
  # Check for an existing database file. If it exists then run an
    # update check. Otherwise, just download and extract the database file.
  # update check. Otherwise, just download and extract the database file.
    if [ ! -s $target_file ] ; then
  if [ ! -s $target_file ] ; then
      # Redirect stderr to stdout while downloading the file.
      # Redirect stderr to stdout while downloading the file.
      ( curl -L -R -o $tmp_dir/$source_file $source_url 2>&1 )
      ( curl -L -R -o $tmp_dir/$source_file $source_url 2>&1 )


      # If the source isn't gzipped, compress it if $keep_sources_gzipped
      # If the source isn't gzipped, compress it if $keep_sources_gzipped
      # is non-empty
      # is non-empty
      if [ -n "$keep_sources_gzipped" -a -n "$source_not_gzipped" ] ; then
      if [ -n "$keep_sources_gzipped" -a -n "$source_not_gzipped" ] ; then
          test -s $tmp_dir/$source_file && \
        test -s $tmp_dir/$source_file && \
            gzip -9f $tmp_dir/$source_file && \
            gzip -9f $tmp_dir/$source_file && \
            source_file=${source_file}.gz
            source_file=${source_file}.gz
      fi
      fi


      # Validate the source file through a series of tests.
      # Validate the source file through a series of tests.
      # If all tests succeed install the source and database files
      # If all tests succeed install the source and database files
      # in the ClamAV database directory ($clamd_dbdir).
      # in the ClamAV database directory ($clamd_dbdir).
      test -s $tmp_dir/$source_file && \
      test -s $tmp_dir/$source_file && \
          gunzip -cdf $tmp_dir/$source_file > $tmp_dir/$target_file && \
        gunzip -cdf $tmp_dir/$source_file > $tmp_dir/$target_file && \
          clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
        clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
          mv -f $tmp_dir/$target_file $tmp_dir/$source_file . && \
        mv -f $tmp_dir/$target_file $tmp_dir/$source_file . && \
          do_clamd_reload=$(($do_clamd_reload + 1))
        do_clamd_reload=$(($do_clamd_reload + 1))
    else
  else
      # Select which file to use as a timestamp reference.
      # Select which file to use as a timestamp reference.
      source_timeref=$source_file
      source_timeref=$source_file
      if [ -n "$keep_sources_gzipped" -a -f ${source_file}.gz ] || \
      if [ -n "$keep_sources_gzipped" -a -f ${source_file}.gz ] || \
          [ ! -f $source_file -a -f ${source_file}.gz ] ; then
        [ ! -f $source_file -a -f ${source_file}.gz ] ; then
          source_timeref=${source_file}.gz
        source_timeref=${source_file}.gz
      fi
      fi


      # Redirect stderr to stdout while downloading the source file, tell curl
      # Redirect stderr to stdout while downloading the source file, tell curl
      # to use $source_timeref as a timestamp reference
      # to use $source_timeref as a timestamp reference
      ( curl -L -R -z $source_timeref -o $tmp_dir/$source_file $source_url 2>&1 )
      ( curl -L -R -z $source_timeref -o $tmp_dir/$source_file $source_url 2>&1 )


      # If the source isn't gzipped...
      # If the source isn't gzipped...
      if [ -n "$keep_sources_gzipped" -a -n "$source_not_gzipped" ] ; then
      if [ -n "$keep_sources_gzipped" -a -n "$source_not_gzipped" ] ; then
          test -s $tmp_dir/$source_file && \
        test -s $tmp_dir/$source_file && \
            gzip -9f $tmp_dir/$source_file && \
            gzip -9f $tmp_dir/$source_file && \
            source_file=${source_file}.gz
            source_file=${source_file}.gz
      fi
      fi


      # Validate the source file...
      # Validate the source file...
      test -s $tmp_dir/$source_file && \
      test -s $tmp_dir/$source_file && \
          gunzip -cdf $tmp_dir/$source_file > $tmp_dir/$target_file && \
        gunzip -cdf $tmp_dir/$source_file > $tmp_dir/$target_file && \
          clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
        clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
          cp -f -p $target_file $backup_dir && \
        cp -f -p $target_file $backup_dir && \
          mv -f $tmp_dir/$target_file $tmp_dir/$source_file . && \
        mv -f $tmp_dir/$target_file $tmp_dir/$source_file . && \
          do_clamd_reload=$(($do_clamd_reload + 1))
        do_clamd_reload=$(($do_clamd_reload + 1))
    fi
  fi
done
done


# Process rsync://urls
# Process rsync://urls
for source_url in $rsync_source_urls ; do
for source_url in $rsync_source_urls ; do
    source_file=`basename $source_url | awk -F, '{print $1}'`
  source_file=`basename $source_url | awk -F, '{print $1}'`
    source_args=`basename $source_url | sed "s/^\$source_file//;s/^,//"`
  source_args=`basename $source_url | sed "s/^\$source_file//;s/^,//"`
    target_file=$source_file
  target_file=$source_file


    # Produce the source header
  # Produce the source header
    show_source_header
  show_source_header


    # Check for an existing database file. If it exists then run an
  # Check for an existing database file. If it exists then run an
    # update check. Otherwise, just download and extract the database file.
  # update check. Otherwise, just download and extract the database file.
    if [ ! -s $target_file ] ; then
  if [ ! -s $target_file ] ; then
      # Redirect stderr to stdout while downloading the file.
      # Redirect stderr to stdout while downloading the file.
      ( rsync -t --stats $source_url $rsync_dir/$target_file 2>&1 )
      ( rsync -t --stats $source_url $rsync_dir/$target_file 2>&1 )


      # Validate the source file through a series of tests.
      # Validate the source file through a series of tests.
      # If all tests succeed install the source and database files
      # If all tests succeed install the source and database files
      # in the ClamAV database directory ($clamd_dbdir).
      # in the ClamAV database directory ($clamd_dbdir).
      cp -p $rsync_dir/$target_file $tmp_dir && \
      cp -p $rsync_dir/$target_file $tmp_dir && \
      test -s $tmp_dir/$target_file && \
      test -s $tmp_dir/$target_file && \
          clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
        clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
          mv -f $tmp_dir/$target_file . && \
        mv -f $tmp_dir/$target_file . && \
          do_clamd_reload=$(($do_clamd_reload + 1))
        do_clamd_reload=$(($do_clamd_reload + 1))
    else
  else
      # Download the source file...
      # Download the source file...
      ( rsync -tu --stats $source_url $rsync_dir/$target_file 2>&1 )
      ( rsync -tu --stats $source_url $rsync_dir/$target_file 2>&1 )


      # Validate the source file...
      # Validate the source file...
      test $rsync_dir/$target_file -nt $target_file && \
      test $rsync_dir/$target_file -nt $target_file && \
          cp -p $rsync_dir/$target_file $tmp_dir && \
        cp -p $rsync_dir/$target_file $tmp_dir && \
          test -s $tmp_dir/$target_file && \
        test -s $tmp_dir/$target_file && \
            clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
            clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
            cp -f -p $target_file $backup_dir && \
            cp -f -p $target_file $backup_dir && \
            mv -f $tmp_dir/$target_file . && \
            mv -f $tmp_dir/$target_file . && \
            do_clamd_reload=$(($do_clamd_reload + 1))
            do_clamd_reload=$(($do_clamd_reload + 1))
    fi
  fi
done
done


# Set appropriate file access permissions
# Set appropriate file access permissions
chown -R $clamd_user:$clamd_group $clamd_dbdir
chown -R $clamd_user:$clamd_group $clamd_dbdir


# Remove any leftover files in the $tmp_dir working directory
# Remove any leftover files in the $tmp_dir working directory
# (should only happen when a corrupted database is detected)
# (should only happen when a corrupted database is detected)
rm -f $tmp_dir/*
rm -f $tmp_dir/*


# Reload the clamd database if $clamd_pidfile and $do_clamd_reload
# Reload the clamd database if $clamd_pidfile and $do_clamd_reload
# are both non-empty
# are both non-empty
if [ -n "$clamd_pidfile" -a -n "$do_clamd_reload" ] ; then
if [ -n "$clamd_pidfile" -a -n "$do_clamd_reload" ] ; then
    echo
  echo
    echo ====================================================
  echo ====================================================
    echo Reloading the ClamAV databases \($do_clamd_reload updated\)
  echo Reloading the ClamAV databases \($do_clamd_reload updated\)
    echo ====================================================
  echo ====================================================
    kill -USR2 `cat $clamd_pidfile`
  kill -USR2 `cat $clamd_pidfile`
fi
fi


echo
echo
echo "Script ended: "`date`
echo "Script ended: "`date`


exit $?
exit $?
</pre>

Revision as of 08:48, 21 May 2009

#!/bin/sh

# update_clamd_extra_sigs.sh v0.2 by Dan Larsson <dl|at|tyfon|dot|net>
# ==============================================================================
# LICENSE
# ==============================================================================
# "THE BEER-WARE LICENSE" (Revision 42):
# <dl|at|tyfon|dot|net> wrote this file. As long as you retain this
# notice you can do whatever you want with this stuff. If we meet
# some day, and you think this stuff is worth it, you can buy me a
# beer in return. Dan Larsson
#
# ==============================================================================
# VERSION INFO
# ==============================================================================
# v0.2 - 2008-08-23
#   * Don't use non-word chars in function names
#   * Changed the SecuriteInfo signature source url
#     (Thank's to Bill Landry for pointing out the above two)
#   * Cleaned up and added comments
#   * Misc cosmetic changes
#
# v0.1 - 2008-08-22
#   * Initial release, branched from v1.4 of Bill Landry's
#     ss-msrbl.sh script
#
# ==============================================================================
# README
# ==============================================================================
# In order to run this script you need to have curl, rsync and clamd installed
# on your machine aswell as the basic set of unix-like tools (i.e. awk, sed,
# cat, cp, gunzip etc...).
#
# If this script fails to run on your system or you have made improvements that
# you wish to share, you're welcome to drop me a line.
#
# ==============================================================================
# USAGE
# ==============================================================================
# Using this script is easy, just configure the parameters, save the changes
# and execute from the prompt (or via cron). Should you want to add additional
# signature databases simply add their download urls to the appropriate
# section(s) here below and you're done! Naturally, it's just as easy to remove
# and edit :-) No script coding necessary!
#
# ==============================================================================
# SIGNATURE SOURCES
# ==============================================================================
# SaneSecurity (phish.ndb, scam.ndb)
# http://www.sanesecurity.com/clamav/usage.htm
#
# SecuriteInfo (vx.hdb)
# http://www.securiteinfo.com/services/clamav_unofficial_malwares_signatures.shtml
#
# MalwareBlockList (mbl.db)
# http://www.malware.com.br/clamav.txt
#
# MSRBL (MSRBL-Images.hdb, MSRBL-SPAM.ndb)
# http://www.msrbl.com/site/msrblimagesdownload
# http://www.msrbl.com/site/msrblspamdownload
#
# ==============================================================================
# SOURCE ARGUMENTS ( see below for more info in source arguments )
# ==============================================================================
# Name                Value        Comment
# ------------------- --------- ------------------------------------------------
# fetch_interval      integer   Forced delay in seconds between download
#                               attempts
# target_file         string    Use this name for the signature database
#                               (instead of extracting it from the source file)

################################################################################
#       SCRIPT USER EDIT SECTION - SET PROGRAM PATHS AND OTHER VARIABLES       #
################################################################################

# *** COMMENT OUT THE BELOW LINE WHEN YOU HAVE CONFIGURED THIS SCRIPT ***
#script_not_configured=1

# Set and export the command searchpaths
PATH=/root/bin:/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
export PATH

# Set path to ClamAV database dir location as well as
# the clamd user and group account
clamd_dbdir=/var/lib/clamav
clamd_user=clamav
clamd_group=clamav

# Set path to the clamd pidfile
# (comment out to disable forced database reload)
clamd_pidfile=/var/run/clamav/clamd.pid

# Set backup and temp working directory paths
# (edit to meet your own needs)
backup_dir=/var/backups/clamav
tmp_dir=/var/tmp/clamdb
rsync_dir=/var/tmp/rsync

# HTTP source urls - *MUST* be HTTP urls, one url per line
# (add/remove/modify urls as per preference and/or need,
# to disable, comment out the below six lines)
http_source_urls="
   http://www.sanesecurity.com/clamav/phishsigs/phish.ndb.gz
   http://www.sanesecurity.com/clamav/scamsigs/scam.ndb.gz
   http://clamav.securiteinfo.com/vx.hdb.gz
   http://www.malware.com.br/cgi/submit?action=list_clamav,fetch_interval=86400,target_file=mbl.db
"

# RSYNC source urls - *MUST* be RSYNC urls, one url per line
# (add/remove/modify urls as per preference and/or need,
# to disable, comment out the below four lines)
rsync_source_urls="
   rsync://rsync.mirror.msrbl.com/msrbl/MSRBL-Images.hdb
   rsync://rsync.mirror.msrbl.com/msrbl/MSRBL-SPAM.ndb
"

# Arguments can be appended to the source_url, if you do so
# seperate them from the source url and eachother with commas
# ( e.g. scheme://hostname/path,arg1=123,arg2=abc )
# Please note that it's very important you null their value when
# they've served their purpose, not doing so will lead to weird
# results

# Enable random sleeping before processing
# - recommeded when running via cron!
# (to disable this comment out the below line)
#sleep_enabled=1

# Show each slept second visually
# - disabled when running via cron.
# (to disable this comment out the below line)
sleep_visual=1

# Compress all downloaded *source files* with gzip
# (to disable this comment out the below line)
keep_sources_gzipped=1

################################################################################
# END OF SCRIPT USER EDIT SECTION - YOU SHOULD NOT NEED TO EDIT ANYTHING BELOW #
################################################################################

# Initializes the source arguments
sarg_init() {
   sarg_init_success=
   if [ -n "$source_args" ] ; then
      for arg in `echo $source_args | sed 's/,/ /g'` ; do
         eval $arg
         sarg_init_success=1
      done
      source_url=`echo $source_url | awk -F, '{print $1}'`
   fi
   source_args=
}

# Runs all source argument subroutines. If you add your own
# checks/processing add them to this function.
sarg_process() {
   # Check for fetch interval restriction
   if [ -n "$fetch_interval" ] && ! sarg_check_fetch_interval ; then
      echo
      echo Skipped due to interval restriction in effect
      continue
   fi
   # Insert your own argument processing here
}

# Handles processing of the "$fetch_interval" source argument
# Returns 0 when fetching is permitted ( i.e. elapsed seconds
# since last fetch is equal or greater than $fetch_interval )
# respectively returns 1 when fetching is not permitted.
sarg_check_fetch_interval() {
   local fetch_lastrun fetch_rundiff fetch_stampfile fetch_thisrun
   fetch_stampfile=$backup_dir/FETCHSTAMP.$source_file
   fetch_thisrun=`date +%s`

   fetch_stampfile_old=$backup_dir/LASTRUN.$source_file
   if [ -f $fetch_stampfile_old ] ; then
      mv $fetch_stampfile_old $fetch_stampfile
   fi

   if [ -f $fetch_stampfile ] ; then
      fetch_lastrun=`cat $fetch_stampfile`
      fetch_rundiff=$(($fetch_thisrun - $fetch_lastrun))
      if [ $fetch_rundiff -lt $fetch_interval ] ; then
         fetch_interval=
         return 1
      fi
   fi
   echo $fetch_thisrun > $fetch_stampfile
   fetch_interval=
   return 0
}

# Shows the source header
# (the below function also calls the source argument
# init and processing functions)
show_source_header() {
   sarg_init
   echo
   echo ====================================================
   echo Processing signature database: $target_file
   echo ====================================================
   # Process arguments if any are set for this source
   [ -n "$sarg_init_success" ] && sarg_process
}

#### actual script execution begins here ####
if [ -n "$script_not_configured" ] ; then
   echo '*** SCRIPT NOT CONFIGURED ***'
   echo Please take the time to configure this script before running it.
   echo When you have, comment out the \'script_not_configured=1\' line at
   echo the top in the user editables section and execute the script again
   exit 1
fi
echo "Script started: "`date`

# Check to see if the working directories exist.
# If not, create them. Otherwise, ignore and proceed with script
mkdir -p $tmp_dir $rsync_dir $backup_dir

# Change working directory to ClamAV database directory
cd $clamd_dbdir

# Get the timestamp from the previous run if it exists and
# update it.
stamp_lastrun=0
stamp_thisrun=`date +%s`
if [ -f $backup_dir/LASTRUN ] ; then
   stamp_lastrun=`cat $backup_dir/LASTRUN`
fi
echo $stamp_thisrun > $backup_dir/LASTRUN

# To "play nice" with the source servers don't run more frequently
# than once every hour. Also, attempt to keep off any peak crontimes
# by adding a randomized (between 30 seconds and 10 minutes) sleep period.
#  --- Idea inspired by Rick Cooper's "UpdateSaneSecurity" script.
# ( You can ofcourse disable the sleep-feature by commenting out the
# 'sleep_enabled=1' line in the user editables section here above )
if [ -n "$sleep_enabled" ] ; then
   # Calculate if we have run in the last hour. If we have add the
   # remainder to the sleep time
   sleep_forced=0
   if [ $stamp_lastrun -gt 0 ] ; then
      stamp_rundiff=$(($stamp_thisrun - $stamp_lastrun))
      if [ $stamp_rundiff -lt 3600 ] ; then
         sleep_forced=$((3600 - $stamp_rundiff))
      fi
   fi

   # NOTE:
   # Please note that I'm very well aware of the $RANDOM variable, however
   # since it is not a FreeBSD sh(1) native variable (which is the O/S and
   # shell I'm running this script under) I'm staying off that path. Feel
   # free to implement and use the $RANDOM method, if you want to :-)

   # Get a random number between 30 and 600. First attempt this by using
   # the jot(1) utility (installed by default on *BSD systems)...
   sleep_random=`jot -r 1 30 600 2>/dev/null`

   # ...if jot(1) failed attempt another (more portable?) method
   if [ -z "$sleep_random" ] ; then
      sleep_random=0
      while [ $sleep_random -lt 30 ] || [ $sleep_random -gt 600 ] ; do
         sleep_random=`head -1 /dev/urandom | od -N 1 | awk '$2~/^0/{ print $2 / 1 }'`
      done
   fi

   # Add the two values together and sleep for that amount of seconds.
   # If the $TERM variable isn't set we're probably running from cron so
   # disable visual sleeping in that case
   sleep_forced=$(($sleep_forced + $sleep_random))
   echo ====================================================
   echo Sleeping $sleep_forced seconds before proceeding...
   echo ====================================================
   if [ -n "$TERM" -a -n "$sleep_visual" ] ; then
      while [ $sleep_forced -gt 0 ] ; do
         sleep_forced=$(($sleep_forced - 1))
         echo -n .
         sleep 1
      done
      echo
   else
      sleep $sleep_forced
   fi
fi

# Process http://urls
for source_url in $http_source_urls ; do
   source_file=`basename $source_url | awk -F, '{print $1}'`
   source_args=`basename $source_url | sed "s/^\$source_file//;s/^,//"`
   target_file=`echo $source_file | sed 's/\.gz$//'`

   # If the source and target filenames are equal the source is not gzipped
   # (this will have to be expanded upon if/when additional forms of source
   # compression are to be supported).
   source_not_gzipped=
   if [ $source_file = $target_file ] ; then
      source_not_gzipped=1
   fi

   # Remove any non-word characters from the source filename.
   # We need this since it's used in various file operations
   source_file=`echo $source_file | sed 's/[^[:alnum:]\.-]/_/g'`

   # Produce the source header
   show_source_header

   # Check for an existing database file. If it exists then run an
   # update check. Otherwise, just download and extract the database file.
   if [ ! -s $target_file ] ; then
      # Redirect stderr to stdout while downloading the file.
      ( curl -L -R -o $tmp_dir/$source_file $source_url 2>&1 )

      # If the source isn't gzipped, compress it if $keep_sources_gzipped
      # is non-empty
      if [ -n "$keep_sources_gzipped" -a -n "$source_not_gzipped" ] ; then
         test -s $tmp_dir/$source_file && \
            gzip -9f $tmp_dir/$source_file && \
            source_file=${source_file}.gz
      fi

      # Validate the source file through a series of tests.
      # If all tests succeed install the source and database files
      # in the ClamAV database directory ($clamd_dbdir).
      test -s $tmp_dir/$source_file && \
         gunzip -cdf $tmp_dir/$source_file > $tmp_dir/$target_file && \
         clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
         mv -f $tmp_dir/$target_file $tmp_dir/$source_file . && \
         do_clamd_reload=$(($do_clamd_reload + 1))
   else
      # Select which file to use as a timestamp reference.
      source_timeref=$source_file
      if [ -n "$keep_sources_gzipped" -a -f ${source_file}.gz ] || \
         [ ! -f $source_file -a -f ${source_file}.gz ] ; then
         source_timeref=${source_file}.gz
      fi

      # Redirect stderr to stdout while downloading the source file, tell curl
      # to use $source_timeref as a timestamp reference
      ( curl -L -R -z $source_timeref -o $tmp_dir/$source_file $source_url 2>&1 )

      # If the source isn't gzipped...
      if [ -n "$keep_sources_gzipped" -a -n "$source_not_gzipped" ] ; then
         test -s $tmp_dir/$source_file && \
            gzip -9f $tmp_dir/$source_file && \
            source_file=${source_file}.gz
      fi

      # Validate the source file...
      test -s $tmp_dir/$source_file && \
         gunzip -cdf $tmp_dir/$source_file > $tmp_dir/$target_file && \
         clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
         cp -f -p $target_file $backup_dir && \
         mv -f $tmp_dir/$target_file $tmp_dir/$source_file . && \
         do_clamd_reload=$(($do_clamd_reload + 1))
   fi
done

# Process rsync://urls
for source_url in $rsync_source_urls ; do
   source_file=`basename $source_url | awk -F, '{print $1}'`
   source_args=`basename $source_url | sed "s/^\$source_file//;s/^,//"`
   target_file=$source_file

   # Produce the source header
   show_source_header

   # Check for an existing database file. If it exists then run an
   # update check. Otherwise, just download and extract the database file.
   if [ ! -s $target_file ] ; then
      # Redirect stderr to stdout while downloading the file.
      ( rsync -t --stats $source_url $rsync_dir/$target_file 2>&1 )

      # Validate the source file through a series of tests.
      # If all tests succeed install the source and database files
      # in the ClamAV database directory ($clamd_dbdir).
      cp -p $rsync_dir/$target_file $tmp_dir && \
      test -s $tmp_dir/$target_file && \
         clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
         mv -f $tmp_dir/$target_file . && \
         do_clamd_reload=$(($do_clamd_reload + 1))
   else
      # Download the source file...
      ( rsync -tu --stats $source_url $rsync_dir/$target_file 2>&1 )

      # Validate the source file...
      test $rsync_dir/$target_file -nt $target_file && \
         cp -p $rsync_dir/$target_file $tmp_dir && \
         test -s $tmp_dir/$target_file && \
            clamscan --quiet -d $tmp_dir/$target_file - < /dev/null && \
            cp -f -p $target_file $backup_dir && \
            mv -f $tmp_dir/$target_file . && \
            do_clamd_reload=$(($do_clamd_reload + 1))
   fi
done

# Set appropriate file access permissions
chown -R $clamd_user:$clamd_group $clamd_dbdir

# Remove any leftover files in the $tmp_dir working directory
# (should only happen when a corrupted database is detected)
rm -f $tmp_dir/*

# Reload the clamd database if $clamd_pidfile and $do_clamd_reload
# are both non-empty
if [ -n "$clamd_pidfile" -a -n "$do_clamd_reload" ] ; then
   echo
   echo ====================================================
   echo Reloading the ClamAV databases \($do_clamd_reload updated\)
   echo ====================================================
   kill -USR2 `cat $clamd_pidfile`
fi

echo
echo "Script ended: "`date`

exit $?