Server IP : 85.214.239.14 / Your IP : 18.191.103.10 Web Server : Apache/2.4.62 (Debian) System : Linux h2886529.stratoserver.net 4.9.0 #1 SMP Tue Jan 9 19:45:01 MSK 2024 x86_64 User : www-data ( 33) PHP Version : 7.4.18 Disable Function : pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,pcntl_unshare, MySQL : OFF | cURL : OFF | WGET : ON | Perl : ON | Python : ON | Sudo : ON | Pkexec : OFF Directory : /proc/2/root/proc/3/task/3/root/proc/2/task/2/root/proc/3/cwd/bin/X11/X11/ |
Upload File : |
#!/usr/bin/perl -T -w # <@LICENSE> # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to you under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # </@LICENSE> use strict; use warnings; use re 'taint'; my $VERSION = 'svnunknown'; if ('$Id: sa-update.raw 1900642 2022-05-07 06:01:02Z hege $' =~ ':') { # Subversion keyword "$Id: sa-update.raw 1900642 2022-05-07 06:01:02Z hege $" has been successfully expanded. # Doesn't happen with automated launchpad builds: # https://bugs.launchpad.net/launchpad/+bug/780916 $VERSION = &Mail::SpamAssassin::Version . ' / svn' . (split(/\s+/, '$Id: sa-update.raw 1900642 2022-05-07 06:01:02Z hege $'))[2]; } my $PREFIX = '/usr'; # substituted at 'make' time my $DEF_RULES_DIR = '/usr/share/spamassassin'; # substituted at 'make' time my $LOCAL_RULES_DIR = '/etc/spamassassin'; # substituted at 'make' time my $LOCAL_STATE_DIR = '/var/lib/spamassassin'; # substituted at 'make' time use lib '/usr/share/perl5'; # substituted at 'make' time # We want to do a small amount of macro processing during channel installs, # based on the values as passed in via 'make' my %MACRO_VALUES = ( 'VERSION' => '4.000000', 'CONTACT_ADDRESS' => 'the administrator of that system', 'PREFIX' => '/usr', 'DEF_RULES_DIR' => '/usr/share/spamassassin', 'LOCAL_RULES_DIR' => '/etc/spamassassin', 'LOCAL_STATE_DIR' => '/var/lib/spamassassin', 'INSTALLSITELIB' => '/usr/share/perl5', ); # Standard perl modules use Errno qw(ENOENT EACCES); use IO::File qw(O_RDONLY O_WRONLY O_RDWR O_CREAT O_EXCL); use File::Spec; use File::Path; use Getopt::Long; use Pod::Usage; use Config; use POSIX qw(locale_h setsid sigprocmask _exit); POSIX::setlocale(LC_TIME,'C'); BEGIN { # see comments in "spamassassin.raw" for doco my @bin = File::Spec->splitpath($0); my $bin = ($bin[0] ? File::Spec->catpath(@bin[0..1], '') : $bin[1]) || File::Spec->curdir; if (-e $bin.'/lib/Mail/SpamAssassin.pm' || !-e '/usr/share/perl5/Mail/SpamAssassin.pm' ) { my $searchrelative; if ($searchrelative && $bin eq '../' && -e '../blib/lib/Mail/SpamAssassin.pm') { unshift ( @INC, '../blib/lib' ); } else { foreach ( qw(lib ../lib/site_perl ../lib/spamassassin ../share/spamassassin/lib)) { my $dir = File::Spec->catdir( $bin, split ( '/', $_ ) ); if ( -f File::Spec->catfile( $dir, "Mail", "SpamAssassin.pm" ) ) { unshift ( @INC, $dir ); last; } } } } } # These are the non-standard required modules use Net::DNS; use Archive::Tar 1.23; use IO::Zlib 1.04; use Mail::SpamAssassin::Logger qw(:DEFAULT info log_message); our ($have_lwp, $io_socket_module_name, $have_inet4, $use_inet4, $have_inet6, $use_inet6, $have_sha256, $have_sha512); BEGIN { # Deal with optional modules eval { require Digest::SHA; Digest::SHA->import(qw(sha256_hex sha512_hex)); 1 } and do { $have_sha256=1; $have_sha512=1 } or die "Unable to verify file hashes! You must install a modern version of Digest::SHA."; $have_lwp = eval { require LWP::UserAgent; }; if (eval { require IO::Socket::IP }) { # handles IPv6 and IPv4 $io_socket_module_name = 'IO::Socket::IP'; } elsif (eval { require IO::Socket::INET6 }) { # handles IPv6 and IPv4 $io_socket_module_name = 'IO::Socket::INET6'; } elsif (eval { require IO::Socket::INET }) { # IPv4 only $io_socket_module_name = 'IO::Socket::INET'; } $have_inet4 = # can we create a PF_INET socket? defined $io_socket_module_name && eval { my $sock = $io_socket_module_name->new(LocalAddr => '0.0.0.0', Proto => 'tcp'); $sock->close or die "error closing socket: $!" if $sock; $sock ? 1 : undef; }; $have_inet6 = # can we create a PF_INET6 socket? defined $io_socket_module_name && $io_socket_module_name ne 'IO::Socket::INET' && eval { my $sock = $io_socket_module_name->new(LocalAddr => '::', Proto => 'tcp'); $sock->close or die "error closing socket: $!" if $sock; $sock ? 1 : undef; }; } # These should already be available use Mail::SpamAssassin; use Mail::SpamAssassin::Util qw(untaint_var untaint_file_path proc_status_ok exit_status_str am_running_on_windows secure_tmpfile secure_tmpdir); # Make the main dbg() accessible in our package w/o an extra function *dbg=\&Mail::SpamAssassin::dbg; sub dbg; $| = 1; # autoflushing STDOUT makes verbose output consistent with warnings # Clean up PATH appropriately Mail::SpamAssassin::Util::clean_path_in_taint_mode(); ############################################################################## # Default list of GPG keys allowed to sign update releases # # pub 4096R/5244EC45 2005-12-20 # Key fingerprint = 5E54 1DC9 59CB 8BAC 7C78 DFDC 4056 A61A 5244 EC45 # uid updates.spamassassin.org Signing Key <release@spamassassin.org> # sub 4096R/24F434CE 2005-12-20 # # note for gpg newbs: these are "long" gpg keyids. It's common to also # use the last 8 hex digits as a shorter keyid string. # my %valid_GPG = ( '0C2B1D7175B852C64B3CDC716C55397824F434CE' => 1, '5E541DC959CB8BAC7C78DFDC4056A61A5244EC45' => 1, ); # Default list of channels to update against # my @channels = ( 'updates.spamassassin.org' ); my $IGNORE_MIRBY_OLDER_THAN = (24 * 60 * 60 * 7); # 1 week ############################################################################## my %opt; @{$opt{'gpgkey'}} = (); @{$opt{'channel'}} = (); my $GPG_ENABLED = 1; $opt{'gpghomedir'} = File::Spec->catfile($LOCAL_RULES_DIR, 'sa-update-keys'); Getopt::Long::Configure( qw(bundling no_getopt_compat no_auto_abbrev no_ignore_case)); GetOptions( 'debug|D:s' => \$opt{'debug'}, 'version|V' => \$opt{'version'}, 'help|h|?' => \$opt{'help'}, 'verbose|v+' => \$opt{'verbose'}, 'checkonly' => \$opt{'checkonly'}, 'allowplugins' => \$opt{'allowplugins'}, 'reallyallowplugins' => \$opt{'reallyallowplugins'}, 'refreshmirrors' => \$opt{'refreshmirrors'}, 'forcemirror=s' => \$opt{'forcemirror'}, 'httputil=s' => \$opt{'httputil'}, 'score-multiplier=s' => \$opt{'score-multiplier'}, 'score-limit=s' => \$opt{'score-limit'}, # allow multiple of these on the commandline 'gpgkey=s' => $opt{'gpgkey'}, 'gpghomedir=s' => \$opt{'gpghomedir'}, 'channel=s' => $opt{'channel'}, 'install=s' => \$opt{'install'}, 'import=s' => \$opt{'import'}, 'gpgkeyfile=s' => \$opt{'gpgkeyfile'}, 'channelfile=s' => \$opt{'channelfile'}, 'updatedir=s' => \$opt{'updatedir'}, 'gpg!' => \$GPG_ENABLED, '4' => sub { $opt{'force_pf'} = 'inet' }, '6' => sub { $opt{'force_pf'} = 'inet6' }, # backward compatibility 'usegpg' => \$GPG_ENABLED, ) or print_usage_and_exit(); if ( defined $opt{'help'} ) { print_usage_and_exit("For more information read the sa-update man page.\n", 0); } if ( defined $opt{'version'} ) { print_version(); exit(0); } if ( $opt{'allowplugins'} && !$opt{'reallyallowplugins'} ) { warn "Security warning: dangerous option --allowplugins used:\n". "- there should never be need to use this option, see man sa-update(1)\n". "- specify --reallyallowplugins to allow activating plugins\n"; exit 2; } $use_inet4 = $have_inet4 && ( !$opt{'force_pf'} || $opt{'force_pf'} eq 'inet' ); $use_inet6 = $have_inet6 && ( !$opt{'force_pf'} || $opt{'force_pf'} eq 'inet6' ); if ( $opt{'force_pf'} && $opt{'force_pf'} eq 'inet' && !$have_inet4 ) { warn "Option -4 specified but support for the ". "INET protocol family is not available.\n"; } if ( $opt{'force_pf'} && $opt{'force_pf'} eq 'inet6' && !$have_inet6 ) { warn "Option -6 specified but support for the ". "INET6 protocol family is not available.\n"; } if ( defined $opt{'httputil'} && $opt{'httputil'} !~ /^(curl|wget|fetch|lwp)$/ ) { warn "Invalid parameter for --httputil, curl|wget|fetch|lwp wanted\n"; } if ( defined $opt{'score-multiplier'} && $opt{'score-multiplier'} !~ /^\d+(?:\.\d+)?$/ ) { die "Invalid parameter for --score-multiplier, integer or float expected.\n"; } if ( defined $opt{'score-limit'} && $opt{'score-limit'} !~ /^\d+(?:\.\d+)?$/ ) { die "Invalid parameter for --score-limit, integer or float expected.\n"; } # Figure out what version of SpamAssassin we're using, and also figure out the # reverse of it for the DNS query. Handle x.yyyzzz as well as x.yz. my $SAVersion = $Mail::SpamAssassin::VERSION; if ($SAVersion =~ /^(\d+)\.(\d{3})(\d{3})$/) { $SAVersion = join(".", $1+0, $2+0, $3+0); } elsif ($SAVersion =~ /^(\d)\.(\d)(\d)$/) { $SAVersion = "$1.$2.$3"; } else { die "fatal: SpamAssassin version number '$SAVersion' is in an unknown format!\n"; } my $RevSAVersion = join(".", reverse split(/\./, $SAVersion)); # set debug areas, if any specified (only useful for command-line tools) $opt{'debug'} ||= 'all' if (defined $opt{'debug'}); # Find the default site rule directory, also setup debugging and other M::SA bits my $SA = Mail::SpamAssassin->new({ debug => $opt{'debug'}, local_tests_only => 1, dont_copy_prefs => 1, PREFIX => $PREFIX, DEF_RULES_DIR => $DEF_RULES_DIR, LOCAL_RULES_DIR => $LOCAL_RULES_DIR, LOCAL_STATE_DIR => $LOCAL_STATE_DIR, }); if (defined $opt{'updatedir'}) { $opt{'updatedir'} = untaint_file_path($opt{'updatedir'}); } else { $opt{'updatedir'} = $SA->sed_path('__local_state_dir__/__version__'); } # check only disabled gpg # https://issues.apache.org/SpamAssassin/show_bug.cgi?id=5854 if ( defined $opt{'checkonly'}) { $GPG_ENABLED=0; dbg("gpg: Disabling gpg requirement due to checkonly flag."); } dbg("generic: sa-update version $VERSION"); dbg("generic: using update directory: $opt{'updatedir'}"); # doesn't really display useful things for this script, but we do want # a module/version listing, etc. sa-update may be used for older versions # of SA that don't include this function, so eval around it. eval { $SA->debug_diagnostics(); 1; }; $SA->finish(); # untaint the command-line args; since the root user supplied these, and # we're not a setuid script, we trust them foreach my $optkey (keys %opt) { next if ref $opt{$optkey}; untaint_var(\$opt{$optkey}); } ############################################################################## # Deal with gpg-related options if (@{$opt{'gpgkey'}}) { $GPG_ENABLED = 1; foreach my $key (@{$opt{'gpgkey'}}) { unless (is_valid_gpg_key_id($key)) { dbg("gpg: invalid gpgkey parameter $key"); next; } $key = uc $key; dbg("gpg: adding key id $key"); $valid_GPG{$key} = 1; } } if (defined $opt{'gpgkeyfile'}) { $GPG_ENABLED = 1; open(GPG, $opt{'gpgkeyfile'}) or die "cannot open $opt{'gpgkeyfile'} for reading: $!\n"; dbg("gpg: reading in gpgfile ".$opt{'gpgkeyfile'}); while(my $key = <GPG>) { chomp $key; $key =~ s/#.*$//; # remove comments $key =~ s/^\s+//; # remove leading whitespace $key =~ s/\s+$//; # remove tailing whitespace next if $key eq ''; # skip empty lines unless (is_valid_gpg_key_id($key)) { dbg("gpg: invalid key id $key"); next; } $key = uc $key; dbg("gpg: adding key id $key"); $valid_GPG{$key} = 1; } close(GPG) or die "cannot close $opt{'gpgkeyfile'}: $!"; } # At this point, we need to know where GPG is ... my $GPGPath; if ($GPG_ENABLED || $opt{'import'}) { # find GPG in the PATH # bug 4958: for *NIX it's "gpg", in Windows it's "gpg.exe" $GPGPath = 'gpg' . $Config{_exe}; dbg("gpg: Searching for '$GPGPath'"); if ($GPGPath = Mail::SpamAssassin::Util::find_executable_in_env_path($GPGPath)) { dbg("gpg: found $GPGPath"); # bug 5030: if GPGPath has a space, put it in quotes if ($GPGPath =~ / /) { $GPGPath =~ s/"/\\"/g; $GPGPath = qq/"$GPGPath"/; dbg("gpg: path changed to $GPGPath"); } } else { die "error: gpg required but not found! It is not recommended, but you can use \"sa-update\" with the --no-gpg to skip the verification. \n"; } # GPG was found, and we've been asked to import a key only if ( $opt{'import'} ) { my $ex = import_gpg_key($opt{'import'}); exit $ex; } # does the sa-update keyring exist? if not, import it if(!-f File::Spec->catfile($opt{'gpghomedir'}, "trustdb.gpg")) { import_default_keyring(); # attempt to continue even if this fails, anyway } # specify which keys are trusted dbg("gpg: release trusted key id list: ".join(" ", keys %valid_GPG)); # convert fingerprint gpg ids to keyids foreach (keys %valid_GPG) { my $id = substr $_, -8; $valid_GPG{$id} = 1; } } ############################################################################## # Deal with channel-related options if (defined $opt{'channel'} && scalar @{$opt{'channel'}} > 0) { @channels = @{$opt{'channel'}}; } if (defined $opt{'channelfile'}) { open(CHAN, $opt{'channelfile'}) or die "cannot open $opt{'channelfile'} for reading: $!\n"; dbg("channel: reading in channelfile ".$opt{'channelfile'}); @channels = (); while(my $chan = <CHAN>) { chomp $chan; $chan =~ s/#.*$//; # remove comments $chan =~ s/^\s+//; # remove leading whitespace $chan =~ s/\s+$//; # remove tailing whitespace next if $chan eq ''; # skip empty lines $chan = lc $chan; dbg("channel: adding $chan"); push(@channels, $chan); } close(CHAN) or die "cannot close $opt{'channelfile'}: $!"; } # untaint the channel listing for(my $ind = 0; $ind < @channels; $ind++) { local($1); # bug 5061: prevent random taint flagging of $1 if ($channels[$ind] =~ /^([a-zA-Z0-9._-]+)$/) { untaint_var(\$channels[$ind]); } else { dbg("channel: skipping invalid channel: $channels[$ind]"); splice @channels, $ind, 1; $ind--; # the next element takes the place of the deleted one } } my ($res, $ua); if ($opt{'install'}) { if (scalar @channels > 1) { die "fatal: --install cannot be used with multiple --channel switches.\n"; } } else { $res = Net::DNS::Resolver->new(); $res->force_v4(1) if $have_inet4 && $opt{'force_pf'} && $opt{'force_pf'} eq 'inet'; } # Generate a temporary file to put channel content in for later use ... my ($content_file, $tfh) = secure_tmpfile(); $tfh or die "fatal: could not create temporary channel content file: $!\n"; close $tfh or die "cannot close temporary channel content file $content_file: $!"; undef $tfh; my $lint_failures = 0; my $channel_failures = 0; my $channel_successes = 0; # Use a temporary directory for all update channels my $UPDTmp; # we only need to lint the site pre files once my $site_pre_linted = 0; # Go ahead and loop through all of the channels foreach my $channel (@channels) { dbg("channel: attempting channel $channel"); my %preserve_files; # Convert the channel to a nice-for-filesystem version my $nicechannel = $channel; $nicechannel =~ tr/A-Za-z0-9-/_/cs; my $UPDDir = File::Spec->catfile($opt{'updatedir'}, $nicechannel); my $CFFile = "$UPDDir.cf"; my $PREFile = "$UPDDir.pre"; if (-d $UPDDir) { dbg("channel: using existing directory $UPDDir"); } else { # create the dir, if it doesn't exist dbg("channel: creating directory $UPDDir"); mkpath([$UPDDir], 0, 0777) or die "channel: cannot create channel directory $UPDDir: $!\n"; } dbg("channel: channel cf file $CFFile"); dbg("channel: channel pre file $PREFile"); my $instfile; if ($opt{'install'}) { $instfile = $opt{'install'}; dbg("channel: installing from file $instfile"); } my($mirby, $mirby_force_reload, $mirby_file_is_ok); my $mirby_path = File::Spec->catfile($UPDDir, "MIRRORED.BY"); # try to read metadata from channel.cf file my $currentV = -1; if (!open(CF, $CFFile)) { dbg("channel: error opening file %s: %s", $CFFile, $!) unless $! == ENOENT; } else { while(<CF>) { local($1,$2); last unless /^# UPDATE\s+([A-Za-z]+)\s+(\S+)/; my($type, $value) = (lc $1,$2); dbg("channel: metadata $type = $value, from file $CFFile"); if ($type eq 'version') { $value =~ /^(\d+)/; $currentV = $1; } } close(CF) or die "cannot close $CFFile: $!"; } # obtain a version number which should be installed my $newV; if ($instfile) { # the /.*/ ensures we use the 3-digit string nearest to the end of string, # otherwise we might pick up something from the middle of the directory path local($1); if ($instfile !~ /(?:.*\D|^)(\d{3,})/) { # this is a requirement die "channel: $channel: --install file $instfile does not contain a 3-digit version number!\n"; } $newV = $1; if ( defined $opt{'checkonly'} ) { dbg("channel: $channel: --install and --checkonly, claiming update available"); $channel_successes++; next; } } else { # not an install file, get the latest version number from network # Setup the channel version DNS query my $DNSQ = "$RevSAVersion.$channel"; my $dnsV = join(' ', do_dns_query($DNSQ)); local($1); if (defined $dnsV && $dnsV =~ /^(\d+)/) { $newV = untaint_var($1) if (!defined $newV || $1 > $newV); dbg("dns: $DNSQ => $dnsV, parsed as $1"); } # Not getting a response isn't a failure, there may just not be any updates # for this SA version yet. if (!defined $newV) { my @mirs = do_dns_query("mirrors.$channel"); if (defined shift @mirs) { dbg("channel: no updates available, skipping channel"); } else { channel_failed("channel '$channel': no 'mirrors.$channel' record found"); } next; } # If this channel hasn't been installed before, or it's out of date, # keep going. Otherwise, skip it. if ($currentV >= $newV) { dbg("channel: current version is $currentV, new version is $newV, ". "skipping channel"); next; } print "Update available for channel $channel: $currentV -> $newV\n" if $opt{'verbose'}; # If we are only checking for update availability, exit now if ( defined $opt{'checkonly'} ) { dbg("channel: $channel: update available, not downloading ". "in checkonly mode"); $channel_successes++; next; } } # we need a directory we control that we can use to avoid loading any rules # when we lint the site pre files, we might as well use the channel temp dir dbg("channel: preparing temp directory for new channel"); if (!$UPDTmp) { $UPDTmp = secure_tmpdir(); dbg("channel: created tmp directory $UPDTmp"); } else { dbg("channel: using existing tmp directory $UPDTmp"); if (!clean_update_dir($UPDTmp)) { die "channel: attempt to clean update temp dir failed, aborting"; } } # lint the site pre files (that will be used when lint checking the channel) # before downloading the channel update unless ($site_pre_linted) { dbg("generic: lint checking site pre files once before attempting channel updates"); unless (lint_check_dir(File::Spec->catfile($UPDTmp, "doesnotexist"))) { dbg("generic: lint of site pre files failed, cannot continue"); print "Lint of site pre files failed, cannot continue\n" if $opt{'verbose'}; $lint_failures++; last; } dbg("generic: lint check of site pre files succeeded, continuing with channel updates"); $site_pre_linted = 1; } my $content; my $SHA512; my $SHA256; my $GPG; if ($instfile) { dbg("channel: using --install files $instfile\{,.asc,.sha512,.sha256\}"); $content = read_install_file($instfile); if ( -f "$instfile.sha512" ) { $SHA512 = read_install_file($instfile.".sha512"); } if ( -f "$instfile.sha256" ) { $SHA256 = read_install_file($instfile.".sha256"); } $GPG = read_install_file($instfile.".asc") if $GPG_ENABLED; } else { # not an install file, obtain fresh rules from network dbg("channel: protocol family available: %s%s", join(',', $have_inet4 ? 'inet' : (), $have_inet6 ? 'inet6' : ()), $opt{'force_pf'} ? '; force '.$opt{'force_pf'} : '' ); # test if the MIRRORED.BY file for this channel exists, # is nonempty, and is reasonably fresh my(@mirr_stat_list) = stat($mirby_path); if (!@mirr_stat_list) { if ($! == ENOENT) { dbg("channel: no mirror file %s, will fetch it", $mirby_path); } else { # protection error, misconfiguration, file system error, ... warn "error: error accessing mirrors file $mirby_path: $!\n"; channel_failed("channel '$channel': error accessing mirrors file $mirby_path: $!"); next; } } elsif (-z _) { dbg("channel: file %s is empty, refreshing mirrors file", $mirby_path); $mirby_force_reload = 1; } elsif ($opt{'refreshmirrors'}) { dbg("channel: --refreshmirrors used, forcing mirrors file refresh ". "on channel $channel"); $mirby_force_reload = 1; } elsif (time - $mirr_stat_list[9] > $IGNORE_MIRBY_OLDER_THAN) { dbg("channel: file %s is too old, refreshing mirrors file", $mirby_path); $mirby_file_is_ok = 1; # mirrors file seems fine, but is old $mirby_force_reload = 1; } else { # mirror file $mirby_path exists, is nonempty, and is reasonably fresh $mirby_file_is_ok = 1; } if (!$mirby_file_is_ok || $mirby_force_reload) { # fetch a fresh list of mirrors dbg("channel: DNS lookup on mirrors.$channel"); my @mirrors = do_dns_query("mirrors.$channel"); unless (@mirrors) { warn "error: no mirror data available for channel $channel\n"; channel_failed("channel '$channel': MIRRORED.BY file URL was not in DNS"); next; } # make sure requests spread randomly Mail::SpamAssassin::Util::fisher_yates_shuffle(\@mirrors); foreach my $mirror (@mirrors) { my ($result_fname, $http_ok) = http_get($mirror, $UPDDir, $mirby_path, $mirby_force_reload); if (!$http_ok) { dbg("channel: no mirror data available for channel %s from %s", $channel, $mirror); next; } $mirby = read_content($result_fname, 0); if ($mirby) { dbg("channel: MIRRORED.BY file for channel %s retrieved", $channel); $mirby_file_is_ok = 1; $mirby_force_reload = 0; $preserve_files{$mirby_path} = 1; # set file creation time to now, otherwise we'll keep refreshing # (N.B.: curl preserves time of a downloaded file) my $now = time; utime($now, $now, $mirby_path) or warn "error: error setting creation time of $mirby_path: $!\n"; last; } } if ($mirby_force_reload) { # not refreshed? warn "error: unable to refresh mirrors file for channel $channel, ". "using old file\n"; } } if (!$mirby_file_is_ok) { warn "error: no mirror data available for channel $channel\n"; channel_failed("channel '$channel': MIRRORED.BY file contents were missing"); next; } elsif ($mirby) { # file contents already in memory, no need to read it from a file } elsif (!open(MIRBY, $mirby_path)) { warn "error: error opening mirrors file $mirby_path: $!\n"; channel_failed("channel '$channel': error opening mirrors file $mirby_path: $!"); next; } else { dbg("channel: reading MIRRORED.BY file %s", $mirby_path); { local $/ = undef; $mirby = <MIRBY> } close(MIRBY) or die "cannot close $mirby_path: $!"; $preserve_files{$mirby_path} = 1; } # Parse the list of mirrors dbg("channel: parsing MIRRORED.BY file for channel %s", $channel); my %mirrors; my @mirrors = split(/^/, $mirby); while(my $mirror = shift @mirrors) { chomp $mirror; if ( defined $opt{'forcemirror'} ) { $mirror = $opt{'forcemirror'}; $mirrors{$mirror}->{"weight"} = 1; dbg("channel: found mirror $mirror (forced)"); last; } $mirror =~ s/#.*$//; # remove comments $mirror =~ s/^\s+//; # remove leading whitespace $mirror =~ s/\s+$//; # remove tailing whitespace next if $mirror eq ''; # skip empty lines # We only support HTTP (and HTTPS) right now if ($mirror !~ m{^https?://}i) { dbg("channel: skipping non-HTTP mirror: $mirror"); next; } dbg("channel: found mirror $mirror"); my @data; ($mirror,@data) = split(/\s+/, $mirror); $mirror =~ s{/+\z}{}; # http://example.com/updates/ -> .../updates $mirrors{$mirror}->{weight} = 1; foreach (@data) { my($k,$v) = split(/=/, $_, 2); $mirrors{$mirror}->{$k} = $v; } } unless (%mirrors) { warn "error: no mirrors available for channel $channel\n"; channel_failed("channel '$channel': no mirrors available"); next; } # Now that we've laid the foundation, go grab the appropriate files # my $path_content = File::Spec->catfile($UPDDir, "$newV.tar.gz"); my $path_sha512 = File::Spec->catfile($UPDDir, "$newV.tar.gz.sha512"); my $path_sha256 = File::Spec->catfile($UPDDir, "$newV.tar.gz.sha256"); my $path_asc = File::Spec->catfile($UPDDir, "$newV.tar.gz.asc"); # Loop through all available mirrors, choose from them randomly # if any get fails, choose another mirror to retry _all_ files again # sleep few seconds on retries my $download_ok = 0; while (my $mirror = choose_mirror(\%mirrors)) { my ($result_fname, $http_ok); # Grab the data hash for this mirror, then remove it from the list my $mirror_info = $mirrors{$mirror}; delete $mirrors{$mirror}; # Make sure we start without files from existing tries unlink($path_content); unlink($path_sha512); unlink($path_sha256); unlink($path_asc); my $sleep_sec = 2; if (!check_mirror_af($mirror)) { my @my_af; push(@my_af, "IPv4") if $use_inet4; push(@my_af, "IPv6") if $use_inet6; push(@my_af, "no IP service") if !@my_af; dbg("reject mirror %s: no common address family (%s), %s", $mirror, join(" ", @my_af), %mirrors ? "sleeping $sleep_sec sec and trying next" : 'no mirrors left'); sleep($sleep_sec) if %mirrors; next; } dbg("channel: selected mirror $mirror"); # Actual archive file ($result_fname, $http_ok) = http_get("$mirror/$newV.tar.gz", $UPDDir); if (!$http_ok || !-s $result_fname) { dbg("channel: failed to get $newV.tar.gz from mirror $mirror, %s", %mirrors ? "sleeping $sleep_sec sec and trying next" : 'no mirrors left'); sleep($sleep_sec) if %mirrors; next; } # if GPG is enabled, the GPG detached signature of the archive file if ($GPG_ENABLED) { ($result_fname, $http_ok) = http_get("$mirror/$newV.tar.gz.asc", $UPDDir); if (!$http_ok || !-s $result_fname) { dbg("channel: No GPG/asc file available from $mirror, %s", %mirrors ? "sleeping $sleep_sec sec and trying next" : 'no mirrors left'); sleep($sleep_sec) if %mirrors; next; } } else { # SHA512 of the archive file ($result_fname, $http_ok) = http_get("$mirror/$newV.tar.gz.sha512", $UPDDir); if (!$http_ok || !-s $result_fname) { # If not found, try SHA256 instead ($result_fname, $http_ok) = http_get("$mirror/$newV.tar.gz.sha256", $UPDDir); if (!$http_ok || !-s $result_fname) { dbg("channel: No sha512 or sha256 file available from $mirror, %s", %mirrors ? "sleeping $sleep_sec sec and trying next" : 'no mirrors left'); sleep($sleep_sec) if %mirrors; next; } } } $download_ok = 1; last; } if ($download_ok) { if (-s $path_content) { $content = read_content($path_content, 1); # binary $preserve_files{$path_content} = 1; } if (-s $path_sha512) { $SHA512 = read_content($path_sha512, 0); # ascii $preserve_files{$path_sha512} = 1; } if (-s $path_sha256) { $SHA256 = read_content($path_sha256, 0); # ascii $preserve_files{$path_sha256} = 1; } if (-s $path_asc) { $GPG = read_content($path_asc, 0); # ascii $preserve_files{$path_asc} = 1; } } } unless ($content && (($GPG_ENABLED && $GPG) || (!$GPG_ENABLED && ($SHA512 || $SHA256)))) { if ($instfile) { channel_failed("channel '$channel': missing checksum files $instfile\{,.sha512,.sha256\}"); } else { channel_failed("channel '$channel': could not find working mirror"); } next; } if ( $SHA512 ) { # Validate the SHA512 signature { local($1); $SHA512 =~ /^([a-fA-F0-9]{128})\b/; $SHA512 = defined $1 ? lc($1) : 'INVALID'; } my $digest = sha512_hex($content); dbg("sha512: verification wanted: $SHA512"); dbg("sha512: verification result: $digest"); unless ($digest eq $SHA512) { channel_failed("channel '$channel': SHA512 verification failed"); next; } } if ( $SHA256 ) { # Validate the SHA256 signature { local($1); $SHA256 =~ /^([a-fA-F0-9]{64})\b/; $SHA256 = defined $1 ? lc($1) : 'INVALID'; } my $digest = sha256_hex($content); dbg("sha256: verification wanted: $SHA256"); dbg("sha256: verification result: $digest"); unless ($digest eq $SHA256) { channel_failed("channel '$channel': SHA256 verification failed"); next; } } # Write the content out to a temp file for GPG/Archive::Tar interaction dbg("channel: populating temp content file %s", $content_file); open(TMP, ">$content_file") or die "fatal: cannot create content temp file $content_file: $!\n"; binmode TMP or die "fatal: cannot set binmode on content temp file $content_file: $!\n"; print TMP $content or die "fatal: cannot write to content temp file $content_file: $!\n"; close TMP or die "fatal: cannot close content temp file $content_file: $!\n"; # to sign : gpg -bas file # to verify: gpg --verify --batch --no-tty --status-fd=1 -q --logger-fd=1 file.asc file # look for : [GNUPG:] GOODSIG 6C55397824F434CE updates.spamassassin.org [...] # [GNUPG:] VALIDSIG 0C2B1D7175B852C64B3CDC716C55397824F434CE [...] # [GNUPG:] NO_PUBKEY 6C55397824F434CE if ($GPG) { dbg("gpg: populating temp signature file"); my $sig_file; ($sig_file, $tfh) = secure_tmpfile(); $tfh or die "fatal: couldn't create temp file for GPG signature: $!\n"; binmode $tfh or die "fatal: cannot set binmode on temp file for GPG signature: $!\n"; print $tfh $GPG or die "fatal: cannot write temp file for GPG signature: $!\n"; close $tfh or die "fatal: cannot close temp file for GPG signature: $!\n"; undef $tfh; dbg("gpg: calling gpg"); my $gpghome = interpolate_gpghomedir(); # TODO: we could also use "--keyserver pgp.mit.edu" or similar, # to autodownload missing keys... my $CMD = "$GPGPath $gpghome --verify --batch ". "--no-tty --status-fd=1 -q --logger-fd=1"; unless (open(CMD, "$CMD $sig_file $content_file|")) { unlink $sig_file or warn "error: cannot unlink $sig_file: $!\n"; die "fatal: couldn't execute $GPGPath: $!\n"; } # Determine the fate of the signature my $signer = ''; my $missingkeys = ''; while(my $GNUPG = <CMD>) { chop $GNUPG; dbg ("gpg: $GNUPG"); if ($GNUPG =~ /^gpg: fatal:/) { warn $GNUPG."\n"; # report bad news } local($1); if ($GNUPG =~ /^\Q[GNUPG:]\E NO_PUBKEY \S+(\S{8})$/) { $missingkeys .= $1." "; } next unless ($GNUPG =~ /^\Q[GNUPG:]\E (?:VALID|GOOD)SIG (\S{8,40})/); my $key = $1; # we want either a keyid (8) or a fingerprint (40) if (length $key > 8 && length $key < 40) { substr($key, 8) = ''; } # use the longest match we can find $signer = $key if length $key > length $signer; } my $errno = 0; close CMD or $errno = $!; proc_status_ok($?,$errno) or warn("gpg: process '$GPGPath' finished: ". exit_status_str($?,$errno)."\n"); unlink $sig_file or warn "cannot unlink $sig_file: $!\n"; if ($signer) { my $keyid = substr $signer, -8; dbg("gpg: found signature made by key $signer"); if (exists $valid_GPG{$signer}) { dbg("gpg: key id $signer is release trusted"); } elsif (exists $valid_GPG{$keyid}) { dbg("gpg: key id $keyid is release trusted"); } else { dbg("gpg: key id $keyid is not release trusted"); $signer = undef; } } unless ($signer) { warn "error: GPG validation failed!\n"; if ($missingkeys) { warn <<ENDOFVALIDATIONERR; The update downloaded successfully, but it was not signed with a trusted GPG key. Instead, it was signed with the following keys: $missingkeys Perhaps you need to import the channel's GPG key? For example: wget https://spamassassin.apache.org/updates/GPG.KEY sa-update --import GPG.KEY ENDOFVALIDATIONERR } else { warn <<ENDOFVALIDATIONERR; The update downloaded successfully, but the GPG signature verification failed. ENDOFVALIDATIONERR } channel_failed("channel '$channel': GPG validation failed"); next; } } # OK, we're all validated at this point, install the new version dbg("channel: file verification passed, testing update"); dbg("channel: extracting archive"); if (!taint_safe_archive_extract($UPDTmp, $content_file)) { channel_failed("channel '$channel': archive extraction failed"); next; } # check --lint if (!lint_check_dir($UPDTmp)) { channel_failed("channel '$channel': lint check of update failed"); next; } dbg("channel: lint check succeeded, extracting archive to $UPDDir..."); my @totry = ( { 'try' => sub { if (-d $UPDDir) { # ok that worked, too late to stop now! At this stage, if there are # errors, we have to attempt to carry on regardless, since we've already # blown away the old ruleset. dbg("channel: point of no return for existing $UPDDir"); # clean out the previous channel files, if they exist if (-f $PREFile && ! unlink $PREFile ) { warn("channel: attempt to rm channel pre file failed, attempting to continue anyway: $!"); } if (-f $CFFile && ! unlink $CFFile ) { warn("channel: attempt to rm channel cf file failed, attempting to continue anyway: $!"); } if (!clean_update_dir($UPDDir, \%preserve_files)) { warn("channel: attempt to rm channel directory failed, attempting to continue anyway"); } } else { # create the dir, if it doesn't exist dbg("channel: creating $UPDDir"); mkpath([$UPDDir], 0, 0777) or die "channel: cannot create channel directory $UPDDir: $!\n"; # ok, that test worked. it's now likely that the .cf's will # similarly be ok to rename, too. Too late to stop from here on dbg("channel: point of no return for new $UPDDir"); } return 1; }, 'rollback' => sub { dbg("channel: attempting to remove the channel and update directories"); # bug 4941: try to get rid of the empty directories to avoid leaving SA # with no rules. rmdir $UPDDir or dbg("channel: error removing dir %s: %s", $UPDDir, $!); rmdir $opt{'updatedir'} or dbg("channel: error removing dir %s: %s", $opt{'updatedir'}, $!); }, }, { 'try' => sub { # extract the files again for the last time if (!taint_safe_archive_extract($UPDDir, $content_file)) { channel_failed("channel '$channel': archive extraction failed"); return 0; } return 1; }, 'rollback' => sub { dbg("channel: attempting to clean out the channel update directory"); # bug 4941: try to get rid of the empty directories to avoid leaving SA # with no rules. if (!clean_update_dir($UPDDir, \%preserve_files)) { warn "channel: attempt to clean up failed extraction also failed!\n"; } }, }, { 'try' => sub { if ($instfile) { dbg("channel: not creating MIRRORED.BY file due to --install"); return 1; } # The $mirby_path file should have already been created by http_get # and preserved past clean_update_dir() # # # Write out the mirby file, not fatal if it doesn't work # dbg("channel: creating MIRRORED.BY file"); # open(MBY, ">$mirby_path") # or die "cannot create a new MIRRORED.BY file: $!\n"; # print MBY $mirby or die "error writing to $mirby_path: $!"; # close(MBY) or die "cannot close $mirby_path: $!"; return 1; }, 'rollback' => sub { }, }, { 'try' => sub { # the last step is to create the .cf and .pre files to include the # channel files my @CF; my @PRE; dbg("channel: creating update cf/pre files"); # Put in whatever metadata we need push(@CF, "# UPDATE version $newV\n"); # Find all of the cf and pre files opendir(DIR, $UPDDir) or die "fatal: cannot access $UPDDir: $!\n"; my @files; while(my $file = readdir(DIR)) { next if $file eq '.' || $file eq '..'; untaint_var(\$file); my $path = File::Spec->catfile($UPDDir, $file); next unless (-f $path); # shouldn't ever happen push(@files, $file); } # bug 5371: ensure files are sorted foreach my $file ( sort @files ) { if ($file =~ /\.cf$/) { push(@CF, "include $nicechannel/$file\n"); } elsif ($file =~ /\.pre$/) { push(@PRE, "include $nicechannel/$file\n"); } else { next; } dbg("channel: adding $file"); } closedir(DIR) or die "cannot close directory $UPDDir: $!"; # Finally, write out the files to include the update files if (!write_channel_file($PREFile, \@PRE)) { channel_failed("channel '$channel': writing of $PREFile failed"); return 0; } if (!write_channel_file($CFFile, \@CF)) { channel_failed("channel '$channel': writing of $CFFile failed"); return 0; } # if all went fine, remove the .tar.gz, .sha* and .asc files delete_files( grep(!m{/\QMIRRORED.BY\E\z}, keys %preserve_files) ); $channel_successes++; dbg("channel: update complete"); return 1; }, 'rollback' => sub { }, }, ); my $error; my $eval_stat; for(my $elem = 0; $elem <= $#totry; $elem++) { my $success; eval { $success = &{$totry[$elem]->{'try'}}(); 1; } or do { $eval_stat = $@ ne '' ? $@ : "errno=$!"; chomp $eval_stat; }; if (!$success) { $error = $elem; $eval_stat = "step $elem unsuccessful" if !defined $eval_stat; last; } } if (defined $error) { dbg("channel: channel failed, attempting rollback: %s", $eval_stat); for(my $elem = $error; $elem >= 0; $elem--) { &{$totry[$elem]->{'rollback'}}(); } } } ############################################################################## # clean out the temp dir if ($UPDTmp) { dbg("generic: cleaning up temporary directory/files"); if (!clean_update_dir($UPDTmp)) { warn "error: unable to clean out the files in $UPDTmp\n"; } } # clear out the temp files if they still exist foreach ( $content_file, $UPDTmp ) { next unless defined $_; my $stat_errn = stat($_) ? 0 : 0+$!; next if $stat_errn == ENOENT; if ($stat_errn != 0) { warn "error: cannot access $_: $!\n"; } elsif (-d _) { rmdir $_ or warn "error: cannot remove directory $_: $!\n"; } elsif (-f _) { unlink $_ or warn "error: cannot remove file $_: $!\n"; } else { warn "error: '$_' isn't a file nor a directory, skipping\n"; } } my $exit; if ($lint_failures) { # 2: lint of site pre files failed, cannot continue $exit = 2; } elsif (!$channel_failures) { # 0: updates found and successfully applied # 1: no updates were needed (success with nothing to do) $exit = $channel_successes ? 0 : 1; } else { # at least some failures # 3: some failures, but at least one channel succeeded # 4 or higher means all channels failed $exit = $channel_successes ? 3 : 4; } dbg("diag: updates complete, exiting with code $exit"); if ($opt{'verbose'}) { if (!$exit) { if (defined $opt{'checkonly'}) { print "Update was available, but not installed in checkonly mode\n"; } else { print "Update was available, and was downloaded and installed successfully\n"; } } elsif ($exit == 1) { print "Update finished, no fresh updates were available\n"; } elsif ($exit == 3) { print "Update of at least one channel finished, other updates failed\n"; } else { print "Update failed, exiting with code $exit\n"; } } exit $exit; ############################################################################## sub read_install_file { my ($file) = @_; open (IN, "<$file") or die "cannot open $file\n"; my $all; { local $/ = undef; $all = <IN> } close IN or die "cannot close $file: $!"; defined $all && $all ne '' or die "empty file $file\n"; return $all; } ############################################################################## sub write_channel_file { my ($filename, $contents) = @_; return 1 unless @{$contents}; if (open(FILE, ">$filename")) { print FILE @{$contents} or die "cannot write to $filename: $!"; close FILE or return 0; return 1; } return 0; } ############################################################################## sub channel_failed { my $reason = shift; warn("$reason, channel failed\n"); $channel_failures++; } ############################################################################## sub taint_safe_archive_extract { my $todir = shift; my $input = shift; my $tfh = IO::Zlib->new($input, "rb"); $tfh or die "fatal: couldn't read content tmpfile $content_file: $!\n"; my $tar = Archive::Tar->new($tfh); $tar or die "fatal: couldn't open tar archive!\n"; # stupid Archive::Tar is not natively taint-safe! duh. # return $tar->extract(); # instead, get the file list, untaint, and extract one-by-one. my @files = $tar->list_files(); foreach my $file (@files) { next if ($file =~ /^\/$/); # ignore dirs local($1); $file =~ /^([-\.\,\/a-zA-Z0-9_]+)$/; my $outfname = $1; $outfname =~ s/\.\.\//__\//gs; # avoid "../" dir traversal attacks $outfname = File::Spec->catfile($todir, $outfname); dbg "extracting: $outfname"; if (!open OUT, ">".$outfname) { warn "error: failed to create $outfname: $!"; goto failed; } else { my $content = $tar->get_content($file); if ($outfname =~ /\.(?:pre|cf)$/) { # replace macros in the update files if it's a .pre or .cf local($1); $content =~ s/\@\@([^\@]+)\@\@/$MACRO_VALUES{$1} || "\@\@$1\@\@"/ge; # also, if --allowplugins is not specified, comment out # all loadplugin or tryplugin lines (and others that can load code) if ( !$opt{'allowplugins'} ) { $content =~ s{^\s*( loadplugin | tryplugin | \S+_modules? | \S+_factory | dcc_(?:path|options) | pyzor_(?:path|options) | extracttext_external )\s} {#(commented by sa-update, no --allowplugins switch specified)# $1}gmx; } # other stuff never allowed for safety $content =~ s/^\s*(dns_server)/#(commented by sa-update, not allowed)# $1/gm; # adjust scores if ($opt{'score-multiplier'} || $opt{'score-limit'}) { my $adjust_score = sub { my @scores = split(/\s+/, $_[1]); my $touched = 0; foreach (@scores) { next if $_ == 0; # Can't adjust if zero.. my $old = $_; $_ = $_ * $opt{'score-multiplier'} if $opt{'score-multiplier'}; $_ = $opt{'score-limit'} if $opt{'score-limit'} && $_ > $opt{'score-limit'}; if ($old != $_) { if ($_ == 0) { # Prevent zeroing scores $_ = $old < 0 ? "-0.001" : "0.001" } else { $_ = sprintf("%.3f", $_); } $touched++ if $old != $_; } } if ($touched) { return $_[0].join(' ', @scores)." #(score adjusted by sa-update, $_[1])#".$_[2]; } else { return $_[0].$_[1].$_[2]; } }; $content =~ s/^(\s*score\s+\w+\s+)(-?\d+(?:\.\d+)?(?:\s+-?\d+(?:\.\d+)?)*)(.*)$ /$adjust_score->($1,$2,$3)/igmex; } } print OUT $content or do { warn "error writing to $outfname: $!"; goto failed }; close OUT or do { warn "error: write failed to $outfname: $!"; goto failed } } } return @files; failed: return; # undef = failure } ############################################################################## # Do a generic DNS query sub do_dns_query { my($query, $rr_type) = @_; $rr_type = 'TXT' if !defined $rr_type; my $RR = $res->query($query, $rr_type); my @result; # NOTE: $rr->rdatastr returns the result encoded in a DNS zone file # format, i.e. enclosed in double quotes if a result contains whitespace # (or other funny characters), and may use \DDD encoding or \X quoting as # per RFC 1035. Using $rr->txtdata instead avoids this unnecessary encoding # step and a need for decoding by a caller, returning an unmodified string. # Caveat: in case of multiple RDATA <character-string> fields contained # in a resource record (TXT, SPF, HINFO), starting with Net::DNS 0.69 # the $rr->txtdata in a list context returns these strings as a list. # The $rr->txtdata in a scalar context always returns a single string # with <character-string> fields joined by a single space character as # a separator. The $rr->txtdata in Net::DNS 0.68 and older returned # such joined space-separated string even in a list context. # # From Net::DNS maintainers (Willem Toorop, NLnet Labs): # I encourage you to use txtdata for getting the values of # <version>.updates.spamassassin.org and mirros.updates.spamassassin.org. # As those records have only a single rdata field, txtdata would return # the same value since Net::DNS 0.34. # if ($RR) { foreach my $rr ($RR->answer) { next if !$rr; # no answer records, only rcode next if $rr->type ne $rr_type; # scalar context! my $text = $rr->UNIVERSAL::can('txtdata') ? $rr->txtdata : $rr->rdatastr; push(@result,$text) if defined $text && $text ne ''; } printf("DNS %s query: %s -> %s\n", $rr_type, $query, join(", ",@result)) if $opt{'verbose'} && $opt{'verbose'} > 1; } else { dbg("dns: query failed: $query => " . $res->errorstring); printf("DNS %s query %s failed: %s\n", $rr_type, $query, $res->errorstring) if $opt{'verbose'} && $opt{'verbose'} > 1; } return @result; } ############################################################################## sub init_lwp { if ($have_inet6 && (!$opt{'force_pf'} || $opt{'force_pf'} eq 'inet6') && ($io_socket_module_name eq 'IO::Socket::IP' || $io_socket_module_name eq 'IO::Socket::INET6') ) { # LWP module has no support for IPv6. Use hotpatching, # copying IO::Socket::IP or IO::Socket::INET6 to IO::Socket::INET. # 'Borrowed' from Net::INET6Glue::INET_is_INET6 : printf("http: (lwp) hotpatching IO::Socket::INET by module %s\n", $io_socket_module_name) if $opt{'verbose'}; my $io_socket_module_hash_name = $io_socket_module_name . '::'; my $io_socket_module_path = $io_socket_module_name . '.pm'; $io_socket_module_path =~ s{::}{/}g; $INC{'IO/Socket/INET.pm'} = $INC{$io_socket_module_path}; no strict 'refs'; no warnings 'redefine'; for ( keys %{$io_socket_module_hash_name} ) { ref(my $v = $io_socket_module_hash_name->{$_}) and next; *{ 'IO::Socket::INET::'.$_ } = \&{ $io_socket_module_hash_name . $_ } if *{$v}{CODE}; } } my $ua = LWP::UserAgent->new(); $ua->agent("sa-update/$VERSION/$SAVersion"); $ua->timeout(60); # a good long timeout; 10 is too short for Coral! $ua->env_proxy; # if ($opt{'force_pf'}) { # # No longer needed and can be harmful as we don't know which address family # # will be picked by the IO::Socket::* module in case of multihomed servers. # # The IO::Socket::IP should choose the right protocol family automatically. # if ($have_inet4 && $opt{'force_pf'} eq 'inet') { # $ua->local_address('0.0.0.0'); # } elsif ($have_inet6 && $opt{'force_pf'} eq 'inet6') { # $ua->local_address('::'); # } # } return $ua; } # Do a GET request via HTTP for a certain URL # Use the optional time_t value to do an IMS GET sub http_get_lwp { my($url, $ims, $dir) = @_; $have_lwp or die "http_get_lwp: module LWP not available"; $ua = init_lwp() if !$ua; my $response; my $text; # retry 3 times; this works better with Coral foreach my $retries (1 .. 3) { my $request = HTTP::Request->new("GET"); $request->url($url); if (defined $ims) { my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday) = gmtime($ims); my $str = sprintf("%s, %02d %s %04d %02d:%02d:%02d GMT", qw(Sun Mon Tue Wed Thu Fri Sat)[$wday], $mday, qw(Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec)[$mon], $year + 1900, $hour, $min, $sec); $request->header('If-Modified-Since', $str); dbg("http: IMS GET request, $url, $str"); } else { dbg("http: GET request, $url"); } $response = $ua->request($request); printf("http: (lwp) %sGET %s, %s\n", defined $ims ? 'IMS ' : '', $url, !$response ? '(no response)' : $response->status_line ) if $opt{'verbose'}; if ($response->is_success) { return $response->content; } # could be a "304 not modified" or similar. # TODO: should use a special return type for "not modified" here # instead of overloading the failure return type if ($ims && $response->status_line =~ /^3/) { return; } if ($response->status_line =~ /^[45]/) { # client error or server error, makes no sense retrying return; } # include the text in the debug output; it's useful in some cases, # e.g. proxies that require authentication, diagnosing fascist # filtering false positives, etc. $text = $response->content; $text ||= "(no body)"; $text =~ s/\s+/ /gs; dbg ("http: GET $url request failed, retrying: " . $response->status_line.": ".$text); } # this should be a user-readable warning without --debug warn "http: GET $url request failed: " . $response->status_line.": ".$text."\n"; return; } # Do a GET request via HTTP for a given URL using an external program, # or fall back to LWP if no external downloading program is available. sub http_get { my($url, $dir, $suggested_out_fname, $force_reload) = @_; my $content; my $out_fname; if (defined $suggested_out_fname) { $out_fname = $suggested_out_fname; } else { local $1; $url =~ m{([^/]*)\z}s; my $url_fname = $1; $out_fname = File::Spec->catfile($dir, $url_fname); } $out_fname = untaint_var($out_fname); # construct a short filename, relative to a current working directory $dir my $out_fname_short = $out_fname; $out_fname_short =~ s{^\Q$dir\E/*}{}; printf("fetching %s\n", $url) if $opt{'verbose'} && $opt{'verbose'} > 1; dbg("http: url: %s", $url); my $out_fname_exists = -e $out_fname; dbg("http: downloading to: %s, %s", $out_fname, !$out_fname_exists ? 'new' : $force_reload ? 'replace' : 'update'); my($ext_prog, $cmd, @args); if (defined $opt{'httputil'}) { if ($opt{'httputil'} eq 'lwp') { if (!$have_lwp) { die "http: module LWP not available, download failed"; } } else { $ext_prog = $opt{'httputil'}; $cmd = Mail::SpamAssassin::Util::find_executable_in_env_path($ext_prog); if (!defined $cmd || $cmd eq '') { die "http: $ext_prog utility not found, download failed"; } } } else { foreach my $try_prog ('curl', 'wget', 'fetch') { $cmd = Mail::SpamAssassin::Util::find_executable_in_env_path($try_prog); if (defined $cmd && $cmd ne '') { $ext_prog = $try_prog; last } } } if (defined $ext_prog && $ext_prog eq 'curl') { push(@args, qw(-s -L -O --remote-time -g --max-redirs 2 --connect-timeout 30 --max-time 300 --fail -o), $out_fname_short); push(@args, '-z', $out_fname_short) if $out_fname_exists && !$force_reload; push(@args, '-A', "sa-update/$VERSION/$SAVersion"); } elsif (defined $ext_prog && $ext_prog eq 'wget') { push(@args, qw(-q --max-redirect=2 --tries=3 --dns-timeout=20 --connect-timeout=30 --read-timeout=300)); push(@args, defined $suggested_out_fname ? ('-O', $out_fname_short) : $force_reload ? () : ('-N') ); push(@args, '-U', "sa-update/$VERSION/$SAVersion"); } elsif (defined $ext_prog && $ext_prog eq 'fetch') { push(@args, qw(-q -n -a -w 20 -m -o), $out_fname_short); push(@args, '-m') if $out_fname_exists && !$force_reload; push(@args, "--user-agent=sa-update/$VERSION/$SAVersion"); } elsif ($have_lwp) { dbg("http: no external tool for download, fallback to using LWP") if !$opt{'httputil'}; my $ims; if ($out_fname_exists && !$force_reload) { my @out_fname_stat = stat($out_fname); my $size = $out_fname_stat[7]; $ims = $out_fname_stat[9] if $size; # only if nonempty } my $out_fh = IO::File->new; $out_fh->open($out_fname,'>',0640) or die "Cannot create a file $out_fname: $!"; binmode($out_fh) or die "Can't set binmode on $out_fname: $!"; $content = http_get_lwp($url, $ims, $dir); if (!defined $content) { dbg("http: (lwp) no content downloaded from %s", $url); } else { $out_fh->print($content) or die "Error writing to $out_fname: $!"; } $out_fh->close or die "Error closing file $out_fname: $!"; return ($out_fname, 1); } else { die "http: no downloading tool available"; } # only reached if invoking an external program is needed (not lwp) if ($opt{'force_pf'}) { if ($opt{'force_pf'} eq 'inet') { push(@args, '-4') } elsif ($opt{'force_pf'} eq 'inet6') { push(@args, '-6') } } push(@args, '--', untaint_var($url)); dbg("http: %s", join(' ',$cmd,@args)); # avoid a system() call, use fork/exec to make sure we avoid invoking a shell my $pid; eval { # use eval, the fork() sometimes signals an error # instead of returning a failure status $pid = fork(); 1; } or do { $@ = "errno=$!" if $@ eq ''; chomp $@; die "http fork: $@" }; defined $pid or die "spawning $cmd failed: $!"; if (!$pid) { # child chdir($dir) or die "Can't chdir to $dir: $!"; $cmd = untaint_file_path($cmd); exec {$cmd} ($cmd,@args); die "failed to exec $cmd: $!"; } # parent waitpid($pid,0); my $child_stat = $?; dbg("http: process [%s], exit status: %s", $pid, exit_status_str($child_stat,0)); if (!$opt{'verbose'}) { # silent } elsif ($child_stat == 0) { printf("http: (%s) GET %s, success\n", $ext_prog, $url); } else { printf("http: (%s) GET %s, FAILED, status: %s\n", $ext_prog, $url, exit_status_str($child_stat,0)); } return ($out_fname, $child_stat == 0); } # Read the content of a (downloaded) file. The subroutine expects a file name # and a boolean value. The boolean value indicates whether the file should be # opened in "text" mode or in "binary" mode. Pass 0 for text mode, 1 for binary # mode. Returns the content of the file as a string. sub read_content { my ($file_name, $binary_mode) = @_; my $file = IO::File->new; if (!$file->open($file_name, '<')) { dbg("read_content: Cannot open file $file_name: $!"); return undef; ## no critic (ProhibitExplicitReturnUndef) } if ($binary_mode) { binmode $file; } my($number_of_bytes,$buffer); my $content = ''; while (($number_of_bytes = $file->read($buffer, 16384)) > 0) { $content .= $buffer; } if (!defined $number_of_bytes) { dbg("read_content: Error reading from file $file_name: $!"); return undef; ## no critic (ProhibitExplicitReturnUndef) } $file->close; return $content; } ############################################################################## # choose a random integer between 0 and the total weight of all mirrors # loop through the mirrors from largest to smallest weight # if random number is < largest weight, use it # otherwise, random number -= largest, remove mirror from list, try again # eventually, there'll just be 1 mirror left in $mirrors[0] and it'll be used # sub choose_mirror { my($mirror_list) = @_; # Sort the mirror list by reverse weight (largest first) my @mirrors = sort { $mirror_list->{$b}->{weight} <=> $mirror_list->{$a}->{weight} } keys %{$mirror_list}; return unless @mirrors; if (keys %{$mirror_list} > 1) { # Figure out the total weight my $weight_total = 0; foreach (@mirrors) { $weight_total += $mirror_list->{$_}->{weight}; } # Pick a random int my $value = int(rand($weight_total)); # loop until we find the right mirror, or there's only 1 left while (@mirrors > 1) { if ($value < $mirror_list->{$mirrors[0]}->{weight}) { last; } $value -= $mirror_list->{$mirrors[0]}->{weight}; shift @mirrors; } } return $mirrors[0]; } ############################################################################## sub check_mirror_af { my ($mirror) = @_; # RFC 3986: scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." ) $mirror =~ s{^([a-z][a-z0-9.+-]*)://}{}si; # strip scheme like http:// my $scheme = lc($1); # No DNS check needed for proxied connections (caveat: no_proxy is not checked) my $http_proxy = (defined $ENV{"http_proxy"} && $ENV{"http_proxy"} =~ /\S/) || (defined $ENV{"HTTP_PROXY"} && $ENV{"HTTP_PROXY"} =~ /\S/); my $https_proxy = (defined $ENV{"https_proxy"} && $ENV{"https_proxy"} =~ /\S/) || (defined $ENV{"HTTPS_PROXY"} && $ENV{"HTTPS_PROXY"} =~ /\S/); return 1 if $scheme eq "http" && $http_proxy; return 1 if $scheme eq "https" && $https_proxy; # No DNS check needed for IPv4 or IPv6 address literal return 1 if $use_inet4 && $mirror =~ m{^\d+\.\d+\.\d+\.\d+(?:[:/]|$)}; return 1 if $use_inet6 && $mirror =~ m{^\[}; $mirror =~ s{[:/].*}{}s; # strip all starting from :port or /path return 1 if $use_inet4 && do_dns_query($mirror, "A"); return 1 if $use_inet6 && do_dns_query($mirror, "AAAA"); return 0; } ############################################################################## sub print_version { printf("sa-update version %s\n running on Perl version %s\n", $VERSION, join(".", map( 0+($_||0), ( $] =~ /(\d)\.(\d{3})(\d{3})?/ )))); } ############################################################################## sub print_usage_and_exit { my ( $message, $exitval ) = @_; $exitval ||= 64; if ($exitval == 0) { print_version(); print("\n"); } pod2usage( -verbose => 0, -message => $message, -exitval => $exitval, ); } ############################################################################## sub usage { my ( $verbose, $message ) = @_; print "sa-update version $VERSION\n"; pod2usage( -verbose => $verbose, -message => $message, -exitval => 64 ); } ############################################################################## sub interpolate_gpghomedir { my $gpghome = ''; if ($opt{'gpghomedir'}) { $gpghome = $opt{'gpghomedir'}; if (am_running_on_windows()) { # windows is single-quote-phobic; bug 4958 cmt 7 $gpghome =~ s/\"/\\\"/gs; $gpghome = "--homedir=\"$gpghome\""; } else { $gpghome =~ s/\'/\\\'/gs; $gpghome = "--homedir='$gpghome'"; } } return $gpghome; } ############################################################################## sub check_gpghomedir { unless (-d $opt{gpghomedir}) { dbg("gpg: creating gpg home dir ".$opt{gpghomedir}); # use 0700 to avoid "unsafe permissions" warning mkpath([$opt{gpghomedir}], 0, 0700) or die "cannot mkpath $opt{gpghomedir}: $!"; } } ############################################################################## sub import_gpg_key { my $keyfile = shift; my $gpghome = interpolate_gpghomedir(); check_gpghomedir(); my $CMD = "$GPGPath $gpghome --batch ". "--no-tty --status-fd=1 -q --logger-fd=1 --import"; unless (open(CMD, "$CMD $keyfile|")) { die "fatal: couldn't execute $GPGPath: $!\n"; } # Determine the fate of the signature while(my $GNUPG = <CMD>) { chop $GNUPG; dbg ("gpg: $GNUPG"); if ($GNUPG =~ /^gpg: /) { warn $GNUPG."\n"; # report bad news } if ($GNUPG =~ /^IMPORTED /) { dbg("gpg: gpg key imported successfully"); } } my $errno = 0; close CMD or $errno = $!; proc_status_ok($?,$errno) or warn("gpg: process '$CMD' finished: ".exit_status_str($?,$errno)."\n"); return ($? >> 8); } ############################################################################## sub import_default_keyring { my $defkey = File::Spec->catfile ($DEF_RULES_DIR, "sa-update-pubkey.txt"); unless (-f $defkey) { dbg("gpg: import of default keyring failed, couldn't find sa-update-pubkey.txt"); return; } dbg("gpg: importing default keyring to ".$opt{gpghomedir}); check_gpghomedir(); import_gpg_key($defkey); } ############################################################################## sub is_valid_gpg_key_id { # either a keyid (8 bytes) or a fingerprint (40 bytes) return ($_[0] =~ /^[a-fA-F0-9]+$/ && (length $_[0] == 8 || length $_[0] == 40)); } ############################################################################## sub clean_update_dir { my($dir, $preserve_files_ref) = @_; dbg("generic: cleaning directory %s", $dir); unless (opendir(DIR, $dir)) { warn "error: cannot opendir $dir: $!\n"; dbg("generic: attempt to opendir ($dir) failed"); return; } while(my $file = readdir(DIR)) { next if $file eq '.' || $file eq '..'; my $path = File::Spec->catfile($dir, $file); if ($preserve_files_ref && $preserve_files_ref->{$path}) { dbg("generic: preserving $file"); next; } untaint_var(\$path); next unless -f $path; dbg("generic: unlinking $file"); if (!unlink $path) { warn "error: cannot remove file $path: $!\n"; closedir(DIR) or die "cannot close directory $dir: $!"; return; } } closedir(DIR) or die "cannot close directory $dir: $!"; return 1; } sub delete_files { my(@filenames) = @_; foreach my $path (@filenames) { dbg("generic: unlinking $path"); unlink $path or warn "error: cannot unlink file $path: $!\n"; } return 1; } ############################################################################## sub lint_check_dir { my $dir = shift; # due to the Logger module's globalness (all M::SA objects share the same # Logger setup), we can't change the debug level here to only include # "config" or otherwise be more terse. :( my $spamtest = Mail::SpamAssassin->new( { rules_filename => $dir, site_rules_filename => $LOCAL_RULES_DIR, ignore_site_cf_files => 1, userprefs_filename => File::Spec->catfile($dir, "doesnotexist"), local_tests_only => 1, dont_copy_prefs => 1, PREFIX => $PREFIX, DEF_RULES_DIR => $DEF_RULES_DIR, LOCAL_RULES_DIR => $LOCAL_RULES_DIR, LOCAL_STATE_DIR => $LOCAL_STATE_DIR, }); # need to kluge disabling bayes since it may try to expire the DB, and # without the proper config it's not going to be good. $spamtest->{conf}->{use_bayes} = 0; my $res = $spamtest->lint_rules(); $spamtest->finish(); return $res == 0; } ############################################################################## =head1 NAME sa-update - automate SpamAssassin rule updates =head1 SYNOPSIS B<sa-update> [options] Options: --channel channel Retrieve updates from this channel Use multiple times for multiple channels --channelfile file Retrieve updates from the channels in the file --checkonly Check for update availability, do not install --install file Install updates directly from this file. Signature verification will use "file.asc", or "file.sha512" or "file.sha256". --allowplugins Allow updates to load plugin code (DANGEROUS) --gpgkey key Trust the key id to sign releases Use multiple times for multiple keys --gpgkeyfile file Trust the key ids in the file to sign releases --gpghomedir path Store the GPG keyring in this directory --gpg and --nogpg Use (or do not use) GPG to verify updates (--gpg is assumed by use of the above --gpgkey and --gpgkeyfile options) --import file Import GPG key(s) from file into sa-update's keyring. Use multiple times for multiple files --updatedir path Directory to place updates, defaults to the SpamAssassin site rules directory (default: /var/lib/spamassassin/4.000000) --refreshmirrors Force the MIRRORED.BY file to be updated --forcemirror url Use a specific mirror instead of downloading from official mirrors --httputil util Force used download tool. By default first found from these is used: curl, wget, fetch, lwp --score-multiplier x.x Adjust all scores from update channel, multiply with given value (integer or float). --score-limit x.x Adjust all scores from update channel, limit to given value (integer or float). Limiting is done after possible multiply operation. -D, --debug [area=n,...] Print debugging messages -v, --verbose Be verbose, like print updated channel names; For more verbosity specify multiple times -V, --version Print version -h, --help Print usage message -4 Force using the inet protocol (IPv4), not inet6 -6 Force using the inet6 protocol (IPv6), not inet =head1 DESCRIPTION sa-update automates the process of downloading and installing new rules and configuration, based on channels. The default channel is I<updates.spamassassin.org>, which has updated rules since the previous release. Update archives are verified using GPG signatures by default. If GPG is disabled (not recommended), file integrity is checked with SHA512 or SHA256 checksums. Note that C<sa-update> will not restart C<spamd> or otherwise cause a scanner to reload the now-updated ruleset automatically. Instead, C<sa-update> is typically used in something like the following manner: sa-update && /etc/init.d/spamassassin reload This works because C<sa-update> only returns an exit status of C<0> if it has successfully downloaded and installed an updated ruleset. The program sa-update uses the underlying operating system umask for the updated rule files it installs. You may wish to run sa-update from a script that sets the umask prior to calling sa-update. For example: #!/bin/sh umask 022 sa-update =head1 OPTIONS =over 4 =item B<--channel> sa-update can update multiple channels at the same time. By default, it will only access "updates.spamassassin.org", but more channels can be specified via this option. If there are multiple additional channels, use the option multiple times, once per channel. i.e.: sa-update --channel foo.example.com --channel bar.example.com =item B<--channelfile> Similar to the B<--channel> option, except specify the additional channels in a file instead of on the commandline. This is useful when there are a lot of additional channels. =item B<--checkonly> Only check if an update is available, don't actually download and install it. The exit code will be C<0> or C<1> as described below. =item B<--install> Install updates "offline", from the named tar.gz file, instead of performing DNS lookups and HTTP invocations. Files named B<file>.asc, B<file>.sha512, or B<file>.sha256 will be used for GPG signature, and the SHA256 and SHA512 checksums, respectively. The filename provided must contain a version number of at least 3 digits, which will be used as the channel's update version number. Multiple B<--channel> switches cannot be used with B<--install>. To install multiple channels from tarballs, run C<sa-update> multiple times with different B<--channel> and B<--install> switches, e.g.: sa-update --channel foo.example.com --install foo-34958.tgz sa-update --channel bar.example.com --install bar-938455.tgz =item B<--allowplugins> Allow downloaded updates to activate plugins. The default is not to activate plugins; any C<loadplugin> or C<tryplugin> lines will be commented in the downloaded update rules files. You should never enable this for 3rd party update channels, since plugins can execute unrestricted code on your system, even possibly as root! This includes spamassassin official updates, which have no need to include running code. Use --reallyallowplugins option to bypass warnings and make it work. =item B<--gpg>, B<--nogpg> sa-update by default will verify update archives by use of GPG signature. If you wish to skip GPG verification (very unsafe), you can use the B<--nogpg> option to disable its use. Use of the following gpgkey-related options will override B<--nogpg> and keep GPG verification enabled. If GPG is disabled, only SHA512 or SHA256 checksums are used to verify whether or not the downloaded archive has been corrupted, but it does not offer any form of security regarding whether or not the downloaded archive is legitimate (aka: non-modifed by evildoers). Note: Only GnuPG is supported (ie: not any other PGP software). =item B<--gpgkey> sa-update has the concept of "release trusted" GPG keys. When an archive is downloaded and the signature verified, sa-update requires that the signature be from one of these "release trusted" keys or else verification fails. This prevents third parties from manipulating the files on a mirror, for instance, and signing with their own key. By default, sa-update trusts key ids C<24F434CE> and C<5244EC45>, which are the standard SpamAssassin release key and its sub-key. Use this option to trust additional keys. See the B<--import> option for how to add keys to sa-update's keyring. For sa-update to use a key it must be in sa-update's keyring and trusted. For multiple keys, use the option multiple times. i.e.: sa-update --gpgkey E580B363 --gpgkey 298BC7D0 Note: use of this option automatically enables GPG verification. =item B<--gpgkeyfile> Similar to the B<--gpgkey> option, except specify the additional keys in a file instead of on the commandline. This is extremely useful when there are a lot of additional keys that you wish to trust. =item B<--gpghomedir> Specify a directory path to use as a storage area for the C<sa-update> GPG keyring. By default, this is /etc/spamassassin/sa-update-keys =item B<--import> Use to import GPG key(s) from a file into the sa-update keyring which is located in the directory specified by B<--gpghomedir>. Before using channels from third party sources, you should use this option to import the GPG key(s) used by those channels. You must still use the B<--gpgkey> or B<--gpgkeyfile> options above to get sa-update to trust imported keys. To import multiple keys, use the option multiple times. i.e.: sa-update --import channel1-GPG.KEY --import channel2-GPG.KEY Note: use of this option automatically enables GPG verification. =item B<--refreshmirrors> Force the list of sa-update mirrors for each channel, stored in the MIRRORED.BY file, to be updated. By default, the MIRRORED.BY file will be cached for up to 7 days after each time it is downloaded. =item B<--forcemirror> Force the download from a specific host instead of relying on mirrors listed in MIRRORED.BY. =item B<--updatedir> By default, C<sa-update> will use the system-wide rules update directory: /var/lib/spamassassin/4.000000 If the updates should be stored in another location, specify it here. Note that use of this option is not recommended; if you're just using sa-update to download updated rulesets for a scanner, and sa-update is placing updates in the wrong directory, you probably need to rebuild SpamAssassin with different C<Makefile.PL> arguments, instead of overriding sa-update's runtime behaviour. =item B<-D> [I<area,...>], B<--debug> [I<area,...>] Produce debugging output. If no areas are listed, all debugging information is printed. Diagnostic output can also be enabled for each area individually; I<area> is the area of the code to instrument. For example, to produce diagnostic output on channel, gpg, and http, use: sa-update -D channel,gpg,http For more information about which areas (also known as channels) are available, please see the documentation at L<https://wiki.apache.org/spamassassin/DebugChannels>. =item B<-h>, B<--help> Print help message and exit. =item B<-V>, B<--version> Print sa-update version and exit. =back =head1 EXIT CODES In absence of a --checkonly option, an exit code of C<0> means: an update was available, and was downloaded and installed successfully. If --checkonly was specified, an exit code of C<0> means: an update was available. An exit code of C<1> means no fresh updates were available. An exit code of C<2> means that at least one update is available but that a lint check of the site pre files failed. The site pre files must pass a lint check before any updates are attempted. An exit code of C<3> means that at least one update succeeded while other channels failed. If using sa-compile, you should proceed with it. An exit code of C<4> or higher, indicates that errors occurred while attempting to download and extract updates, and no channels were updated. =head1 SEE ALSO Mail::SpamAssassin(3) Mail::SpamAssassin::Conf(3) spamassassin(1) spamd(1) <https://wiki.apache.org/spamassassin/RuleUpdates> =head1 PREREQUISITES C<Mail::SpamAssassin> =head1 BUGS See <https://issues.apache.org/SpamAssassin/> =head1 AUTHORS The Apache SpamAssassin(tm) Project <https://spamassassin.apache.org/> =head1 LICENSE AND COPYRIGHT SpamAssassin is distributed under the Apache License, Version 2.0, as described in the file C<LICENSE> included with the distribution. Copyright (C) 2015 The Apache Software Foundation =cut