#! /bin/sh

# fetch a blacklist dcc.dcc-servers.net DCC servers

# This script should be run about twice an hour at an arbitrary time
#	instead of 0,30 to minimize pile-ups on the FTP server.

# -h homedir	set DCC home directory

# -o blklist	file of local blacklist entries

# -p pgm	fetch, wget, curl, or ftp

# -s srvr-host	host name of source of the common blacklist

# Rhyolite Software DCC 1.3.45-1.40 $Revision$
# Generated automatically from fetchblack.in by configure.


# other, probably local blacklist files
OTHER_BL=

exec </dev/null

DCC_HOMEDIR=/etc/dcc

DEBUG=
SRVR_BL=dcc-servers-blacklist
SRVRS=
PGM=ftp
FORCE=

USAGE="`basename $0`: [-xf] [-h homdir] [-o blklist] [-p pgm] [-s srvr-host]"
while getopts "xfh:o:p:s:" c; do
    case $c in
	x) DEBUG=yes; set -x;;
	f) FORCE=yes;;
	h) DCC_HOMEDIR="$OPTARG";;
	o) OTHER_BL="$OTHER_BL $OPTARG";;
	p) PGM=$OPTARG;;
	s) SRVRS="$SRVRS $OPTARG";;
	*) echo "$USAGE" 1>&2; exit 1;;
    esac
done
shift `expr $OPTIND - 1 || true`
if test "$#" -ne 0; then
    echo "$USAGE" 1>&2; exit 1
fi
if test -z "$SRVRS"; then
    SRVRS="http://www.dcc-servers.net/dcc http://www.rhyolite.com/anti-spam/dcc ftp://ftp.dcc-servers.net ftp://ftp.rhyolite.com"
fi
URLS=
for SRVR in $SRVRS; do
    if expr "$SRVR" : '.[hft]*tp://' >/dev/null; then
	if expr "$SRVR" : ".*/$SRVR_BL"'$' >/dev/null; then
	    URLS="$URLS $SRVR"
	else
	    URLS="$URLS $SRVR/$SRVR_BL"
	fi
    else
	URLS="$URLS ftp://$SRVR/$SRVR_BL"
    fi
done

# dccd expects this target
TGT_BL=$DCC_HOMEDIR/blacklist
NEW_BL=new-blacklist
BDIR=$DCC_HOMEDIR/$SRVR_BL
FLOG=$BDIR/fetch-log

if test ! -d $BDIR; then
    mkdir $BDIR
fi
cd $BDIR

# use fetch, wget, curl, or ftp that understands URLs
rm -f $SRVR_BL $FLOG
BASE_PGM=`basename "$PGM"`
if test "$BASE_PGM" = wget; then
    BASE_PGM=
    for URL in $URLS; do
	echo "$URL:" >>$FLOG
	$PGM -nd --no-host-directories --mirror --retr-symlinks \
	    --passive-ftp  $URL >>$FLOG 2>&1
	if test -s $SRVR_BL; then
	    if test -z "`sed -n -e 2q -e 's/.*DOCTYPE.*/HTML/p' $SRVR_BL`"; then
		break;
	    fi
	    rm $SRVR_BL
	fi
	echo >>$FLOG
    done
fi

if test "$BASE_PGM" = fetch; then
    BASE_PGM=
    for URL in $URLS; do
	echo "$URL:" >>$FLOG
	HTTP_REFERER=DCC-script; export HTTP_REFERER
	$PGM -p -m $URL >>$FLOG 2>&1
	if test -s $SRVR_BL; then
	    break;
	fi
	echo >>$FLOG
    done
fi

if test "$BASE_PGM" = curl; then
    BASE_PGM=
    for URL in $URLS; do
	echo "$URL:" >>$FLOG
	$PGM  --connect-timeout 30 --max-time 600\
	    $URL >>$FLOG 2>&1
	# --fail does not work on at least some versions of curl
	if test -s $SRVR_BL; then
	    if test -z "`sed -n -e 2q -e 's/.*DOCTYPE.*/HTML/p' $SRVR_BL`"; then
		break;
	    fi
	    rm $SRVR_BL
	fi
	echo >>$FLOG
    done
fi

if test "$BASE_PGM" = ftp; then
    BASE_PGM=
    for URL in $URLS; do
	echo "$URL:" >>$FLOG
	$PGM -p $URL >>$FLOG 2>&1
	if test -s $SRVR_BL; then
	    break;
	fi
	echo >>$FLOG
    done
    # if that did not work, try ancient FTP
    if test ! -s $SRVR_BL; then
	for URL in $URLS; do
	    HOST=`expr "$URL" : "ftp://\([^/]*\)/"`
	    RFILE=`expr "$URL" : "ftp://[^/]*/\(.*\)"`
	    if test -z "$RFILE" -o -z "$HOST"; then
		continue
	    fi
	    echo "$URL:" >>$FLOG
	    (echo "user anonymous `hostname`"; echo "get $RFILE $SRVR_BL")    \
		| ftp -n $HOST >>$FLOG 2>&1
	    if test -s $SRVR_BL; then
		break;
	    fi
	    # some versions of ftp like to leave empty files
	    rm -f $SRVR_BL
	    echo >>$FLOG
	done
    fi
fi

if test ! -s $SRVR_BL; then
    # complain only when the list is more than a day old, and then only
    #	once per day
    OLD=
    if test -f $NEW_BL; then
	OLD="`find $NEW_BL -follow -mtime -1 2>/dev/null`"
    fi
    if test -z "$OLD"; then
	touch $NEW_BL
	echo "Unable to fetch blacklist; see $FLOG" 2>&1
    fi
    exit 1
fi

# add the local list last so that it can override
rm -f $NEW_BL
cat  $SRVR_BL $OTHER_BL > $NEW_BL
if test -z "$FORCE" && cmp $NEW_BL ../blacklist 1>/dev/null 2>&1; then :
    exit 0
fi
# copy it in case the target is a symbolic link
cp $NEW_BL $TGT_BL
