この情報は古い可能性があります!

squidってリダイレクタなんて便利なものがあって
それを使うとURLフィルタリングなんてものができちゃうんですよ。

大抵URLフィルタリングなんてものは商用のものが多いですが、
今回はフリーで使えるsquidGuardをつかいます
viralatorで特定拡張子をウィルススキャンするスクリプトを併合しました。

viralator HP: http://viralator.sourceforge.net/
SquidGuard HP: http://www.squidguard.org/

現在shallaさんがsquidguard.orgドメインを取得したようで
squidGuardのメンテナンスをおこなっているようです。
1.4が出ているのでそれを使うようにしたほうがよいかもしれません。

インストール

cd /usr/local/src
wget http://www.squidguard.org/Downloads/squidGuard-1.4.tar.gz
tar jxvf squidGuard-1.4.tar.bz2
cd squidGuard-1.4
./configure --prefix=/usr/local/squid \
--with-sg-config=/usr/local/squid/etc/squidGuard.conf \
--with-sg-logdir=/usr/local/squid/var/logs \
--with-sg-dbhome=/usr/local/squid/var/blacklists \
--with-db=/usr/local/BerkeleyDB
make
make install

CGIファイルは英文のまま?なので必要に応じて
下のほうにおいてある日本語化ずみのものをおいてください。
たぶん1.4でも動くとおもいます。

squid.confの修正

redirect_program /usr/local/squid/bin/squidGuard -c /usr/local/squid/etc/squidGuard.conf
redirect_children 5

次にsquidGuard.confを設定します。
実はまだ(永遠に)準備中なので現在の設定をおいておきます。

dbhome /usr/local/squid/var/blacklists/blacklists
logdir /usr/local/squid/var/logs

dest ads {
log block.log
domainlist ads/domains
urllist ads/urls
}

dest adult {
log block.log
domainlist adult/domains
urllist adult/urls
}

dest aggressive {
log block.log
domainlist aggressive/domains
urllist aggressive/urls
}

dest agressif {
log block.log
domainlist agressif/domains
urllist agressif/urls
}

dest audio-video {
log block.log
domainlist audio-video/domains
urllist audio-video/urls
}

dest cleaning {
log block.log
domainlist cleaning/domains
urllist cleaning/urls
}

dest dangerous_material {
log block.log
domainlist dangerous_material/domains
urllist dangerous_material/urls
}

dest drogue {
log block.log
domainlist drogue/domains
urllist drogue/urls
}

dest drugs {
log block.log
domainlist drugs/domains
urllist drugs/urls
}

dest forums {
log block.log
domainlist forums/domains
urllist forums/urls
}

dest gambling {
log block.log
domainlist gambling/domains
urllist gambling/urls
}

dest games {
log block.log
domainlist games/domains
urllist games/urls
}

dest hacking {
log block.log
domainlist hacking/domains
urllist hacking/urls
}

dest liste_bu {
log block.log
domainlist liste_bu/domains
urllist liste_bu/urls
}

dest mail {
log block.log
domainlist mail/domains
urllist mail/urls
}

dest mixed_adult {
log block.log
domainlist mixed_adult/domains
# urllist mixed_adult/urls
}

dest mobile-phone {
log block.log
domainlist mobile-phone/domains
# urllist mobile-phone/urls
}

dest phishing {
log block.log
domainlist phishing/domains
# urllist phishing/urls
}

dest porn {
log block.log
domainlist porn/domains
urllist porn/urls
}

dest proxy {
log block.log
domainlist proxy/domains
urllist proxy/urls
}

dest publicite {
log block.log
domainlist publicite/domains
urllist publicite/urls
}

dest radio {
log block.log
domainlist radio/domains
urllist radio/urls
}

dest reaffected {
log block.log
domainlist reaffected/domains
# urllist reaffected/urls
}

dest redirector {
log block.log
domainlist redirector/domains
urllist redirector/urls
}

dest sexual_education {
log block.log
domainlist sexual_education/domains
urllist sexual_education/urls
}

dest shopping {
log block.log
domainlist shopping/domians
# urllist shopping/urls
}

dest strict_redirector {
log block.log
domainlist strict_redirector/domains
urllist strict_redirector/urls
}

dest strong_redirector {
log block.log
domainlist strong_redirector/domains
urllist strong_redirector/urls
}

dest tricheur {
log block.log
domainlist tricheur/domains
urllist tricheur/urls
}

dest violence {
log block.log
domainlist violence/domains
urllist violence/urls
}

dest warez {
log block.log
domainlist warez/domains
urllist warez/urls
}

dest webmail {
log block.log
domainlist webmail/domains
urllist webmail/urls
}

dest local-ok {
domainlist local-ok/domains
urllist local-ok/urls
}

dest local-block {
log block.log
domainlist local-block/domains
urllist local-block/urls
}

rewrite google {
s@(google.com/search.*q=.*)@\1\&safe=active@i
s@(google.com/images.*q=.*)@\1\&safe=active@i
s@(google.com/groups.*q=.*)@\1\&safe=active@i
s@(google.com/news.*q=.*)@\1\&safe=active@i
# log google
}

acl {
default {
# for google to be in "safe mode"
rewrite google

# the default categories are conservative, please add any additional
# categories listed above or simply comment out this line and uncomment
# out the line below it.

pass local-ok !local-block !aggressive !drugs !gambling !hacking !porn !proxy !violence !warez all

redirect 302:http://www.example.com/cgi-bin/squidGuard.cgi?clientaddr=%a&clientname=%n&clientident=%i&srcclass=%s&targetgroup=%t&url=%u
}
}

コンフィグのインデントは全部Tabじゃないとだめっぽいです?

viralatorのインストール

/usr/local/apache2/cgi-binに置くものとします。

cd /usr/local/src
wget http://jaist.dl.sourceforge.net/sourceforge/viralator/viralator-0.9.7.tar.gz
tar zxvf viralator-0.9.7.tar.gz
cd viralator-0.9.7
#日本語化済み?とSquidGuard+にしたもののをほしい方は↓
wget http://ftp.momo-i.org/pub/proxy/viralator-0.9.7jp.tar.bz2
tar jxvf viralator-0.9.7jp.tar.bz2
cd viralator-0.9.7jp
mkdir /usr/local/squid/etc/viralator

cp progress.png style.css viralator.cgi bar.png /usr/local/apache2/cgi-bin
cp -r etc/viralator/* /usr/local/squid/etc/viralator/
cd /usr/local/apache2/cgi-bin
chown -R apache. progress.png style.css viralator.cgi bar.png
chmod 755 viralator.cgi

設定

/usr/local/squid/etc/viralator/viralator.conf

charset -> UTF-8
#自分のサーバの文字コードへ変更

progress_unit -> cgi-bin/bar.png
progress_indicator -> cgi-bin/progress.png
css_file -> cgi-bin/style.css
#cgi-bin以外のフォルダへおいたらcgi-binのところを変えてください

次にsquidGuard側の変更

cd /usr/local/squid/var/blacklists/blacklists
mkdir files
vi files/files

#以下を追加(好みで拡張子追加・削除しても問題ありません。)
(\.exe$|\.com$|\.bat$|\.zip$|\.bin$|\.pdf$|\.bz2$|\.bz$|\.tgz$|\.tar$|\.doc$|\.xls$|\.ppt$|\.lzh$|\.lha$|\.dll$|\.gz$)

cd /usr/local/squid/etc
vi squidGuard.conf

#追加
dest files {
expressionlist files/files
}
#pass行に !files を追加
pass ~~ !files all

redirect 302:http://~~/cgi-bin/squidGuard.cgi?~~

redirect http://~~/cgi-bin/viralator.cgi.cgi?~~

最後にsquid.confに追加

acl momoi url_regex .*\.momo-i\.org.*
redirector_access deny momoi

日時でブラックリスト更新しましょう?

http://urlblacklist.com/

/etc/cron.daily/updatebl

#!/bin/bash
set -u

export BL_URL=${BL_URL:="http://urlblacklist.com/cgi-bin/commercialdownload.pl?type=download&file=bigblacklist"}
export DB_PATH=${DB_PATH:="/usr/local/squid/var/blacklists/blacklists"}
export HOME_DIR="/tmp"
export SG_UGID=${SG_UGID:="squid:squid"}
export DG_PATH=${DG_PATH:="/usr/local/squid/bin"}
export UNCOMP_CMD="gunzip"
export UNTAR_DIR="blacklists"
export VERS="0.9.2"

export BL_TAR_BASE="bigblacklist.tar.gz"
export BL_TAR_FULL="${HOME_DIR}/${BL_TAR_BASE}"
export TMP_DIR="/tmp/blacklists"

echo "==="
echo "Starting Blacklist update v${VERS}: `date '+%Y/%m/%d %T'`"

if [ ! -d "${TMP_DIR}" ]
then
if [ -e "${TMP_DIR}" ]
then
echo "ERROR: ${TMP_DIR} already exists, but isn't a directory;"
echo " aborting Blacklist refresh."
exit 1
fi

mkdir "${TMP_DIR}"
fi

cd "${TMP_DIR}"
if [ "$?" != "0" ]
then
echo "ERROR: unable to cd into working directory,"
echo " ${TMP_DIR}"
exit 1
else
if [ -f "${BL_TAR_FULL}" ]
then
mv -f "${BL_TAR_FULL}" "${BL_TAR_FULL}.1"
fi

if [ -f "./${BL_TAR_BASE}" ]
then
echo "Moving old ${BL_TAR_BASE} out of the way."
mv -f "./${BL_TAR_BASE}" "${BL_TAR_FULL}"
fi

echo "Running wget to retrieve new lists."
wget -nv "${BL_URL}"
if [ "$?" != "0" ]
then
echo "ERROR: unable to retrieve new lists,"
echo " aborting blacklist refresh."
exit 1
else
echo "Succesfully retrieved new lists."

echo "Untaring Blacklist archive, ${BL_TAR_BASE}"
"${UNCOMP_CMD}" <"${bl_tar_base}" tar="" -xf="" br="" /> if [ "$?" != "0" ]
then
echo "ERROR: unable to extract new lists,"
echo " aborting blacklist refresh."
exit 1
else
echo "Moving new lists into place."
for i in "${UNTAR_DIR}"/*
do
export ib="`basename ${i}`"
if [ -d "${DB_PATH}/${ib}" ]
then
rm -rf "${DB_PATH}/${ib}"
fi

mv "${UNTAR_DIR}/${ib}" "${DB_PATH}"

done

echo "Remove temporary files and folders."
rm -rf /tmp/blacklists

echo "DansGuardian Exports"
cd ${DB_PATH}
for i in "${DB_PATH}"/*
do
export ib2="`basename ${i}`"
if [ -f ${DB_PATH}/${ib2}/domains ]
then
cd ${DB_PATH}/${ib2}
cat domains >> domains.tmp
mv domains.tmp domains
fi
if [ -f ${DB_PATH}/${ib2}/urls ]
then
cd ${DB_PATH}/${ib2}
cat urls >> urls.tmp
mv urls.tmp urls
fi
done
echo "Change owner and permissions."
chown -R "${SG_UGID}" "${DB_PATH}"
chmod -R 755 "${DB_PATH}"
echo "Reloading BlackLists"
/usr/local/squid/bin/squidGuard -C all
/usr/local/squid/bin/squidGuard -u
echo "Restarting Squid."
/etc/init.d/squid restart
echo "Finished Blacklist update: `date '+%Y/%m/%d %T'`"
echo "==="

exit $?
fi
fi
fi

これで毎日urlblacklist.comから最新のDBをダウンロードしてくれます。

コメントを追加

制限付き HTML

  • 使用できるHTMLタグ: <a href hreflang> <em> <strong> <cite> <blockquote cite> <code> <ul type> <ol start type> <li> <dl> <dt> <dd> <h2 id> <h3 id> <h4 id> <h5 id> <h6 id>
  • 行と段落は自動的に折り返されます。
  • ウェブページのアドレスとメールアドレスは自動的にリンクに変換されます。
CAPTCHA
半角で
この質問はあなたが人間の訪問者であるかどうかをテストし、自動化されたスパム送信を防ぐためのものです。
ももーい に投稿