jobs - avoid reload when not necessary

This commit is contained in:
bunkerity 2021-07-21 17:09:42 +02:00
parent 5f845680ff
commit 280d189864
No known key found for this signature in database
GPG Key ID: 3D80806F12602A7C
6 changed files with 58 additions and 36 deletions

View File

@ -70,30 +70,30 @@ fi
# GeoIP
if [ "$(has_value BLACKLIST_COUNTRY ".\+")" != "" ] || [ "$(has_value WHITELIST_COUNTRY ".\+")" != "" ] ; then
/opt/bunkerized-nginx/jobs/main.py --name geoip
/opt/bunkerized-nginx/jobs/main.py --name geoip --cache
fi
# User-Agents
if [ "$(has_value BLOCK_USER_AGENT yes)" != "" ] ; then
/opt/bunkerized-nginx/jobs/main.py --name user-agents
/opt/bunkerized-nginx/jobs/main.py --name user-agents --cache
fi
# Referrers
if [ "$(has_value BLOCK_REFERRER yes)" != "" ] ; then
/opt/bunkerized-nginx/jobs/main.py --name referrers
/opt/bunkerized-nginx/jobs/main.py --name referrers --cache
fi
# exit nodes
if [ "$(has_value BLOCK_TOR_EXIT_NODE yes)" != "" ] ; then
/opt/bunkerized-nginx/jobs/main.py --name exit-nodes
/opt/bunkerized-nginx/jobs/main.py --name exit-nodes --cache
fi
# proxies
if [ "$(has_value BLOCK_PROXIES yes)" != "" ] ; then
/opt/bunkerized-nginx/jobs/main.py --name proxies
/opt/bunkerized-nginx/jobs/main.py --name proxies --cache
fi
# abusers
if [ "$(has_value BLOCK_ABUSERS yes)" != "" ] ; then
/opt/bunkerized-nginx/jobs/main.py --name abusers
/opt/bunkerized-nginx/jobs/main.py --name abusers --cache
fi

View File

@ -36,8 +36,8 @@ echo "[*] Generate configuration files"
do_and_check_cmd "/opt/bunkerized-nginx/gen/main.py --settings /opt/bunkerized-nginx/settings.json --templates /opt/bunkerized-nginx/confs --output /etc/nginx --variables /opt/bunkerized-nginx/variables.env"
# Run pre-jobs
echo "[*] Run pre-jobs"
do_and_check_cmd "/opt/bunkerized-nginx/entrypoint/pre-jobs.sh"
echo "[*] Run jobs"
do_and_check_cmd "/opt/bunkerized-nginx/entrypoint/jobs.sh"
# Reload nginx if it's running
if [ -f "/tmp/nginx.pid" ] ; then
@ -49,6 +49,5 @@ else
AS_ROOT="yes" do_and_check_cmd nginx -g 'daemon on; user nginx;'
fi
# Run post-jobs
echo "[*] Run post-jobs"
do_and_check_cmd /opt/bunkerized-nginx/entrypoint/post-jobs.sh
# Done
echo "[*] bunkerized-nginx successfully executed"

View File

@ -886,27 +886,27 @@ fi
# Download abusers list
echo "[*] Download abusers list"
do_and_check_cmd /opt/bunkerized-nginx/jobs/main.py --name abusers
do_and_check_cmd su -s "/bin/bash" -c "/opt/bunkerized-nginx/jobs/main.py --name abusers" nginx
# Download TOR exit nodes list
echo "[*] Download TOR exit nodes list"
do_and_check_cmd /opt/bunkerized-nginx/jobs/main.py --name exit-nodes
do_and_check_cmd su -s "/bin/bash" -c "/opt/bunkerized-nginx/jobs/main.py --name exit-nodes" nginx
# Download proxies list
echo "[*] Download proxies list"
do_and_check_cmd /opt/bunkerized-nginx/jobs/main.py --name proxies
do_and_check_cmd su -s "/bin/bash" -c "/opt/bunkerized-nginx/jobs/main.py --name proxies" nginx
# Download referrers list
echo "[*] Download referrers list"
do_and_check_cmd /opt/bunkerized-nginx/jobs/main.py --name referrers
do_and_check_cmd su -s "/bin/bash" -c "/opt/bunkerized-nginx/jobs/main.py --name referrers" nginx
# Download user agents list
echo "[*] Download user agents list"
do_and_check_cmd /opt/bunkerized-nginx/jobs/main.py --name user-agents
do_and_check_cmd su -s "/bin/bash" -c "/opt/bunkerized-nginx/jobs/main.py --name user-agents" nginx
# Download geoip database
echo "[*] Download geoip DB"
do_and_check_cmd /opt/bunkerized-nginx/jobs/main.py --name geoip
do_and_check_cmd su -s "/bin/bash" -c "/opt/bunkerized-nginx/jobs/main.py --name geoip" nginx
# We're done
cd "$old_dir"

View File

@ -1,6 +1,6 @@
from Job import Job
import datetime, gzip
import datetime, gzip, shutil, os
class GeoIP(Job) :
@ -9,13 +9,13 @@ class GeoIP(Job) :
data = ["https://download.db-ip.com/free/dbip-country-lite-" + datetime.datetime.today().strftime("%Y-%m") + ".mmdb.gz"]
filename = "geoip.mmdb.gz"
type = "file"
super().__init__(name, data, filename, redis_host=redis_host, type=type, regex=regex, copy_cache=copy_cache)
super().__init__(name, data, filename, redis_host=redis_host, type=type, copy_cache=copy_cache)
def run(self) :
super().run()
count = 0
with gzip.open("/etc/nginx/geoip.mmdb.gz", "rb") as f :
with open("/tmp/geoip.mmdb", "w") as f2 :
with open("/tmp/geoip.mmdb", "wb") as f2 :
while True :
chunk = f.read(8192)
if not chunk :

View File

@ -1,4 +1,9 @@
import abc, requests, redis, os, datetime, traceback, re, shutil
import abc, requests, redis, os, datetime, traceback, re, shutil, enum, filecmp
class JobRet(enum.Enum) :
KO = 0
OK_RELOAD = 1
OK_NO_RELOAD = 2
class Job(abc.ABC) :
@ -24,18 +29,21 @@ class Job(abc.ABC) :
f.write(when + " " + what)
def run(self) :
ret = JobRet.KO
try :
if self.__type == "line" or self.__type == "file" :
if self.__copy_cache and self.__from_cache() :
return True
self.__external()
if self.__copy_cache :
ret = self.__from_cache()
if ret != JobRet.KO :
return ret
ret = self.__external()
self.__to_cache()
elif self.__type == "exec" :
self.__exec()
return self.__exec()
except Exception as e :
self.__log("exception while running job : " + traceback.format_exc())
return False
return True
return JobRet.KO
return ret
def __external(self) :
if self.__redis == None :
@ -67,10 +75,14 @@ class Job(abc.ABC) :
if count > 0 :
shutil.copyfile("/tmp/" + self.__filename, "/etc/nginx/" + self.__filename)
os.remove("/tmp/" + self.__filename)
return JobRet.OK_RELOAD
elif self.__redis != None and count > 0 :
self.__redis.delete(self.__redis.keys(self.__name + "_*"))
pipe.execute()
return JobRet.OK_RELOAD
return JobRet.KO
def __download_data(self, url) :
r = requests.get(url, stream=True)
@ -89,17 +101,24 @@ class Job(abc.ABC) :
if len(stderr) > 1 :
self.__log("stderr = " + stderr)
if proc.returncode != 0 :
raise Exception("error code " + str(proc.returncode))
return JobRet.KO
# TODO : check if reload is needed ?
return JobRet.OK_RELOAD
def __edit(self, chunk) :
return chunk
def __from_cache(self) :
if not os.path.isfile("/opt/bunkerized-nginx/cache/" + self.__filename) :
return False
return JobRet.KO
if self.__redis == None or self.__type == "file" :
shutil.copyfile("/opt/bunkerized-nginx/cache/" + self.__filename, "/etc/nginx/" + self.__filename)
elif self.__redis != None and self.__type == "line" :
if not os.path.isfile("/etc/nginx/" + self.__filename) or not filecmp.cmp("/opt/bunkerized-nginx/cache/" + self.__filename, "/etc/nginx/" + self.__filename, shallow=False) :
shutil.copyfile("/opt/bunkerized-nginx/cache/" + self.__filename, "/etc/nginx/" + self.__filename)
return JobRet.OK_RELOAD
return JobRet.OK_NO_RELOAD
if self.__redis != None and self.__type == "line" :
self.__redis.delete(self.__redis.keys(self.__name + "_*"))
with open("/opt/bunkerized-nginx/cache/" + self.__filename) as f :
pipe = self.__redis.pipeline()
@ -110,7 +129,9 @@ class Job(abc.ABC) :
line = line.strip()
pipe.set(self.__name + "_" + line, "1")
pipe.execute()
return True
return JobRet.OK_NO_RELOAD
return JobRet.KO
def __to_cache(self) :
if self.__redis == None or self.__type == "file" :

View File

@ -5,6 +5,7 @@ import argparse, sys
sys.path.append("/opt/bunkerized-nginx/jobs")
import Abusers, CertbotNew, CertbotRenew, ExitNodes, GeoIP, Proxies, Referrers, SelfSignedCert, UserAgents
from Job import JobRet
from reload import reload
@ -50,15 +51,14 @@ if __name__ == "__main__" :
instance = JOBS[job](redis_host=args.redis, copy_cache=args.cache, dst_cert=args.dst_cert, dst_key=args.dst_key, expiry=args.expiry, subj=args.subj)
else :
instance = JOBS[job](redis_host=args.redis, copy_cache=args.cache)
if not instance.run() :
ret = instance.run()
if ret == JobRet.KO :
print("[!] Error while running job " + job)
sys.exit(1)
print("[*] Job " + job + " successfully executed")
# Reload
# TODO : only reload if needed
do_reload = True
if do_reload :
if ret == JobRet.OK_RELOAD :
ret = reload()
if ret == 0 :
print("[*] Reload operation successfully executed")
@ -67,6 +67,8 @@ if __name__ == "__main__" :
sys.exit(1)
elif ret == 2 :
print("[*] Skipped reload operation because nginx is not running")
else :
print("[*] Skipped reload operation because it's not needed")
# Done
sys.exit(0)