bunkerweb 1.4.0

This commit is contained in:
bunkerity
2022-06-03 17:24:14 +02:00
parent 3a078326c5
commit a9f886804a
5245 changed files with 1432051 additions and 27894 deletions

162
job/JobScheduler.py Normal file
View File

@@ -0,0 +1,162 @@
import schedule, glob, traceback, os, sys, json, subprocess
sys.path.append("/opt/bunkerweb/utils")
from logger import log
from ApiCaller import ApiCaller
class JobScheduler(ApiCaller) :
def __init__(self, env={}, lock=None, apis=[]) :
super().__init__(apis)
self.__env = env
with open("/tmp/autoconf.env", "w") as f :
for k, v in self.__env.items() :
f.write(k + "=" + v + "\n")
self.__env.update(os.environ)
self.__jobs = self.__get_jobs()
self.__lock = lock
def __get_jobs(self) :
jobs = {}
plugins_core = [folder for folder in glob.glob("/opt/bunkerweb/core/*/")]
plugins_external = [folder for folder in glob.glob("/opt/bunkerweb/plugins/*/")]
for plugin in plugins_core + plugins_external :
plugin_name = plugin.split("/")[-2]
jobs[plugin_name] = []
try :
with open(plugin + "/plugin.json") as f :
plugin_data = json.loads(f.read())
if not "jobs" in plugin_data :
continue
for job in plugin_data["jobs"] :
job["path"] = plugin
jobs[plugin_name] = plugin_data["jobs"]
except :
log("SCHEDULER", "⚠️", "Exception while getting jobs for plugin " + plugin_name + " : " + traceback.format_exc())
return jobs
def __str_to_schedule(self, every) :
if every == "minute" :
return schedule.every().minute
if every == "hour" :
return schedule.every().hour
if every == "day" :
return schedule.every().day
if every == "week" :
return schedule.every().week
raise Exception("can't convert every string \"" + every + "\" to schedule")
def __reload(self) :
reload = True
if os.path.isfile("/usr/sbin/nginx") and os.path.isfile("/opt/bunkerweb/tmp/nginx.pid") :
log("SCHEDULER", "", "Reloading nginx ...")
proc = subprocess.run(["/usr/sbin/nginx", "-s", "reload"], stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, env=self.__env)
reload = proc.returncode != 0
if reload :
log("SCHEDULER", "", "Successfuly reloaded nginx")
else :
log("SCHEDULER", "", "Error while reloading nginx")
else :
log("SCHEDULER", "", "Reloading nginx ...")
reload = self._send_to_apis("POST", "/reload")
if reload :
log("SCHEDULER", "", "Successfuly reloaded nginx")
else :
log("SCHEDULER", "", "Error while reloading nginx")
return reload
def __gen_conf(self) :
success = True
cmd = "/opt/bunkerweb/gen/main.py --settings /opt/bunkerweb/settings.json --templates /opt/bunkerweb/confs --output /etc/nginx --variables /tmp/autoconf.env"
proc = subprocess.run(cmd.split(" "), stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT)
if proc.returncode != 0 :
success = False
return success
def __job_wrapper(self, path, plugin, name, file) :
log("SCHEDULER", "", "Executing job " + name + " from plugin " + plugin + " ...")
success = True
try :
proc = subprocess.run(path + "/jobs/" + file, stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, env=self.__env)
except :
log("SCHEDULER", "", "Exception while executing job " + name + " from plugin " + plugin + " : " + traceback.format_exc())
success = False
if success and proc.returncode >= 2 :
log("SCHEDULER", "", "Error while executing job " + name + " from plugin " + plugin)
success = False
elif success and proc.returncode < 2 :
log("SCHEDULER", "", "Successfuly executed job " + name + " from plugin " + plugin)
return success
def setup(self) :
for plugin, jobs in self.__jobs.items() :
for job in jobs :
try :
path = job["path"]
name = job["name"]
file = job["file"]
every = job["every"]
if every != "once" :
self.__str_to_schedule(every).do(self.__job_wrapper, path, plugin, name, file)
except :
log("SCHEDULER", "⚠️", "Exception while scheduling jobs for plugin " + plugin + " : " + traceback.format_exc())
def run_pending(self) :
if self.__lock is not None :
self.__lock.acquire()
jobs = [job for job in schedule.jobs if job.should_run]
success = True
reload = False
for job in jobs :
ret = job.run()
if ret == 1 :
reload = True
elif ret >= 2 :
success = False
if reload :
if not self.__gen_conf() :
success = False
if not self._send_files("/data", "/data") :
success = False
if not self.__reload() :
success = False
if self.__lock is not None :
self.__lock.release()
return success
def run_once(self) :
ret = True
for plugin, jobs in self.__jobs.items() :
for job in jobs :
try :
path = job["path"]
name = job["name"]
file = job["file"]
if self.__job_wrapper(path, plugin, name, file) >= 2 :
ret = False
except :
log("SCHEDULER", "⚠️", "Exception while running once jobs for plugin " + plugin + " : " + traceback.format_exc())
return ret
def clear(self) :
schedule.clear()
def reload(self, env, apis=[]) :
ret = True
try :
self.__env = env
super().__init__(apis)
with open("/tmp/autoconf.env", "w") as f :
for k, v in self.__env.items() :
f.write(k + "=" + v + "\n")
#print(self.__env)
#self.__env.update(os.environ)
self.clear()
self.__jobs = self.__get_jobs()
if not self.run_once() :
ret = False
self.setup()
except :
log("SCHEDULER", "⚠️", "Exception while reloading scheduler " + traceback.format_exc())
return False
return ret

6
job/logger.py Normal file
View File

@@ -0,0 +1,6 @@
import datetime
def log(title, severity, msg) :
when = datetime.datetime.today().strftime("[%Y-%m-%d %H:%M:%S]")
what = title + " - " + severity + " - " + msg
print(when + " " + what, flush=True)

100
job/main.py Normal file
View File

@@ -0,0 +1,100 @@
#!/usr/bin/python3
import argparse, sys, re, signal, time, traceback, os
import sys
sys.path.append("/opt/bunkerweb/deps/python")
sys.path.append("/opt/bunkerweb/jobs")
sys.path.append("/opt/bunkerweb/utils")
from dotenv import dotenv_values
from threading import Lock
from logger import log
from JobScheduler import JobScheduler
run = True
scheduler = None
reloading = False
def handle_stop(signum, frame) :
global run, scheduler
run = False
if scheduler is not None :
scheduler.clear()
stop(0)
signal.signal(signal.SIGINT, handle_stop)
signal.signal(signal.SIGTERM, handle_stop)
def handle_reload(env) :
global run, scheduler, reloading
try :
if scheduler is not None and run :
if scheduler.reload(dotenv_values(env)) :
log("SCHEDULER", "", "Reload successful")
else :
log("SCHEDULER", "", "Reload failed")
else :
log("SCHEDULER", "⚠️", "Ignored reload operation because scheduler is not running ...")
except :
log("SCHEDULER", "", "Exception while reloading scheduler : " + traceback.format_exc())
def handle_reload_bw(signum, frame) :
handle_reload("/etc/nginx/variables.env")
signal.signal(signal.SIGUSR1, handle_reload_bw)
def handle_reload_api(signum, frame) :
handle_reload("/opt/bunkerweb/tmp/jobs.env")
signal.signal(signal.SIGUSR2, handle_reload_api)
def stop(status) :
os.remove("/opt/bunkerweb/tmp/scheduler.pid")
os._exit(status)
if __name__ == "__main__" :
try :
# Don't execute if pid file exists
if os.path.isfile("/opt/bunkerweb/tmp/scheduler.pid") :
log("SCHEDULER", "", "Scheduler is already running, skipping execution ...")
os._exit(1)
# Write pid to file
with open("/opt/bunkerweb/tmp/scheduler.pid", "w") as f :
f.write(str(os.getpid()))
# Parse arguments
parser = argparse.ArgumentParser(description="Job scheduler for BunkerWeb")
parser.add_argument("--run", action="store_true", help="only run jobs one time in foreground")
parser.add_argument("--variables", default="/etc/nginx/variables.env", type=str, help="path to the variables")
args = parser.parse_args()
# Read env file
env = dotenv_values(args.variables)
# Instantiate scheduler
scheduler = JobScheduler(env=env)
# Only run jobs once
log("SCHEDULER", "", "Executing job scheduler ...")
if args.run :
ret = scheduler.run_once()
if not ret :
log("SCHEDULER", "", "At least one job in run_once() failed")
stop(1)
else :
log("SCHEDULER", "", "All jobs in run_once() were successful")
# Or infinite schedule
else :
scheduler.setup()
while run :
scheduler.run_pending()
time.sleep(1)
except :
log("SCHEDULER", "", "Exception while executing scheduler : " + traceback.format_exc())
stop(1)
log("SCHEDULER", "", "Job scheduler stopped")
stop(0)