bunkerweb 1.4.0
This commit is contained in:
45
utils/ApiCaller.py
Normal file
45
utils/ApiCaller.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from io import BytesIO
|
||||
import tarfile
|
||||
|
||||
from logger import log
|
||||
|
||||
class ApiCaller :
|
||||
|
||||
def __init__(self, apis=[]) :
|
||||
self.__apis = apis
|
||||
|
||||
def _set_apis(self, apis) :
|
||||
self.__apis = apis
|
||||
|
||||
def _get_apis(self) :
|
||||
return self.__apis
|
||||
|
||||
def _send_to_apis(self, method, url, files=None, data=None) :
|
||||
ret = True
|
||||
for api in self.__apis :
|
||||
if files is not None :
|
||||
for file, buffer in files.items() :
|
||||
buffer.seek(0, 0)
|
||||
sent, err, status, resp = api.request(method, url, files=files, data=data)
|
||||
if not sent :
|
||||
ret = False
|
||||
log("API", "❌", "Can't send API request to " + api.get_endpoint() + url + " : " + err)
|
||||
else :
|
||||
if status != 200 :
|
||||
ret = False
|
||||
log("API", "❌", "Error while sending API request to " + api.get_endpoint() + url + " : status = " + resp["status"] + ", msg = " + resp["msg"])
|
||||
else :
|
||||
log("API", "ℹ️", "Successfully sent API request to " + api.get_endpoint() + url)
|
||||
return ret
|
||||
|
||||
def _send_files(self, path, url) :
|
||||
ret = True
|
||||
tgz = BytesIO()
|
||||
with tarfile.open(mode="w:gz", fileobj=tgz) as tf :
|
||||
tf.add(path, arcname=".")
|
||||
tgz.seek(0, 0)
|
||||
files = {"archive.tar.gz": tgz}
|
||||
if not self._send_to_apis("POST", url, files=files) :
|
||||
ret = False
|
||||
tgz.close()
|
||||
return ret
|
||||
49
utils/ConfigCaller.py
Normal file
49
utils/ConfigCaller.py
Normal file
@@ -0,0 +1,49 @@
|
||||
from json import loads
|
||||
from glob import glob
|
||||
from re import match
|
||||
|
||||
class ConfigCaller :
|
||||
|
||||
def __init__(self) :
|
||||
with open("/opt/bunkerweb/settings.json") as f :
|
||||
self._settings = loads(f.read())
|
||||
for plugin in glob("/opt/bunkerweb/core/*/plugin.json") + glob("/opt/bunkerweb/plugins/*/plugin.json") :
|
||||
with open(plugin) as f :
|
||||
self._settings.update(loads(f.read())["settings"])
|
||||
|
||||
def _is_setting(self, setting) :
|
||||
return setting in self._settings
|
||||
|
||||
def _is_global_setting(self, setting) :
|
||||
if setting in self._settings :
|
||||
return self._settings[setting]["context"] == "global"
|
||||
if match("^.+_\d+$", setting) :
|
||||
multiple_setting = "_".join(setting.split("_")[0:-1])
|
||||
return multiple_setting in self._settings and self._settings[multiple_setting]["context"] == "global" and "multiple" in self._settings[multiple_setting]
|
||||
return False
|
||||
|
||||
def _is_multisite_setting(self, setting) :
|
||||
if setting in self._settings :
|
||||
return self._settings[setting]["context"] == "multisite"
|
||||
if match("^.+_\d+$", setting) :
|
||||
multiple_setting = "_".join(setting.split("_")[0:-1])
|
||||
return multiple_setting in self._settings and self._settings[multiple_setting]["context"] == "multisite" and "multiple" in self._settings[multiple_setting]
|
||||
return False
|
||||
|
||||
def _full_env(self, env_instances, env_services) :
|
||||
full_env = {}
|
||||
# Fill with default values
|
||||
for k, v in self._settings.items() :
|
||||
full_env[k] = v["default"]
|
||||
# Replace with instances values
|
||||
for k, v in env_instances.items() :
|
||||
full_env[k] = v
|
||||
if (not self._is_global_setting(k) and
|
||||
"MULTISITE" in env_instances and env_instances["MULTISITE"] == "yes" and
|
||||
"SERVER_NAME" in env_instances and env_instances["SERVER_NAME"] != "") :
|
||||
for server_name in env_instances["SERVER_NAME"].split(" ") :
|
||||
full_env[server_name + "_" + k] = v
|
||||
# Replace with services values
|
||||
for k, v in env_services.items() :
|
||||
full_env[k] = v
|
||||
return full_env
|
||||
0
utils/__init__.py
Normal file
0
utils/__init__.py
Normal file
69
utils/jobs.py
Normal file
69
utils/jobs.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import traceback, json, hashlib
|
||||
from os import path, remove
|
||||
from shutil import copy
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
"""
|
||||
{
|
||||
"date": timestamp,
|
||||
"checksum": sha512
|
||||
}
|
||||
"""
|
||||
|
||||
def is_cached_file(file, expire) :
|
||||
is_cached = False
|
||||
try :
|
||||
if not path.isfile(file) :
|
||||
return False
|
||||
if not path.isfile(file + ".md") :
|
||||
return False
|
||||
cached_time = 0
|
||||
with open(file + ".md", "r") as f :
|
||||
cached_time = json.loads(f.read())["date"]
|
||||
current_time = datetime.timestamp(datetime.now())
|
||||
if current_time < cached_time :
|
||||
return False
|
||||
diff_time = current_time - cached_time
|
||||
if expire == "hour" :
|
||||
is_cached = diff_time < 3600
|
||||
elif expire == "day" :
|
||||
is_cached = diff_time < 86400
|
||||
elif expire == "month" :
|
||||
is_cached = diff_time < 2592000
|
||||
except :
|
||||
is_cached = False
|
||||
return is_cached
|
||||
|
||||
def file_hash(file) :
|
||||
sha512 = hashlib.sha512()
|
||||
with open(file, "rb") as f :
|
||||
while True :
|
||||
data = f.read(1024)
|
||||
if not data :
|
||||
break
|
||||
sha512.update(data)
|
||||
return sha512.hexdigest()
|
||||
|
||||
def cache_hash(cache) :
|
||||
try :
|
||||
with open(cache + ".md", "r") as f :
|
||||
return json.loads(f.read())["checksum"]
|
||||
except :
|
||||
pass
|
||||
return None
|
||||
|
||||
def cache_file(file, cache, _hash) :
|
||||
ret, err = True, "success"
|
||||
try :
|
||||
copy(file, cache)
|
||||
remove(file)
|
||||
with open(cache + ".md", "w") as f :
|
||||
md = {
|
||||
"date": datetime.timestamp(datetime.now()),
|
||||
"checksum": _hash
|
||||
}
|
||||
f.write(json.dumps(md))
|
||||
except :
|
||||
return False, "exception : " + traceback.format_exc()
|
||||
return ret, err
|
||||
6
utils/logger.py
Normal file
6
utils/logger.py
Normal file
@@ -0,0 +1,6 @@
|
||||
import datetime
|
||||
|
||||
def log(title, severity, msg) :
|
||||
when = datetime.datetime.today().strftime("[%Y-%m-%d %H:%M:%S]")
|
||||
what = title + " - " + severity + " - " + msg
|
||||
print(when + " " + what, flush=True)
|
||||
Reference in New Issue
Block a user