Initial YakPanel commit
This commit is contained in:
36
mod/base/__init__.py
Normal file
36
mod/base/__init__.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import time
|
||||
from typing import Dict, List, Tuple, Union
|
||||
|
||||
from .process import RealProcess, Process
|
||||
from .process import RealUser, User
|
||||
from .process import RealServer, Server
|
||||
|
||||
|
||||
def json_response(
|
||||
status: bool,
|
||||
msg: str = None,
|
||||
data: Union[Dict, List, Tuple, bool, str, int, float] = None,
|
||||
code: int = 0,
|
||||
args: Union[List[str], Tuple[str]] = None,
|
||||
):
|
||||
if isinstance(msg, str) and args is not None:
|
||||
for i in range(len(args)):
|
||||
rep = '{' + str(i + 1) + '}'
|
||||
msg = msg.replace(rep, args[i])
|
||||
stn = 0 if status else -1
|
||||
if msg is None:
|
||||
msg = data
|
||||
|
||||
# return {
|
||||
# "status": status,
|
||||
# "msg": msg,
|
||||
# "data": data,
|
||||
# "code": code,
|
||||
# "timestamp": int(time.time())
|
||||
# }
|
||||
|
||||
return {
|
||||
"status": stn,
|
||||
"timestamp": int(time.time()),
|
||||
"message": msg
|
||||
}
|
||||
135
mod/base/backup_tool/__init__.py
Normal file
135
mod/base/backup_tool/__init__.py
Normal file
@@ -0,0 +1,135 @@
|
||||
import os
|
||||
import time
|
||||
from hashlib import md5
|
||||
from typing import Optional, List, Union, Dict, Any
|
||||
|
||||
from .util import DB, ExecShell, write_file, write_log
|
||||
from .versions_tool import VersionTool
|
||||
|
||||
|
||||
class BackupTool:
|
||||
|
||||
def __init__(self):
|
||||
self._backup_path: Optional[str] = None
|
||||
self._sub_dir_name: str = ""
|
||||
self.exec_log_file = "/tmp/mod_backup_exec.log"
|
||||
|
||||
@staticmethod
|
||||
def _hash_src_name(name: Union[str, bytes]) -> str:
|
||||
if isinstance(name, str):
|
||||
name = name.encode('utf-8')
|
||||
md5_obj = md5()
|
||||
md5_obj.update(name)
|
||||
return md5_obj.hexdigest()
|
||||
|
||||
@property
|
||||
def backup_path(self) -> str:
|
||||
if self._backup_path is None:
|
||||
config_data = DB("config").where("id=?", (1,)).select()
|
||||
if isinstance(config_data, dict):
|
||||
path = config_data["backup_path"]
|
||||
else: # 查询出错
|
||||
path = "/www/backup"
|
||||
self._backup_path = path
|
||||
return self._backup_path
|
||||
|
||||
# sub_dir 可以设置为多级子目录, 如 "site/aaa", 或使用列表传递如:["site", "aaa"]
|
||||
def set_sub_dir(self, sub_dir: Union[str, List[str]]) -> Optional[str]:
|
||||
if isinstance(sub_dir, str):
|
||||
self._sub_dir_name = sub_dir.strip("./")
|
||||
elif isinstance(sub_dir, list):
|
||||
self._sub_dir_name = "/".join(filter(None, [i.strip("./") for i in sub_dir]))
|
||||
else:
|
||||
return "Unsupported type settings"
|
||||
|
||||
def backup(self,
|
||||
src: str, # 源文件位置
|
||||
backup_path: Optional[str] = None, # 备份位置
|
||||
sub_dir: Union[str, List[str]] = None, # 备份目录的子目录
|
||||
site_info: Dict[str, Any] = None, # 关联的站点信息, 必须包含 id 和 name
|
||||
sync=False # 是否同步执行, 默认异步由单独的线程放入后台执行
|
||||
) -> Optional[str]: # 返回执行错误的信息
|
||||
|
||||
if not os.path.exists(src):
|
||||
return "The source path does not exist"
|
||||
if backup_path is None:
|
||||
backup_path = self.backup_path
|
||||
|
||||
if not os.path.exists(backup_path):
|
||||
return "The backup directory does not exist"
|
||||
if sub_dir is not None:
|
||||
set_res = self.set_sub_dir(sub_dir)
|
||||
if set_res is not None:
|
||||
return set_res
|
||||
|
||||
target_path = os.path.join(backup_path, self._sub_dir_name)
|
||||
if not os.path.isdir(target_path):
|
||||
os.makedirs(target_path)
|
||||
zip_name = "{}_{}.tar.gz".format(os.path.basename(src), time.strftime('%Y%m%d_%H%M%S', time.localtime()))
|
||||
if sync:
|
||||
return self._sync_backup(src, target_path, zip_name, site_info)
|
||||
else:
|
||||
return self._async_backup(src, target_path, zip_name, site_info)
|
||||
|
||||
def _sync_backup(self, src: str, target_path: str, zip_name: str, site_info: dict):
|
||||
try:
|
||||
write_file(self.exec_log_file, "")
|
||||
execStr = ("cd {} && "
|
||||
"tar -zcvf '{}' --exclude=.user.ini ./ 2>&1 > {} \n"
|
||||
"echo '---Backup execution completed---' >> {}"
|
||||
).format(src, os.path.join(target_path, zip_name), self.exec_log_file, self.exec_log_file)
|
||||
ExecShell(execStr)
|
||||
if site_info is not None and "id" in site_info and "name" in site_info:
|
||||
DB('backup').add(
|
||||
'type,name,pid,filename,size,addtime',
|
||||
(0, zip_name, site_info["id"], os.path.join(target_path, zip_name), 0, self.get_date())
|
||||
)
|
||||
write_log('TYPE_SITE', 'SITE_BACKUP_SUCCESS', (site_info["name"],))
|
||||
except:
|
||||
return "The backup execution failed"
|
||||
|
||||
def _async_backup(self, src: str, target_path: str, zip_name: str, site_info: dict):
|
||||
import threading
|
||||
|
||||
hash_name = self._hash_src_name(src)
|
||||
backup_tip_path = "/tmp/mod_backup_tip"
|
||||
if os.path.exists(backup_tip_path):
|
||||
os.makedirs(backup_tip_path)
|
||||
|
||||
tip_file = os.path.join(backup_tip_path, hash_name)
|
||||
if os.path.isfile(tip_file):
|
||||
mtime = os.stat(tip_file).st_mtime
|
||||
if time.time() - mtime > 60 * 20: # 20 分钟未执行,认为出现在不可抗力,导致备份失败,允许再次备份
|
||||
os.remove(tip_file)
|
||||
else:
|
||||
return "The backup is in progress, do not proceed"
|
||||
|
||||
write_file(tip_file, "")
|
||||
|
||||
def _back_p():
|
||||
try:
|
||||
write_file(self.exec_log_file, "")
|
||||
execStr = ("cd {} && "
|
||||
"tar -zcvf '{}' --exclude=.user.ini ./ 2>&1 > {} \n"
|
||||
"echo '---Backup execution completed---' >> {}"
|
||||
).format(src, os.path.join(target_path, zip_name), self.exec_log_file, self.exec_log_file)
|
||||
ExecShell(execStr)
|
||||
if site_info is not None and "id" in site_info and "name" in site_info:
|
||||
DB('backup').add(
|
||||
'type,name,pid,filename,size,addtime',
|
||||
(0, zip_name, site_info["id"], os.path.join(target_path, zip_name), 0, self.get_date())
|
||||
)
|
||||
write_log('TYPE_SITE', 'SITE_BACKUP_SUCCESS', (site_info["name"],))
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
if os.path.exists(tip_file):
|
||||
os.remove(tip_file)
|
||||
|
||||
t = threading.Thread(target=_back_p)
|
||||
t.start()
|
||||
|
||||
@staticmethod
|
||||
def get_date():
|
||||
# 取格式时间
|
||||
return time.strftime('%Y-%m-%d %X', time.localtime())
|
||||
65
mod/base/backup_tool/util.py
Normal file
65
mod/base/backup_tool/util.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import sys
|
||||
from typing import Optional, Callable, Tuple, Union
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
import public
|
||||
|
||||
ExecShell: Callable = public.ExecShell
|
||||
write_log: Callable[[str, str, Tuple], Union[int, str, type(None)]] = public.WriteLog
|
||||
|
||||
|
||||
def write_file(filename: str, s_body: str, mode='w+') -> bool:
|
||||
"""
|
||||
写入文件内容
|
||||
@filename 文件名
|
||||
@s_body 欲写入的内容
|
||||
return bool 若文件不存在则尝试自动创建
|
||||
"""
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
try:
|
||||
fp = open(filename, mode=mode, encoding="utf-8")
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def read_file(filename, mode='r') -> Optional[str]:
|
||||
"""
|
||||
读取文件内容
|
||||
@filename 文件名
|
||||
return string(bin) 若文件不存在,则返回None
|
||||
"""
|
||||
import os
|
||||
if not os.path.exists(filename):
|
||||
return None
|
||||
fp = None
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
f_body = fp.read()
|
||||
except:
|
||||
return None
|
||||
finally:
|
||||
if fp and not fp.closed:
|
||||
fp.close()
|
||||
return f_body
|
||||
|
||||
|
||||
class _DB:
|
||||
|
||||
def __call__(self, table: str):
|
||||
import db
|
||||
with db.Sql() as t:
|
||||
t.table(table)
|
||||
return t
|
||||
|
||||
|
||||
DB = _DB()
|
||||
293
mod/base/backup_tool/versions_tool.py
Normal file
293
mod/base/backup_tool/versions_tool.py
Normal file
@@ -0,0 +1,293 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
import time
|
||||
from hashlib import md5
|
||||
from typing import Optional, List, Union, Dict, Any
|
||||
|
||||
from .util import DB, ExecShell, write_file, write_log, read_file
|
||||
|
||||
|
||||
class VersionTool:
|
||||
_config_file = "/www/server/panel/data/version_config.json"
|
||||
|
||||
def __init__(self):
|
||||
self._config: Optional[Dict[str, List[Dict[str, Any]]]] = None
|
||||
self._pack_class = BasePack
|
||||
self.pack_path = "/www/backup/versions"
|
||||
if not os.path.isdir(self.pack_path):
|
||||
os.makedirs(self.pack_path)
|
||||
|
||||
@property
|
||||
def config(self) -> Dict[str, List[Dict[str, Any]]]:
|
||||
if self._config is not None:
|
||||
return self._config
|
||||
|
||||
data = {}
|
||||
try:
|
||||
res = read_file(self._config_file)
|
||||
if isinstance(res, str):
|
||||
data = json.loads(res)
|
||||
except (json.JSONDecoder, TypeError, ValueError):
|
||||
pass
|
||||
self._config = data
|
||||
return self._config
|
||||
|
||||
def save_config(self):
|
||||
if self._config is not None:
|
||||
write_file(self._config_file, json.dumps(self._config))
|
||||
|
||||
def add_to_config(self, data: dict):
|
||||
project_name = data.get("project_name")
|
||||
self._config = None
|
||||
if project_name not in self.config:
|
||||
self.config[project_name] = []
|
||||
self.config[project_name].append(data)
|
||||
self.save_config()
|
||||
|
||||
def set_pack_class(self, pack_cls):
|
||||
self._pack_class = pack_cls
|
||||
|
||||
def version_list(self, project_name: str):
|
||||
if project_name in self.config:
|
||||
return self.config[project_name]
|
||||
return []
|
||||
|
||||
def get_version_info(self, project_name: str, version: str) -> Optional[dict]:
|
||||
if project_name in self.config:
|
||||
for i in self.config[project_name]:
|
||||
if i.get("version") == version:
|
||||
return i
|
||||
return None
|
||||
|
||||
# 把某个路径下的文件打包并发布为一个版本
|
||||
def publish_by_src_path(self,
|
||||
project_name: str, # 名称
|
||||
src_path: str, # 源路径
|
||||
version: str, # 版本号
|
||||
ps: Optional[str] = None, # 备注
|
||||
other: Optional[dict] = None, # 其他信息
|
||||
sync: bool = False, # 是否同步执行
|
||||
):
|
||||
|
||||
if project_name in self.config:
|
||||
for i in self.config[project_name]:
|
||||
if i["version"] == version:
|
||||
return "The current version already exists"
|
||||
if not os.path.isdir(src_path):
|
||||
return "The source path does not exist"
|
||||
|
||||
if ps is None:
|
||||
ps = ''
|
||||
|
||||
if other is None:
|
||||
other = {}
|
||||
|
||||
zip_name = "{}_{}.tar.gz".format(
|
||||
os.path.basename(src_path), time.strftime('%Y%m%d_%H%M%S', time.localtime())
|
||||
)
|
||||
|
||||
data = {
|
||||
"project_name": project_name,
|
||||
"version": version,
|
||||
"ps": ps,
|
||||
"other": other,
|
||||
"zip_name": zip_name,
|
||||
"backup_time": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
|
||||
}
|
||||
return self._pack_class(src_path, self.pack_path, zip_name, sync=sync, vt=self, data=data)(**other)
|
||||
|
||||
def recover(self,
|
||||
project_name: str, # 名称
|
||||
version: str, # 版本
|
||||
target_path: str, # 目标路径
|
||||
run_path=None
|
||||
):
|
||||
if not run_path:
|
||||
run_path = target_path
|
||||
if project_name not in self.config:
|
||||
return 'The project does not exist'
|
||||
|
||||
target = None
|
||||
for i in self.config[project_name]:
|
||||
if i["version"] == version:
|
||||
target = i
|
||||
break
|
||||
|
||||
if target is None:
|
||||
return 'Version does not exist'
|
||||
|
||||
file = os.path.join(self.pack_path, target["zip_name"])
|
||||
if not os.path.exists(file):
|
||||
return 'Version file missing'
|
||||
|
||||
tmp_path = '/tmp/version_{}'.format(int(time.time()))
|
||||
tar = tarfile.open(file, mode='r')
|
||||
tar.extractall(tmp_path)
|
||||
user_data = None
|
||||
if os.path.exists(target_path):
|
||||
ExecShell("chattr -i -R {}/".format(target_path))
|
||||
user_data = read_file(run_path + "/.user.ini")
|
||||
ExecShell("rm -rf {}".format(target_path))
|
||||
os.makedirs(target_path)
|
||||
if not os.path.exists(target_path):
|
||||
os.makedirs(target_path)
|
||||
ExecShell(r"\cp -rf {}/* {}".format(tmp_path, target_path))
|
||||
if user_data:
|
||||
write_file(target_path + "/.user.ini", run_path)
|
||||
ExecShell("chattr +i {}/.user.ini".format(run_path))
|
||||
ExecShell("rm -rf {}".format(tmp_path))
|
||||
return True
|
||||
|
||||
def publish_by_file(self,
|
||||
project_name: str, # 名称
|
||||
src_file: str, # 源路径
|
||||
version: str, # 版本号
|
||||
ps: Optional[str] = None, # 备注
|
||||
other: Optional[dict] = None, # 其他信息
|
||||
):
|
||||
|
||||
if project_name in self.config:
|
||||
for i in self.config[project_name]:
|
||||
if i["version"] == version:
|
||||
return "The current version already exists"
|
||||
|
||||
if not os.path.isfile(src_file):
|
||||
return "The source path does not exist"
|
||||
|
||||
if ps is None:
|
||||
ps = ''
|
||||
|
||||
if other is None:
|
||||
other = {}
|
||||
|
||||
zip_name = os.path.basename(src_file)
|
||||
|
||||
data = {
|
||||
"project_name": project_name,
|
||||
"version": version,
|
||||
"ps": ps,
|
||||
"other": other,
|
||||
"zip_name": zip_name,
|
||||
"backup_time": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
|
||||
}
|
||||
try:
|
||||
shutil.copy(src_file, self.pack_path + "/" + zip_name)
|
||||
except:
|
||||
return "File save failed"
|
||||
self.add_to_config(data)
|
||||
return None
|
||||
|
||||
def remove(self,
|
||||
project_name: str, # 名称
|
||||
version: str, # 版本
|
||||
) -> Optional[str]:
|
||||
|
||||
if project_name not in self.config:
|
||||
return 'The project does not exist'
|
||||
|
||||
target = None
|
||||
for i in self.config[project_name]:
|
||||
if i["version"] == version:
|
||||
target = i
|
||||
break
|
||||
|
||||
if target is None:
|
||||
return 'Version does not exist'
|
||||
|
||||
file = os.path.join(self.pack_path, target["zip_name"])
|
||||
if os.path.isfile(file):
|
||||
os.remove(file)
|
||||
|
||||
self.config[project_name].remove(target)
|
||||
|
||||
self.save_config()
|
||||
return None
|
||||
|
||||
def set_ps(self, name: str, version: str, ps: str):
|
||||
[i.update({'ps': ps}) for i in self.config[name] if i["version"] == version]
|
||||
self.save_config()
|
||||
return True
|
||||
|
||||
|
||||
class BasePack:
|
||||
exec_log_file = "/tmp/project_pack.log"
|
||||
|
||||
def __init__(self, src_path, target_path, zip_name, sync=False, vt: VersionTool = None, data: dict = None):
|
||||
self.src_path = src_path
|
||||
self.target_path = target_path
|
||||
self.zip_name = zip_name
|
||||
self.sync = sync
|
||||
self.v = vt
|
||||
self._add_data = data
|
||||
|
||||
def save_config(self):
|
||||
self.v.add_to_config(self._add_data)
|
||||
|
||||
def __call__(self, *args, **kwargs) -> Optional[str]:
|
||||
if not os.path.exists(self.src_path):
|
||||
return "The source path does not exist"
|
||||
target_path = "/www/backup/versions"
|
||||
|
||||
if not os.path.isdir(target_path):
|
||||
os.makedirs(target_path)
|
||||
if self.sync:
|
||||
return self._sync_backup(self.src_path, target_path, self.zip_name)
|
||||
else:
|
||||
return self._async_backup(self.src_path, target_path, self.zip_name)
|
||||
|
||||
def _sync_backup(self, src: str, target_path: str, zip_name: str) -> Optional[str]:
|
||||
try:
|
||||
write_file(self.exec_log_file, "")
|
||||
execStr = ("cd {} && "
|
||||
"tar -zcvf '{}' --exclude=.user.ini ./ 2>&1 > {} \n"
|
||||
"echo '---The packaging execution is complete---' >> {}"
|
||||
).format(src, os.path.join(target_path, zip_name), self.exec_log_file, self.exec_log_file)
|
||||
ExecShell(execStr)
|
||||
self.save_config()
|
||||
except:
|
||||
return "The packaging execution failed"
|
||||
|
||||
def _async_backup(self, src: str, target_path: str, zip_name: str):
|
||||
import threading
|
||||
hash_name = self._hash_src_name(src)
|
||||
backup_tip_path = "/tmp/mod_version_tip"
|
||||
if os.path.exists(backup_tip_path):
|
||||
os.makedirs(backup_tip_path)
|
||||
|
||||
tip_file = os.path.join(backup_tip_path, hash_name)
|
||||
if os.path.isfile(tip_file):
|
||||
mtime = os.stat(tip_file).st_mtime
|
||||
if time.time() - mtime > 60 * 20: # 20 分钟未执行,认为出现在不可抗力,导致备份失败,允许再次备份
|
||||
os.remove(tip_file)
|
||||
else:
|
||||
return "Packing is in progress, please do not proceed"
|
||||
|
||||
write_file(tip_file, "")
|
||||
|
||||
def _back_p():
|
||||
try:
|
||||
write_file(self.exec_log_file, "")
|
||||
execStr = ("cd {} && "
|
||||
"tar -zcvf '{}' --exclude=.user.ini ./ 2>&1 > {} \n"
|
||||
"echo '---Backup execution completed---' >> {}"
|
||||
).format(src, os.path.join(target_path, zip_name), self.exec_log_file, self.exec_log_file)
|
||||
ExecShell(execStr)
|
||||
self.save_config()
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
if os.path.exists(tip_file):
|
||||
os.remove(tip_file)
|
||||
|
||||
t = threading.Thread(target=_back_p)
|
||||
t.start()
|
||||
|
||||
@staticmethod
|
||||
def _hash_src_name(name: Union[str, bytes]) -> str:
|
||||
if isinstance(name, str):
|
||||
name = name.encode('utf-8')
|
||||
md5_obj = md5()
|
||||
md5_obj.update(name)
|
||||
return md5_obj.hexdigest()
|
||||
53
mod/base/database_tool/__init__.py
Normal file
53
mod/base/database_tool/__init__.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from .pgsql import PgsqlTool
|
||||
from .mongodb import MongodbTool
|
||||
from .mysql import MysqlTool
|
||||
from .sql_server import SQLServerTool
|
||||
|
||||
from typing import Optional
|
||||
|
||||
DB_TYPE = (
|
||||
"pgsql",
|
||||
"mongodb",
|
||||
"mysql",
|
||||
"sqlserver"
|
||||
)
|
||||
|
||||
|
||||
def add_database(db_type: str, data: dict) -> Optional[str]:
|
||||
"""
|
||||
data: 中包含的有效参数为
|
||||
database_name:数据库名称
|
||||
server_id:数据库 id
|
||||
db_user:数据库用户名
|
||||
password:数据库用户的密码
|
||||
dataAccess :链接限制方式 如:ip
|
||||
address:可允许使用的ip, 配合上一个参数使用
|
||||
codeing: 编码
|
||||
ps:备注
|
||||
listen_ip: pgsql 有效,可设置访问地址
|
||||
"""
|
||||
if db_type not in DB_TYPE:
|
||||
return "Wrong database type"
|
||||
|
||||
if db_type == "pgsql":
|
||||
tool = PgsqlTool()
|
||||
elif db_type == "mongodb":
|
||||
tool = MongodbTool()
|
||||
elif db_type == "mysql":
|
||||
tool = MysqlTool()
|
||||
else:
|
||||
tool = SQLServerTool()
|
||||
|
||||
f, msg = tool.add_database(data.pop("server_id"), data.pop("database_name"), **data)
|
||||
if not f:
|
||||
return msg
|
||||
return None
|
||||
|
||||
|
||||
__all__ = [
|
||||
"PgsqlTool",
|
||||
"MongodbTool",
|
||||
"MysqlTool",
|
||||
"SQLServerTool",
|
||||
"add_database",
|
||||
]
|
||||
34
mod/base/database_tool/base.py
Normal file
34
mod/base/database_tool/base.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import sys
|
||||
from typing import List, Dict, Optional
|
||||
from .util import DB
|
||||
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
from db_mysql import panelMysql
|
||||
from database import database
|
||||
from databaseModel.mongodbModel import main as mongodb
|
||||
from databaseModel.pgsqlModel import main as pgsql
|
||||
from databaseModel.sqlserverModel import main as sqlserver
|
||||
|
||||
|
||||
class BaseDatabaseTool:
|
||||
_type_name = ""
|
||||
|
||||
def local_server_info(self) -> Optional[Dict]:
|
||||
raise NotImplementedError()
|
||||
|
||||
# 获取所有可以管理的服务器的信息
|
||||
def server_list(self) -> List[Dict]:
|
||||
data = DB('database_servers').where("LOWER(db_type)=LOWER('?')", (self._type_name, )).select()
|
||||
if not isinstance(data, list):
|
||||
data = []
|
||||
local_server = self.local_server_info()
|
||||
if local_server is not None:
|
||||
data.insert(0, local_server)
|
||||
return data
|
||||
|
||||
# 添加一个数据库
|
||||
def add_database(self, server_id: int, database_name: str, **kwargs) -> List[Dict]:
|
||||
raise NotImplementedError()
|
||||
51
mod/base/database_tool/mongodb.py
Normal file
51
mod/base/database_tool/mongodb.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from typing import Optional, Dict, List, Union, Tuple
|
||||
|
||||
from .base import BaseDatabaseTool, mongodb
|
||||
from .util import read_file, GET_CLASS
|
||||
|
||||
|
||||
class MongodbTool(BaseDatabaseTool):
|
||||
_type_name = "mongodb"
|
||||
|
||||
def local_server_info(self) -> Optional[Dict]:
|
||||
bin_path = "/www/server/mongodb/bin/mongod"
|
||||
if not os.path.isfile(bin_path):
|
||||
return None
|
||||
|
||||
conf_file = '/www/server/mongodb/config.conf'
|
||||
conf = read_file(conf_file)
|
||||
default_port = 27017
|
||||
if not isinstance(conf, str):
|
||||
port = default_port
|
||||
else:
|
||||
rep_port = re.compile(r"\s*port\s*:\s*(?P<port>\d+)", re.M)
|
||||
port_res = rep_port.search(conf)
|
||||
if not port_res:
|
||||
port = default_port
|
||||
else:
|
||||
port = int(port_res.group("port"))
|
||||
|
||||
return {
|
||||
'id': 0,
|
||||
'db_host': '127.0.0.1',
|
||||
'db_port': port,
|
||||
'db_user': 'root',
|
||||
'db_password': '',
|
||||
'ps': 'local server',
|
||||
'addtime': 0
|
||||
}
|
||||
|
||||
# 添加一个数据库
|
||||
def add_database(self, server_id: int, database_name: str, **kwargs) -> Tuple[bool, str]:
|
||||
get_obj = GET_CLASS()
|
||||
get_obj.name = database_name
|
||||
get_obj.sid = server_id
|
||||
get_obj.ps = kwargs.get("ps", "")
|
||||
res = mongodb().AddDatabase(get_obj)
|
||||
if res["status"] is True:
|
||||
return True, "Successfully added"
|
||||
else:
|
||||
return False, res['msg']
|
||||
98
mod/base/database_tool/mysql.py
Normal file
98
mod/base/database_tool/mysql.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from typing import Optional, Dict, List, Union, Tuple
|
||||
|
||||
from .base import BaseDatabaseTool, panelMysql, database
|
||||
from .util import read_file, write_file, DB, GET_CLASS
|
||||
|
||||
|
||||
class MysqlTool(BaseDatabaseTool):
|
||||
_type_name = "mysql"
|
||||
|
||||
def local_server_info(self) -> Optional[Dict]:
|
||||
bin_path = "/www/server/mysql/bin/mysql"
|
||||
if not os.path.isfile(bin_path):
|
||||
return None
|
||||
|
||||
conf_file = '/etc/my.cnf'
|
||||
conf = read_file(conf_file)
|
||||
default_port = 3306
|
||||
if not isinstance(conf, str):
|
||||
port = default_port
|
||||
else:
|
||||
rep_port = re.compile(r"\s*port\s*=\s*(?P<port>\d+)", re.M)
|
||||
port_res = rep_port.search(conf)
|
||||
if not port_res:
|
||||
port = default_port
|
||||
else:
|
||||
port = int(port_res.group("port"))
|
||||
|
||||
return {
|
||||
'id': 0,
|
||||
'db_host': '127.0.0.1',
|
||||
'db_port': port,
|
||||
'db_user': 'root',
|
||||
'db_password': '',
|
||||
'ps': 'local server',
|
||||
'addtime': 0
|
||||
}
|
||||
|
||||
# 检测服务是否可以链接
|
||||
# def server_status(self, server_id: int) -> Union[Dict, str]:
|
||||
# """
|
||||
# 数据库状态检测
|
||||
# """
|
||||
# db_name = None
|
||||
# if server_id != 0:
|
||||
# conn_config = DB("database_servers").where("id=? AND LOWER(db_type)=LOWER('mysql')", (server_id,)).find()
|
||||
# if not conn_config:
|
||||
# return "远程数据库信息不存在!"
|
||||
# conn_config["db_name"] = None
|
||||
# db_user = conn_config["db_user"]
|
||||
# root_password = conn_config["db_password"]
|
||||
# db_host = conn_config["db_host"]
|
||||
# db_port = conn_config["db_port"]
|
||||
# else:
|
||||
# db_user = "root"
|
||||
# root_password = DB("config").where("id=?", (1,)).getField("mysql_root")
|
||||
# db_host = "localhost"
|
||||
# try:
|
||||
# db_port = int(panelMysql().query("show global variables like 'port'")[0][1])
|
||||
# except:
|
||||
# db_port = 3306
|
||||
# mysql_obj = panelMysql()
|
||||
# flag = mysql_obj.set_host(db_host, db_port, db_name, db_user, root_password)
|
||||
#
|
||||
# error = ''
|
||||
# db_status = True
|
||||
# if flag is False:
|
||||
# db_status = False
|
||||
# error = mysql_obj._ex
|
||||
#
|
||||
# return {
|
||||
# "status": True,
|
||||
# 'error': str(error),
|
||||
# "msg": "正常" if db_status is True else "异常",
|
||||
# "db_status": db_status
|
||||
# }
|
||||
|
||||
# 添加一个数据库
|
||||
def add_database(self, server_id: int, database_name: str, **kwargs) -> Tuple[bool, str]:
|
||||
get_obj = GET_CLASS()
|
||||
get_obj.name = database_name
|
||||
get_obj.sid = server_id
|
||||
get_obj.db_user = kwargs.get("db_user", "")
|
||||
get_obj.password = kwargs.get("password", "")
|
||||
get_obj.dataAccess = kwargs.get("dataAccess", "")
|
||||
get_obj.address = kwargs.get("address", "")
|
||||
get_obj.codeing = kwargs.get("codeing", "")
|
||||
get_obj.dtype = "MySQL"
|
||||
get_obj.ps = kwargs.get("ps", "")
|
||||
get_obj.host = kwargs.get("host", "")
|
||||
get_obj.pid = str(kwargs.get("pid", '0'))
|
||||
res = database().AddDatabase(get_obj)
|
||||
if res["status"] is True:
|
||||
return True, "Successfully added"
|
||||
else:
|
||||
return False, res['msg']
|
||||
54
mod/base/database_tool/pgsql.py
Normal file
54
mod/base/database_tool/pgsql.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from typing import Optional, Dict, List, Union, Tuple
|
||||
|
||||
from .base import BaseDatabaseTool, pgsql
|
||||
from .util import read_file, GET_CLASS
|
||||
|
||||
|
||||
class PgsqlTool(BaseDatabaseTool):
|
||||
_type_name = "pgsql"
|
||||
|
||||
def local_server_info(self) -> Optional[Dict]:
|
||||
bin_path = "/www/server/pgsql/bin/postgres"
|
||||
if not os.path.isfile(bin_path):
|
||||
return None
|
||||
|
||||
conf_file = '/www/server/pgsql/data/postgresql.conf'
|
||||
conf = read_file(conf_file)
|
||||
default_port = 5432
|
||||
if not isinstance(conf, str):
|
||||
port = default_port
|
||||
else:
|
||||
rep_port = re.compile(r"\s*port\s*=\s*(?P<port>\d+)", re.M)
|
||||
port_res = rep_port.search(conf)
|
||||
if not port_res:
|
||||
port = default_port
|
||||
else:
|
||||
port = int(port_res.group("port"))
|
||||
|
||||
return {
|
||||
'id': 0,
|
||||
'db_host': '127.0.0.1',
|
||||
'db_port': port,
|
||||
'db_user': 'root',
|
||||
'db_password': '',
|
||||
'ps': 'local server',
|
||||
'addtime': 0
|
||||
}
|
||||
|
||||
# 添加一个数据库
|
||||
def add_database(self, server_id: int, database_name: str, **kwargs) -> Tuple[bool, str]:
|
||||
get_obj = GET_CLASS()
|
||||
get_obj.name = database_name
|
||||
get_obj.sid = server_id
|
||||
get_obj.ps = kwargs.get("ps", "")
|
||||
get_obj.db_user = kwargs.get("db_user", "")
|
||||
get_obj.password = kwargs.get("password", "")
|
||||
get_obj.listen_ip = kwargs.get("listen_ip", "")
|
||||
res = pgsql().AddDatabase(get_obj)
|
||||
if res["status"] is True:
|
||||
return True, "Successfully added"
|
||||
else:
|
||||
return False, res['msg']
|
||||
28
mod/base/database_tool/sql_server.py
Normal file
28
mod/base/database_tool/sql_server.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from typing import Optional, Dict, List, Union, Tuple
|
||||
|
||||
from .base import BaseDatabaseTool, sqlserver
|
||||
from .util import read_file, GET_CLASS
|
||||
|
||||
|
||||
class SQLServerTool(BaseDatabaseTool):
|
||||
_type_name = "sqlserver"
|
||||
|
||||
def local_server_info(self) -> Optional[Dict]:
|
||||
return None
|
||||
|
||||
# 添加一个数据库
|
||||
def add_database(self, server_id: int, database_name: str, **kwargs) -> Tuple[bool, str]:
|
||||
get_obj = GET_CLASS()
|
||||
get_obj.name = database_name
|
||||
get_obj.sid = server_id
|
||||
get_obj.ps = kwargs.get("ps", "")
|
||||
get_obj.db_user = kwargs.get("db_user", "")
|
||||
get_obj.password = kwargs.get("password", "")
|
||||
res = sqlserver().AddDatabase(get_obj)
|
||||
if res["status"] is True:
|
||||
return True, "Successfully added"
|
||||
else:
|
||||
return False, res['msg']
|
||||
68
mod/base/database_tool/util.py
Normal file
68
mod/base/database_tool/util.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional, Tuple, Callable
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
|
||||
import public
|
||||
|
||||
|
||||
def write_file(filename: str, s_body: str, mode='w+') -> bool:
|
||||
"""
|
||||
写入文件内容
|
||||
@filename 文件名
|
||||
@s_body 欲写入的内容
|
||||
return bool 若文件不存在则尝试自动创建
|
||||
"""
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
try:
|
||||
fp = open(filename, mode=mode, encoding="utf-8")
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def read_file(filename, mode='r') -> Optional[str]:
|
||||
"""
|
||||
读取文件内容
|
||||
@filename 文件名
|
||||
return string(bin) 若文件不存在,则返回None
|
||||
"""
|
||||
import os
|
||||
if not os.path.exists(filename):
|
||||
return None
|
||||
fp = None
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
f_body = fp.read()
|
||||
except:
|
||||
return None
|
||||
finally:
|
||||
if fp and not fp.closed:
|
||||
fp.close()
|
||||
return f_body
|
||||
|
||||
|
||||
class _DB:
|
||||
|
||||
def __call__(self, table: str):
|
||||
import db
|
||||
with db.Sql() as t:
|
||||
t.table(table)
|
||||
return t
|
||||
|
||||
|
||||
DB = _DB()
|
||||
|
||||
GET_CLASS = public.dict_obj
|
||||
|
||||
ExecShell: Callable = public.ExecShell
|
||||
1
mod/base/git_tool/__init__.py
Normal file
1
mod/base/git_tool/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .tool import GitTool, GitMager, RealGitMager
|
||||
40
mod/base/git_tool/install.py
Normal file
40
mod/base/git_tool/install.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import re
|
||||
from .util import ExecShell
|
||||
|
||||
|
||||
def installed():
|
||||
sh_str = "git --version"
|
||||
out, error = ExecShell(sh_str)
|
||||
if re.search(r"git\s+version\s+(\d+\.){1,4}\d+", out):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def version_1_5_3() -> bool:
|
||||
sh_str = "git --version"
|
||||
out, error = ExecShell(sh_str)
|
||||
res = re.search(r"git\s+version\s+(?P<v>(\d+\.){1,4}\d+)", out)
|
||||
if not res:
|
||||
return False
|
||||
ver = [int(i) for i in res.group('v').split(".")]
|
||||
if len(ver) < 3:
|
||||
ver.extend([0] * (3 - len(ver)))
|
||||
if ver[0] > 1:
|
||||
return True
|
||||
elif ver[0] == 1 and ver[1] > 5:
|
||||
return True
|
||||
elif ver[0] == 1 and ver[1] == 5 and ver[3] >= 3:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def install_git():
|
||||
check_str = "Git installed successfully"
|
||||
if not installed():
|
||||
script_path = "/www/server/panel/mod/base/git_tool/install.sh"
|
||||
out, error = ExecShell("bash {}".format(script_path))
|
||||
if out.find(check_str) != -1:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return True
|
||||
26
mod/base/git_tool/install.sh
Normal file
26
mod/base/git_tool/install.sh
Normal file
@@ -0,0 +1,26 @@
|
||||
|
||||
if command -v apt-get &> /dev/null; then
|
||||
package_manager="apt-get"
|
||||
elif command -v yum &> /dev/null; then
|
||||
package_manager="yum"
|
||||
else
|
||||
echo "Git installation failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 安装Git
|
||||
if [ "$package_manager" = "apt-get" ]; then
|
||||
apt-get update
|
||||
apt-get install git -y
|
||||
elif [ "$package_manager" = "yum" ]; then
|
||||
yum install git -y
|
||||
fi
|
||||
|
||||
# 验证Git安装
|
||||
git_version=$(git --version)
|
||||
# shellcheck disable=SC2181
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Git installed successfully. Version: $git_version"
|
||||
else
|
||||
echo "Git installation failed."
|
||||
fi
|
||||
543
mod/base/git_tool/tool.py
Normal file
543
mod/base/git_tool/tool.py
Normal file
@@ -0,0 +1,543 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import time
|
||||
|
||||
from .install import installed, install_git, version_1_5_3
|
||||
from .util import read_file, write_file, ExecShell, set_ownership
|
||||
from typing import Optional, Dict, Union, List
|
||||
from urllib3.util import parse_url, Url
|
||||
from mod.base import json_response
|
||||
|
||||
GIT_TMP_PATH = "/www/server/git_tmp"
|
||||
|
||||
|
||||
class GitTool:
|
||||
|
||||
def __init__(self, project_path: str, git_url: str, user_config: Optional[dict] = None, git_id: str = ""):
|
||||
self.git_url = git_url
|
||||
self.get_id = git_id
|
||||
self.project_path = project_path
|
||||
if self.project_path[-1] == '/':
|
||||
self.project_path = self.project_path[:-1]
|
||||
if not os.path.isdir(GIT_TMP_PATH):
|
||||
os.makedirs(GIT_TMP_PATH)
|
||||
|
||||
self._tmp_path: Optional[str] = None
|
||||
self._askpass_path: Optional[str] = None
|
||||
|
||||
self.user_config = user_config
|
||||
_conf = self.get_user_config_by_project_path()
|
||||
if _conf and not self.user_config:
|
||||
self.user_config = _conf
|
||||
|
||||
self._init_tmp_path = False
|
||||
|
||||
@property
|
||||
def tmp_path(self) -> Optional[str]:
|
||||
if self._tmp_path is not None:
|
||||
return self._tmp_path
|
||||
if not os.path.isdir(self.project_path):
|
||||
return None
|
||||
ino = os.stat(self.project_path).st_ino
|
||||
self._tmp_path = "{}/{}".format(GIT_TMP_PATH, str(ino))
|
||||
self._askpass_path = "{}/help_{}.sh".format(GIT_TMP_PATH, str(ino))
|
||||
return self._tmp_path
|
||||
|
||||
def get_user_config_by_project_path(self) -> Optional[dict]:
|
||||
if os.path.isfile(self.project_path + "/.git/config"):
|
||||
git_config = read_file(self.project_path + "/.git/config")
|
||||
if isinstance(git_config, str):
|
||||
return self._read_user_conf_by_config(git_config)
|
||||
return None
|
||||
|
||||
@property
|
||||
def askpass_path(self) -> Optional[str]:
|
||||
if self._askpass_path is not None:
|
||||
return self._askpass_path
|
||||
if not os.path.isdir(self.project_path):
|
||||
return None
|
||||
ino = os.stat(self.project_path).st_ino
|
||||
self._tmp_path = "{}/{}".format(GIT_TMP_PATH, str(ino))
|
||||
self._askpass_path = "{}/help_{}.sh".format(GIT_TMP_PATH, str(ino))
|
||||
return self._askpass_path
|
||||
|
||||
def _setup_tmp_path(self) -> Optional[str]:
|
||||
if not os.path.isdir(self.project_path):
|
||||
return "目标目录:【{}】不存在,无法进行git操作".format(self.project_path)
|
||||
|
||||
if not os.path.exists(self.tmp_path):
|
||||
os.makedirs(self.tmp_path)
|
||||
self._init_tmp_path = True
|
||||
else:
|
||||
if not self._init_tmp_path:
|
||||
shutil.rmtree(self.tmp_path)
|
||||
os.makedirs(self.tmp_path)
|
||||
write_file(self.askpass_path, "")
|
||||
ExecShell("chmod +x " + self.askpass_path)
|
||||
|
||||
if os.path.isdir(self.tmp_path + "/.git"):
|
||||
return None
|
||||
sh_str = """cd {tmp_path}
|
||||
{git_bin} init .
|
||||
{git_bin} remote add origin {git_url}
|
||||
""".format(tmp_path=self.tmp_path, git_bin=self.git_bin(), git_url=self.git_url)
|
||||
|
||||
ExecShell(sh_str)
|
||||
if not os.path.isfile(self.tmp_path + "/.git/config"):
|
||||
return "git Initialization failed"
|
||||
|
||||
git_conf = read_file(self.tmp_path + "/.git/config")
|
||||
if not (isinstance(git_conf, str) and git_conf.find("origin") != -1 and git_conf.find(self.git_url) != -1):
|
||||
return "git Failed to set up a remote route"
|
||||
|
||||
if self.git_url.find("ssh://") != -1: # ssh的情况下不做处理
|
||||
return
|
||||
|
||||
if isinstance(self.user_config, dict):
|
||||
sh_str_list = ["cd {}".format(self.tmp_path)]
|
||||
config_sh = self.git_bin() + " config user.{} {}"
|
||||
for k, v in self.user_config.items():
|
||||
if isinstance(k, str) and isinstance(v, str) and k.strip() and v.strip():
|
||||
sh_str_list.append(config_sh.format(k, v))
|
||||
ExecShell("\n".join(sh_str_list))
|
||||
askpass_str = """#!/bin/sh
|
||||
case "$1" in
|
||||
Username*) exec echo "{}" ;;
|
||||
Password*) exec echo "{}" ;;
|
||||
esac
|
||||
""".format(self.user_config.get('name', "--"), self.user_config.get('password', "--"))
|
||||
write_file(self.askpass_path, askpass_str)
|
||||
|
||||
def remote_branch(self) -> Union[str, List[str]]:
|
||||
error = self._setup_tmp_path()
|
||||
if error:
|
||||
return error
|
||||
out, err = ExecShell("cd {} && export GIT_ASKPASS='{}' && git ls-remote origin".format(
|
||||
self.tmp_path, self.askpass_path))
|
||||
rep_branch = re.compile(r"refs/heads/(?P<b>[^\n]*)\n")
|
||||
branch_list = []
|
||||
for tmp_res in rep_branch.finditer(out):
|
||||
branch_list.append(tmp_res.group("b"))
|
||||
|
||||
if not branch_list:
|
||||
return err
|
||||
return branch_list
|
||||
|
||||
@staticmethod
|
||||
def _read_user_conf_by_config(git_config_data: str) -> Optional[dict]:
|
||||
rep_user = re.compile(r"\[user][^\n]*\n(?P<target>(\s*\w+\s*=\s*[^\n]*\n)*(\s*\w+\s*=\s*[^\n]*)?)(\s*\[)?")
|
||||
res = rep_user.search(git_config_data)
|
||||
if not res:
|
||||
return None
|
||||
res_data = dict()
|
||||
k_v_str = res.group("target")
|
||||
for line in k_v_str.split("\n"):
|
||||
if not line.strip():
|
||||
continue
|
||||
k, v = line.split("=", 1)
|
||||
res_data[k.strip()] = v.strip()
|
||||
return res_data
|
||||
|
||||
@classmethod
|
||||
def global_user_conf(cls) -> dict:
|
||||
res_dict = {
|
||||
"name": None,
|
||||
"password": None,
|
||||
"email": None,
|
||||
}
|
||||
global_file = "/root/.gitconfig"
|
||||
if not os.path.isfile(global_file):
|
||||
return res_dict
|
||||
data = read_file(global_file)
|
||||
if not isinstance(data, str):
|
||||
return res_dict
|
||||
res_data = cls._read_user_conf_by_config(data)
|
||||
res_dict.update(res_data)
|
||||
return res_dict
|
||||
|
||||
@classmethod
|
||||
def set_global_user_conf(cls, data) -> None:
|
||||
sh_str = cls.git_bin() + " config --global user.{} {}"
|
||||
for k, v in data.items():
|
||||
if isinstance(k, str) and isinstance(v, str) and k.strip() and v.strip():
|
||||
ExecShell(sh_str.format(k, v))
|
||||
|
||||
@classmethod
|
||||
def ssh_pub_key(cls):
|
||||
key_files = ('id_ed25519', 'id_rsa', 'id_ecdsa', 'id_rsa_bt')
|
||||
for key_file in key_files:
|
||||
key_file = "/root/.ssh/{}".format(key_file)
|
||||
pub_file = "/root/.ssh/{}.pub".format(key_file)
|
||||
if os.path.isfile(pub_file) and os.path.isfile(key_file):
|
||||
data = read_file(pub_file)
|
||||
if isinstance(data, str):
|
||||
return data
|
||||
return cls._create_ssh_key()
|
||||
|
||||
@staticmethod
|
||||
def _create_ssh_key() -> str:
|
||||
key_type = "ed25519"
|
||||
ExecShell("ssh-keygen -t {s_type} -P '' -f /root/.ssh/id_{s_type} |echo y".format(s_type=key_type))
|
||||
authorized_keys = '/root/.ssh/authorized_keys'
|
||||
pub_file = "/root/.ssh/id_{s_type}.pub".format(s_type=key_type)
|
||||
ExecShell('cat %s >> %s && chmod 600 %s' % (pub_file, authorized_keys, authorized_keys))
|
||||
key_type_file = '/www/server/panel/data/ssh_key_type.pl'
|
||||
write_file(key_type_file, key_type)
|
||||
return read_file(pub_file)
|
||||
|
||||
@staticmethod
|
||||
def git_bin() -> str:
|
||||
if not installed():
|
||||
if not install_git():
|
||||
raise ValueError("There is no git tool and the installation fails, so this feature cannot be used")
|
||||
default = "/usr/bin/git"
|
||||
git_path = shutil.which("git")
|
||||
if git_path is None:
|
||||
return default
|
||||
return git_path
|
||||
|
||||
def pull(self, branch, set_own: Optional[str] = None) -> Optional[str]:
|
||||
if self.git_url.startswith("https://") or self.git_url.startswith("http://"):
|
||||
res = parse_url(self.git_url)
|
||||
if isinstance(res, Url) and not res.auth:
|
||||
if self.user_config and "name" in self.user_config and "password" in self.user_config:
|
||||
res.auth = "{}:{}".format(self.user_config["name"], self.user_config["password"])
|
||||
self.git_url = res.url
|
||||
git_name = self.git_name()
|
||||
if git_name is None:
|
||||
git_name = 'None'
|
||||
if os.path.isdir(self.tmp_path + "/" + git_name):
|
||||
shutil.rmtree(self.tmp_path + "/" + git_name)
|
||||
|
||||
log_file = "/tmp/git_{}_log.log".format(self.get_id)
|
||||
|
||||
shell_command_str = "cd {0} && {1} clone --progress -b {2} {3} &>> {4}".format(
|
||||
self.tmp_path, self.git_bin(), branch, self.git_url, log_file
|
||||
)
|
||||
|
||||
ExecShell(shell_command_str)
|
||||
if not os.path.isdir(self.tmp_path + "/" + git_name):
|
||||
return "Pull error"
|
||||
|
||||
ExecShell(r"\cp -rf {}/{}/* {}/".format(self.tmp_path, git_name, self.project_path))
|
||||
if isinstance(set_own, str):
|
||||
set_ownership(self.project_path, set_own)
|
||||
|
||||
if os.path.isdir(self.tmp_path + "/" + git_name):
|
||||
shutil.rmtree(self.tmp_path + "/" + git_name)
|
||||
|
||||
def git_name(self) -> Optional[str]:
|
||||
if isinstance(self.git_url, str):
|
||||
name = self.git_url.rsplit("/", 1)[1]
|
||||
if name.endswith(".git"):
|
||||
name = name[:-4]
|
||||
return name
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def new_id(cls) -> str:
|
||||
from uuid import uuid4
|
||||
return uuid4().hex[::2]
|
||||
|
||||
|
||||
class RealGitMager:
|
||||
_git_config_file = "/www/server/panel/data/site_git_config.json"
|
||||
|
||||
def __init__(self):
|
||||
self._config: Optional[Dict[str, List[Dict[str, Union[int, str, dict]]]]] = None
|
||||
# c = {
|
||||
# "site_name": [{
|
||||
# "id": "",
|
||||
# "site_name": "aaaa",
|
||||
# "url": "https://example.com/git/panel-plugin.git",
|
||||
# "path_ino": 4564524,
|
||||
# "git_path": "/www/wwwroot/site",
|
||||
# "config": {
|
||||
# "name": "",
|
||||
# "password": "",
|
||||
# "email": "",
|
||||
# },
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
|
||||
@property
|
||||
def configure(self) -> Dict[str, List[Dict[str, Union[int, str, dict]]]]:
|
||||
if self._config is None:
|
||||
try:
|
||||
res = read_file(self._git_config_file)
|
||||
if res is None:
|
||||
data = {}
|
||||
else:
|
||||
data = json.loads(res)
|
||||
except (json.JSONDecoder, TypeError, ValueError):
|
||||
data = {}
|
||||
|
||||
self._config = data
|
||||
return self._config
|
||||
|
||||
def save_configure(self):
|
||||
if self._config:
|
||||
write_file(self._git_config_file, json.dumps(self._config))
|
||||
|
||||
def add_git(self, git_url: str, site_name: str, git_path: str, user_config: Optional[dict]) -> Union[str, list]:
|
||||
url = parse_url(git_url)
|
||||
if not (isinstance(url, Url) and url.scheme and url.host and url.path):
|
||||
return "The URL format is incorrect"
|
||||
|
||||
if user_config and not (isinstance(user_config, dict) and "name" in user_config and "password" in user_config):
|
||||
return "User information is entered incorrectly"
|
||||
|
||||
if not os.path.exists(git_path):
|
||||
return "The git target directory does not exist"
|
||||
else:
|
||||
path_ino = os.stat(git_path).st_ino
|
||||
|
||||
if site_name not in self.configure:
|
||||
self.configure[site_name] = []
|
||||
|
||||
for c in self.configure[site_name]:
|
||||
if c["path_ino"] == path_ino or git_path == c["git_path"]:
|
||||
return "The path already exists, so please do not add it repeatedly"
|
||||
|
||||
try:
|
||||
GitTool.git_bin()
|
||||
except ValueError as e:
|
||||
return str(e)
|
||||
|
||||
git_id = GitTool.new_id()
|
||||
git = GitTool(project_path=git_path, git_url=git_url, user_config=user_config, git_id=git_id)
|
||||
res = git.remote_branch()
|
||||
if isinstance(res, str):
|
||||
return res
|
||||
|
||||
self.configure[site_name].append(
|
||||
{
|
||||
"id": git_id,
|
||||
"site_name": site_name,
|
||||
"url": git_url,
|
||||
"path_ino": path_ino,
|
||||
"git_path": git_path,
|
||||
"remote_branch": res,
|
||||
"remote_branch_time": int(time.time()),
|
||||
"config": user_config,
|
||||
}
|
||||
)
|
||||
self.save_configure()
|
||||
return res
|
||||
|
||||
def modify_git(
|
||||
self,
|
||||
git_id: str,
|
||||
site_name: str,
|
||||
git_url: Optional[str],
|
||||
git_path: Optional[str],
|
||||
user_config: Optional[dict]
|
||||
) -> Optional[str]:
|
||||
target = None
|
||||
for i in self.configure.get(site_name, []):
|
||||
if i["id"] == git_id:
|
||||
target = i
|
||||
break
|
||||
|
||||
if target is None:
|
||||
return 'The specified git configuration does not exist'
|
||||
if git_url:
|
||||
url = parse_url(git_url)
|
||||
if not (isinstance(url, Url) and url.scheme and url.host and url.path):
|
||||
return "The URL format is incorrect"
|
||||
target["url"] = git_url
|
||||
|
||||
if git_path:
|
||||
if not os.path.exists(git_path):
|
||||
return "The git target directory does not exist"
|
||||
else:
|
||||
path_ino = os.stat(git_path).st_ino
|
||||
target["path_ino"] = path_ino
|
||||
target["git_path"] = git_path
|
||||
|
||||
if user_config:
|
||||
if not (isinstance(user_config, dict) and "name" in user_config and "password" in user_config):
|
||||
return "User information is entered incorrectly"
|
||||
target["config"] = user_config
|
||||
|
||||
git = GitTool(project_path=target['git_path'],
|
||||
git_url=target['url'],
|
||||
user_config=target['config'],
|
||||
git_id=target['id'])
|
||||
res = git.remote_branch()
|
||||
if isinstance(res, str):
|
||||
return res
|
||||
|
||||
self.save_configure()
|
||||
return None
|
||||
|
||||
def remove_git(self, git_id: str, site_name: str) -> Optional[str]:
|
||||
target = None
|
||||
for i in self.configure.get(site_name, []):
|
||||
if i["id"] == git_id:
|
||||
target = i
|
||||
break
|
||||
|
||||
if target is None:
|
||||
return 'The specified git configuration does not exist'
|
||||
|
||||
self.configure[site_name].remove(target)
|
||||
self.save_configure()
|
||||
|
||||
def site_git_configure(self, site_name, refresh: bool = False) -> List[dict]:
|
||||
if site_name not in self.configure:
|
||||
return []
|
||||
res_list = []
|
||||
for i in self.configure[site_name]:
|
||||
if time.time() - i.get("remote_branch_time", 0) > 60 * 60 or refresh:
|
||||
g = GitTool(project_path=i['git_path'], git_url=i['url'], user_config=i['config'], git_id=i['id'])
|
||||
res = g.remote_branch()
|
||||
if isinstance(res, str):
|
||||
i.update(remote_branch_error=res, remote_branch=[], remote_branch_time=int(time.time()))
|
||||
else:
|
||||
i.update(remote_branch=res, remote_branch_time=int(time.time()))
|
||||
res_list.append(i)
|
||||
|
||||
return res_list
|
||||
|
||||
@staticmethod
|
||||
def set_global_user(name: Optional[str], password: Optional[str], email: Optional[str] = None) -> None:
|
||||
data = {}
|
||||
if name:
|
||||
data['name'] = name
|
||||
if password:
|
||||
data['password'] = password
|
||||
if email:
|
||||
data['email'] = email
|
||||
GitTool.set_global_user_conf(data)
|
||||
|
||||
def git_pull(self, git_id: str, site_name: str, branch: str) -> Optional[str]:
|
||||
target = None
|
||||
for i in self.configure.get(site_name, []):
|
||||
if i["id"] == git_id:
|
||||
target = i
|
||||
break
|
||||
|
||||
if target is None:
|
||||
return 'The specified git configuration does not exist'
|
||||
|
||||
g = GitTool(
|
||||
project_path=target['git_path'],
|
||||
git_url=target['url'],
|
||||
user_config=target['config'],
|
||||
git_id=target["id"])
|
||||
return g.pull(branch)
|
||||
|
||||
|
||||
class GitMager:
|
||||
# 添加git信息
|
||||
@staticmethod
|
||||
def add_git(get):
|
||||
user_config = None
|
||||
try:
|
||||
git_url = get.url.strip()
|
||||
site_name = get.site_name.strip()
|
||||
git_path = get.git_path.strip()
|
||||
if hasattr(get, "config") and get.config.strip():
|
||||
user_config = json.loads(get.config.strip())
|
||||
except (json.JSONDecoder, AttributeError, TypeError):
|
||||
return json_response(status=False, msg="The parameter is incorrect")
|
||||
res = RealGitMager().add_git(git_url, site_name, git_path, user_config)
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, data=res)
|
||||
|
||||
# 修改git信息
|
||||
@staticmethod
|
||||
def modify_git(get):
|
||||
git_url = None
|
||||
git_path = None
|
||||
user_config = None
|
||||
try:
|
||||
git_id = get.git_id.strip()
|
||||
site_name = get.site_name.strip()
|
||||
if "url" in get:
|
||||
git_url = get.url.strip()
|
||||
if 'git_path' in get:
|
||||
git_path = get.git_path.strip()
|
||||
if hasattr(get, "user_config") and get.user_config.strip():
|
||||
user_config = json.loads(get.user_config.strip())
|
||||
except (json.JSONDecoder, AttributeError, TypeError):
|
||||
return json_response(status=False, msg="parameter error")
|
||||
res = RealGitMager().modify_git(git_id, site_name, git_url, git_path, user_config)
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, data=res)
|
||||
|
||||
# 移除git信息
|
||||
@staticmethod
|
||||
def remove_git(get):
|
||||
try:
|
||||
git_id = get.git_id.strip()
|
||||
site_name = get.site_name.strip()
|
||||
except (json.JSONDecoder, AttributeError, TypeError):
|
||||
return json_response(status=False, msg="parameter error")
|
||||
res = RealGitMager().remove_git(git_id, site_name)
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, data=res)
|
||||
|
||||
@staticmethod
|
||||
def site_git_configure(get):
|
||||
if not version_1_5_3():
|
||||
return json_response(status=False, msg="Git versions earlier than 1.5.3 are not available")
|
||||
refresh = ''
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
if "refresh" in get:
|
||||
refresh = get.refresh.strip()
|
||||
except (AttributeError, TypeError):
|
||||
return json_response(status=False, msg="parameter error")
|
||||
if refresh in ("true", "1"):
|
||||
refresh = True
|
||||
else:
|
||||
refresh = False
|
||||
res = RealGitMager().site_git_configure(site_name, refresh=refresh)
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, data=res)
|
||||
|
||||
@staticmethod
|
||||
def set_global_user(get):
|
||||
name = password = email = None
|
||||
try:
|
||||
if "name" in get:
|
||||
name = get.name.strip()
|
||||
if "password" in get:
|
||||
password = get.password.strip()
|
||||
if "email" in get:
|
||||
email = get.email.strip()
|
||||
except (AttributeError, TypeError):
|
||||
return json_response(status=False, msg="parameter error")
|
||||
|
||||
RealGitMager().set_global_user(name, password, email)
|
||||
return json_response(status=True, msg="The setup was successful")
|
||||
|
||||
@staticmethod
|
||||
def git_pull(get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
git_id = get.git_id.strip()
|
||||
branch = get.branch.strip()
|
||||
except (AttributeError, TypeError):
|
||||
return json_response(status=False, msg="parameter error")
|
||||
res = RealGitMager().git_pull(git_id, site_name, branch)
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, data=res)
|
||||
|
||||
@staticmethod
|
||||
def git_global_user_conf(get=None):
|
||||
return GitTool.global_user_conf()
|
||||
|
||||
@staticmethod
|
||||
def git_ssh_pub_key(get=None):
|
||||
return GitTool.ssh_pub_key()
|
||||
56
mod/base/git_tool/util.py
Normal file
56
mod/base/git_tool/util.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional, Tuple, Callable
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
|
||||
import public
|
||||
|
||||
|
||||
def write_file(filename: str, s_body: str, mode='w+') -> bool:
|
||||
"""
|
||||
写入文件内容
|
||||
@filename 文件名
|
||||
@s_body 欲写入的内容
|
||||
return bool 若文件不存在则尝试自动创建
|
||||
"""
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
try:
|
||||
fp = open(filename, mode=mode, encoding="utf-8")
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def read_file(filename, mode='r') -> Optional[str]:
|
||||
"""
|
||||
读取文件内容
|
||||
@filename 文件名
|
||||
return string(bin) 若文件不存在,则返回None
|
||||
"""
|
||||
import os
|
||||
if not os.path.exists(filename):
|
||||
return None
|
||||
fp = None
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
f_body = fp.read()
|
||||
except:
|
||||
return None
|
||||
finally:
|
||||
if fp and not fp.closed:
|
||||
fp.close()
|
||||
return f_body
|
||||
|
||||
|
||||
ExecShell: Callable = public.ExecShell
|
||||
set_ownership: Callable = public.set_ownership
|
||||
168
mod/base/msg/__init__.py
Normal file
168
mod/base/msg/__init__.py
Normal file
@@ -0,0 +1,168 @@
|
||||
import json
|
||||
import os.path
|
||||
|
||||
from .weixin_msg import WeiXinMsg
|
||||
from .mail_msg import MailMsg
|
||||
from .web_hook_msg import WebHookMsg
|
||||
from .feishu_msg import FeiShuMsg
|
||||
from .dingding_msg import DingDingMsg
|
||||
from .sms_msg import SMSMsg
|
||||
# from .wx_account_msg import WeChatAccountMsg
|
||||
from .tg_msg import TgMsg
|
||||
from .manager import SenderManager
|
||||
from .util import read_file,write_file
|
||||
|
||||
from mod.base.push_mod import SenderConfig, PUSH_DATA_PATH
|
||||
|
||||
|
||||
# 把旧地告警系统的信息通道更新
|
||||
def update_mod_push_msg():
|
||||
|
||||
if os.path.exists(PUSH_DATA_PATH + "/update_sender.pl"):
|
||||
return
|
||||
# else:
|
||||
# with open(PUSH_DATA_PATH + "/update_sender.pl", "w") as f:
|
||||
# f.write("")
|
||||
|
||||
# WeChatAccountMsg.refresh_config(force=True)
|
||||
sms_status = False
|
||||
sc = SenderConfig()
|
||||
for conf in sc.config:
|
||||
if conf["sender_type"] == "sms":
|
||||
sms_status = True
|
||||
break
|
||||
|
||||
# sms 取消自动添加
|
||||
# if not sms_status:
|
||||
# sc.config.append({
|
||||
# "id": sc.nwe_id(),
|
||||
# "used": True,
|
||||
# "sender_type": "sms",
|
||||
# "data": {},
|
||||
# "original": True # 标记这个通道是该类型 旧有的通道, 同时也是默认通道
|
||||
# })
|
||||
|
||||
panel_data_path = "/www/server/panel/data"
|
||||
|
||||
# weixin
|
||||
if os.path.exists(panel_data_path + "/weixin.json"):
|
||||
try:
|
||||
weixin_data = json.loads(read_file(panel_data_path + "/weixin.json"))
|
||||
except:
|
||||
weixin_data = None
|
||||
|
||||
if isinstance(weixin_data, dict) and "weixin_url" in weixin_data:
|
||||
sc.config.append({
|
||||
"id": sc.nwe_id(),
|
||||
"used": True,
|
||||
"sender_type": "weixin",
|
||||
"data": {
|
||||
"url": weixin_data["weixin_url"],
|
||||
"title": "weixin" if "title" not in weixin_data else weixin_data["title"]
|
||||
},
|
||||
"original": True
|
||||
})
|
||||
|
||||
# mail
|
||||
stmp_file = panel_data_path + "/stmp_mail.json"
|
||||
mail_list_file = panel_data_path + "/mail_list.json"
|
||||
if os.path.exists(stmp_file) and os.path.exists(mail_list_file):
|
||||
stmp_data = None
|
||||
try:
|
||||
stmp_data = json.loads(read_file(stmp_file))
|
||||
mail_list_data = json.loads(read_file(mail_list_file))
|
||||
except:
|
||||
mail_list_data = None
|
||||
|
||||
if isinstance(stmp_data, dict):
|
||||
if 'qq_mail' in stmp_data or 'qq_stmp_pwd' in stmp_data or 'hosts' in stmp_data:
|
||||
sc.config.append({
|
||||
"id": sc.nwe_id(),
|
||||
"used": True,
|
||||
"sender_type": "mail",
|
||||
"data": {
|
||||
"send": stmp_data,
|
||||
"title": "mail",
|
||||
"receive": [] if not mail_list_data else mail_list_data,
|
||||
},
|
||||
"original": True
|
||||
})
|
||||
|
||||
# webhook
|
||||
webhook_file = panel_data_path + "/hooks_msg.json"
|
||||
if os.path.exists(stmp_file) and os.path.exists(mail_list_file):
|
||||
try:
|
||||
webhook_data = json.loads(read_file(webhook_file))
|
||||
except:
|
||||
webhook_data = None
|
||||
|
||||
if isinstance(webhook_data, list):
|
||||
for i in webhook_data:
|
||||
i["title"] = i["name"]
|
||||
sc.config.append({
|
||||
"id": sc.nwe_id(),
|
||||
"used": True,
|
||||
"sender_type": "webhook",
|
||||
"data": i,
|
||||
})
|
||||
|
||||
# feishu
|
||||
if os.path.exists(panel_data_path + "/feishu.json"):
|
||||
try:
|
||||
feishu_data = json.loads(read_file(panel_data_path + "/feishu.json"))
|
||||
except:
|
||||
feishu_data = None
|
||||
|
||||
if isinstance(feishu_data, dict) and "feishu_url" in feishu_data:
|
||||
sc.config.append({
|
||||
"id": sc.nwe_id(),
|
||||
"used": True,
|
||||
"sender_type": "feishu",
|
||||
"data": {
|
||||
"url": feishu_data["feishu_url"],
|
||||
"title": "feishu" if "title" not in feishu_data else feishu_data["title"]
|
||||
},
|
||||
"original": True
|
||||
})
|
||||
|
||||
# dingding
|
||||
if os.path.exists(panel_data_path + "/dingding.json"):
|
||||
try:
|
||||
dingding_data = json.loads(read_file(panel_data_path + "/dingding.json"))
|
||||
except:
|
||||
dingding_data = None
|
||||
|
||||
if isinstance(dingding_data, dict) and "dingding_url" in dingding_data:
|
||||
sc.config.append({
|
||||
"id": sc.nwe_id(),
|
||||
"used": True,
|
||||
"sender_type": "dingding",
|
||||
"data": {
|
||||
"url": dingding_data["dingding_url"],
|
||||
"title": "dingding" if "title" not in dingding_data else dingding_data["title"]
|
||||
},
|
||||
"original": True
|
||||
})
|
||||
|
||||
# tg
|
||||
if os.path.exists(panel_data_path + "/tg_bot.json"):
|
||||
try:
|
||||
tg_data = json.loads(read_file(panel_data_path + "/tg_bot.json"))
|
||||
except:
|
||||
tg_data = None
|
||||
|
||||
if isinstance(tg_data, dict) and "bot_token" in tg_data:
|
||||
sc.config.append({
|
||||
"id": sc.nwe_id(),
|
||||
"used": True,
|
||||
"sender_type": "tg",
|
||||
"data": {
|
||||
"my_id": tg_data["my_id"],
|
||||
"bot_token": tg_data["bot_token"],
|
||||
"title": "tg" if "title" not in tg_data else tg_data["title"],
|
||||
},
|
||||
"original": True
|
||||
})
|
||||
|
||||
sc.save_config()
|
||||
write_file(PUSH_DATA_PATH + "/update_sender.pl", "")
|
||||
BIN
mod/base/msg/__pycache__/sms_msg.cpython-314.pyc
Normal file
BIN
mod/base/msg/__pycache__/sms_msg.cpython-314.pyc
Normal file
Binary file not shown.
BIN
mod/base/msg/__pycache__/wx_account_msg.cpython-314.pyc
Normal file
BIN
mod/base/msg/__pycache__/wx_account_msg.cpython-314.pyc
Normal file
Binary file not shown.
156
mod/base/msg/dingding_msg.py
Normal file
156
mod/base/msg/dingding_msg.py
Normal file
@@ -0,0 +1,156 @@
|
||||
# coding: utf-8
|
||||
# +-------------------------------------------------------------------
|
||||
# | yakpanel
|
||||
# +-------------------------------------------------------------------
|
||||
# | Copyright (c) 2015-2020 yakpanel(https://www.yakpanel.com) All rights reserved.
|
||||
# +-------------------------------------------------------------------
|
||||
# | Author: baozi <
|
||||
# | 消息通道邮箱模块(新)
|
||||
# +-------------------------------------------------------------------
|
||||
|
||||
import re
|
||||
import json
|
||||
import requests
|
||||
import traceback
|
||||
import socket
|
||||
|
||||
import requests.packages.urllib3.util.connection as urllib3_cn
|
||||
from requests.packages import urllib3
|
||||
from typing import Optional, Union
|
||||
|
||||
from .util import write_push_log, get_test_msg
|
||||
import public
|
||||
|
||||
# 关闭警告
|
||||
urllib3.disable_warnings()
|
||||
|
||||
|
||||
class DingDingMsg:
|
||||
def __init__(self, dingding_data):
|
||||
self.id = dingding_data["id"]
|
||||
self.config = dingding_data["data"]
|
||||
|
||||
def send_msg(self, msg: str, title) -> Optional[str]:
|
||||
"""
|
||||
钉钉发送信息
|
||||
@msg 消息正文
|
||||
"""
|
||||
if not self.config:
|
||||
return public.lang('DingTalk information is not correctly configured')
|
||||
|
||||
# user没有时默认为空
|
||||
if "user" not in self.config:
|
||||
self.config['user'] = []
|
||||
|
||||
if "isAtAll" not in self.config:
|
||||
self.config['isAtAll'] = []
|
||||
|
||||
if not isinstance(self.config['url'], str):
|
||||
return public.lang('The DingTalk configuration is incorrect, please reconfigure the DingTalk robot')
|
||||
|
||||
at_info = ''
|
||||
for user in self.config['user']:
|
||||
if re.match(r"^[0-9]{11}$", str(user)):
|
||||
at_info += '@' + user + ' '
|
||||
|
||||
if at_info:
|
||||
msg = msg + '\n\n>' + at_info
|
||||
|
||||
|
||||
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
data = {
|
||||
"msgtype": "markdown",
|
||||
"markdown": {
|
||||
"title": "Server notifications",
|
||||
"text": msg
|
||||
},
|
||||
"at": {
|
||||
"atMobiles": self.config['user'],
|
||||
"isAtAll": self.config['isAtAll']
|
||||
}
|
||||
}
|
||||
status = False
|
||||
error = None
|
||||
try:
|
||||
def allowed_gai_family():
|
||||
family = socket.AF_INET
|
||||
return family
|
||||
|
||||
allowed_gai_family_lib = urllib3_cn.allowed_gai_family
|
||||
urllib3_cn.allowed_gai_family = allowed_gai_family
|
||||
|
||||
response = requests.post(
|
||||
url=self.config["url"],
|
||||
data=json.dumps(data),
|
||||
verify=False,
|
||||
headers=headers,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
urllib3_cn.allowed_gai_family = allowed_gai_family_lib
|
||||
|
||||
if response.json()["errcode"] == 0:
|
||||
status = True
|
||||
except:
|
||||
error = traceback.format_exc()
|
||||
status = False
|
||||
|
||||
write_push_log("dingding", status, title)
|
||||
return error
|
||||
|
||||
@classmethod
|
||||
def check_args(cls, args: dict) -> Union[dict, str]:
|
||||
if "url" not in args or "title" not in args:
|
||||
return public.lang('Incomplete information')
|
||||
|
||||
title = args["title"]
|
||||
if len(title) > 15:
|
||||
return public.lang('Note names cannot be longer than 15 characters')
|
||||
|
||||
if "user" in args and isinstance(args["user"], list):
|
||||
user = args["user"]
|
||||
else:
|
||||
user = []
|
||||
|
||||
if "atall" in args and isinstance(args["atall"], bool):
|
||||
atall = args["atall"]
|
||||
else:
|
||||
atall = True
|
||||
|
||||
data = {
|
||||
"url": args["url"],
|
||||
"user": user,
|
||||
"title": title,
|
||||
"isAtAll": atall,
|
||||
}
|
||||
|
||||
test_obj = cls({"data": data, "id": None})
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: <font color=#20a53a> Success </font>\n\n']
|
||||
}
|
||||
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
|
||||
res = test_obj.send_msg(
|
||||
test_task.to_dingding_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders"
|
||||
)
|
||||
if res is None:
|
||||
return data
|
||||
|
||||
return res
|
||||
|
||||
def test_send_msg(self) -> Optional[str]:
|
||||
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: <font color=#20a53a> Success </font>\n\n']
|
||||
}
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
res = self.send_msg(
|
||||
test_task.to_dingding_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders"
|
||||
)
|
||||
if res is None:
|
||||
return None
|
||||
return res
|
||||
140
mod/base/msg/feishu_msg.py
Normal file
140
mod/base/msg/feishu_msg.py
Normal file
@@ -0,0 +1,140 @@
|
||||
#coding: utf-8
|
||||
# +-------------------------------------------------------------------
|
||||
# | yakpanel
|
||||
# +-------------------------------------------------------------------
|
||||
# | Copyright (c) 2015-2020 yakpanel(http://www.yakpanel.com) All rights reserved.
|
||||
# +-------------------------------------------------------------------
|
||||
# | Author: lx
|
||||
# | 消息通道飞书通知模块
|
||||
# +-------------------------------------------------------------------
|
||||
|
||||
import re
|
||||
import json
|
||||
import requests
|
||||
import traceback
|
||||
import socket
|
||||
import public
|
||||
|
||||
import requests.packages.urllib3.util.connection as urllib3_cn
|
||||
from requests.packages import urllib3
|
||||
from typing import Optional, Union
|
||||
|
||||
from .util import write_push_log, get_test_msg
|
||||
|
||||
# 关闭警告
|
||||
urllib3.disable_warnings()
|
||||
|
||||
|
||||
class FeiShuMsg:
|
||||
|
||||
def __init__(self, feishu_data):
|
||||
self.id = feishu_data["id"]
|
||||
self.config = feishu_data["data"]
|
||||
|
||||
@classmethod
|
||||
def check_args(cls, args: dict) -> Union[dict, str]:
|
||||
if "url" not in args or "title" not in args:
|
||||
return public.lang('Incomplete information')
|
||||
|
||||
title = args["title"]
|
||||
if len(title) > 15:
|
||||
return public.lang('Note names cannot be longer than 15 characters')
|
||||
|
||||
if "user" in args and isinstance(args["user"], list):
|
||||
user = args["user"]
|
||||
else:
|
||||
user = []
|
||||
|
||||
if "atall" in args and isinstance(args["atall"], bool):
|
||||
atall = args["atall"]
|
||||
else:
|
||||
atall = True
|
||||
|
||||
data = {
|
||||
"url": args["url"],
|
||||
"user": user,
|
||||
"title": title,
|
||||
"isAtAll": atall,
|
||||
}
|
||||
|
||||
test_obj = cls({"data": data, "id": None})
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: Success\n\n']
|
||||
}
|
||||
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
|
||||
res = test_obj.send_msg(
|
||||
test_task.to_feishu_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders"
|
||||
)
|
||||
if res is None:
|
||||
return data
|
||||
|
||||
return res
|
||||
|
||||
def send_msg(self, msg: str, title: str) -> Optional[str]:
|
||||
"""
|
||||
飞书发送信息
|
||||
@msg 消息正文
|
||||
"""
|
||||
if not self.config:
|
||||
return public.lang('Feishu information is not configured correctly.')
|
||||
|
||||
reg = '<font.+>(.+)</font>'
|
||||
tmp = re.search(reg, msg)
|
||||
if tmp:
|
||||
tmp = tmp.groups()[0]
|
||||
msg = re.sub(reg, tmp, msg)
|
||||
|
||||
if "isAtAll" not in self.config:
|
||||
self.config["isAtAll"] = True
|
||||
|
||||
if self.config["isAtAll"]:
|
||||
msg += "<at userid='all'>All</at>"
|
||||
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
data = {
|
||||
"msg_type": "text",
|
||||
"content": {
|
||||
"text": msg
|
||||
}
|
||||
}
|
||||
status = False
|
||||
error = None
|
||||
try:
|
||||
def allowed_gai_family():
|
||||
family = socket.AF_INET
|
||||
return family
|
||||
allowed_gai_family_lib = urllib3_cn.allowed_gai_family
|
||||
urllib3_cn.allowed_gai_family = allowed_gai_family
|
||||
rdata = requests.post(
|
||||
url=self.config['url'],
|
||||
data=json.dumps(data),
|
||||
verify=False,
|
||||
headers=headers,
|
||||
timeout=10
|
||||
).json()
|
||||
urllib3_cn.allowed_gai_family = allowed_gai_family_lib
|
||||
|
||||
if "StatusCode" in rdata and rdata["StatusCode"] == 0:
|
||||
status = True
|
||||
except:
|
||||
error = traceback.format_exc()
|
||||
|
||||
write_push_log("feishu", status, title)
|
||||
|
||||
return error
|
||||
|
||||
def test_send_msg(self) -> Optional[str]:
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: <font color=#20a53a> Success </font>\n\n']
|
||||
}
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
res = self.send_msg(
|
||||
test_task.to_feishu_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders"
|
||||
)
|
||||
if res is None:
|
||||
return None
|
||||
return res
|
||||
158
mod/base/msg/mail_msg.py
Normal file
158
mod/base/msg/mail_msg.py
Normal file
@@ -0,0 +1,158 @@
|
||||
#coding: utf-8
|
||||
# +-------------------------------------------------------------------
|
||||
# | yakpanel
|
||||
# +-------------------------------------------------------------------
|
||||
# | Copyright (c) 2015-2020 yakpanel(http://www.yakpanel.com) All rights reserved.
|
||||
# +-------------------------------------------------------------------
|
||||
# | Author: 沐落 <cjx@yakpanel.com>
|
||||
# | Author: lx
|
||||
# | 消息通道邮箱模块
|
||||
# +-------------------------------------------------------------------
|
||||
|
||||
import smtplib
|
||||
import traceback
|
||||
from email.mime.text import MIMEText
|
||||
from email.utils import formataddr
|
||||
from typing import Tuple, Union, Optional
|
||||
import public
|
||||
|
||||
from mod.base.msg.util import write_push_log, write_mail_push_log, get_test_msg
|
||||
|
||||
|
||||
class MailMsg:
|
||||
|
||||
def __init__(self, mail_data):
|
||||
self.id = mail_data["id"]
|
||||
self.config = mail_data["data"]
|
||||
|
||||
@classmethod
|
||||
def check_args(cls, args: dict) -> Tuple[bool, Union[dict, str]]:
|
||||
if "send" not in args or "receive" not in args or len(args["receive"]) < 1:
|
||||
return False, "Incomplete information, there must be a sender and at least one receiver"
|
||||
|
||||
if "title" not in args:
|
||||
return False, "There is no necessary remark information"
|
||||
|
||||
title = args["title"]
|
||||
if len(title) > 15:
|
||||
return False, 'Note names cannot be longer than 15 characters'
|
||||
|
||||
send_data = args["send"]
|
||||
send = {}
|
||||
for i in ("qq_mail", "qq_stmp_pwd", "hosts", "port"):
|
||||
if i not in send_data:
|
||||
return False, "The sender configuration information is incomplete"
|
||||
send[i] = send_data[i].strip()
|
||||
|
||||
receive_data = args["receive"]
|
||||
if isinstance(receive_data, str):
|
||||
receive_list = [i.strip() for i in receive_data.split("\n") if i.strip()]
|
||||
else:
|
||||
receive_list = [i.strip() for i in receive_data if i.strip()]
|
||||
|
||||
data = {
|
||||
"send": send,
|
||||
"title": title,
|
||||
"receive": receive_list,
|
||||
}
|
||||
|
||||
test_obj = cls({"data": data, "id": None})
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: Success<br>']
|
||||
}
|
||||
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
|
||||
res = test_obj.send_msg(
|
||||
test_task.to_mail_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders"
|
||||
)
|
||||
if res is None or res.find("Failed to send mail to some recipients") != -1:
|
||||
return True, data
|
||||
|
||||
return False, res
|
||||
|
||||
def send_msg(self, msg: str, title: str):
|
||||
"""
|
||||
邮箱发送
|
||||
@msg 消息正文
|
||||
@title 消息标题
|
||||
"""
|
||||
if not self.config:
|
||||
return public.lang('Mailbox information is not configured correctly')
|
||||
|
||||
if 'port' not in self.config['send']:
|
||||
self.config['send']['port'] = 465
|
||||
|
||||
receive_list = self.config['receive']
|
||||
|
||||
error_list, success_list = [], []
|
||||
error_msg_dict = {}
|
||||
for email in receive_list:
|
||||
if not email.strip():
|
||||
continue
|
||||
server = None
|
||||
try:
|
||||
data = MIMEText(msg, 'html', 'utf-8')
|
||||
data['From'] = formataddr((self.config['send']['qq_mail'], self.config['send']['qq_mail']))
|
||||
data['To'] = formataddr((self.config['send']['qq_mail'], email.strip()))
|
||||
data['Subject'] = title
|
||||
|
||||
port = int(self.config['send']['port'])
|
||||
host = str(self.config['send']['hosts'])
|
||||
user = self.config['send']['qq_mail']
|
||||
pwd = self.config['send']['qq_stmp_pwd']
|
||||
|
||||
if port == 465:
|
||||
# SSL direct connection
|
||||
server = smtplib.SMTP_SSL(host, port, timeout=10)
|
||||
else:
|
||||
# Standard connection, possibly with STARTTLS
|
||||
server = smtplib.SMTP(host, port, timeout=10)
|
||||
try:
|
||||
# Attempt to upgrade to a secure connection
|
||||
server.starttls()
|
||||
except smtplib.SMTPNotSupportedError:
|
||||
# The server does not support STARTTLS, proceed with an insecure connection
|
||||
pass
|
||||
|
||||
server.login(user, pwd)
|
||||
server.sendmail(user, [email.strip(), ], data.as_string())
|
||||
success_list.append(email)
|
||||
except:
|
||||
err_msg = traceback.format_exc()
|
||||
# public.print_log(f"邮件发送失败: {err_msg}")
|
||||
error_list.append(email)
|
||||
error_msg_dict[email] = err_msg
|
||||
finally:
|
||||
if server:
|
||||
server.quit()
|
||||
|
||||
if not error_list and not success_list:
|
||||
return public.lang('The receiving mailbox is not configured')
|
||||
if not error_list:
|
||||
write_push_log("mail", True, title, success_list)
|
||||
return None
|
||||
if not success_list:
|
||||
write_push_log("mail", False, title, error_list)
|
||||
first_error_msg = list(error_msg_dict.values())[0]
|
||||
# 修复 IndexError
|
||||
return public.lang('Failed to send message, Recipient of failed to send:{}, Error: {}', error_list, first_error_msg)
|
||||
|
||||
write_mail_push_log(title, error_list, success_list)
|
||||
|
||||
return public.lang('Failed to send mail to some recipients, including:{}',error_list)
|
||||
|
||||
def test_send_msg(self) -> Optional[str]:
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: <font color=#20a53a> Success </font>\n\n']
|
||||
}
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
res = self.send_msg(
|
||||
test_task.to_mail_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders"
|
||||
)
|
||||
if res is None:
|
||||
return None
|
||||
return res
|
||||
|
||||
362
mod/base/msg/manager.py
Normal file
362
mod/base/msg/manager.py
Normal file
@@ -0,0 +1,362 @@
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from mod.base.push_mod import SenderConfig
|
||||
from .weixin_msg import WeiXinMsg
|
||||
from .mail_msg import MailMsg
|
||||
from .tg_msg import TgMsg
|
||||
from .web_hook_msg import WebHookMsg
|
||||
from .feishu_msg import FeiShuMsg
|
||||
from .dingding_msg import DingDingMsg
|
||||
from .sms_msg import SMSMsg
|
||||
# from .wx_account_msg import WeChatAccountMsg
|
||||
import json
|
||||
from mod.base import json_response
|
||||
from .util import write_file, read_file
|
||||
import sys,os
|
||||
sys.path.insert(0, "/www/server/panel/class/")
|
||||
import public
|
||||
|
||||
# 短信会自动添加到 sender 库中的第一个 且通过官方接口更新
|
||||
# 微信公众号信息通过官网接口更新, 不写入数据库,需要时由文件中读取并序列化
|
||||
# 其他告警通道本质都类似于web hook 在确认完数据信息无误后,都可以自行添加或启用
|
||||
class SenderManager:
|
||||
def __init__(self):
|
||||
self.custom_parameter_filename = "/www/server/panel/data/mod_push_data/custom_parameter.pl"
|
||||
self.init_default_sender()
|
||||
def set_sender_conf(self, get):
|
||||
|
||||
args = json.loads(get.sender_data.strip())
|
||||
|
||||
try:
|
||||
sender_id = None
|
||||
try:
|
||||
if hasattr(get, "sender_id"):
|
||||
sender_id = get.sender_id.strip()
|
||||
if not sender_id:
|
||||
sender_id = None
|
||||
sender_type = get.sender_type.strip()
|
||||
args = json.loads(get.sender_data.strip())
|
||||
except (json.JSONDecoder, AttributeError, TypeError):
|
||||
return json_response(status=False, msg=public.lang('The parameter is incorrect'))
|
||||
sender_config = SenderConfig()
|
||||
if sender_id is not None:
|
||||
tmp = sender_config.get_by_id(sender_id)
|
||||
if tmp is None:
|
||||
sender_id = None
|
||||
|
||||
if sender_type == "weixin":
|
||||
data = WeiXinMsg.check_args(args)
|
||||
if isinstance(data, str):
|
||||
return json_response(status=False, data=data, msg=public.lang('Test send failed'))
|
||||
|
||||
|
||||
elif sender_type == "mail":
|
||||
_, data = MailMsg.check_args(args)
|
||||
if isinstance(data, str):
|
||||
return json_response(status=False, data=data, msg=public.lang('Test send failed'))
|
||||
|
||||
|
||||
elif sender_type == "tg":
|
||||
_, data = TgMsg.check_args(args)
|
||||
|
||||
if isinstance(data, str):
|
||||
return json_response(status=False, data=data, msg=data)
|
||||
elif sender_type == "webhook":
|
||||
custom_parameter = args.get("custom_parameter", {})
|
||||
if custom_parameter:
|
||||
try:
|
||||
public.writeFile(self.custom_parameter_filename, json.dumps(custom_parameter))
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
data = WebHookMsg.check_args(args)
|
||||
if isinstance(data, str):
|
||||
return json_response(status=False, data=data, msg=public.lang('Test send failed'))
|
||||
|
||||
# 从文件读取并删除文件
|
||||
try:
|
||||
if os.path.exists(self.custom_parameter_filename):
|
||||
custom_parameter = json.loads(public.readFile(self.custom_parameter_filename))
|
||||
data['custom_parameter'] = custom_parameter
|
||||
os.remove(self.custom_parameter_filename)
|
||||
except:
|
||||
pass
|
||||
|
||||
elif sender_type == "feishu":
|
||||
data = FeiShuMsg.check_args(args)
|
||||
if isinstance(data, str):
|
||||
return json_response(status=False, data=data, msg=public.lang('Test send failed'))
|
||||
|
||||
elif sender_type == "dingding":
|
||||
data = DingDingMsg.check_args(args)
|
||||
if isinstance(data, str):
|
||||
return json_response(status=False, data=data, msg=public.lang('Test send failed'))
|
||||
else:
|
||||
return json_response(status=False, msg=public.lang('A type that is not supported by the current interface'))
|
||||
# Check if the sender configuration already exists
|
||||
|
||||
existing_sender = any(
|
||||
conf for conf in sender_config.config
|
||||
if conf['sender_type'] == sender_type and 'title' in conf['data'] and conf['data']['title'] == data['title'] and conf['id'] != sender_id
|
||||
)
|
||||
|
||||
|
||||
# for conf in sender_config.config:
|
||||
# if conf['sender_type'] == sender_type and 'title' in conf['data'] and conf['data']['title'] == data[
|
||||
# 'title'] and conf['id'] != sender_id:
|
||||
# public.print_log('000 -{}'.format(conf['sender_type']))
|
||||
# public.print_log('000 -{}'.format(sender_type))
|
||||
#
|
||||
# public.print_log('111 conf -{}'.format(conf['sender_type']))
|
||||
# public.print_log('111 -{}'.format(sender_type))
|
||||
#
|
||||
# public.print_log('222 conf -{}'.format(conf['data']['title']))
|
||||
# public.print_log('222 data -{}'.format(data['title']))
|
||||
#
|
||||
# public.print_log('333 conf -{}'.format(conf['id']))
|
||||
# public.print_log('333 -{}'.format(sender_id))
|
||||
|
||||
if existing_sender:
|
||||
return json_response(status=False, msg=public.lang('The same send configuration already exists and cannot be added repeatedly'))
|
||||
now_sender_id = None
|
||||
if not sender_id:
|
||||
now_sender_id = sender_config.nwe_id()
|
||||
sender_config.config.append(
|
||||
{
|
||||
"id": now_sender_id,
|
||||
"sender_type": sender_type,
|
||||
"data": data,
|
||||
"used": True,
|
||||
})
|
||||
|
||||
else:
|
||||
now_sender_id = sender_id
|
||||
tmp = sender_config.get_by_id(sender_id)
|
||||
tmp["data"].update(data)
|
||||
|
||||
# type_senders = [conf for conf in sender_config.config if conf['sender_type'] == sender_type]
|
||||
# if len(type_senders) == 1:
|
||||
# for conf in sender_config.config:
|
||||
# conf["original"] = (conf['id'] == now_sender_id)
|
||||
|
||||
sender_config.save_config()
|
||||
if sender_type == "webhook":
|
||||
self.set_default_for_compatible(sender_config.get_by_id(now_sender_id))
|
||||
|
||||
return json_response(status=True, msg=public.lang('Saved successfully'))
|
||||
except:
|
||||
public.print_log('Error:{}'.format(str(public.get_error_info())))
|
||||
|
||||
@staticmethod
|
||||
def change_sendr_used(get):
|
||||
try:
|
||||
sender_id = get.sender_id.strip()
|
||||
except (AttributeError, TypeError):
|
||||
return json_response(status=False, msg=public.lang('The parameter is incorrect'))
|
||||
|
||||
sender_config = SenderConfig()
|
||||
tmp = sender_config.get_by_id(sender_id)
|
||||
if tmp is None:
|
||||
return json_response(status=False, msg=public.lang('Corresponding sender not found'))
|
||||
tmp["used"] = not tmp["used"]
|
||||
|
||||
sender_config.save_config()
|
||||
|
||||
return json_response(status=True, msg=public.lang('Saved successfully'))
|
||||
|
||||
@staticmethod
|
||||
def remove_sender(get):
|
||||
try:
|
||||
sender_id = get.sender_id.strip()
|
||||
except (AttributeError, TypeError):
|
||||
return json_response(status=False, msg=public.lang('The parameter is incorrect'))
|
||||
|
||||
sender_config = SenderConfig()
|
||||
tmp = sender_config.get_by_id(sender_id)
|
||||
if tmp is None:
|
||||
return json_response(status=False, msg=public.lang('Corresponding sender not found'))
|
||||
sender_config.config.remove(tmp)
|
||||
sender_config.save_config()
|
||||
|
||||
return json_response(status=True, msg=public.lang('Successfully delete'))
|
||||
|
||||
@staticmethod
|
||||
def get_sender_list(get):
|
||||
# 微信, 飞书, 钉钉, web-hook, 邮箱
|
||||
refresh = False
|
||||
try:
|
||||
if hasattr(get, 'refresh'):
|
||||
refresh = get.refresh.strip()
|
||||
if refresh in ("1", "true"):
|
||||
refresh = True
|
||||
except (AttributeError, TypeError):
|
||||
return json_response(status=False, msg=public.lang('The parameter is incorrect'))
|
||||
|
||||
res = []
|
||||
# WeChatAccountMsg.refresh_config(force=refresh)
|
||||
simple = ("weixin", "mail", "webhook", "feishu", "dingding", "tg")
|
||||
|
||||
for conf in SenderConfig().config:
|
||||
if conf["sender_type"] in simple or conf["sender_type"] == "wx_account":
|
||||
res.append(conf)
|
||||
# 去掉短信设置
|
||||
# elif conf["sender_type"] == "sms":
|
||||
# conf["data"] = SMSMsg(conf).refresh_config(force=refresh)
|
||||
# res.append(conf)
|
||||
res.sort(key=lambda x: x["sender_type"])
|
||||
return json_response(status=True, data=res)
|
||||
|
||||
@staticmethod
|
||||
def test_send_msg(get):
|
||||
try:
|
||||
sender_id = get.sender_id.strip()
|
||||
except (json.JSONDecoder, AttributeError, TypeError):
|
||||
return json_response(status=False, msg=public.lang('The parameter is incorrect'))
|
||||
|
||||
sender_config = SenderConfig()
|
||||
tmp = sender_config.get_by_id(sender_id)
|
||||
if tmp is None:
|
||||
return json_response(status=False, msg=public.lang('Corresponding sender not found'))
|
||||
|
||||
sender_type = tmp["sender_type"]
|
||||
|
||||
if sender_type == "weixin":
|
||||
sender_obj = WeiXinMsg(tmp)
|
||||
|
||||
elif sender_type == "mail":
|
||||
sender_obj = MailMsg(tmp)
|
||||
|
||||
elif sender_type == "webhook":
|
||||
sender_obj = WebHookMsg(tmp)
|
||||
|
||||
elif sender_type == "feishu":
|
||||
sender_obj = FeiShuMsg(tmp)
|
||||
|
||||
elif sender_type == "dingding":
|
||||
sender_obj = DingDingMsg(tmp)
|
||||
elif sender_type == "tg":
|
||||
sender_obj = TgMsg(tmp)
|
||||
# elif sender_type == "wx_account":
|
||||
# sender_obj = WeChatAccountMsg(tmp)
|
||||
else:
|
||||
return json_response(status=False, msg=public.lang('A type that is not supported by the current interface'))
|
||||
|
||||
res = sender_obj.test_send_msg()
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, data=res, msg=public.lang('Test send failed'))
|
||||
return json_response(status=True, msg=public.lang('The sending was successful'))
|
||||
|
||||
@staticmethod
|
||||
def set_default_for_compatible(sender_data: dict):
|
||||
if sender_data["sender_type"] in ("sms", "wx_account"):
|
||||
return
|
||||
|
||||
panel_data = "/www/server/panel/data"
|
||||
if sender_data["sender_type"] == "weixin":
|
||||
weixin_file = "{}/weixin.json".format(panel_data)
|
||||
write_file(weixin_file, json.dumps({
|
||||
"state": 1,
|
||||
"weixin_url": sender_data["data"]["url"],
|
||||
"title": sender_data["data"]["title"],
|
||||
"list": {
|
||||
"default": {
|
||||
"data": sender_data["data"]["url"],
|
||||
"title": sender_data["data"]["title"],
|
||||
"status": 1,
|
||||
"addtime": int(time.time())
|
||||
}
|
||||
}
|
||||
}))
|
||||
|
||||
elif sender_data["sender_type"] == "mail":
|
||||
stmp_mail_file = "{}/stmp_mail.json".format(panel_data)
|
||||
mail_list_file = "{}/mail_list.json".format(panel_data)
|
||||
write_file(stmp_mail_file, json.dumps(sender_data["data"]["send"]))
|
||||
write_file(mail_list_file, json.dumps(sender_data["data"]["receive"]))
|
||||
|
||||
elif sender_data["sender_type"] == "feishu":
|
||||
feishu_file = "{}/feishu.json".format(panel_data)
|
||||
write_file(feishu_file, json.dumps({
|
||||
"feishu_url": sender_data["data"]["url"],
|
||||
"title": sender_data["data"]["title"],
|
||||
"isAtAll": True,
|
||||
"user": []
|
||||
}))
|
||||
|
||||
elif sender_data["sender_type"] == "dingding":
|
||||
dingding_file = "{}/dingding.json".format(panel_data)
|
||||
write_file(dingding_file, json.dumps({
|
||||
"dingding_url": sender_data["data"]["url"],
|
||||
"title": sender_data["data"]["title"],
|
||||
"isAtAll": True,
|
||||
"user": []
|
||||
}))
|
||||
elif sender_data["sender_type"] == "tg":
|
||||
tg_file = "{}/tg_bot.json".format(panel_data)
|
||||
write_file(tg_file, json.dumps({
|
||||
"my_id": sender_data["data"]["my_id"],
|
||||
"bot_token": sender_data["data"]["bot_token"],
|
||||
"title": sender_data["data"]["title"]
|
||||
}))
|
||||
elif sender_data["sender_type"] == "webhook":
|
||||
webhook_file = "{}/hooks_msg.json".format(panel_data)
|
||||
try:
|
||||
webhook_data = json.loads(read_file(webhook_file))
|
||||
except:
|
||||
webhook_data =[]
|
||||
target_idx = -1
|
||||
for idx, i in enumerate(webhook_data):
|
||||
if i["name"] == sender_data["data"]["title"]:
|
||||
target_idx = idx
|
||||
break
|
||||
else:
|
||||
sender_data["data"]["name"] = sender_data["data"]["title"]
|
||||
webhook_data.append(sender_data["data"])
|
||||
if target_idx != -1:
|
||||
sender_data["data"]["name"] = sender_data["data"]["title"]
|
||||
webhook_data[target_idx] = sender_data["data"]
|
||||
write_file(webhook_file, json.dumps(webhook_data))
|
||||
|
||||
|
||||
def init_default_sender(self):
|
||||
|
||||
|
||||
import os,sys
|
||||
sys.path.insert(0, "/www/server/panel/mod/project/push")
|
||||
import msgconfMod
|
||||
sender_config = SenderConfig()
|
||||
sender_types = set(conf['sender_type'] for conf in sender_config.config)
|
||||
all_types = {"feishu", "dingding", "weixin", "mail", "webhook"} # 所有可能的类型
|
||||
|
||||
for sender_type in sender_types:
|
||||
type_senders = [conf for conf in sender_config.config if conf['sender_type'] == sender_type]
|
||||
|
||||
# 检查是否已有默认通道
|
||||
has_default = any(conf.get('original', False) for conf in type_senders)
|
||||
if has_default:
|
||||
continue
|
||||
|
||||
if len(type_senders) == 1:
|
||||
# 只有一个通道,设置为默认通道
|
||||
for conf in type_senders:
|
||||
get = public.dict_obj()
|
||||
get['sender_id'] = conf['id']
|
||||
get['sender_type'] = conf['sender_type']
|
||||
self.set_default_sender(get)
|
||||
else:
|
||||
# 有多个通道,根据添加时间设置默认通道
|
||||
sorted_senders = sorted(type_senders, key=lambda x: x['data'].get('create_time', ''))
|
||||
if sorted_senders:
|
||||
get = public.dict_obj()
|
||||
get['sender_id'] = sorted_senders[0]['id']
|
||||
get['sender_type'] = sorted_senders[0]['sender_type']
|
||||
self.set_default_sender(get)
|
||||
|
||||
# 检查没有通道的类型,并删除对应文件
|
||||
missing_types = all_types - sender_types
|
||||
for missing_type in missing_types:
|
||||
file_path = f"/www/server/panel/data/{missing_type}.json"
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
123
mod/base/msg/sms_msg.py
Normal file
123
mod/base/msg/sms_msg.py
Normal file
@@ -0,0 +1,123 @@
|
||||
# coding: utf-8
|
||||
# +-------------------------------------------------------------------
|
||||
# | yakpanel
|
||||
# +-------------------------------------------------------------------
|
||||
# | Copyright (c) 2015-2020 yakpanel(http://www.yakpanel.com) All rights reserved.
|
||||
# +-------------------------------------------------------------------
|
||||
# | Author: baozi
|
||||
# | 消息通道 短信模块(新)
|
||||
# +-------------------------------------------------------------------
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
from typing import Union, Optional
|
||||
from mod.base.push_mod import SenderConfig
|
||||
from .util import write_push_log, PANEL_PATH, write_file, read_file, public_http_post
|
||||
|
||||
|
||||
class SMSMsg:
|
||||
API_URL = 'https://www.yakpanel.com/api/wmsg'
|
||||
USER_PATH = '{}/data/userInfo.json'.format(PANEL_PATH)
|
||||
|
||||
# 构造方法
|
||||
def __init__(self, msm_data: dict):
|
||||
self.id = msm_data["id"]
|
||||
self.data = msm_data["data"]
|
||||
self.user_info = None
|
||||
try:
|
||||
self.user_info = json.loads(read_file(self.USER_PATH))
|
||||
except:
|
||||
self.user_info = None
|
||||
|
||||
self._PDATA = {
|
||||
"access_key": "" if self.user_info is None else 'B' * 32,
|
||||
"data": {}
|
||||
}
|
||||
|
||||
def refresh_config(self, force=False):
|
||||
if "last_refresh_time" not in self.data:
|
||||
self.data["last_refresh_time"] = 0
|
||||
if self.data.get("last_refresh_time") + 60 * 60 * 24 < time.time() or force: # 一天最多更新一次
|
||||
result = self._request('get_user_sms')
|
||||
if not isinstance(result, dict) or ("status" in result and not result["status"]):
|
||||
return {
|
||||
"count": 0,
|
||||
"total": 0
|
||||
}
|
||||
sc = SenderConfig()
|
||||
tmp = sc.get_by_id(self.id)
|
||||
if tmp is not None:
|
||||
result["last_refresh_time"] = time.time()
|
||||
tmp["data"] = result
|
||||
sc.save_config()
|
||||
else:
|
||||
result = self.data
|
||||
return result
|
||||
|
||||
def send_msg(self, sm_type: str, sm_args: dict):
|
||||
"""
|
||||
@发送短信
|
||||
@sm_type 预警类型, ssl_end|YakPanel SSL到期提醒
|
||||
@sm_args 预警参数
|
||||
"""
|
||||
if not self.user_info:
|
||||
return "未成功绑定官网账号,无法发送信息,请尝试重新绑定"
|
||||
tmp = sm_type.split('|')
|
||||
if "|" in sm_type and len(tmp) >= 2:
|
||||
s_type = tmp[0]
|
||||
title = tmp[1]
|
||||
else:
|
||||
s_type = sm_type
|
||||
title = 'YakPanel 告警提醒'
|
||||
|
||||
sm_args = self.canonical_data(sm_args)
|
||||
self._PDATA['data']['sm_type'] = s_type
|
||||
self._PDATA['data']['sm_args'] = sm_args
|
||||
print(s_type)
|
||||
print(sm_args)
|
||||
result = self._request('send_msg')
|
||||
u_key = '{}****{}'.format(self.user_info['username'][:3], self.user_info['username'][-3:])
|
||||
print(result)
|
||||
if isinstance(result, str):
|
||||
write_push_log("短信", False, title, [u_key])
|
||||
return result
|
||||
|
||||
if result['status']:
|
||||
write_push_log("短信", True, title, [u_key])
|
||||
return None
|
||||
else:
|
||||
write_push_log("短信", False, title, [u_key])
|
||||
return result.get("msg", "发送错误")
|
||||
|
||||
@staticmethod
|
||||
def canonical_data(args):
|
||||
"""规范数据内容"""
|
||||
if not isinstance(args, dict):
|
||||
return args
|
||||
new_args = {}
|
||||
for param, value in args.items():
|
||||
if type(value) != str:
|
||||
new_str = str(value)
|
||||
else:
|
||||
new_str = value.replace(".", "_").replace("+", "+")
|
||||
new_args[param] = new_str
|
||||
return new_args
|
||||
|
||||
def push_data(self, data):
|
||||
return self.send_msg(data['sm_type'], data['sm_args'])
|
||||
|
||||
# 发送请求
|
||||
def _request(self, d_name: str) -> Union[dict, str]:
|
||||
pdata = {
|
||||
'access_key': self._PDATA['access_key'],
|
||||
'data': json.dumps(self._PDATA['data'])
|
||||
}
|
||||
try:
|
||||
import public
|
||||
api_root = public.GetConfigValue('home').rstrip('/') + '/api/wmsg'
|
||||
result = public_http_post(api_root + '/' + d_name, pdata)
|
||||
result = json.loads(result)
|
||||
return result
|
||||
except Exception:
|
||||
return traceback.format_exc()
|
||||
105
mod/base/msg/test.json
Normal file
105
mod/base/msg/test.json
Normal file
@@ -0,0 +1,105 @@
|
||||
[
|
||||
{
|
||||
"id": "f4e98e478b85e876",
|
||||
"used": true,
|
||||
"sender_type": "sms",
|
||||
"data": {}
|
||||
},
|
||||
{
|
||||
"id": "fb3e9e409b9d7c27",
|
||||
"sender_type": "mail",
|
||||
"data": {
|
||||
"send": {
|
||||
"qq_mail": "1191604998@qq.com",
|
||||
"qq_stmp_pwd": "alvonbfcwhlahbcg",
|
||||
"hosts": "smtp.qq.com",
|
||||
"port": "465"
|
||||
},
|
||||
"title": "test_mail",
|
||||
"receive": [
|
||||
"1191604998@qq.com",
|
||||
"225326944@qq.com"
|
||||
]
|
||||
},
|
||||
"used": true
|
||||
},
|
||||
{
|
||||
"id": "79900d4fb37fa83d",
|
||||
"sender_type": "feishu",
|
||||
"data": {
|
||||
"url": "https://open.feishu.cn/open-apis/bot/v2/hook/ba6a3f77-0349-4492-a8ad-0b4c99435bf1",
|
||||
"user": [],
|
||||
"title": "test_feishu",
|
||||
"isAtAll": true
|
||||
},
|
||||
"used": true
|
||||
},
|
||||
{
|
||||
"id": "8f70de4baa89133e",
|
||||
"sender_type": "webhook",
|
||||
"data": {
|
||||
"title": "webhook",
|
||||
"url": "http://192.168.69.172:11211",
|
||||
"query": {},
|
||||
"headers": {},
|
||||
"body_type": "json",
|
||||
"custom_parameter": {},
|
||||
"method": "POST",
|
||||
"ssl_verify": null,
|
||||
"status": true
|
||||
},
|
||||
"used": true
|
||||
},
|
||||
{
|
||||
"id": "10bcf5439299d9dd",
|
||||
"used": true,
|
||||
"sender_type": "wx_account",
|
||||
"data": {
|
||||
"id": "jsbRCBBinMmFjYjczNTQyYmUzxNDiWQw",
|
||||
"uid": 1228262,
|
||||
"is_subscribe": 1,
|
||||
"head_img": "https://thirdwx.qlogo.cn/mmopen/vi_32/DYAIOgq83epBUaqBcCkkxtKwuaOHLy1qjGeDvmf1hZsrkFGNrldyRgSuA3sYB1xlgKv1Z98PUciaxju71PUKchA/132",
|
||||
"nickname": "沈涛",
|
||||
"status": 1,
|
||||
"create_time": "2023-12-27 11:30:15",
|
||||
"update_time": "2023-12-27 11:30:15",
|
||||
"remaining": 98,
|
||||
"title": "沈涛"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "2c7c094eb23ddaae",
|
||||
"used": true,
|
||||
"sender_type": "webhook",
|
||||
"data": {
|
||||
"url": "http://192.168.69.159:8888/hook?access_key=IUSEViIMMhQio1WyP0ztCyoa8sIBjaWulihhcJX4rRJ4sW79",
|
||||
"query": {},
|
||||
"headers": {},
|
||||
"body_type": "json",
|
||||
"custom_parameter": {},
|
||||
"method": "GET",
|
||||
"ssl_verify": 1,
|
||||
"status": true,
|
||||
"name": "aaa",
|
||||
"title": "aaa"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "63c30845916fa722",
|
||||
"used": true,
|
||||
"sender_type": "feishu",
|
||||
"data": {
|
||||
"url": "https://open.feishu.cn/open-apis/bot/v2/hook/c6906d9f-01c5-4a74-80bd-3ccda33bf4ec",
|
||||
"title": "amber"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "6ccf834a95010bed",
|
||||
"used": true,
|
||||
"sender_type": "dingding",
|
||||
"data": {
|
||||
"url": "https://oapi.dingtalk.com/robot/send?access_token=00732dec605edc1c07f441eb9d470c8bdfa301c4ce89959916fe535d08c09043",
|
||||
"title": "dd"
|
||||
}
|
||||
}
|
||||
]
|
||||
252
mod/base/msg/tg_msg.py
Normal file
252
mod/base/msg/tg_msg.py
Normal file
@@ -0,0 +1,252 @@
|
||||
# coding: utf-8
|
||||
# +-------------------------------------------------------------------
|
||||
# | YakPanel
|
||||
# +-------------------------------------------------------------------
|
||||
# | Copyright (c) 2015-2020 YakPanel(www.yakpanel.com) All rights reserved.
|
||||
# +-------------------------------------------------------------------
|
||||
# | Author: jose <zhw@yakpanel.com>
|
||||
# | 消息通道电报模块
|
||||
# +-------------------------------------------------------------------
|
||||
|
||||
import sys, os, re, public, json, requests
|
||||
|
||||
try:
|
||||
import telegram
|
||||
except:
|
||||
public.ExecShell("btpip install -I python-telegram-bot")
|
||||
import telegram
|
||||
|
||||
panelPath = "/www/server/panel"
|
||||
os.chdir(panelPath)
|
||||
sys.path.insert(0, panelPath + "/class/")
|
||||
from requests.packages import urllib3
|
||||
|
||||
# 关闭警告
|
||||
urllib3.disable_warnings()
|
||||
|
||||
from typing import Union, Optional
|
||||
|
||||
from mod.base.msg.util import write_push_log, get_test_msg
|
||||
|
||||
|
||||
class TgMsg:
|
||||
conf_path = "{}/data/tg_bot.json".format(panelPath)
|
||||
__tg_info = None
|
||||
__module_name = None
|
||||
__default_pl = "{}/data/default_msg_channel.pl".format(panelPath)
|
||||
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
self.bot_token = self.conf['data']['bot_token']
|
||||
self.my_id = self.conf['data']['my_id']
|
||||
|
||||
def get_version_info(self, get):
|
||||
"""
|
||||
获取版本信息
|
||||
"""
|
||||
data = {}
|
||||
data['ps'] = 'Use telegram bots to send receive panel notifications'
|
||||
data['version'] = '1.0'
|
||||
data['date'] = '2022-08-10'
|
||||
data['author'] = 'YakPanel'
|
||||
data['title'] = 'Telegram'
|
||||
data['help'] = 'http://www.yakpanel.com'
|
||||
return data
|
||||
|
||||
def get_config(self, get):
|
||||
"""
|
||||
获取tg配置
|
||||
"""
|
||||
data = {}
|
||||
if self.__tg_info:
|
||||
data = self.__tg_info
|
||||
|
||||
data['default'] = self.__get_default_channel()
|
||||
|
||||
return data
|
||||
|
||||
def set_config(self, get):
|
||||
"""
|
||||
设置tg bot
|
||||
@my_id tg id
|
||||
@bot_token 机器人token
|
||||
"""
|
||||
|
||||
if not hasattr(get, 'my_id') or not hasattr(get, 'bot_token'):
|
||||
return public.returnMsg(False, public.lang("Please fill in the complete information"))
|
||||
|
||||
title = 'Default'
|
||||
if hasattr(get, 'title'):
|
||||
title = get.title
|
||||
if len(title) > 7:
|
||||
return public.returnMsg(False, public.lang("Note name cannot exceed 7 characters"))
|
||||
|
||||
self.__tg_info = {"my_id": get.my_id.strip(), "bot_token": get.bot_token, "title": title}
|
||||
|
||||
try:
|
||||
info = public.get_push_info('Notification Configuration Reminder',
|
||||
['>Configuration status:<font color=#20a53a>successfully</font>\n\n'])
|
||||
ret = self.send_msg(info['msg'], get.my_id.strip(), get.bot_token)
|
||||
except:
|
||||
ret = self.send_msg('YakPanel alarm test', get.my_id.strip(), get.bot_token)
|
||||
if ret:
|
||||
|
||||
if 'default' in get and get['default']:
|
||||
public.writeFile(self.__default_pl, self.__module_name)
|
||||
|
||||
public.writeFile(self.conf_path, json.dumps(self.__tg_info))
|
||||
return public.returnMsg(True, public.lang("successfully set"))
|
||||
else:
|
||||
return ret
|
||||
|
||||
def get_send_msg(self, msg):
|
||||
"""
|
||||
@name 处理md格式
|
||||
"""
|
||||
try:
|
||||
title = 'YakPanel notifications'
|
||||
if msg.find("####") >= 0:
|
||||
try:
|
||||
title = re.search(r"####(.+)", msg).groups()[0]
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
info = public.get_push_info('Notification Configuration Reminder', ['>Send Content: ' + msg])
|
||||
msg = info['msg']
|
||||
except:
|
||||
pass
|
||||
return msg, title
|
||||
|
||||
async def send_msg_async(self, bot_token, chat_id, msg):
|
||||
"""
|
||||
tg发送信息
|
||||
@msg 消息正文
|
||||
"""
|
||||
|
||||
bot = telegram.Bot(token=bot_token)
|
||||
|
||||
await bot.send_message(chat_id=chat_id, text=msg, parse_mode='MarkdownV2')
|
||||
|
||||
# 外部也调用
|
||||
def send_msg(self, msg, title):
|
||||
"""
|
||||
tg发送信息
|
||||
@msg 消息正文
|
||||
"""
|
||||
|
||||
bot_token = self.bot_token
|
||||
chat_id = self.my_id
|
||||
|
||||
msg = msg.strip()
|
||||
msg = self.escape_markdown_v2(msg)
|
||||
import asyncio
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
try:
|
||||
loop.run_until_complete(self.send_msg_async(bot_token, chat_id, msg))
|
||||
write_push_log("Telegram", True, title)
|
||||
|
||||
public.print_log('message sent successfully!')
|
||||
loop.close()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
except Exception as e:
|
||||
public.print_log('tg sent error:{}'.format(str(public.get_error_info())))
|
||||
write_push_log("Telegram", False, title)
|
||||
|
||||
return public.lang("Telegram Failed to send {}",e)
|
||||
|
||||
def escape_markdown_v2(self, text):
|
||||
"""
|
||||
Escape special characters for Telegram's MarkdownV2 mode.
|
||||
"""
|
||||
# 所有需要转义的 MarkdownV2 字符
|
||||
escape_chars = r'\_*[]()~`>#+-=|{}.!'
|
||||
for ch in escape_chars:
|
||||
text = text.replace(ch, '\\' + ch)
|
||||
return text
|
||||
|
||||
@classmethod
|
||||
def check_args(cls, args: dict) -> Union[dict, str]:
|
||||
|
||||
my_id = args.get('my_id', None).strip()
|
||||
bot_token = args.get('bot_token', None)
|
||||
if not my_id or not bot_token:
|
||||
return public.lang('Incomplete information')
|
||||
|
||||
title = args.get('title', 'Default')
|
||||
if len(title) > 15:
|
||||
return public.lang('Note name cannot exceed 15 characters')
|
||||
|
||||
data = {
|
||||
"my_id": my_id,
|
||||
"bot_token": bot_token,
|
||||
"title": title
|
||||
}
|
||||
conf = {
|
||||
"data": data
|
||||
}
|
||||
|
||||
# 调用TgMsg的方法
|
||||
tg = TgMsg(conf)
|
||||
try:
|
||||
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: <font color=#20a53a> Success </font>\n\n']
|
||||
}
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
ret = tg.send_msg(
|
||||
test_task.to_tg_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders"
|
||||
)
|
||||
|
||||
|
||||
except:
|
||||
ret = tg.send_msg('YakPanel alarm test', "Message channel configuration reminders")
|
||||
|
||||
# 测试失败也添加
|
||||
if ret:
|
||||
return False, ret
|
||||
else:
|
||||
return True, data
|
||||
|
||||
def test_send_msg(self) -> Optional[str]:
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: <font color=#20a53a> Success </font>\n\n']
|
||||
}
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
res = self.send_msg(
|
||||
test_task.to_tg_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders"
|
||||
)
|
||||
if res is None:
|
||||
return None
|
||||
return res
|
||||
|
||||
def push_data(self, data):
|
||||
"""
|
||||
@name 统一发送接口
|
||||
@data 消息内容
|
||||
{"module":"mail","title":"标题","msg":"内容","to_email":"xx@qq.com","sm_type":"","sm_args":{}}
|
||||
"""
|
||||
|
||||
return self.send_msg(data['msg'])
|
||||
|
||||
def __get_default_channel(self):
|
||||
"""
|
||||
@获取默认消息通道
|
||||
"""
|
||||
try:
|
||||
if public.readFile(self.__default_pl) == self.__module_name:
|
||||
return True
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
def uninstall(self):
|
||||
if os.path.exists(self.conf_path):
|
||||
os.remove(self.conf_path)
|
||||
139
mod/base/msg/util.py
Normal file
139
mod/base/msg/util.py
Normal file
@@ -0,0 +1,139 @@
|
||||
import sys
|
||||
from typing import Optional, List, Tuple
|
||||
from mod.base.push_mod import BaseTask, WxAccountMsgBase, WxAccountMsg, get_push_public_data
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
import public
|
||||
|
||||
|
||||
PANEL_PATH = "/www/server/panel"
|
||||
public_http_post = public.httpPost
|
||||
|
||||
|
||||
def write_push_log(
|
||||
module_name: str,
|
||||
status: bool,
|
||||
title: str,
|
||||
user: Optional[List[str]] = None):
|
||||
"""
|
||||
记录 告警推送情况
|
||||
@param module_name: 通道方式
|
||||
@param status: 是否成功
|
||||
@param title: 标题
|
||||
@param user: 推送到的用户,可以为空,如:钉钉 不需要
|
||||
@return:
|
||||
"""
|
||||
if status:
|
||||
status_str = '<span style="color:#20a53a;"> Success </span>'
|
||||
else:
|
||||
status_str = '<span style="color:red;">Fail</span>'
|
||||
|
||||
if not user:
|
||||
user_str = '[ default ]'
|
||||
else:
|
||||
user_str = '[ {} ]'.format(",".join(user))
|
||||
|
||||
log = 'Title: [{}], Notification: [{}], Result: [{}], Addressee: {}'.format(title, module_name, status_str, user_str)
|
||||
public.WriteLog('Alarm notification', log)
|
||||
return True
|
||||
|
||||
|
||||
def write_mail_push_log(
|
||||
title: str,
|
||||
error_user: List[str],
|
||||
success_user: List[str],
|
||||
):
|
||||
"""
|
||||
记录 告警推送情况
|
||||
@param title: 标题
|
||||
@param error_user: 失败的用户
|
||||
@param success_user: 成功的用户
|
||||
@return:
|
||||
"""
|
||||
e_fmt = '<span style="color:#20a53a;">{}</span>'
|
||||
s_fmt = '<span style="color:red;">{}</span>'
|
||||
error_user_msg = ",".join([e_fmt.format(i) for i in error_user])
|
||||
success_user = ",".join([s_fmt.format(i) for i in success_user])
|
||||
log = 'Title: [{}], notification method: [Email], send failed recipients: {}, send successful recipients: {}'.format(
|
||||
title, error_user_msg, success_user
|
||||
)
|
||||
public.WriteLog('Alarm notification', log)
|
||||
return True
|
||||
|
||||
|
||||
def write_file(filename: str, s_body: str, mode='w+') -> bool:
|
||||
"""
|
||||
写入文件内容
|
||||
@filename 文件名
|
||||
@s_body 欲写入的内容
|
||||
return bool 若文件不存在则尝试自动创建
|
||||
"""
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
try:
|
||||
fp = open(filename, mode=mode, encoding="utf-8")
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def read_file(filename, mode='r') -> Optional[str]:
|
||||
"""
|
||||
读取文件内容
|
||||
@filename 文件名
|
||||
return string(bin) 若文件不存在,则返回None
|
||||
"""
|
||||
import os
|
||||
if not os.path.exists(filename):
|
||||
return None
|
||||
fp = None
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
f_body = fp.read()
|
||||
except:
|
||||
return None
|
||||
finally:
|
||||
if fp and not fp.closed:
|
||||
fp.close()
|
||||
return f_body
|
||||
|
||||
|
||||
class _TestMsgTask(BaseTask):
|
||||
"""
|
||||
用来测试的短息
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def the_push_public_data():
|
||||
return get_push_public_data()
|
||||
|
||||
def get_keywords(self, task_data: dict) -> str:
|
||||
pass
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = self.title
|
||||
msg.msg = "The message channel was configured successfully"
|
||||
return msg
|
||||
|
||||
|
||||
def get_test_msg(title: str, task_name="Message channel configuration reminders") -> _TestMsgTask:
|
||||
"""
|
||||
用来测试的短息
|
||||
"""
|
||||
t = _TestMsgTask()
|
||||
|
||||
t.title = title
|
||||
t.template_name = task_name
|
||||
return t
|
||||
227
mod/base/msg/web_hook_msg.py
Normal file
227
mod/base/msg/web_hook_msg.py
Normal file
@@ -0,0 +1,227 @@
|
||||
# coding: utf-8
|
||||
# +-------------------------------------------------------------------
|
||||
# | yakpanel
|
||||
# +-------------------------------------------------------------------
|
||||
# | Copyright (c) 2015-2020 yakpanel(http://www.yakpanel.com) All rights reserved.
|
||||
# +-------------------------------------------------------------------
|
||||
# | Author: baozi <baozi@yakpanel.com>
|
||||
# | 消息通道HOOK模块
|
||||
# +-------------------------------------------------------------------
|
||||
|
||||
|
||||
import requests
|
||||
from typing import Optional, Union
|
||||
from urllib3.util import parse_url
|
||||
|
||||
from .util import write_push_log, get_test_msg
|
||||
import json
|
||||
import public
|
||||
# config = {
|
||||
# "name": "default",
|
||||
# "url": "https://www.yakpanel.com",
|
||||
# "query": {
|
||||
# "aaa": "111"
|
||||
# },
|
||||
# "header": {
|
||||
# "AAA": "BBBB",
|
||||
# },
|
||||
# "body_type": ["json", "form_data", "null"],
|
||||
# "custom_parameter": {
|
||||
# "rrr": "qqqq"
|
||||
# },
|
||||
# "method": ["GET", "POST", "PUT", "PATCH"],
|
||||
# "ssl_verify": [True, False]
|
||||
# }
|
||||
# #
|
||||
# # 1.自动解析Query参数,拼接并展示给用户 # 可不做
|
||||
# # 2.自定义Header头 # 必做
|
||||
# # 3.Body中的内容是: type:str="首页磁盘告警", time:int=168955427, data:str="xxxxxx" # ?
|
||||
# # 4.自定义参数: key=value 添加在Body中 # 可不做
|
||||
# # 5.请求类型自定义 # 必做
|
||||
# # 以上内容需要让用户可测试--!
|
||||
|
||||
|
||||
class WebHookMsg(object):
|
||||
DEFAULT_HEADERS = {
|
||||
"User-Agent": "Yak-Panel",
|
||||
}
|
||||
|
||||
def __init__(self, hook_data: dict):
|
||||
self.id = hook_data["id"]
|
||||
self.config = hook_data["data"]
|
||||
|
||||
def _replace_and_parse(self, value, real_data):
|
||||
"""替换占位符并递归解析JSON字符串"""
|
||||
if isinstance(value, str):
|
||||
value = value.replace("$1", json.dumps(real_data, ensure_ascii=False))
|
||||
elif isinstance(value, dict):
|
||||
for k, v in value.items():
|
||||
value[k] = self._replace_and_parse(v, real_data)
|
||||
return value
|
||||
|
||||
def send_msg(self, msg: str, title:str, push_type:str) -> Optional[str]:
|
||||
the_url = parse_url(self.config['url'])
|
||||
|
||||
ssl_verify = self.config.get("ssl_verify", None)
|
||||
if ssl_verify is None:
|
||||
ssl_verify = the_url.scheme == "https"
|
||||
else:
|
||||
ssl_verify = bool(int(ssl_verify)) # 转换为布尔值
|
||||
|
||||
|
||||
real_data = {
|
||||
"title": title,
|
||||
"msg": msg,
|
||||
"type": push_type,
|
||||
}
|
||||
custom_parameter = self.config.get("custom_parameter", {})
|
||||
if not isinstance(custom_parameter, dict):
|
||||
custom_parameter = {} # 如果 custom_parameter 不是字典,则设置为空字典
|
||||
# 处理custom_parameter,将$1替换为real_data内容并递归解析
|
||||
custom_data = {}
|
||||
for k, v in custom_parameter.items():
|
||||
custom_data[k] = self._replace_and_parse(v, real_data)
|
||||
|
||||
if custom_data:
|
||||
real_data = custom_data
|
||||
|
||||
|
||||
data = None
|
||||
json_data = None
|
||||
headers = self.DEFAULT_HEADERS.copy()
|
||||
if self.config["body_type"] == "json":
|
||||
json_data = real_data
|
||||
elif self.config["body_type"] == "form_data":
|
||||
data = real_data
|
||||
|
||||
for k, v in self.config.get("headers", {}).items():
|
||||
if not isinstance(v, str):
|
||||
v = str(v)
|
||||
headers[k] = v
|
||||
|
||||
status = False
|
||||
error = None
|
||||
timeout = 10
|
||||
if data:
|
||||
for k, v in data.items():
|
||||
if isinstance(v, str):
|
||||
continue
|
||||
else:
|
||||
data[k]=json.dumps(v)
|
||||
|
||||
for i in range(3):
|
||||
try:
|
||||
if json_data is not None:
|
||||
res = requests.request(
|
||||
method=self.config["method"],
|
||||
url=str(the_url),
|
||||
json=json_data,
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
verify=ssl_verify,
|
||||
)
|
||||
else:
|
||||
res = requests.request(
|
||||
method=self.config["method"],
|
||||
url=str(the_url),
|
||||
data=data,
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
verify=ssl_verify,
|
||||
)
|
||||
|
||||
if res.status_code == 200:
|
||||
status = True
|
||||
break
|
||||
else:
|
||||
status = False
|
||||
return res.text
|
||||
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError):
|
||||
timeout += 5
|
||||
continue
|
||||
except requests.exceptions.RequestException as e:
|
||||
error = str(e)
|
||||
break
|
||||
|
||||
write_push_log("Web Hook", status, title)
|
||||
return error
|
||||
|
||||
@classmethod
|
||||
def check_args(cls, args) -> Union[str, dict]:
|
||||
"""配置hook"""
|
||||
try:
|
||||
title = args['title']
|
||||
url = args["url"]
|
||||
query = args.get("query", {})
|
||||
headers = args.get("headers", {})
|
||||
body_type = args.get("body_type", "json")
|
||||
custom_parameter = args.get("custom_parameter", {})
|
||||
method = args.get("method", "POST")
|
||||
ssl_verify = args.get("ssl_verify", None) # null Ture
|
||||
except (ValueError, KeyError):
|
||||
return public.lang('The parameter is incorrect')
|
||||
|
||||
the_url = parse_url(url)
|
||||
if the_url.scheme is None or the_url.host is None:
|
||||
return"URL parsing error, which may not be a legitimate URL"
|
||||
|
||||
for i in (query, headers, custom_parameter):
|
||||
if not isinstance(i, dict):
|
||||
return public.lang('Parameter format error')
|
||||
|
||||
if body_type not in ('json', 'form_data', 'null'):
|
||||
return public.lang('The body type must be json,form data, or null')
|
||||
|
||||
if method not in ('GET', 'POST', 'PUT', 'PATCH'):
|
||||
return public.lang('The sending method is incorrect')
|
||||
|
||||
if ssl_verify not in (True, False, None):
|
||||
return public.lang('Verify if the SSL option is wrong')
|
||||
|
||||
title = title.strip()
|
||||
if title == "":
|
||||
return"The name cannot be empty"
|
||||
|
||||
data = {
|
||||
"title": title,
|
||||
"url": url,
|
||||
"query": query,
|
||||
"headers": headers,
|
||||
"body_type": body_type,
|
||||
"custom_parameter": custom_parameter,
|
||||
"method": method,
|
||||
"ssl_verify": ssl_verify,
|
||||
"status": True
|
||||
}
|
||||
|
||||
test_obj = cls({"data": data, "id": None})
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: Success\n\n']
|
||||
}
|
||||
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
|
||||
res = test_obj.send_msg(
|
||||
test_task.to_web_hook_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders",
|
||||
"Message channel configuration reminders"
|
||||
)
|
||||
if res is None:
|
||||
return data
|
||||
|
||||
return res
|
||||
|
||||
def test_send_msg(self) -> Optional[str]:
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: <font color=#20a53a> Success </font>\n\n']
|
||||
}
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
res = self.send_msg(
|
||||
test_task.to_web_hook_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders",
|
||||
"Message channel configuration reminders"
|
||||
)
|
||||
if res is None:
|
||||
return None
|
||||
return res
|
||||
|
||||
131
mod/base/msg/weixin_msg.py
Normal file
131
mod/base/msg/weixin_msg.py
Normal file
@@ -0,0 +1,131 @@
|
||||
# coding: utf-8
|
||||
# +-------------------------------------------------------------------
|
||||
# | yakpanel
|
||||
# +-------------------------------------------------------------------
|
||||
# | Copyright (c) 2015-2020 yakpanel(http://www.yakpanel.com) All rights reserved.
|
||||
# +-------------------------------------------------------------------
|
||||
# | Author: baozi <baozi@yakpanel.com>
|
||||
# | 消息通道邮箱模块
|
||||
# +-------------------------------------------------------------------
|
||||
|
||||
import re
|
||||
import json
|
||||
import requests
|
||||
import traceback
|
||||
import socket
|
||||
import public
|
||||
|
||||
import requests.packages.urllib3.util.connection as urllib3_cn
|
||||
from requests.packages import urllib3
|
||||
from typing import Optional, Union
|
||||
|
||||
from .util import write_push_log, get_test_msg
|
||||
|
||||
# 关闭警告
|
||||
urllib3.disable_warnings()
|
||||
|
||||
|
||||
class WeiXinMsg:
|
||||
|
||||
def __init__(self, weixin_data):
|
||||
self.id = weixin_data["id"]
|
||||
self.config = weixin_data["data"]
|
||||
|
||||
@classmethod
|
||||
def check_args(cls, args: dict) -> Union[dict, str]:
|
||||
if "url" not in args or "title" not in args:
|
||||
return public.lang('Incomplete information')
|
||||
|
||||
title = args["title"]
|
||||
if len(title) > 15:
|
||||
return public.lang('Note names cannot be longer than 15 characters')
|
||||
|
||||
data = {
|
||||
"url": args["url"],
|
||||
"title": title,
|
||||
}
|
||||
|
||||
test_obj = cls({"data": data, "id": None})
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: <font color=#20a53a> Success </font>\n']
|
||||
}
|
||||
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
|
||||
res = test_obj.send_msg(
|
||||
test_task.to_weixin_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders"
|
||||
)
|
||||
if res is None:
|
||||
return data
|
||||
|
||||
return res
|
||||
|
||||
def send_msg(self, msg: str, title: str) -> Optional[str]:
|
||||
"""
|
||||
@name 微信发送信息
|
||||
@msg string 消息正文(正文内容,必须包含
|
||||
1、服务器名称
|
||||
2、IP地址
|
||||
3、发送时间
|
||||
)
|
||||
@to_user string 指定发送人
|
||||
"""
|
||||
if not self.config:
|
||||
return public.lang('WeChat information is not configured correctly')
|
||||
|
||||
reg = '<font.+>(.+)</font>'
|
||||
tmp = re.search(reg, msg)
|
||||
if tmp:
|
||||
tmp = tmp.groups()[0]
|
||||
msg = re.sub(reg, tmp, msg)
|
||||
|
||||
data = {
|
||||
"msgtype": "markdown",
|
||||
"markdown": {
|
||||
"content": msg
|
||||
}
|
||||
}
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
|
||||
status = False
|
||||
error = None
|
||||
try:
|
||||
def allowed_gai_family():
|
||||
family = socket.AF_INET
|
||||
return family
|
||||
allowed_gai_family_lib = urllib3_cn.allowed_gai_family
|
||||
urllib3_cn.allowed_gai_family = allowed_gai_family
|
||||
response = requests.post(
|
||||
url=self.config["url"],
|
||||
data=json.dumps(data),
|
||||
verify=False,
|
||||
headers=headers,
|
||||
timeout=10
|
||||
)
|
||||
urllib3_cn.allowed_gai_family = allowed_gai_family_lib
|
||||
|
||||
if response.json()["errcode"] == 0:
|
||||
status = True
|
||||
except:
|
||||
error = traceback.format_exc()
|
||||
|
||||
write_push_log("weixin", status, title)
|
||||
return error
|
||||
|
||||
def test_send_msg(self) -> Optional[str]:
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: <font color=#20a53a> Success </font>\n\n']
|
||||
}
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
res = self.send_msg(
|
||||
test_task.to_weixin_msg(test_msg, test_task.the_push_public_data()),
|
||||
"Message channel configuration reminders",
|
||||
)
|
||||
if res is None:
|
||||
return None
|
||||
return res
|
||||
|
||||
|
||||
|
||||
|
||||
565
mod/base/msg/wx_account_msg.py
Normal file
565
mod/base/msg/wx_account_msg.py
Normal file
@@ -0,0 +1,565 @@
|
||||
# coding: utf-8
|
||||
# +-------------------------------------------------------------------
|
||||
# | yakpanel
|
||||
# +-------------------------------------------------------------------
|
||||
# | Copyright (c) 2015-2020 yakpanel(http://www.yakpanel.com) All rights reserved.
|
||||
# +-------------------------------------------------------------------
|
||||
# | Author: baozi <baozi@yakpanel.com>
|
||||
# | 消息通道微信公众号模块
|
||||
# +-------------------------------------------------------------------
|
||||
|
||||
import os, sys
|
||||
import time, base64
|
||||
|
||||
import re
|
||||
import json
|
||||
import requests
|
||||
import traceback
|
||||
import socket
|
||||
import public
|
||||
|
||||
import requests.packages.urllib3.util.connection as urllib3_cn
|
||||
from requests.packages import urllib3
|
||||
from typing import Optional, Union, List, Dict, Any
|
||||
|
||||
from .util import write_push_log, get_test_msg, read_file, public_http_post
|
||||
from mod.base.push_mod import WxAccountMsg, SenderConfig
|
||||
from mod.base import json_response
|
||||
|
||||
# 关闭警告
|
||||
urllib3.disable_warnings()
|
||||
|
||||
|
||||
class WeChatAccountMsg:
|
||||
USER_PATH = '/www/server/panel/data/userInfo.json'
|
||||
need_refresh_file = '/www/server/panel/data/mod_push_data/refresh_wechat_account.tip'
|
||||
refresh_time = '/www/server/panel/data/mod_push_data/refresh_wechat_account_time.pl'
|
||||
|
||||
def __init__(self, *config_data):
|
||||
if len(config_data) == 0:
|
||||
self.config = None
|
||||
elif len(config_data) == 1:
|
||||
self.config = config_data[0]["data"]
|
||||
else:
|
||||
self.config = config_data[0]["data"]
|
||||
self.config["users"] = [i["data"]['id'] for i in config_data]
|
||||
self.config["users_nickname"] = [i["data"]['nickname'] for i in config_data]
|
||||
try:
|
||||
self.user_info = json.loads(read_file(self.USER_PATH))
|
||||
except:
|
||||
self.user_info = None
|
||||
|
||||
@classmethod
|
||||
def get_user_info(cls) -> Optional[dict]:
|
||||
try:
|
||||
return json.loads(read_file(cls.USER_PATH))
|
||||
except:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def last_refresh(cls):
|
||||
tmp = read_file(cls.refresh_time)
|
||||
if not tmp:
|
||||
last_refresh_time = 0
|
||||
else:
|
||||
try:
|
||||
last_refresh_time = int(tmp)
|
||||
except:
|
||||
last_refresh_time = 0
|
||||
return last_refresh_time
|
||||
|
||||
@staticmethod
|
||||
def get_local_ip() -> str:
|
||||
"""获取内网IP"""
|
||||
import socket
|
||||
s = None
|
||||
try:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
s.connect(('8.8.8.8', 80))
|
||||
ip = s.getsockname()[0]
|
||||
return ip
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
if s is not None:
|
||||
s.close()
|
||||
return '127.0.0.1'
|
||||
|
||||
def send_msg(self, msg: WxAccountMsg) -> Optional[str]:
|
||||
if self.user_info is None:
|
||||
return public.lang('No user information was obtained')
|
||||
if public.is_self_hosted():
|
||||
return public.lang('WeChat official account cloud features are not available in self-hosted mode.')
|
||||
|
||||
msg.set_ip_address(self.user_info["address"], self.get_local_ip())
|
||||
template_id, msg_data = msg.to_send_data()
|
||||
url = "https://wafapi2.yakpanel.com/api/v2/user/wx_web/send_template_msg_v3"
|
||||
wx_account_ids = self.config["users"] if "users" in self.config else [self.config["id"], ]
|
||||
data = {
|
||||
"uid": self.user_info["uid"],
|
||||
"access_key": 'B' * 32,
|
||||
"data": base64.b64encode(json.dumps(msg_data).encode('utf-8')).decode('utf-8'),
|
||||
"wx_account_ids": base64.b64encode(json.dumps(wx_account_ids).encode('utf-8')).decode('utf-8'),
|
||||
}
|
||||
if template_id != "":
|
||||
data["template_id"] = template_id
|
||||
|
||||
status = False
|
||||
error = None
|
||||
user_name = self.config["users_nickname"] if "users_nickname" in self.config else [self.config["nickname"], ]
|
||||
try:
|
||||
|
||||
resp = public_http_post(url, data)
|
||||
x = json.loads(resp)
|
||||
if x["success"]:
|
||||
status = True
|
||||
else:
|
||||
status = False
|
||||
error = x["res"]
|
||||
except:
|
||||
error = traceback.format_exc()
|
||||
|
||||
write_push_log("wx_account", status, msg.thing_type, user_name)
|
||||
|
||||
return error
|
||||
|
||||
@classmethod
|
||||
def refresh_config(cls, force: bool = False):
|
||||
if os.path.exists(cls.need_refresh_file):
|
||||
force = True
|
||||
os.remove(cls.need_refresh_file)
|
||||
if force or cls.last_refresh() + 60 * 10 < time.time():
|
||||
cls._get_by_web()
|
||||
|
||||
@classmethod
|
||||
def _get_by_web(cls) -> Optional[List]:
|
||||
user_info = cls.get_user_info()
|
||||
if user_info is None:
|
||||
return None
|
||||
if public.is_self_hosted():
|
||||
return None
|
||||
url = "https://wafapi2.yakpanel.com/api/v2/user/wx_web/bound_wx_accounts"
|
||||
data = {
|
||||
"uid": user_info["uid"],
|
||||
"access_key": 'B' * 32,
|
||||
"serverid": user_info["server_id"]
|
||||
}
|
||||
try:
|
||||
data = json.loads(public_http_post(url, data))
|
||||
if not data["success"]:
|
||||
return None
|
||||
except:
|
||||
return None
|
||||
|
||||
cls._save_user_info(data["res"])
|
||||
return data["res"]
|
||||
|
||||
@staticmethod
|
||||
def _save_user_info(user_config_list: List[Dict[str, Any]]):
|
||||
print(user_config_list)
|
||||
user_config_dict = {i["hex"]: i for i in user_config_list}
|
||||
|
||||
remove_list = []
|
||||
sc = SenderConfig()
|
||||
for i in sc.config:
|
||||
if i['sender_type'] != "wx_account":
|
||||
continue
|
||||
if i['data'].get("hex", None) in user_config_dict:
|
||||
i['data'].update(user_config_dict[i['data']["hex"]])
|
||||
user_config_dict.pop(i['data']["hex"])
|
||||
else:
|
||||
remove_list.append(i)
|
||||
|
||||
for r in remove_list:
|
||||
sc.config.remove(r)
|
||||
|
||||
if user_config_dict: # 还有多的
|
||||
for v in user_config_dict.values():
|
||||
v["title"] = v["nickname"]
|
||||
sc.config.append({
|
||||
"id": sc.nwe_id(),
|
||||
"used": True,
|
||||
"sender_type": "wx_account",
|
||||
"data": v
|
||||
})
|
||||
sc.save_config()
|
||||
|
||||
@classmethod
|
||||
def unbind(cls, wx_account_uid: str):
|
||||
user_info = cls.get_user_info()
|
||||
if user_info is None:
|
||||
return json_response(status=True, msg=public.lang('The user binding information was not obtained'))
|
||||
url = "https://wafapi2.yakpanel.com/api/v2/user/wx_web/unbind_wx_accounts"
|
||||
data = {
|
||||
"uid": user_info["uid"],
|
||||
"access_key": 'B' * 32,
|
||||
"serverid": user_info["server_id"],
|
||||
"ids": str(wx_account_uid)
|
||||
}
|
||||
try:
|
||||
datas = json.loads(public_http_post(url, data))
|
||||
if datas["success"]:
|
||||
return json_response(status=True, data=datas, msg=public.lang('The unbinding is successful'))
|
||||
else:
|
||||
return json_response(status=False, data=datas, msg=datas["res"])
|
||||
except:
|
||||
return json_response(status=True, msg=public.lang('Failed to link to the cloud'))
|
||||
|
||||
@classmethod
|
||||
def get_auth_url(cls):
|
||||
user_info = cls.get_user_info()
|
||||
if user_info is None:
|
||||
return json_response(status=True, msg=public.lang('The user binding information was not obtained'))
|
||||
if public.is_self_hosted():
|
||||
return json_response(status=False, msg=public.lang('WeChat official account cloud features are not available in self-hosted mode.'))
|
||||
url = "https://wafapi2.yakpanel.com/api/v2/user/wx_web/get_auth_url"
|
||||
data = {
|
||||
"uid": user_info["uid"],
|
||||
"access_key": 'B' * 32,
|
||||
"serverid": user_info["server_id"],
|
||||
}
|
||||
try:
|
||||
datas = json.loads(public_http_post(url, data))
|
||||
if datas["success"]:
|
||||
return json_response(status=True, data=datas)
|
||||
else:
|
||||
return json_response(status=False, data=datas, msg=datas["res"])
|
||||
except:
|
||||
return json_response(status=True, msg=public.lang('Failed to link to the cloud'))
|
||||
|
||||
def test_send_msg(self) -> Optional[str]:
|
||||
test_msg = {
|
||||
"msg_list": ['>configuration state: <font color=#20a53a> Success </font>\n\n']
|
||||
}
|
||||
test_task = get_test_msg("Message channel configuration reminders")
|
||||
res = self.send_msg(
|
||||
test_task.to_wx_account_msg(test_msg, test_task.the_push_public_data()),
|
||||
)
|
||||
if res is None:
|
||||
return None
|
||||
return res
|
||||
|
||||
|
||||
# class wx_account_msg:
|
||||
# __module_name = None
|
||||
# __default_pl = "{}/data/default_msg_channel.pl".format(panelPath)
|
||||
# conf_path = '{}/data/wx_account_msg.json'.format(panelPath)
|
||||
# user_info = None
|
||||
#
|
||||
# def __init__(self):
|
||||
# try:
|
||||
# self.user_info = json.loads(public.ReadFile("{}/data/userInfo.json".format(public.get_panel_path())))
|
||||
# except:
|
||||
# self.user_info = None
|
||||
# self.__module_name = self.__class__.__name__.replace('_msg', '')
|
||||
#
|
||||
# def get_version_info(self, get):
|
||||
# """
|
||||
# 获取版本信息
|
||||
# """
|
||||
# data = {}
|
||||
# data['ps'] = 'YakPanel 微信公众号,用于接收面板消息推送'
|
||||
# data['version'] = '1.0'
|
||||
# data['date'] = '2022-08-15'
|
||||
# data['author'] = 'YakPanel'
|
||||
# data['title'] = '微信公众号'
|
||||
# data['help'] = 'http://www.yakpanel.com'
|
||||
# return data
|
||||
#
|
||||
# def get_local_ip(self):
|
||||
# '''获取内网IP'''
|
||||
# import socket
|
||||
# try:
|
||||
# s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
# s.connect(('8.8.8.8', 80))
|
||||
# ip = s.getsockname()[0]
|
||||
# return ip
|
||||
# finally:
|
||||
# s.close()
|
||||
# return '127.0.0.1'
|
||||
#
|
||||
# def get_config(self, get):
|
||||
# """
|
||||
# 微信公众号配置
|
||||
# """
|
||||
# if os.path.exists(self.conf_path):
|
||||
# # 60S内不重复加载
|
||||
# start_time = int(time.time())
|
||||
# if os.path.exists("data/wx_account_msg.lock"):
|
||||
# lock_time = 0
|
||||
# try:
|
||||
# lock_time = int(public.ReadFile("data/wx_account_msg.lock"))
|
||||
# except:
|
||||
# pass
|
||||
# # 大于60S重新加载
|
||||
# if start_time - lock_time > 60:
|
||||
# public.run_thread(self.get_web_info2)
|
||||
# public.WriteFile("data/wx_account_msg.lock", str(start_time))
|
||||
# else:
|
||||
# public.WriteFile("data/wx_account_msg.lock", str(start_time))
|
||||
# public.run_thread(self.get_web_info2)
|
||||
# data = json.loads(public.ReadFile(self.conf_path))
|
||||
#
|
||||
# if not 'list' in data: data['list'] = {}
|
||||
#
|
||||
# title = '默认'
|
||||
# if 'res' in data and 'nickname' in data['res']: title = data['res']['nickname']
|
||||
#
|
||||
# data['list']['default'] = {'title': title, 'data': ''}
|
||||
#
|
||||
# data['default'] = self.__get_default_channel()
|
||||
# return data
|
||||
# else:
|
||||
# public.run_thread(self.get_web_info2)
|
||||
# return {"success": False, "res": "未获取到配置信息"}
|
||||
#
|
||||
# def set_config(self, get):
|
||||
# """
|
||||
# @设置默认值
|
||||
# """
|
||||
# if 'default' in get and get['default']:
|
||||
# public.writeFile(self.__default_pl, self.__module_name)
|
||||
#
|
||||
# return public.returnMsg(True, '设置成功')
|
||||
#
|
||||
# def get_web_info(self, get):
|
||||
# if self.user_info is None: return public.returnMsg(False, 'The user binding information was not obtained')
|
||||
# url = "https://wafapi2.yakpanel.com/api/v2/user/wx_web/info"
|
||||
# data = {
|
||||
# "uid": self.user_info["uid"],
|
||||
# "access_key": self.user_info["access_key"],
|
||||
# "serverid": self.user_info["server_id"]
|
||||
# }
|
||||
# try:
|
||||
#
|
||||
# datas = json.loads(public.httpPost(url, data))
|
||||
#
|
||||
# if datas["success"]:
|
||||
# public.WriteFile(self.conf_path, json.dumps(datas))
|
||||
# return public.returnMsg(True, datas)
|
||||
# else:
|
||||
# public.WriteFile(self.conf_path, json.dumps(datas))
|
||||
# return public.returnMsg(False, datas)
|
||||
# except:
|
||||
# public.WriteFile(self.conf_path, json.dumps({"success": False, "res": "链接云端失败,请检查网络"}))
|
||||
# return public.returnMsg(False, "链接云端失败,请检查网络")
|
||||
#
|
||||
# def unbind(self):
|
||||
# if self.user_info is None:
|
||||
# return public.returnMsg(False, 'The user binding information was not obtained')
|
||||
# url = "https://wafapi2.yakpanel.com/api/v2/user/wx_web/unbind"
|
||||
# data = {
|
||||
# "uid": self.user_info["uid"],
|
||||
# "access_key": self.user_info["access_key"],
|
||||
# "serverid": self.user_info["server_id"]
|
||||
# }
|
||||
# try:
|
||||
#
|
||||
# datas = json.loads(public.httpPost(url, data))
|
||||
#
|
||||
# if os.path.exists(self.conf_path):
|
||||
# os.remove(self.conf_path)
|
||||
#
|
||||
# if datas["success"]:
|
||||
# return public.returnMsg(True, datas)
|
||||
# else:
|
||||
# return public.returnMsg(False, datas)
|
||||
# except:
|
||||
# public.WriteFile(self.conf_path, json.dumps({"success": False, "res": "链接云端失败,请检查网络"}))
|
||||
# return public.returnMsg(False, "链接云端失败,请检查网络")
|
||||
#
|
||||
# def get_web_info2(self):
|
||||
# if self.user_info is None:
|
||||
# return public.returnMsg(False, 'The user binding information was not obtained')
|
||||
# url = "https://wafapi2.yakpanel.com/api/v2/user/wx_web/info"
|
||||
# data = {
|
||||
# "uid": self.user_info["uid"],
|
||||
# "access_key": self.user_info["access_key"],
|
||||
# "serverid": self.user_info["server_id"]
|
||||
# }
|
||||
# try:
|
||||
# datas = json.loads(public.httpPost(url, data))
|
||||
# if datas["success"]:
|
||||
# public.WriteFile(self.conf_path, json.dumps(datas))
|
||||
# return public.returnMsg(True, datas)
|
||||
# else:
|
||||
# public.WriteFile(self.conf_path, json.dumps(datas))
|
||||
# return public.returnMsg(False, datas)
|
||||
# except:
|
||||
# public.WriteFile(self.conf_path, json.dumps({"success": False, "res": "链接云端失败"}))
|
||||
# return public.returnMsg(False, "链接云端失败")
|
||||
#
|
||||
# def get_send_msg(self, msg):
|
||||
# """
|
||||
# @name 处理md格式
|
||||
# """
|
||||
# try:
|
||||
# import re
|
||||
# title = 'YakPanel 告警通知'
|
||||
# if msg.find("####") >= 0:
|
||||
# try:
|
||||
# title = re.search(r"####(.+)", msg).groups()[0]
|
||||
# except:
|
||||
# pass
|
||||
#
|
||||
# msg = msg.replace("####", ">").replace("\n\n", "\n").strip()
|
||||
# s_list = msg.split('\n')
|
||||
#
|
||||
# if len(s_list) > 3:
|
||||
# s_title = s_list[0].replace(" ", "")
|
||||
# s_list = s_list[3:]
|
||||
# s_list.insert(0, s_title)
|
||||
# msg=public.lang('\n').join(s_list)
|
||||
#
|
||||
# s_list = []
|
||||
# for msg_info in msg.split('\n'):
|
||||
# reg = '<font.+>(.+)</font>'
|
||||
# tmp = re.search(reg, msg_info)
|
||||
# if tmp:
|
||||
# tmp = tmp.groups()[0]
|
||||
# msg_info = re.sub(reg, tmp, msg_info)
|
||||
# s_list.append(msg_info)
|
||||
# msg=public.lang('\n').join(s_list)
|
||||
# except:
|
||||
# pass
|
||||
# return msg, title
|
||||
#
|
||||
# def send_msg(self, msg):
|
||||
# """
|
||||
# 微信发送信息
|
||||
# @msg 消息正文
|
||||
# """
|
||||
#
|
||||
# if self.user_info is None:
|
||||
# return public.returnMsg(False, '未获取到用户信息')
|
||||
#
|
||||
# if not isinstance(msg, str):
|
||||
# return self.send_msg_v2(msg)
|
||||
#
|
||||
# msg, title = self.get_send_msg(msg)
|
||||
# url = "https://wafapi2.yakpanel.com/api/v2/user/wx_web/send_template_msg_v2"
|
||||
# datassss = {
|
||||
# "first": {
|
||||
# "value": "堡塔主机告警",
|
||||
# },
|
||||
# "keyword1": {
|
||||
# "value": "内网IP " + self.get_local_ip() + "\n外网IP " + self.user_info[
|
||||
# "address"] + " \n服务器别名 " + public.GetConfigValue("title"),
|
||||
# },
|
||||
# "keyword2": {
|
||||
# "value": "堡塔主机告警",
|
||||
# },
|
||||
# "keyword3": {
|
||||
# "value": msg,
|
||||
# },
|
||||
# "remark": {
|
||||
# "value": "如有疑问,请联系YakPanel 支持",
|
||||
# },
|
||||
# }
|
||||
# data = {
|
||||
# "uid": self.user_info["uid"],
|
||||
# "access_key": self.user_info["access_key"],
|
||||
# "data": base64.b64encode(json.dumps(datassss).encode('utf-8')).decode('utf-8')
|
||||
# }
|
||||
#
|
||||
# try:
|
||||
# res = {}
|
||||
# error, success = 0, 0
|
||||
#
|
||||
# x = json.loads(public.httpPost(url, data))
|
||||
# conf = self.get_config(None)['list']
|
||||
#
|
||||
# # 立即刷新剩余次数
|
||||
# public.run_thread(self.get_web_info2)
|
||||
#
|
||||
# res[conf['default']['title']] = 0
|
||||
# if x['success']:
|
||||
# res[conf['default']['title']] = 1
|
||||
# success += 1
|
||||
# else:
|
||||
# error += 1
|
||||
#
|
||||
# try:
|
||||
# public.write_push_log(self.__module_name, title, res)
|
||||
# except:
|
||||
# pass
|
||||
#
|
||||
# result = public.returnMsg(True, '发送完成,发送成功{},发送失败{}.'.format(success, error))
|
||||
# result['success'] = success
|
||||
# result['error'] = error
|
||||
# return result
|
||||
#
|
||||
# except:
|
||||
# print(public.get_error_info())
|
||||
# return public.returnMsg(False, '微信消息发送失败。 --> {}'.format(public.get_error_info()))
|
||||
#
|
||||
# def push_data(self, data):
|
||||
# if isinstance(data, dict):
|
||||
# return self.send_msg(data['msg'])
|
||||
# else:
|
||||
# return self.send_msg_v2(data)
|
||||
#
|
||||
# def uninstall(self):
|
||||
# if os.path.exists(self.conf_path):
|
||||
# os.remove(self.conf_path)
|
||||
#
|
||||
# def send_msg_v2(self, msg):
|
||||
# from push.base_push import WxAccountMsgBase, WxAccountMsg
|
||||
# if self.user_info is None:
|
||||
# return public.returnMsg(False, '未获取到用户信息')
|
||||
#
|
||||
# if isinstance(msg, public.dict_obj):
|
||||
# msg = getattr(msg, "msg", "测试信息")
|
||||
# if len(msg) >= 20:
|
||||
# return self.send_msg(msg)
|
||||
#
|
||||
# if isinstance(msg, str):
|
||||
# the_msg = WxAccountMsg.new_msg()
|
||||
# the_msg.thing_type = msg
|
||||
# the_msg.msg = msg
|
||||
# msg = the_msg
|
||||
#
|
||||
# if not isinstance(msg, WxAccountMsgBase):
|
||||
# return public.returnMsg(False, '消息类型错误')
|
||||
#
|
||||
# msg.set_ip_address(self.user_info["address"], self.get_local_ip())
|
||||
#
|
||||
# template_id, msg_data = msg.to_send_data()
|
||||
# url = "https://wafapi2.yakpanel.com/api/v2/user/wx_web/send_template_msg_v2"
|
||||
# data = {
|
||||
# "uid": self.user_info["uid"],
|
||||
# "access_key": self.user_info["access_key"],
|
||||
# "data": base64.b64encode(json.dumps(msg_data).encode('utf-8')).decode('utf-8'),
|
||||
# }
|
||||
# if template_id != "":
|
||||
# data["template_id"] = template_id
|
||||
#
|
||||
# try:
|
||||
# error, success = 0, 0
|
||||
# resp = public.httpPost(url, data)
|
||||
# x = json.loads(resp)
|
||||
# conf = self.get_config(None)['list']
|
||||
#
|
||||
# # 立即刷新剩余次数
|
||||
# public.run_thread(self.get_web_info2)
|
||||
#
|
||||
# res = {
|
||||
# conf['default']['title']: 0
|
||||
# }
|
||||
# if x['success']:
|
||||
# res[conf['default']['title']] = 1
|
||||
# success += 1
|
||||
# else:
|
||||
# error += 1
|
||||
#
|
||||
# try:
|
||||
# public.write_push_log(self.__module_name, msg.thing_type, res)
|
||||
# except:
|
||||
# pass
|
||||
# result = public.returnMsg(True, '发送完成,发送成功{},发送失败{}.'.format(success, error))
|
||||
# result['success'] = success
|
||||
# result['error'] = error
|
||||
# return result
|
||||
#
|
||||
# except:
|
||||
# return public.returnMsg(False, '微信消息发送失败。 --> {}'.format(public.get_error_info()))
|
||||
7
mod/base/process/__init__.py
Normal file
7
mod/base/process/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from .process import RealProcess
|
||||
from .user import RealUser
|
||||
from .server import RealServer
|
||||
from .process import Process
|
||||
from .user import User
|
||||
from .server import Server
|
||||
__all__ = ['RealProcess', 'Process', 'RealUser', 'User', 'RealServer', 'Server']
|
||||
889
mod/base/process/process.py
Normal file
889
mod/base/process/process.py
Normal file
@@ -0,0 +1,889 @@
|
||||
# coding: utf-8
|
||||
# -------------------------------------------------------------------
|
||||
# yakpanel
|
||||
# -------------------------------------------------------------------
|
||||
# Copyright (c) 2015-2099 yakpanel(http://www.yakpanel.com) All rights reserved.
|
||||
# -------------------------------------------------------------------
|
||||
# Author: sww <sww@yakpanel.com>
|
||||
# -------------------------------------------------------------------
|
||||
import json
|
||||
import os
|
||||
# ------------------------------
|
||||
# 进程模型
|
||||
# ------------------------------
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
os.chdir("/www/server/panel")
|
||||
import public
|
||||
import psutil
|
||||
from typing import Any
|
||||
|
||||
try:
|
||||
from YakPanel import cache
|
||||
except:
|
||||
import cachelib
|
||||
|
||||
cache = cachelib.SimpleCache()
|
||||
|
||||
|
||||
class RealProcess:
|
||||
process_path = '/proc'
|
||||
ps = json.loads(public.readFile('/www/server/panel/mod/base/process/process_ps.json'))
|
||||
__isUfw = False
|
||||
__isFirewalld = False
|
||||
old_info = {}
|
||||
new_info = {}
|
||||
old_path = '/tmp/bt_task_old1.json'
|
||||
__cpu_time = None
|
||||
__process_net_list = {}
|
||||
last_net_process = None
|
||||
last_net_process_time = 0
|
||||
old_net_path = '/tmp/bt_network_old1.json'
|
||||
old_net_info = {}
|
||||
new_net_info = {}
|
||||
|
||||
def __init__(self):
|
||||
if os.path.exists('/usr/sbin/firewalld'): self.__isFirewalld = True
|
||||
if os.path.exists('/usr/sbin/ufw'): self.__isUfw = True
|
||||
|
||||
def object_to_dict(self, obj):
|
||||
result = {}
|
||||
for name in dir(obj):
|
||||
value = getattr(obj, name)
|
||||
if not name.startswith('__') and not callable(value) and not name.startswith('_'): result[name] = value
|
||||
return result
|
||||
|
||||
def get_computers_use(self):
|
||||
result = {}
|
||||
cpu_usage = psutil.cpu_percent(interval=1, percpu=True)
|
||||
result['cpu'] = round(sum(cpu_usage) / len(cpu_usage), 2)
|
||||
memory = psutil.virtual_memory()
|
||||
print(memory.total)
|
||||
result['memory_usage'] = memory.percent
|
||||
disk = psutil.disk_usage('/')
|
||||
result['disk_usage'] = round(((disk.used / disk.total) * 100), 0)
|
||||
network_io = psutil.net_io_counters()
|
||||
result['network_io_bytes_sent'] = network_io.bytes_sent
|
||||
result['network_io_bytes_recv'] = network_io.bytes_recv
|
||||
|
||||
return result
|
||||
|
||||
# ------------------------------ 获取进程列表 start ------------------------------
|
||||
def get_process_list(self):
|
||||
"""
|
||||
获取进程列表
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
process_list = []
|
||||
if type(self.new_info) != dict: self.new_info = {}
|
||||
self.new_info['cpu_time'] = self.get_cpu_time()
|
||||
self.new_info['time'] = time.time()
|
||||
self.get_process_net_list()
|
||||
for proc in psutil.process_iter(
|
||||
['pid', 'ppid', 'name', 'username', 'create_time', 'memory_info', 'io_counters', 'num_threads', 'create_time', 'connections', 'open_files', 'status', 'cmdline']):
|
||||
try:
|
||||
proc_info = proc.as_dict(
|
||||
attrs=['pid', 'ppid', 'name', 'username', 'create_time', 'memory_info', 'io_counters', 'num_threads', 'create_time', 'connections', 'open_files', 'status',
|
||||
'cmdline'])
|
||||
p_cpus = proc.cpu_times()
|
||||
process_list.append({
|
||||
'pid': proc_info['pid'],
|
||||
'ppid': proc_info['ppid'],
|
||||
'name': proc_info['name'],
|
||||
'username': proc_info['username'],
|
||||
'cpu_percent': self.get_cpu_percent(str(proc_info['pid']), p_cpus, self.new_info['cpu_time']),
|
||||
'running_time': time.time() - proc_info['create_time'],
|
||||
'memory_info': proc_info['memory_info'],
|
||||
'io_info': proc_info['io_counters'],
|
||||
'num_threads': proc_info['num_threads'],
|
||||
'create_time': proc_info['create_time'],
|
||||
'connections_info': proc_info['connections'],
|
||||
'open_files': proc_info['open_files'],
|
||||
'ps': self.get_process_ps(proc.name())['data'],
|
||||
'status': proc_info['status'],
|
||||
'cmdline': proc_info['cmdline'],
|
||||
'net_info': self.get_process_network(proc_info['pid'])
|
||||
})
|
||||
cache.set(self.old_path, self.new_info, 600)
|
||||
except:
|
||||
pass
|
||||
return public.returnResult(code=1, msg='success', status=True, data=process_list)
|
||||
except Exception as e:
|
||||
return public.returnResult(code=0, msg='获取进程列表失败' + str(e), status=False)
|
||||
|
||||
# ------------------------------ 获取进程列表 end ------------------------------
|
||||
|
||||
# ------------------------------ 获取进程信息 start ------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _format_connections(connects):
|
||||
result = []
|
||||
for i in connects:
|
||||
r_addr = i.raddr
|
||||
if not i.raddr:
|
||||
r_addr = ('', 0)
|
||||
l_addr = i.laddr
|
||||
if not i.laddr:
|
||||
l_addr = ('', 0)
|
||||
result.append({
|
||||
"fd": i.fd,
|
||||
"family": i.family,
|
||||
"local_addr": l_addr[0],
|
||||
"local_port": l_addr[1],
|
||||
"client_addr": r_addr[0],
|
||||
"client_rport": r_addr[1],
|
||||
"status": i.status
|
||||
})
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_connects(pid: str):
|
||||
'''
|
||||
@name 获取进程连接信息
|
||||
@author hwliang<2021-08-09>
|
||||
@param pid<int>
|
||||
@return dict
|
||||
'''
|
||||
connects = 0
|
||||
try:
|
||||
if pid == 1:
|
||||
return connects
|
||||
tp = '/proc/' + str(pid) + '/fd/'
|
||||
if not os.path.exists(tp):
|
||||
return connects
|
||||
for d in os.listdir(tp):
|
||||
f_name = tp + d
|
||||
if os.path.islink(f_name):
|
||||
l = os.readlink(f_name)
|
||||
if l.find('socket:') != -1:
|
||||
connects += 1
|
||||
except:
|
||||
pass
|
||||
return connects
|
||||
|
||||
def get_process_info_by_pid(self, pid: int) -> dict:
|
||||
"""
|
||||
获取进程信息
|
||||
:param pid:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
status_ps = {'sleeping': '睡眠', 'running': '活动'}
|
||||
process = psutil.Process(int(pid))
|
||||
if type(self.new_info) != dict: self.new_info = {}
|
||||
self.new_info['cpu_time'] = self.get_cpu_time()
|
||||
self.new_info['time'] = time.time()
|
||||
self.get_process_net_list()
|
||||
p_cpus = process.cpu_times()
|
||||
# 获取连接信息
|
||||
connections = process.connections()
|
||||
p_mem = process.memory_full_info()
|
||||
io_info = process.io_counters()
|
||||
info = {
|
||||
'pid': process.pid,
|
||||
'ppid': process.ppid(),
|
||||
'name': process.name(),
|
||||
'threads': process.num_threads(),
|
||||
'user': process.username(),
|
||||
'username': process.username(),
|
||||
'cpu_percent': self.get_cpu_percent(process.pid, p_cpus, self.new_info['cpu_time']),
|
||||
'memory_info': self.object_to_dict(p_mem),
|
||||
'memory_used': p_mem.uss,
|
||||
'io_info': self.object_to_dict(io_info),
|
||||
"io_write_bytes": io_info.write_bytes,
|
||||
"io_read_bytes": io_info.read_bytes,
|
||||
'connections': self._format_connections(connections),
|
||||
"connects": self.get_connects(str(process.pid)),
|
||||
'status': status_ps[process.status()] if process.status() in status_ps else process.status(),
|
||||
'create_time': process.create_time(),
|
||||
'running_time': process.cpu_times().user + process.cpu_times().system,
|
||||
'cmdline': process.cmdline(),
|
||||
'open_files': [self.object_to_dict(i) for i in process.open_files()],
|
||||
'ps': self.get_process_ps(process.name())['data'],
|
||||
'net_info': self.get_process_network(process.pid),
|
||||
"exe": ' '.join(process.cmdline()),
|
||||
}
|
||||
cache.set(self.old_path, self.new_info, 600)
|
||||
return public.returnResult(code=1, msg='success', status=True, data=info)
|
||||
except Exception as e:
|
||||
return public.returnResult(code=0, msg='获取进程信息失败' + str(e), status=False)
|
||||
|
||||
# 通过name获取进程信息
|
||||
def get_process_info_by_name(self, name: str) -> dict:
|
||||
"""
|
||||
通过name获取进程信息
|
||||
:param name:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
pids = [i.pid for i in psutil.process_iter(['pid', 'name', 'cmdline']) if i.name() == name]
|
||||
infos = []
|
||||
for pid in pids:
|
||||
try:
|
||||
info = self.get_process_info_by_pid(pid)
|
||||
if info['status']:
|
||||
infos.append(info['data'])
|
||||
except:
|
||||
pass
|
||||
return public.returnResult(code=1, msg='success', status=True, data=infos)
|
||||
except Exception as e:
|
||||
return public.returnResult(code=0, msg='获取进程信息失败' + str(e), status=False)
|
||||
|
||||
# 通过启动命令获取进程信息
|
||||
def get_process_info_by_exec(self, cli: str) -> dict:
|
||||
"""
|
||||
通过启动命令获取进程信息
|
||||
:param cli:启动命令
|
||||
:return:
|
||||
"""
|
||||
|
||||
try:
|
||||
pids = [i.pid for i in psutil.process_iter(['pid', 'cmdline']) if cli in ' '.join(i.cmdline())]
|
||||
infos = []
|
||||
for pid in pids:
|
||||
try:
|
||||
info = self.get_process_info_by_pid(pid)
|
||||
if info['status']:
|
||||
infos.append(info['data'])
|
||||
except:
|
||||
pass
|
||||
return public.returnResult(code=1, msg='success', status=True, data=infos)
|
||||
except Exception as e:
|
||||
return public.returnResult(code=0, msg='获取进程信息失败' + str(e), status=False)
|
||||
|
||||
def get_process_info_by_port(self, port: int) -> dict:
|
||||
"""
|
||||
通过端口获取进程信息
|
||||
:param port:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
infos = []
|
||||
for i in psutil.process_iter(['pid', 'connections']):
|
||||
for conn in i.connections():
|
||||
try:
|
||||
if conn.laddr.port == int(port):
|
||||
info = self.get_process_info_by_pid(i.pid)
|
||||
if info['status']:
|
||||
infos.append(info['data'])
|
||||
except:
|
||||
pass
|
||||
return public.returnResult(code=1, msg='success', status=True, data=infos)
|
||||
except Exception as e:
|
||||
return public.returnResult(code=0, msg='获取进程信息失败' + str(e), status=False)
|
||||
|
||||
def get_process_info_by_ip(self, ip: str) -> dict:
|
||||
"""
|
||||
通过远程ip获取进程信息
|
||||
:param ip:
|
||||
:return:
|
||||
"""
|
||||
infos = []
|
||||
try:
|
||||
for i in psutil.process_iter(['pid', 'connections']):
|
||||
for conn in i.connections():
|
||||
try:
|
||||
if conn.raddr:
|
||||
if conn.raddr.ip == ip:
|
||||
info = self.get_process_info_by_pid(i.pid)['data']
|
||||
if info:
|
||||
infos.append(info)
|
||||
except:
|
||||
pass
|
||||
return public.returnResult(code=1, msg='success', status=True, data=infos)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='获取进程信息失败', status=False, data=infos)
|
||||
|
||||
def get_process_info_by_openfile(self, file_path: str) -> dict:
|
||||
"""
|
||||
通过打开文件获取进程信息
|
||||
:param file_path:
|
||||
:return:
|
||||
"""
|
||||
infos = []
|
||||
try:
|
||||
for i in psutil.process_iter(['pid', 'open_files']):
|
||||
try:
|
||||
for file in i.open_files():
|
||||
if file.path == file_path:
|
||||
info = self.get_process_info_by_pid(i.pid)['data']
|
||||
if info:
|
||||
infos.append(info)
|
||||
except:
|
||||
pass
|
||||
return public.returnResult(code=1, msg='success', status=True, data=infos)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='获取进程信息失败', status=False, data=infos)
|
||||
|
||||
# ------------------------------ 获取进程信息 end ------------------------------
|
||||
|
||||
# ------------------------------ 获取进程ps start ------------------------------
|
||||
|
||||
def get_process_ps(self, name: str) -> dict:
|
||||
"""
|
||||
获取进程ps
|
||||
:param name:
|
||||
:return:
|
||||
"""
|
||||
|
||||
return public.returnResult(code=1, msg='success', status=True, data=self.ps.get(name, '未知进程'))
|
||||
|
||||
# ------------------------------ 获取进程ps end ------------------------------
|
||||
|
||||
# ------------------------------ 获取进程树 start ------------------------------
|
||||
|
||||
def get_process_tree(self, pid: int) -> dict:
|
||||
"""
|
||||
获取进程树
|
||||
:param pid:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
pid = int(pid)
|
||||
process = psutil.Process(pid)
|
||||
process_tree = process.children(recursive=True)
|
||||
infos = []
|
||||
info = self.get_process_info_by_pid(pid)
|
||||
if info['status']:
|
||||
infos.append(info['data'])
|
||||
|
||||
for prc in process_tree:
|
||||
info = self.get_process_info_by_pid(prc.pid)
|
||||
if info['status']:
|
||||
infos.append(info['data'])
|
||||
return public.returnResult(code=1, msg='success', status=True, data=infos)
|
||||
except Exception as e:
|
||||
return public.returnResult(code=0, msg='获取进程树失败' + str(e), status=False)
|
||||
|
||||
# ------------------------------ 获取进程树 end ------------------------------
|
||||
|
||||
# ------------------------------ 结束进程 start ------------------------------
|
||||
# 结束进程pid
|
||||
def kill_pid(self, pid: int) -> dict:
|
||||
"""
|
||||
通过关闭进程
|
||||
:param pid:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
os.kill(pid, 9)
|
||||
return public.returnResult(code=1, msg='success', status=True, data='')
|
||||
except Exception as e:
|
||||
public.ExecShell('kill -9 ' + str(pid))
|
||||
return public.returnResult(code=1, msg='结束进程失败' + str(e), status=True)
|
||||
|
||||
# 结束进程名
|
||||
def kill_name(self, name: str) -> dict:
|
||||
"""
|
||||
通过name关闭进程
|
||||
:param name:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
os.system('killall ' + name)
|
||||
return public.returnResult(code=1, msg='success', status=True, data='')
|
||||
except Exception as e:
|
||||
return public.returnResult(code=0, msg='结束进程失败' + str(e), status=False)
|
||||
|
||||
# 结束进程树
|
||||
def kill_tree(self, pid: int) -> dict:
|
||||
"""
|
||||
通过关闭进程树
|
||||
:param pid:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
p = psutil.Process(pid)
|
||||
p.kill()
|
||||
for i in p.children(recursive=True):
|
||||
i.kill()
|
||||
return public.returnResult(code=1, msg='success', status=True, data='')
|
||||
except Exception as e:
|
||||
public.ExecShell('kill -9 ' + str(pid))
|
||||
return public.returnResult(code=1, msg='success', status=True)
|
||||
|
||||
# 结束所有进程 pid,进程名,进程树
|
||||
def kill_proc_all(self, pid: int) -> dict:
|
||||
"""
|
||||
结束所有进程
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
proc = psutil.Process(pid)
|
||||
name = proc.name()
|
||||
self.kill_pid(pid)
|
||||
self.kill_name(name)
|
||||
self.kill_tree(pid)
|
||||
return public.returnResult(code=1, msg='success', status=True, data='')
|
||||
except Exception as e:
|
||||
return public.returnResult(code=0, msg='结束进程失败' + str(e), status=False)
|
||||
|
||||
def kill_port(self, port: str) -> dict:
|
||||
"""
|
||||
结束端口进程
|
||||
:param port:
|
||||
:return:
|
||||
"""
|
||||
for process in psutil.process_iter(['pid', 'name', 'connections']):
|
||||
try:
|
||||
for conn in process.connections():
|
||||
if conn.laddr.port == int(port):
|
||||
self.kill_pid(process.pid)
|
||||
except:
|
||||
pass
|
||||
return public.returnResult(code=1, msg='success', status=True, data='')
|
||||
|
||||
# ------------------------------ 结束进程 end ------------------------------
|
||||
|
||||
# ------------------------------ 拉黑ip start ------------------------------
|
||||
def add_black_ip(self, ips: list, ) -> dict:
|
||||
"""
|
||||
拉黑ip
|
||||
:param ip:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
if not public.get_firewall_status() == 1: return public.returnMsg(False, '当前系统防火墙未开启')
|
||||
if [ip for ip in ips if ip in ['0.0.0.0', '127.0.0.0', "::1"]]: return {'status': False, 'msg': '禁止拉黑本机ip', 'data': ''}
|
||||
for ip in ips:
|
||||
if not public.check_ip(ip): continue
|
||||
if public.M('firewall_ip').where("port=?", (ip,)).count() > 0: continue
|
||||
if self.__isUfw:
|
||||
if public.is_ipv6(ip):
|
||||
public.ExecShell('ufw deny from ' + ip + ' to any')
|
||||
else:
|
||||
public.ExecShell('ufw insert 1 deny from ' + ip + ' to any')
|
||||
else:
|
||||
if self.__isFirewalld:
|
||||
if public.is_ipv6(ip):
|
||||
public.ExecShell('firewall-cmd --permanent --add-rich-rule=\'rule family=ipv6 source address="' + ip + '" drop\'')
|
||||
else:
|
||||
public.ExecShell('firewall-cmd --permanent --add-rich-rule=\'rule family=ipv4 source address="' + ip + '" drop\'')
|
||||
else:
|
||||
if public.is_ipv6(ip): return public.returnMsg(False, 'FIREWALL_IP_FORMAT')
|
||||
public.ExecShell('iptables -I INPUT -s ' + ip + ' -j DROP')
|
||||
addtime = time.strftime('%Y-%m-%d %X', time.localtime())
|
||||
public.M('firewall_ip').add('address,addtime,types', (ip, addtime, 'drop'))
|
||||
self.firewall_reload()
|
||||
return public.returnResult(code=1, msg='success', status=True, data='')
|
||||
except Exception as e:
|
||||
return public.returnResult(code=0, msg='拉黑失败' + str(e), status=False)
|
||||
|
||||
# ------------------------------ 拉黑ip end ------------------------------
|
||||
|
||||
# ------------------------------ 取消拉黑ip start ------------------------------
|
||||
# 删除IP屏蔽
|
||||
def del_black_ip(self, ips: list) -> dict:
|
||||
try:
|
||||
if not public.get_firewall_status() == 1: return public.returnMsg(False, '当前系统防火墙未开启')
|
||||
for ip in ips:
|
||||
if not public.check_ip(ip): continue
|
||||
if self.__isUfw:
|
||||
public.ExecShell('ufw delete deny from ' + ip + ' to any')
|
||||
else:
|
||||
if self.__isFirewalld:
|
||||
if public.is_ipv6(ip):
|
||||
public.ExecShell('firewall-cmd --permanent --remove-rich-rule=\'rule family=ipv6 source address="' + ip + '" drop\'')
|
||||
else:
|
||||
public.ExecShell('firewall-cmd --permanent --remove-rich-rule=\'rule family=ipv4 source address="' + ip + '" drop\'')
|
||||
else:
|
||||
public.ExecShell('iptables -D INPUT -s ' + ip + ' -j DROP')
|
||||
|
||||
public.WriteLog("TYPE_FIREWALL", 'FIREWALL_ACCEPT_IP', (ip,))
|
||||
public.M('firewall_ip').where("address=?", (ip,)).delete()
|
||||
|
||||
self.firewall_reload()
|
||||
return public.returnResult(code=1, msg='success', status=True)
|
||||
except Exception as e:
|
||||
return public.returnResult(code=0, msg='删除失败' + str(e), status=False)
|
||||
|
||||
# 重载防火墙配置
|
||||
def firewall_reload(self):
|
||||
try:
|
||||
if self.__isUfw:
|
||||
public.ExecShell('/usr/sbin/ufw reload &')
|
||||
return public.returnResult(code=1, msg='success', status=True)
|
||||
if self.__isFirewalld:
|
||||
public.ExecShell('firewall-cmd --reload &')
|
||||
else:
|
||||
public.ExecShell('/etc/init.d/iptables save &')
|
||||
public.ExecShell('/etc/init.d/iptables restart &')
|
||||
return public.returnResult(code=1, msg='success', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='重载防火墙失败', status=False)
|
||||
|
||||
# ------------------------------ 取消拉黑ip end ------------------------------
|
||||
|
||||
# ------------------------------ 获取进程cpu start ------------------------------
|
||||
|
||||
# 获取cpu使用率
|
||||
def get_cpu_percent(self, pid, cpu_times, cpu_time):
|
||||
self.get_old()
|
||||
percent = 0.00
|
||||
process_cpu_time = self.get_process_cpu_time(cpu_times)
|
||||
if not self.old_info: self.old_info = {}
|
||||
if not pid in self.old_info:
|
||||
self.new_info[pid] = {}
|
||||
self.new_info[pid]['cpu_time'] = process_cpu_time
|
||||
return percent
|
||||
try:
|
||||
percent = round(
|
||||
100.00 * (process_cpu_time - self.old_info[pid]['cpu_time']) / (cpu_time - self.old_info['cpu_time']), 2)
|
||||
except:
|
||||
return 0
|
||||
self.new_info[pid] = {}
|
||||
self.new_info[pid]['cpu_time'] = process_cpu_time
|
||||
if percent > 0: return percent
|
||||
return 0.00
|
||||
|
||||
def get_process_cpu_time(self, cpu_times):
|
||||
cpu_time = 0.00
|
||||
for s in cpu_times: cpu_time += s
|
||||
return cpu_time
|
||||
|
||||
def get_old(self):
|
||||
if self.old_info: return True
|
||||
data = cache.get(self.old_path)
|
||||
if not data: return False
|
||||
self.old_info = data
|
||||
del (data)
|
||||
return True
|
||||
|
||||
def get_cpu_time(self):
|
||||
if self.__cpu_time: return self.__cpu_time
|
||||
self.__cpu_time = 0.00
|
||||
s = psutil.cpu_times()
|
||||
self.__cpu_time = s.user + s.system + s.nice + s.idle
|
||||
return self.__cpu_time
|
||||
|
||||
# ------------------------------ 获取进程cpu end ------------------------------
|
||||
|
||||
# ------------------------------ 获取进程net start ------------------------------
|
||||
|
||||
def get_process_network(self, pid):
|
||||
'''
|
||||
@name 获取进程网络流量
|
||||
@author hwliang<2021-09-13>
|
||||
@param pid<int> 进程ID
|
||||
@return tuple
|
||||
'''
|
||||
if not self.__process_net_list:
|
||||
self.get_process_net_list()
|
||||
if not self.last_net_process_time: return 0, 0, 0, 0
|
||||
if not pid in self.__process_net_list: return 0, 0, 0, 0
|
||||
|
||||
if not pid in self.last_net_process:
|
||||
return self.__process_net_list[pid]['up'], self.__process_net_list[pid]['up_package'], \
|
||||
self.__process_net_list[pid]['down'], self.__process_net_list[pid]['down_package']
|
||||
|
||||
up = int((self.__process_net_list[pid]['up'] - self.last_net_process[pid]['up']) / (
|
||||
time.time() - self.last_net_process_time))
|
||||
down = int((self.__process_net_list[pid]['down'] - self.last_net_process[pid]['down']) / (
|
||||
time.time() - self.last_net_process_time))
|
||||
up_package = int((self.__process_net_list[pid]['up_package'] - self.last_net_process[pid]['up_package']) / (
|
||||
time.time() - self.last_net_process_time))
|
||||
down_package = int(
|
||||
(self.__process_net_list[pid]['down_package'] - self.last_net_process[pid]['down_package']) / (
|
||||
time.time() - self.last_net_process_time))
|
||||
return up, up_package, down, down_package
|
||||
|
||||
def get_process_net_list(self):
|
||||
w_file = '/dev/shm/bt_net_process'
|
||||
if not os.path.exists(w_file): return
|
||||
self.last_net_process = cache.get('net_process')
|
||||
self.last_net_process_time = cache.get('last_net_process')
|
||||
net_process_body = public.readFile(w_file)
|
||||
if not net_process_body: return
|
||||
net_process = net_process_body.split('\n')
|
||||
for np in net_process:
|
||||
if not np: continue
|
||||
tmp = {}
|
||||
np_list = np.split()
|
||||
if len(np_list) < 5: continue
|
||||
tmp['pid'] = int(np_list[0])
|
||||
tmp['down'] = int(np_list[1])
|
||||
tmp['up'] = int(np_list[2])
|
||||
tmp['down_package'] = int(np_list[3])
|
||||
tmp['up_package'] = int(np_list[4])
|
||||
self.__process_net_list[tmp['pid']] = tmp
|
||||
cache.set('net_process', self.__process_net_list, 600)
|
||||
cache.set('last_net_process', time.time(), 600)
|
||||
|
||||
def get_network(self):
|
||||
try:
|
||||
self.get_net_old()
|
||||
networkIo = psutil.net_io_counters()[:4]
|
||||
self.new_net_info['upTotal'] = networkIo[0]
|
||||
self.new_net_info['downTotal'] = networkIo[1]
|
||||
self.new_net_info['upPackets'] = networkIo[2]
|
||||
self.new_net_info['downPackets'] = networkIo[3]
|
||||
self.new_net_info['time'] = time.time()
|
||||
|
||||
if not self.old_net_info: self.old_net_info = {}
|
||||
if not 'upTotal' in self.old_net_info:
|
||||
time.sleep(0.1)
|
||||
networkIo = psutil.net_io_counters()[:4]
|
||||
self.old_net_info['upTotal'] = networkIo[0]
|
||||
self.old_net_info['downTotal'] = networkIo[1]
|
||||
self.old_net_info['upPackets'] = networkIo[2]
|
||||
self.old_net_info['downPackets'] = networkIo[3]
|
||||
self.old_net_info['time'] = time.time()
|
||||
|
||||
s = self.new_net_info['time'] - self.old_net_info['time']
|
||||
networkInfo = {}
|
||||
networkInfo['upTotal'] = networkIo[0]
|
||||
networkInfo['downTotal'] = networkIo[1]
|
||||
networkInfo['up'] = round((float(networkIo[0]) - self.old_net_info['upTotal']) / s, 2)
|
||||
networkInfo['down'] = round((float(networkIo[1]) - self.old_net_info['downTotal']) / s, 2)
|
||||
networkInfo['downPackets'] = networkIo[3]
|
||||
networkInfo['upPackets'] = networkIo[2]
|
||||
networkInfo['downPackets_s'] = int((networkIo[3] - self.old_net_info['downPackets']) / s)
|
||||
networkInfo['upPackets_s'] = int((networkIo[2] - self.old_net_info['upPackets']) / s)
|
||||
cache.set(self.old_net_path, self.new_net_info, 600)
|
||||
return networkInfo
|
||||
except:
|
||||
return None
|
||||
|
||||
def get_net_old(self):
|
||||
if self.old_net_info: return True
|
||||
data = cache.get(self.old_net_path)
|
||||
if not data: return False
|
||||
if not data: return False
|
||||
self.old_net_info = data
|
||||
del (data)
|
||||
return True
|
||||
|
||||
# ------------------------------ 获取进程net end ------------------------------
|
||||
|
||||
# ------------------------------ 获取启动项列表 start ------------------------------
|
||||
def get_run_list(self, search: str = ''):
|
||||
runFile = ['/etc/rc.local', '/etc/profile', '/etc/inittab', '/etc/rc.sysinit']
|
||||
runList = []
|
||||
for rfile in runFile:
|
||||
if not os.path.exists(rfile): continue
|
||||
bodyR = self.clear_comments(public.readFile(rfile))
|
||||
if not bodyR: continue
|
||||
stat = os.stat(rfile)
|
||||
accept = str(oct(stat.st_mode)[-3:])
|
||||
if accept == '644': continue
|
||||
tmp = {}
|
||||
tmp['name'] = rfile
|
||||
tmp['srcfile'] = rfile
|
||||
tmp['size'] = os.path.getsize(rfile)
|
||||
tmp['access'] = accept
|
||||
tmp['ps'] = self.get_run_ps(rfile)
|
||||
runList.append(tmp)
|
||||
runlevel = self.get_my_runlevel()
|
||||
runPath = ['/etc/init.d', '/etc/rc' + runlevel + '.d']
|
||||
tmpAll = []
|
||||
islevel = False
|
||||
for rpath in runPath:
|
||||
if not os.path.exists(rpath): continue
|
||||
if runPath[1] == rpath: islevel = True
|
||||
for f in os.listdir(rpath):
|
||||
if f[:1] != 'S': continue
|
||||
filename = rpath + '/' + f
|
||||
if not os.path.exists(filename): continue
|
||||
if os.path.isdir(filename): continue
|
||||
if os.path.islink(filename):
|
||||
flink = os.readlink(filename).replace('../', '/etc/')
|
||||
if not os.path.exists(flink): continue
|
||||
filename = flink
|
||||
tmp = {}
|
||||
tmp['name'] = f
|
||||
if islevel: tmp['name'] = f[3:]
|
||||
if tmp['name'] in tmpAll: continue
|
||||
stat = os.stat(filename)
|
||||
accept = str(oct(stat.st_mode)[-3:])
|
||||
if accept == '644': continue
|
||||
tmp['srcfile'] = filename
|
||||
tmp['access'] = accept
|
||||
tmp['size'] = os.path.getsize(filename)
|
||||
tmp['ps'] = self.get_run_ps(tmp['name'])
|
||||
runList.append(tmp)
|
||||
tmpAll.append(tmp['name'])
|
||||
data = {}
|
||||
data['run_list'] = runList
|
||||
data['run_level'] = runlevel
|
||||
if search:
|
||||
data['run_list'] = self.search_run(data['run_list'], search)
|
||||
return public.returnResult(code=1, msg='success', status=True, data=data)
|
||||
|
||||
# 启动项查询
|
||||
def search_run(self, data, search):
|
||||
try:
|
||||
ldata = []
|
||||
for i in data:
|
||||
if search in i['name'] or search in i['srcfile'] or search in i['ps']:
|
||||
ldata.append(i)
|
||||
return ldata
|
||||
except:
|
||||
return data
|
||||
|
||||
# 清除注释
|
||||
def clear_comments(self, body):
|
||||
bodyTmp = body.split("\n")
|
||||
bodyR = ""
|
||||
for tmp in bodyTmp:
|
||||
if tmp.startswith('#'): continue
|
||||
if tmp.strip() == '': continue
|
||||
bodyR += tmp
|
||||
return bodyR
|
||||
|
||||
# 服务注释
|
||||
def get_run_ps(self, name):
|
||||
runPs = {'netconsole': '网络控制台日志', 'network': '网络服务', 'jexec': 'JAVA', 'tomcat8': 'Apache Tomcat',
|
||||
'tomcat7': 'Apache Tomcat', 'mariadb': 'Mariadb',
|
||||
'tomcat9': 'Apache Tomcat', 'tomcat': 'Apache Tomcat', 'memcached': 'Memcached缓存器',
|
||||
'php-fpm-53': 'PHP-5.3', 'php-fpm-52': 'PHP-5.2',
|
||||
'php-fpm-54': 'PHP-5.4', 'php-fpm-55': 'PHP-5.5', 'php-fpm-56': 'PHP-5.6', 'php-fpm-70': 'PHP-7.0',
|
||||
'php-fpm-71': 'PHP-7.1',
|
||||
'php-fpm-72': 'PHP-7.2', 'rsync_inotify': 'rsync实时同步', 'pure-ftpd': 'FTP服务',
|
||||
'mongodb': 'MongoDB', 'nginx': 'Web服务器(Nginx)',
|
||||
'httpd': 'Web服务器(Apache)', 'bt': 'YakPanel', 'mysqld': 'MySQL数据库', 'rsynd': 'rsync主服务',
|
||||
'php-fpm': 'PHP服务', 'systemd': '系统核心服务',
|
||||
'/etc/rc.local': '用户自定义启动脚本', '/etc/profile': '全局用户环境变量',
|
||||
'/etc/inittab': '用于自定义系统运行级别', '/etc/rc.sysinit': '系统初始化时调用的脚本',
|
||||
'sshd': 'SSH服务', 'crond': '计划任务服务', 'udev-post': '设备管理系统', 'auditd': '审核守护进程',
|
||||
'rsyslog': 'rsyslog服务', 'sendmail': '邮件发送服务', 'blk-availability': 'lvm2相关',
|
||||
'local': '用户自定义启动脚本', 'netfs': '网络文件系统', 'lvm2-monitor': 'lvm2相关',
|
||||
'xensystem': 'xen云平台相关', 'iptables': 'iptables防火墙', 'ip6tables': 'iptables防火墙 for IPv6',
|
||||
'firewalld': 'firewall防火墙'}
|
||||
if name in runPs: return runPs[name]
|
||||
return name
|
||||
|
||||
# 获取当前运行级别
|
||||
def get_my_runlevel(self):
|
||||
try:
|
||||
runlevel = public.ExecShell('runlevel')[0].split()[1]
|
||||
except:
|
||||
runlevel_dict = {"multi-user.target": '3', 'rescue.target': '1', 'poweroff.target': '0',
|
||||
'graphical.target': '5', "reboot.target": '6'}
|
||||
r_tmp = public.ExecShell('systemctl get-default')[0].strip()
|
||||
if r_tmp in runlevel_dict:
|
||||
runlevel = runlevel_dict[r_tmp]
|
||||
else:
|
||||
runlevel = '3'
|
||||
return runlevel
|
||||
|
||||
# ------------------------------ 获取启动项列表 end ------------------------------
|
||||
|
||||
|
||||
class Process(object):
|
||||
process = RealProcess()
|
||||
|
||||
# 获取进程列表
|
||||
def get_process_list(self):
|
||||
return self.process.get_process_list()
|
||||
|
||||
# 获取进程信息->pid
|
||||
def get_process_info_by_pid(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'pid'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.get_process_info_by_pid(get.pid)
|
||||
|
||||
# 通过name获取进程信息
|
||||
def get_process_info_by_name(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'name'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.get_process_info_by_name(get.name)
|
||||
|
||||
def get_process_info_by_exec(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'cli'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.get_process_info_by_exec(get.cli)
|
||||
|
||||
def get_process_info_by_port(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'port'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.get_process_info_by_port(get.port)
|
||||
|
||||
def get_process_info_by_ip(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'ip'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.get_process_info_by_ip(get.ip)
|
||||
|
||||
def get_process_info_by_openfile(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'file_path'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.get_process_info_by_openfile(get.file_path)
|
||||
|
||||
# 获取进程树
|
||||
def get_process_tree(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'pid'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.get_process_tree(get.pid)
|
||||
|
||||
# 结束进程pid
|
||||
def kill_pid(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'pid'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.kill_pid(get.pid)
|
||||
|
||||
# 结束进程名
|
||||
def kill_name(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'name'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.kill_name(get.name)
|
||||
|
||||
# 结束进程树
|
||||
def kill_tree(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'pid'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.kill_tree(get.pid)
|
||||
|
||||
# 结束所有进程 pid,进程名,进程树
|
||||
def kill_proc_all(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'pid'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.kill_proc_all(get.pid)
|
||||
|
||||
def kill_port(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'port'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.kill_port(get.port)
|
||||
|
||||
def add_black_ip(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'ips'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.add_black_ip(get.ips)
|
||||
|
||||
def del_black_ip(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'ips'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.del_black_ip(get.ips)
|
||||
|
||||
def get_process_ps(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'name'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.get_process_ps(get.name)
|
||||
|
||||
def get_run_list(self, get: Any) -> dict:
|
||||
if not hasattr(get, 'search'): return {'status': False, 'msg': '参数错误', 'data': {}}
|
||||
return self.process.get_run_list(get.search)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
p = RealProcess()
|
||||
print(p.get_computers_use())
|
||||
# print('========================')
|
||||
# print(p.get_process_list()['data'])
|
||||
# print('========================')
|
||||
# print(p.get_process_info_by_pid(1)['data'])
|
||||
# print('========================')
|
||||
# print(p.get_process_info_by_name('systemd'))
|
||||
# print('========================')
|
||||
# res = p.get_process_tree(1)
|
||||
# print(res['data'][1])
|
||||
# print('========================')
|
||||
# print(p.kill_pid(1))
|
||||
# print('========================')
|
||||
# print(p.kill_name('systemd'))
|
||||
# print('========================')
|
||||
# print(p.kill_tree(1))
|
||||
# print('========================')
|
||||
# print(p.kill_proc_all(1))
|
||||
# print('========================')
|
||||
# print(p.get_process_info_by_exec('nginx'))
|
||||
# print('========================')
|
||||
# print(p.get_process_info_by_port(8888))
|
||||
# print('========================')
|
||||
# print(p.get_process_ps('nginx'))
|
||||
# print('========================')
|
||||
# print(p.get_process_info_by_ip('192.168.168.66'))
|
||||
# print('========================')
|
||||
# print(p.add_black_ip(['1.1.1.1']))
|
||||
# print('========================')
|
||||
# print(p.del_black_ip(['1.1.1.1']))
|
||||
# print('========================')
|
||||
133
mod/base/process/process_ps.json
Normal file
133
mod/base/process/process_ps.json
Normal file
@@ -0,0 +1,133 @@
|
||||
{
|
||||
"bioset": "用于处理块设备上的I/O请求的进程",
|
||||
"BT-MonitorAgent": "YakPanel 监控相关进程 (BT-MonitorAgent)",
|
||||
"rngd": "一个熵守护的进程",
|
||||
"master": "用于管理和协调子进程的活动的进程",
|
||||
"irqbalance": "一个IRQ平衡守护的进程",
|
||||
"rhsmcertd": "主要用于管理Red Hat订阅证书,并维护系统的订阅状态的进程",
|
||||
"auditd": "是Linux审计系统中用户空间的一个组的进程",
|
||||
"chronyd": "调整内核中运行的系统时钟和时钟服务器同步的进程",
|
||||
"qmgr": "PBS管理器的进程",
|
||||
"oneavd": "YakPanel 微步木马检测 (oneavd)",
|
||||
"postgres": "PostgreSQL数据库的进程",
|
||||
"grep": "一个命令行工具的进程",
|
||||
"lsof": "一个命令行工具的进程",
|
||||
"containerd-shim-runc-v2": "Docker容器的一个组件的进程",
|
||||
"pickup": "用于监听Unix域套接字的进程",
|
||||
"cleanup": "邮件传输代理(MTA)中的一个组件的进程",
|
||||
"trivial-rewrite": "邮件传输代理(MTA)中的一个组件的进程",
|
||||
"containerd": "docker依赖服务的进程",
|
||||
"redis-server": "redis服务的进程",
|
||||
"rcu_sched": "linux系统rcu机制服务的进程",
|
||||
"jsvc": "YakPanel Tomcat 相关 (jsvc)",
|
||||
"oneav": "YakPanel 微步木马检测 (oneav)",
|
||||
"mysqld": "MySQL服务的进程",
|
||||
"php-fpm": "PHP的子进程",
|
||||
"php-cgi": "PHP-CGI的进程",
|
||||
"nginx": "Nginx服务的进程",
|
||||
"httpd": "Apache服务的进程",
|
||||
"sshd": "SSH服务的进程",
|
||||
"pure-ftpd": "FTP服务的进程",
|
||||
"sftp-server": "SFTP服务的进程",
|
||||
"mysqld_safe": "MySQL服务的进程",
|
||||
"firewalld": "防火墙服务的进程",
|
||||
"Yak-Panel": "YakPanel 主面板进程",
|
||||
"Yak-Task": "YakPanel 后台任务进程",
|
||||
"NetworkManager": "网络管理服务的进程",
|
||||
"svlogd": "日志守护的进程",
|
||||
"memcached": "Memcached缓存器的进程",
|
||||
"gunicorn": "YakPanel 相关进程",
|
||||
"YakPanel": "YakPanel 面板进程",
|
||||
"baota_coll": "YakPanel 云控主控端进程 (进程名: baota_coll)",
|
||||
"baota_client": "YakPanel 云控被控端进程 (进程名: baota_client)",
|
||||
"node": "Node.js程序的进程",
|
||||
"supervisord": "Supervisor的进程",
|
||||
"rsyslogd": "rsyslog日志服务的进程",
|
||||
"crond": "计划任务服务的进程",
|
||||
"cron": "计划任务服务的进程",
|
||||
"rsync": "rsync文件同步的进程",
|
||||
"ntpd": "网络时间同步服务的进程",
|
||||
"rpc.mountd": "NFS网络文件系统挂载服务的进程",
|
||||
"sendmail": "sendmail邮件服务的进程",
|
||||
"postfix": "postfix邮件服务的进程",
|
||||
"npm": "Node.js NPM管理器的进程",
|
||||
"PM2": "Node.js PM2进程管理器的进程",
|
||||
"htop": "htop进程监控软件的进程",
|
||||
"btpython": "YakPanel 独立 Python 环境 (btpython)",
|
||||
"btappmanagerd": "应用管理器插件进程 (btappmanagerd)",
|
||||
"dockerd": "Docker容器管理器的进程",
|
||||
"docker-proxy": "Docker容器管理器的进程",
|
||||
"docker-registry": "Docker容器管理器的进程",
|
||||
"docker-distribution": "Docker容器管理器的进程",
|
||||
"docker-network": "Docker容器管理器的进程",
|
||||
"docker-volume": "Docker容器管理器的进程",
|
||||
"docker-swarm": "Docker容器管理器的进程",
|
||||
"docker-systemd": "Docker容器管理器的进程",
|
||||
"docker-containerd": "Docker容器管理器的进程",
|
||||
"docker-containerd-shim": "Docker容器管理器的进程",
|
||||
"docker-runc": "Docker容器管理器的进程",
|
||||
"docker-init": "Docker容器管理器的进程",
|
||||
"docker-init-systemd": "Docker容器管理器的进程",
|
||||
"docker-init-upstart": "Docker容器管理器的进程",
|
||||
"docker-init-sysvinit": "Docker容器管理器的进程",
|
||||
"docker-init-openrc": "Docker容器管理器的进程",
|
||||
"docker-init-runit": "Docker容器管理器的进程",
|
||||
"docker-init-systemd-resolved": "Docker容器管理器的进程",
|
||||
"rpcbind": "NFS网络文件系统服务的进程",
|
||||
"dbus-daemon": "D-Bus消息总线守护的进程",
|
||||
"systemd-logind": "登录管理器的进程",
|
||||
"systemd-journald": "Systemd日志管理服务的进程",
|
||||
"systemd-udevd": "系统设备管理服务的进程",
|
||||
"systemd-timedated": "系统时间日期服务的进程",
|
||||
"systemd-timesyncd": "系统时间同步服务的进程",
|
||||
"systemd-resolved": "系统DNS解析服务的进程",
|
||||
"systemd-hostnamed": "系统主机名服务的进程",
|
||||
"systemd-networkd": "系统网络管理服务的进程",
|
||||
"systemd-resolvconf": "系统DNS解析服务的进程",
|
||||
"systemd-local-resolv": "系统DNS解析服务的进程",
|
||||
"systemd-sysctl": "系统系统参数服务的进程",
|
||||
"systemd-modules-load": "系统模块加载服务的进程",
|
||||
"systemd-modules-restore": "系统模块恢复服务的进程",
|
||||
"agetty": "TTY登陆验证程序的进程",
|
||||
"sendmail-mta": "MTA邮件传送代理的进程",
|
||||
"(sd-pam)": "可插入认证模块的进程",
|
||||
"polkitd": "授权管理服务的进程",
|
||||
"mongod": "MongoDB数据库服务的进程",
|
||||
"mongodb": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-monitor": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-backup": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-restore": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-agent": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-analytics": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-tools": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-backup-agent": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-backup-tools": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-restore-agent": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-restore-tools": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-analytics-agent": "MongoDB数据库服务的进程",
|
||||
"mongodb-mms-analytics-tools": "MongoDB数据库服务的进程",
|
||||
"dhclient": "DHCP协议客户端的进程",
|
||||
"dhcpcd": "DHCP协议客户端的进程",
|
||||
"dhcpd": "DHCP服务器的进程",
|
||||
"isc-dhcp-server": "DHCP服务器的进程",
|
||||
"isc-dhcp-server6": "DHCP服务器的进程",
|
||||
"dhcp6c": "DHCP服务器的进程",
|
||||
"dhcpcd": "DHCP服务器的进程",
|
||||
"dhcpd": "DHCP服务器的进程",
|
||||
"avahi-daemon": "Zeroconf守护的进程",
|
||||
"login": "登录的进程",
|
||||
"systemd": "系统管理服务的进程",
|
||||
"systemd-sysv": "系统管理服务的进程",
|
||||
"systemd-journal-gateway": "系统管理服务的进程",
|
||||
"systemd-journal-remote": "系统管理服务的进程",
|
||||
"systemd-journal-upload": "系统管理服务的进程",
|
||||
"systemd-networkd": "系统网络管理服务的进程",
|
||||
"rpc.idmapd": "NFS网络文件系统相关服务的进程",
|
||||
"cupsd": "打印服务的进程",
|
||||
"cups-browsed": "打印服务的进程",
|
||||
"sh": "shell的进程",
|
||||
"php": "PHP CLI模式的进程",
|
||||
"blkmapd": "NFS映射服务的进程",
|
||||
"lsyncd": "文件同步服务的进程",
|
||||
"sleep": "延迟的进程"
|
||||
}
|
||||
755
mod/base/process/server.py
Normal file
755
mod/base/process/server.py
Normal file
@@ -0,0 +1,755 @@
|
||||
# coding: utf-8
|
||||
# -------------------------------------------------------------------
|
||||
# yakpanel
|
||||
# -------------------------------------------------------------------
|
||||
# Copyright (c) 2015-2099 yakpanel(http://www.yakpanel.com) All rights reserved.
|
||||
# -------------------------------------------------------------------
|
||||
# Author: sww <sww@yakpanel.com>
|
||||
# -------------------------------------------------------------------
|
||||
import json
|
||||
import os
|
||||
# ------------------------------
|
||||
# 服务模型
|
||||
# ------------------------------
|
||||
import sys, re
|
||||
import time
|
||||
import traceback
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
os.chdir("/www/server/panel")
|
||||
import public
|
||||
import glob
|
||||
|
||||
# 关闭系统加固执行函数后打开
|
||||
def syssafe_admin(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
syssafe_flag = 0
|
||||
# 检查系统加固并且关闭
|
||||
if os.path.exists('/www/server/panel/plugin/syssafe/init.sh'):
|
||||
res = public.ExecShell('/www/server/panel/plugin/syssafe/init.sh status')
|
||||
if 'already running' in res[0]:
|
||||
try:
|
||||
syssafe_flag = 1
|
||||
public.ExecShell('/www/server/panel/plugin/syssafe/init.sh stop')
|
||||
res = public.ExecShell('/www/server/panel/plugin/syssafe/init.sh status')
|
||||
if 'already running' in res[0]:
|
||||
import PluginLoader
|
||||
PluginLoader.plugin_run('syssafe', 'set_open', public.to_dict_obj({'status': 0}))
|
||||
print('已关闭系统加固!')
|
||||
except:
|
||||
pass
|
||||
e = None
|
||||
result = None
|
||||
try:
|
||||
result = func(*args, **kwargs)
|
||||
except Exception as ex:
|
||||
e= ex
|
||||
try:
|
||||
if syssafe_flag:
|
||||
public.ExecShell('/www/server/panel/plugin/syssafe/init.sh stop')
|
||||
res = public.ExecShell('/www/server/panel/plugin/syssafe/init.sh status')
|
||||
if 'already running' not in res[0]:
|
||||
import PluginLoader
|
||||
PluginLoader.plugin_run('syssafe', 'set_open', public.to_dict_obj({'status': 1}))
|
||||
print('已开启系统加固!')
|
||||
except:
|
||||
pass
|
||||
if e is not None:
|
||||
raise e
|
||||
return result
|
||||
return wrapper
|
||||
|
||||
|
||||
|
||||
class RealServer:
|
||||
|
||||
server_list = ['mysqld_safe', 'redis-server', 'mongod', 'postgres', 'nginx', 'memcached', 'httpd', 'pure-ftpd', 'jsvc', 'dockerd']
|
||||
system_info = None
|
||||
# --------------------- 常用服务管理 start----------------------
|
||||
def server_admin(self, server_name: str, option: str) -> dict:
|
||||
|
||||
"""
|
||||
服务管理
|
||||
:param server_name:'mysqld_safe', 'redis-server', 'mongod', 'postgres', 'nginx', 'memcached', 'httpd', 'pure-ftpd', 'jsvc', 'dockerd'
|
||||
:param option: start,stop,restart
|
||||
:return:
|
||||
"""
|
||||
servers = {
|
||||
"mongod": self.__mongod_admin,
|
||||
"redis-server": self.__redis_admin,
|
||||
"memcached": self.__memcached_admin,
|
||||
"dockerd": self.__docker_admin,
|
||||
"jsvc": self.__tomcat_admin,
|
||||
"pure-ftpd": self.__ftp_admin,
|
||||
"httpd": self.__apache_admin,
|
||||
"mysqld_safe": self.__mysqld_admin,
|
||||
"nginx": self.__nginx_admin,
|
||||
"postgres": self.__pgsql_admin,
|
||||
}
|
||||
from system import system
|
||||
self.syst = system()
|
||||
if server_name in self.server_list:
|
||||
res = servers[server_name](option)
|
||||
return public.returnResult(code=1, msg=res['msg'], status=res['status'])
|
||||
else:
|
||||
return public.returnResult(code=0, msg='operation failure Parameter does not exist', status=False)
|
||||
|
||||
def __mongod_admin(self, option: str) -> dict:
|
||||
try:
|
||||
Command = {"start": "/etc/init.d/mongodb start",
|
||||
"stop": "/etc/init.d/mongodb stop", }
|
||||
if option != 'restart':
|
||||
public.ExecShell(Command.get(option))
|
||||
return public.returnMsg(True, 'operate successfully!')
|
||||
public.ExecShell(Command.get('stop'))
|
||||
public.ExecShell(Command.get('start'))
|
||||
return public.returnMsg(True, 'operate successfully!')
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def __redis_admin(self, option: str) -> dict:
|
||||
try:
|
||||
get = public.dict_obj()
|
||||
get.name = 'redis'
|
||||
get.type = option
|
||||
|
||||
return self.syst.serverAdmin(get)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def __memcached_admin(self, option: str) -> dict:
|
||||
try:
|
||||
get = public.dict_obj()
|
||||
get.name = 'memcached'
|
||||
get.type = option
|
||||
return self.syst.serverAdmin(get)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def __docker_admin(self, option: str) -> dict:
|
||||
try:
|
||||
exec_str = 'systemctl {} docker.socket'.format(option)
|
||||
public.ExecShell(exec_str)
|
||||
return public.returnMsg(True, "operate successfully")
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def __tomcat_admin(self, option: str) -> dict:
|
||||
try:
|
||||
get = public.dict_obj()
|
||||
get.name = 'tomcat'
|
||||
get.type = option
|
||||
self.syst.serverAdmin(get)
|
||||
return public.returnMsg(True, 'operate successfully!')
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def __ftp_admin(self, option: str) -> dict:
|
||||
try:
|
||||
get = public.dict_obj()
|
||||
get.name = 'pure-ftpd'
|
||||
get.type = option
|
||||
return self.syst.serverAdmin(get)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def __apache_admin(self, option: str) -> dict:
|
||||
try:
|
||||
get = public.dict_obj()
|
||||
get.name = 'apache'
|
||||
get.type = option
|
||||
res = self.syst.serverAdmin(get)
|
||||
import time
|
||||
time.sleep(1)
|
||||
return res
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def __mysqld_admin(self, option: str) -> dict:
|
||||
try:
|
||||
get = public.dict_obj()
|
||||
get.name = 'mysqld'
|
||||
get.type = option
|
||||
return self.syst.serverAdmin(get)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def __nginx_admin(self, option: str) -> dict:
|
||||
try:
|
||||
get = public.dict_obj()
|
||||
get.name = 'nginx'
|
||||
get.type = option
|
||||
return self.syst.serverAdmin(get)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def __pgsql_admin(self, option: str) -> dict:
|
||||
try:
|
||||
get = public.dict_obj()
|
||||
get.name = 'pgsql'
|
||||
get.type = option
|
||||
return self.syst.serverAdmin(get)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
# ----------------------常用服务管理 end----------------------
|
||||
|
||||
# ----------------------常用服务状态 start----------------------
|
||||
def server_status(self, server_name: str) -> dict:
|
||||
"""
|
||||
服务状态
|
||||
:param server_name: 'mysqld_safe', 'redis-server', 'mongod', 'postgres', 'nginx', 'memcached', 'httpd', 'pure-ftpd', 'jsvc', 'dockerd'
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
if server_name in self.server_list:
|
||||
res = self.__get_status(server_name)
|
||||
return public.returnResult(code=1, msg=res['msg'], data=res['data'], status=res['status'])
|
||||
else:
|
||||
return public.returnResult(code=0, msg='operation failure Parameter does not exist', status=False)
|
||||
except Exception as e:
|
||||
return public.returnResult(code=0, msg='operation failure', status=False)
|
||||
|
||||
def __is_installation(self, name: str) -> bool:
|
||||
map = {
|
||||
"mysqld_safe": "mysqld",
|
||||
"redis-server": "redis",
|
||||
"mongod": "mongodb",
|
||||
"postgres": "pgsql",
|
||||
"nginx": "nginx",
|
||||
"memcached": "memcached",
|
||||
"httpd": "httpd",
|
||||
"pure-ftpd": "pure-ftpd",
|
||||
"jsvc": "tomcat",
|
||||
"dockerd": "docker",
|
||||
"php": "php",
|
||||
"tamper_proof": "tamper_proof",
|
||||
"bt_security": "bt_security",
|
||||
"syssafe": "syssafe",
|
||||
|
||||
}
|
||||
import glob
|
||||
dir_path = '/etc/init.d/'
|
||||
files = [os.path.basename(f) for f in glob.glob(dir_path + "*")]
|
||||
if name == "dockerd":
|
||||
res = public.ExecShell('docker -v')[0]
|
||||
if 'version' in res:
|
||||
return True
|
||||
return False
|
||||
if name == "postgres":
|
||||
res = public.ExecShell('/www/server/pgsql/bin/psql --version')[0]
|
||||
pgsql = False
|
||||
if 'PostgreSQL' in res:
|
||||
pgsql = True
|
||||
Manager = False
|
||||
if os.path.exists('/www/server/panel/plugin/pgsql_manager'):
|
||||
Manager = True
|
||||
return {'pgsql': pgsql, 'Manager': Manager}
|
||||
if name == "php":
|
||||
php_l = [i for i in files if name in i.lower()]
|
||||
if len(php_l) != 0:
|
||||
return True
|
||||
if name == "tamper_proof":
|
||||
return os.path.exists('/www/server/panel/plugin/tamper_proof')
|
||||
|
||||
if name == "bt_security":
|
||||
return os.path.exists('/www/server/panel/plugin/bt_security')
|
||||
|
||||
if name == "syssafe":
|
||||
return os.path.exists('/www/server/panel/plugin/syssafe')
|
||||
|
||||
if map[name] in files:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __get_status(self, server_name: str) -> dict:
|
||||
try:
|
||||
if not self.__is_installation(server_name):
|
||||
return {'status': True, 'msg': '', 'data': {'install': False, 'status': False}}
|
||||
res = public.ExecShell('ps -ef|grep {}|grep -v grep'.format(server_name))[0]
|
||||
if 'mongod' in res:
|
||||
return {'status': True, 'msg': '', 'data': {'install': True, 'status': True}}
|
||||
return {'status': True, 'msg': '', 'data': {'install': True, 'status': False}}
|
||||
except:
|
||||
return {'status': False, 'msg': '获取失败!', 'data': {'install': False, 'status': False}}
|
||||
|
||||
# ----------------------常用服务状态 end----------------------
|
||||
|
||||
# ---------------------- 通用服务管理 start----------------------
|
||||
def universal_server_admin(self, server_name: str, option: str) -> dict:
|
||||
"""
|
||||
通用服务管理 服务器在/etc/init.d/目录下有同名的启动文件,且启动文件中有start,stop,restart,status命令
|
||||
:param server_name: 服务名称
|
||||
:param option: start,stop,restart
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
get = public.dict_obj()
|
||||
get.name = server_name
|
||||
get.type = option
|
||||
dir_path = '/etc/init.d/'
|
||||
files = [os.path.basename(f) for f in glob.glob(dir_path + "*")]
|
||||
if server_name in files:
|
||||
res = public.ExecShell('/etc/init.d/{} {}'.format(server_name, option))
|
||||
if 'is running' in res[0].lower() or 'is active' in res[0].lower() or 'already running' in res[0].lower():
|
||||
return public.returnResult(code=1, msg='operate successfully!', status=True)
|
||||
if 'is stopped' in res[0].lower() or 'is not running' in res[0].lower():
|
||||
return public.returnResult(code=1, msg='operate successfully!', status=True)
|
||||
else:
|
||||
return public.returnResult(code=0, msg='operation failure The service was not found in /etc/init.d/', status=False)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='operation failure', status=False)
|
||||
|
||||
# ---------------------- 通用服务管理 end----------------------
|
||||
|
||||
# ---------------------- 通用服务状态 start----------------------
|
||||
def universal_server_status(self, server_name: str) -> dict:
|
||||
"""
|
||||
通用服务状态 服务器在/etc/init.d/目录下有同名的启动文件,且启动文件中有status命令,status中有输出is running或is active
|
||||
:param server_name: 服务名称
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
get = public.dict_obj()
|
||||
get.name = server_name
|
||||
get.type = 'status'
|
||||
dir_path = '/etc/init.d/'
|
||||
files = [os.path.basename(f) for f in glob.glob(dir_path + "*")]
|
||||
if server_name in files:
|
||||
res = public.ExecShell('/etc/init.d/{} status'.format(server_name))
|
||||
if 'is running' in res[0].lower() or 'is active' in res[0].lower() or 'already running' in res[0].lower():
|
||||
return public.returnResult(code=1, msg='运行中', data=True)
|
||||
return public.returnResult(code=1, msg='未运行', data=False)
|
||||
return public.returnResult(code=0, msg='服务不存在!', status=False)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='获取失败!', data=False)
|
||||
|
||||
# ---------------------- 通用服务状态 end----------------------
|
||||
|
||||
# ---------------------- 添加开机自启 启动脚本 start----------------------
|
||||
|
||||
# 添加开机自启
|
||||
@syssafe_admin
|
||||
def add_boot(self, server_name: str, pid_file: str, start_exec: str, stop_exec: str, default_start: str = '2 3 4 5') -> dict:
|
||||
"""
|
||||
添加开机自启
|
||||
:param server_name: 服务名称
|
||||
:param pid_file: 启动pid记录文件
|
||||
:param start_exec: 启动命令
|
||||
:param stop_exec: 停止命令
|
||||
:param default_start: 默认启动级别
|
||||
:return:
|
||||
"""
|
||||
|
||||
content = """
|
||||
#! /bin/sh
|
||||
# chkconfig: 2345 55 25
|
||||
|
||||
### BEGIN INIT INFO
|
||||
# Provides: {name}
|
||||
# Required-Start: $all
|
||||
# Required-Stop: $all
|
||||
# Default-Start: {default_start}
|
||||
# Default-Stop: 0 1 6
|
||||
# Short-Description: {name}
|
||||
# Description: {name}
|
||||
### END INIT INFO
|
||||
|
||||
# Author: licess
|
||||
# website: http://www.yakpanel.com
|
||||
|
||||
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
echo -n "Starting {name}... "
|
||||
if [ -f {pid_file} ];then
|
||||
mPID=$(cat {pid_file})
|
||||
isStart=`ps ax | awk '{{ print $1 }}' | grep -e "^${{mPID}}$"`
|
||||
if [ "$isStart" != "" ];then
|
||||
echo "{name} (pid $mPID) already running."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
nohup {start_exec} &
|
||||
if [ $? != 0 ]; then
|
||||
echo " failed"
|
||||
exit 1
|
||||
else
|
||||
pid=`ps -ef|grep "{start_exec}" |grep -v grep|awk '{{print $2}}'`
|
||||
echo $! > {pid_file}
|
||||
echo " done"
|
||||
fi
|
||||
;;
|
||||
stop)
|
||||
echo -n "Stopping {name}... "
|
||||
if [ -f {pid_file} ];then
|
||||
mPID=$(cat {pid_file})
|
||||
isStart = `ps ax | awk '{{ print $1 }}' | grep -e "^${{mPID}}$"`
|
||||
if [ "$isStart" = "" ];then
|
||||
echo "{name} is stopped"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "{name} is stopped"
|
||||
exit 1
|
||||
fi
|
||||
nohup {stop_exec} &
|
||||
if [ $? != 0 ]; then
|
||||
echo " failed. Use force-quit"
|
||||
exit 1
|
||||
else
|
||||
echo " done"
|
||||
fi
|
||||
;;
|
||||
status)
|
||||
if [ -f {pid_file} ];then
|
||||
mPID=`cat {pid_file}`
|
||||
isStart=`ps ax | awk '{{ print $1 }}' | grep -e "^${{mPID}}$"`
|
||||
if [ "$isStart" != '' ];then
|
||||
echo "{name} (pid `pidof {name}`) is running."
|
||||
exit 1
|
||||
else
|
||||
echo "{name} is stopped"
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
echo "{name} is stopped"
|
||||
exit 0
|
||||
fi
|
||||
;;
|
||||
restart)
|
||||
$0 stop
|
||||
sleep 1
|
||||
$0 start
|
||||
;;
|
||||
esac
|
||||
""".format(name=server_name, pid_file=pid_file, start_exec=start_exec, stop_exec=stop_exec, default_start=default_start)
|
||||
|
||||
|
||||
if os.path.exists(os.path.join('/etc/init.d/', server_name)):
|
||||
return public.returnResult(code=1, msg='operation failure Service already exists', status=False)
|
||||
try:
|
||||
public.writeFile(os.path.join('/etc/init.d/', server_name), content)
|
||||
os.chmod(os.path.join('/etc/init.d/', server_name), 0o777)
|
||||
if os.path.exists('/usr/sbin/update-rc.d'):
|
||||
public.ExecShell('update-rc.d -f {} defaults'.format(server_name))
|
||||
else:
|
||||
public.ExecShell('systemctl enable {}'.format(server_name))
|
||||
return public.returnResult(code=1, msg='operate successfully!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='operation failure', status=False)
|
||||
|
||||
# ---------------------- 添加开机自启 启动脚本 end----------------------
|
||||
|
||||
# ---------------------- 删除开机自启 启动脚本 start----------------------
|
||||
def del_boot(self, server_name: str) -> dict:
|
||||
"""
|
||||
删除启动脚本
|
||||
:param server_name: 服务名称
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
if os.path.exists(os.path.join('/etc/init.d/', server_name)):
|
||||
if os.path.exists('/usr/sbin/update-rc.d'):
|
||||
public.ExecShell('update-rc.d -f {} remove'.format(server_name))
|
||||
else:
|
||||
public.ExecShell('systemctl disable {}'.format(server_name))
|
||||
os.remove(os.path.join('/etc/init.d/', server_name))
|
||||
return public.returnResult(code=1, msg='operate successfully!', status=True)
|
||||
return public.returnResult(code=0, msg='operation failure Service does not exist', status=False)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='operation failure', status=False)
|
||||
|
||||
# ---------------------- 删除开机自启 启动脚本 end----------------------
|
||||
|
||||
# ---------------------- 创建服务守护进程 start----------------------
|
||||
|
||||
@syssafe_admin
|
||||
def create_daemon(self, server_name: str,
|
||||
pid_file: str,
|
||||
start_exec: str,
|
||||
workingdirectory: str,
|
||||
stop_exec: str = None,
|
||||
user: str = 'root',
|
||||
is_power_on: int = 1,
|
||||
logs_file: str = '',
|
||||
environments: str = '',
|
||||
is_fork=None,
|
||||
restart_type='always',
|
||||
fork_time_out=20) -> dict:
|
||||
"""
|
||||
创建服务守护进程
|
||||
:param server_name: 服务名称
|
||||
:param pid_file: 启动pid记录文件
|
||||
:param start_exec: 启动命令
|
||||
:param stop_exec: 停止命令
|
||||
:return:
|
||||
"""
|
||||
|
||||
# 检查系统加固插件是否存在
|
||||
try:
|
||||
if not stop_exec:
|
||||
stop_exec = '/usr/bin/pkill -9 "{}"'.format(start_exec)
|
||||
content = '''
|
||||
[Unit]
|
||||
Description={server_name}
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
{environments}
|
||||
ExecStart={start_exec}
|
||||
ExecStop={stop_exec}
|
||||
WorkingDirectory={workingdirectory}
|
||||
Restart={restart_type}
|
||||
SyslogIdentifier={server_name}
|
||||
User={user}
|
||||
Type=simple
|
||||
PrivateTmp=false
|
||||
PIDFile={pid_file}
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
'''.format(
|
||||
start_exec=start_exec,
|
||||
workingdirectory=workingdirectory,
|
||||
user=user,
|
||||
pid_file=pid_file,
|
||||
server_name=server_name,
|
||||
environments=environments,
|
||||
restart_type=restart_type,
|
||||
stop_exec=stop_exec
|
||||
)
|
||||
exe_shell = ''
|
||||
if is_fork or is_fork is None:
|
||||
content = content.replace('Type=simple', 'Type=forking')
|
||||
if not os.path.exists('/usr/lib/systemd/system/'):
|
||||
os.makedirs('/usr/lib/systemd/system/')
|
||||
public.writeFile('/usr/lib/systemd/system/{}.service'.format(server_name), content)
|
||||
if is_power_on:
|
||||
exe_shell += 'systemctl enable {}\n'.format(server_name) + " && "
|
||||
exe_shell += 'systemctl daemon-reload' + " && "
|
||||
|
||||
if not logs_file:
|
||||
logs_file = '/www/wwwlogs/project_{}.log'.format(server_name)
|
||||
|
||||
rsyslog_conf = public.readFile('/etc/rsyslog.conf')
|
||||
add_conf = "if $programname == '{}' then {}\n".format(server_name, logs_file)
|
||||
if rsyslog_conf:
|
||||
idx = rsyslog_conf.find("if $programname == '{}' then".format(server_name))
|
||||
if idx == -1:
|
||||
rsyslog_conf += "\n" + add_conf
|
||||
else:
|
||||
line_idx = rsyslog_conf.find('\n', idx)
|
||||
rsyslog_conf = rsyslog_conf[:idx] + add_conf + rsyslog_conf[line_idx:]
|
||||
public.writeFile('/etc/rsyslog.conf', rsyslog_conf)
|
||||
|
||||
exe_shell += 'systemctl restart rsyslog' + " && "
|
||||
if not os.path.exists(logs_file):
|
||||
exe_shell += 'touch {}'.format(logs_file) + ' && '
|
||||
exe_shell += 'chown -R {user}:{user} {logs_file}'.format(user=user, logs_file=logs_file) + ' && '
|
||||
if is_fork is not None:
|
||||
exe_shell += 'systemctl restart {}'.format(server_name)
|
||||
public.ExecShell(exe_shell)
|
||||
return public.returnResult(code=1, msg='operate successfully!', status=True)
|
||||
public.ExecShell(exe_shell)
|
||||
import subprocess,psutil
|
||||
try:
|
||||
start_time = time.time()
|
||||
process = subprocess.Popen(["systemctl", "restart", server_name], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
while True:
|
||||
try:
|
||||
p = psutil.Process(process.pid)
|
||||
print(p.status())
|
||||
# 检查进程的状态
|
||||
if p.status() == psutil.STATUS_ZOMBIE:
|
||||
break
|
||||
except:
|
||||
pass
|
||||
if process.poll() is not None:
|
||||
break
|
||||
if time.time() - start_time > fork_time_out:
|
||||
raise
|
||||
time.sleep(0.1)
|
||||
except:
|
||||
content = content.replace('Type=forking','Type=simple')
|
||||
public.writeFile('/usr/lib/systemd/system/{}.service'.format(server_name), content)
|
||||
public.ExecShell('systemctl daemon-reload && systemctl restart {}'.format(server_name))
|
||||
return public.returnResult(code=1, msg='operate successfully!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='operation failure', status=False)
|
||||
|
||||
# ---------------------- 创建服务守护进程 end----------------------
|
||||
|
||||
# ---------------------- 删除服务守护进程 start----------------------
|
||||
@syssafe_admin
|
||||
def del_daemon(self, server_name: str) -> dict:
|
||||
"""
|
||||
删除服务守护进程
|
||||
:param server_name: 服务名称
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
public.ExecShell('systemctl stop {}'.format(server_name))
|
||||
if os.path.exists('/usr/lib/systemd/system/{}.service'.format(server_name)):
|
||||
public.ExecShell('systemctl disable {}'.format(server_name))
|
||||
os.remove('/usr/lib/systemd/system/{}.service'.format(server_name))
|
||||
public.ExecShell('systemctl daemon-reload')
|
||||
public.ExecShell(r'sed -i "/if \$programname == {}/d" /etc/rsyslog.conf'.format(server_name))
|
||||
public.ExecShell('systemctl restart rsyslog')
|
||||
return public.returnResult(code=1, msg='operate successfully!', status=True)
|
||||
return public.returnResult(code=0, msg='operation failure', status=False)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='operation failure', status=False)
|
||||
|
||||
# ---------------------- 删除服务守护进程 end----------------------
|
||||
|
||||
# ---------------------- 服务守护进程状态 start----------------------
|
||||
def daemon_status(self, server_name: str) -> dict:
|
||||
"""
|
||||
服务守护进程状态
|
||||
:param server_name: 服务名称
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
if not os.path.exists('/usr/lib/systemd/system/{}.service'.format(server_name)):
|
||||
return public.returnResult(code=0, msg='服务不存在!', status=False)
|
||||
if not self.system_info:
|
||||
self.system_info = public.ExecShell("systemctl |grep service|grep -E 'active|deactivating'|awk '{print $1}'")[0]
|
||||
if server_name+'.service' in self.system_info:
|
||||
return public.returnResult(code=1, msg='运行中', status=True)
|
||||
return public.returnResult(code=1, msg='未运行', status=False)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='operation failure', status=False)
|
||||
|
||||
# ---------------------- 服务守护进程状态 end----------------------
|
||||
|
||||
def daemon_admin(self, server_name: str,action:str) -> dict:
|
||||
"""
|
||||
|
||||
:param server_name: 项目名称
|
||||
:param action: 操作
|
||||
"""
|
||||
public.ExecShell('systemctl {} {}'.format(action,server_name))
|
||||
return public.returnResult(code=1, msg='操作指令已执行', status=True)
|
||||
# if action == 'start' or action == 'restart':
|
||||
# num = 0
|
||||
# for i in range(5):
|
||||
# time.sleep(0.01)
|
||||
# if self.daemon_status(server_name)['status']:
|
||||
# num += 1
|
||||
# if num > 3:
|
||||
# return public.returnResult(code=1, msg='启动成功!', status=True)
|
||||
# return public.returnResult(code=0, msg='启动失败!', status=False)
|
||||
# return public.returnResult(code=1, msg='关闭成功!' + res[0] + res[1], status=True)
|
||||
|
||||
def get_daemon_pid(self, server_name: str) -> dict:
|
||||
"""
|
||||
获取守护进程pid
|
||||
:param server_name: 项目名称
|
||||
"""
|
||||
res = public.ExecShell("systemctl show --property=MainPID {}".format(server_name))[0] # type: str
|
||||
if not res.startswith('MainPID='):
|
||||
return public.returnResult(code=0, msg='获取失败!', status=False)
|
||||
|
||||
try:
|
||||
pid = int(res.split("=", 1)[1])
|
||||
return public.returnResult(code=1, msg='获取成功!', data=pid, status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='获取失败', status=False)
|
||||
|
||||
|
||||
# ---------------------- 延时定时启动 start----------------------
|
||||
def add_task(self, shell: str, time: int) -> dict:
|
||||
"""
|
||||
服务定时启动
|
||||
:param server_name: 服务名称
|
||||
:param start_exec: 启动命令
|
||||
:param minute: 定时启动时间
|
||||
:return:
|
||||
"""
|
||||
data = {
|
||||
'type': 3,
|
||||
'time': time,
|
||||
'name': shell,
|
||||
'title': '',
|
||||
'fun': '',
|
||||
'args': ''
|
||||
}
|
||||
|
||||
res = public.set_tasks_run(data)
|
||||
if res['status']:
|
||||
return public.returnResult(code=1, msg='operate successfully!', status=True)
|
||||
return public.returnResult(code=0, msg='operation failure', status=False)
|
||||
|
||||
# ---------------------- 服务定时启动 end----------------------
|
||||
|
||||
|
||||
class Server:
|
||||
server = RealServer()
|
||||
|
||||
def server_admin(self, get):
|
||||
try:
|
||||
if hasattr(self.server, get.name):
|
||||
return getattr(self.server, get.name)(get.type)
|
||||
return public.returnMsg(False, 'operation failure Parameter does not exist')
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def server_status(self, get):
|
||||
try:
|
||||
if hasattr(self.server, get.name):
|
||||
return getattr(self.server, get.name)()
|
||||
return public.returnMsg(False, 'operation failure Parameter does not exist')
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def universal_server_admin(self, get):
|
||||
try:
|
||||
return self.server.universal_server_admin(get.name, get.type)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def universal_server_status(self, get):
|
||||
try:
|
||||
return self.server.universal_server_status(get.name)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def add_boot(self, get):
|
||||
try:
|
||||
return self.server.add_boot(get.name, get.pid_file, get.start_exec, get.stop_exec)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def del_boot(self, get):
|
||||
try:
|
||||
return self.server.del_boot(get.name)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def create_daemon(self, get):
|
||||
try:
|
||||
return self.server.create_daemon(get.name, get.pid_file, get.start_exec, get.user)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def del_daemon(self, get):
|
||||
try:
|
||||
return self.server.del_daemon(get.name)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def daemon_status(self, get):
|
||||
try:
|
||||
return self.server.daemon_status(get.name)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
|
||||
def add_task(self, get):
|
||||
try:
|
||||
return self.server.add_task(get.shell, get.time)
|
||||
except:
|
||||
return public.returnMsg(False, 'operation failure')
|
||||
630
mod/base/process/user.py
Normal file
630
mod/base/process/user.py
Normal file
@@ -0,0 +1,630 @@
|
||||
# coding: utf-8
|
||||
# -------------------------------------------------------------------
|
||||
# yakpanel
|
||||
# -------------------------------------------------------------------
|
||||
# Copyright (c) 2015-2099 yakpanel(http://www.yakpanel.com) All rights reserved.
|
||||
# -------------------------------------------------------------------
|
||||
# Author: sww <sww@yakpanel.com>
|
||||
# -------------------------------------------------------------------
|
||||
import json
|
||||
import os
|
||||
# ------------------------------
|
||||
# 用户模型
|
||||
# ------------------------------
|
||||
import sys
|
||||
import traceback
|
||||
import psutil
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
os.chdir("/www/server/panel")
|
||||
import public
|
||||
from typing import List, Dict, Any, Union
|
||||
|
||||
|
||||
class RealUser:
|
||||
def __init__(self):
|
||||
self.groupList = self.get_group_list()['data']
|
||||
print(self.groupList)
|
||||
|
||||
# --------------------获取用户列表 Start--------------------
|
||||
def get_user_list(self, search: str = '') -> Dict[str, Union[bool, str, List[Dict[str, Any]]]]:
|
||||
"""
|
||||
获取用户列表
|
||||
:param search: 搜索关键词 可搜索 用户名、备注、shell、home、用户组
|
||||
:return: list
|
||||
"""
|
||||
try:
|
||||
tmpList = public.readFile('/etc/passwd').split("\n")
|
||||
userList = []
|
||||
for ul in tmpList:
|
||||
tmp = ul.split(':')
|
||||
if len(tmp) < 6: continue
|
||||
userInfo = {}
|
||||
userInfo['username'] = tmp[0]
|
||||
userInfo['uid'] = tmp[2]
|
||||
userInfo['gid'] = tmp[3]
|
||||
userInfo['group'] = self._get_group_name(tmp[3])
|
||||
userInfo['ps'] = self._get_user_ps(tmp[0], tmp[4])
|
||||
userInfo['home'] = tmp[5]
|
||||
userInfo['login_shell'] = tmp[6]
|
||||
userList.append(userInfo)
|
||||
if search != '':
|
||||
userList = self._search_user(userList, search)
|
||||
return public.returnResult(code=1, data=userList, msg='获取用户列表成功!', status=True)
|
||||
except Exception as e:
|
||||
print(traceback.format_exc())
|
||||
return public.returnResult(code=0, data=[], msg='获取用户列表失败!错误:' + str(e), status=False)
|
||||
|
||||
def _get_user_ps(self, name: str, ps: str) -> str:
|
||||
"""
|
||||
获取用户备注
|
||||
:param name: 用户名
|
||||
:param ps: 备注
|
||||
:return: str
|
||||
"""
|
||||
userPs = {'www': 'YakPanel', 'root': '超级管理员', 'mysql': '用于运行MySQL的用户',
|
||||
'mongo': '用于运行MongoDB的用户',
|
||||
'git': 'git用户', 'mail': 'mail', 'nginx': '第三方nginx用户', 'postfix': 'postfix邮局用户',
|
||||
'lp': '打印服务帐号',
|
||||
'daemon': '控制后台进程的系统帐号', 'nobody': '匿名帐户', 'bin': '管理大部分命令的帐号',
|
||||
'adm': '管理某些管理文件的帐号', 'smtp': 'smtp邮件'}
|
||||
if name in userPs: return userPs[name]
|
||||
if not ps: return name
|
||||
return ps
|
||||
|
||||
def _get_group_name(self, gid: str) -> str:
|
||||
"""
|
||||
获取用户组名称
|
||||
:param gid: 用户组ID
|
||||
:return: str
|
||||
"""
|
||||
for g in self.groupList:
|
||||
if g['gid'] == gid: return g['group']
|
||||
return ''
|
||||
|
||||
def _search_user(self, data: List[Dict[str, Any]], search: str) -> List[Dict[str, Union[str, Any]]]:
|
||||
"""
|
||||
搜索用户
|
||||
:param data: 用户列表
|
||||
:param search: 搜索关键词
|
||||
:return: list
|
||||
"""
|
||||
try:
|
||||
ldata = []
|
||||
for i in data:
|
||||
if search in i['username'] or search in i['ps'] or search in i['login_shell'] or search in i['home'] or search in i['group']:
|
||||
ldata.append(i)
|
||||
return ldata
|
||||
except:
|
||||
return data
|
||||
|
||||
def get_group_list(self):
|
||||
"""
|
||||
获取用户组列表
|
||||
:return:list
|
||||
|
||||
"""
|
||||
tmpList = public.readFile('/etc/group').split("\n")
|
||||
groupList = []
|
||||
for gl in tmpList:
|
||||
tmp = gl.split(':')
|
||||
if len(tmp) < 3: continue
|
||||
groupInfo = {}
|
||||
groupInfo['group'] = tmp[0]
|
||||
groupInfo['gid'] = tmp[2]
|
||||
groupList.append(groupInfo)
|
||||
return public.returnResult(code=1, data=groupList, msg='获取用户组列表成功!', status=True)
|
||||
|
||||
# --------------------获取用户列表 End----------------------
|
||||
|
||||
# --------------------删除用户 Start------------------------
|
||||
def remove_user(self, user: str) -> Dict[str, Any]:
|
||||
users = ['www', 'root', 'mysql', 'shutdown', 'postfix', 'smmsp', 'sshd', 'systemd-network', 'systemd-bus-proxy',
|
||||
'avahi-autoipd', 'mail', 'sync', 'lp', 'adm', 'bin', 'mailnull', 'ntp', 'daemon', 'sys']
|
||||
if user in users: return public.returnResult(code=0, msg='系统用户或关键用户不能删除!', status=False)
|
||||
r = public.ExecShell("userdel " + user)
|
||||
if r[1].find('process') != -1:
|
||||
try:
|
||||
pid = r[1].split()[-1]
|
||||
p = psutil.Process(int(pid))
|
||||
pname = p.name()
|
||||
p.kill()
|
||||
public.ExecShell("pkill -9 " + pname)
|
||||
r = public.ExecShell("userdel " + user)
|
||||
public.ExecShell("rm -rf /home/" + user)
|
||||
except:
|
||||
pass
|
||||
if r[1].find('userdel:') != -1: return public.returnMsg(False, r[1])
|
||||
return public.returnResult(code=1, msg='删除成功!', status=True)
|
||||
|
||||
# --------------------删除用户 End------------------------
|
||||
|
||||
# --------------------添加用户 Start------------------------
|
||||
def add_user(self, user: str, pwd: str, group: str) -> Dict[str, Any]:
|
||||
try:
|
||||
if not user: return public.returnResult(code=0, msg='用户名不能为空!', status=False)
|
||||
if not pwd: return public.returnResult(code=0, msg='密码不能为空!', status=False)
|
||||
if not self._check_user(user): return public.returnResult(code=1, msg='用户已存在!', status=True)
|
||||
if not self._check_group(group): self.add_group(group)
|
||||
r = public.ExecShell("useradd -g " + group + " -m " + user + ' -p' + pwd)
|
||||
if r[1].find('useradd:') != -1 and r[1].find('already exists') == 1: return public.returnResult(code=0, msg=r[1], status=False)
|
||||
# public.ExecShell("echo \"" + user + ":" + pwd + "\" | chpasswd")
|
||||
return public.returnResult(code=1, msg='Successfully added!', status=True)
|
||||
except:
|
||||
print(traceback.format_exc())
|
||||
return public.returnResult(code=0, msg='添加失败!', status=False)
|
||||
|
||||
def _check_user(self, user: str) -> bool:
|
||||
"""
|
||||
检查用户是否存在
|
||||
:param user: 用户名
|
||||
:return: bool
|
||||
"""
|
||||
tmpList = public.readFile('/etc/passwd').split("\n")
|
||||
for ul in tmpList:
|
||||
tmp = ul.split(':')
|
||||
if len(tmp) < 6: continue
|
||||
if tmp[0] == user: return False
|
||||
return True
|
||||
|
||||
def _check_group(self, group: str) -> bool:
|
||||
"""
|
||||
检查用户组是否存在
|
||||
:param group: 用户组
|
||||
:return: bool
|
||||
"""
|
||||
tmpList = public.readFile('/etc/group').split("\n")
|
||||
for gl in tmpList:
|
||||
tmp = gl.split(':')
|
||||
if len(tmp) < 3: continue
|
||||
if tmp[0] == group: return True
|
||||
return False
|
||||
|
||||
# --------------------添加用户 End------------------------
|
||||
|
||||
# --------------------修改用户密码 Start------------------------
|
||||
def edit_user_pwd(self, user: str, pwd: str) -> Dict[str, Any]:
|
||||
try:
|
||||
if not user: return public.returnResult(code=0, msg='用户名不能为空!', status=False)
|
||||
if not pwd: return public.returnResult(code=0, msg='密码不能为空!', status=False)
|
||||
if self._check_user(user): return public.returnResult(code=0, msg='用户不存在!', status=False)
|
||||
public.ExecShell("echo \"" + user + ":" + pwd + "\" | chpasswd")
|
||||
return public.returnResult(code=1, msg='修改成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='修改失败!', status=False)
|
||||
|
||||
# --------------------修改用户密码 End------------------------
|
||||
|
||||
# --------------------修改用户组 Start------------------------
|
||||
def edit_user_group(self, user: str, group: str) -> Dict[str, Any]:
|
||||
try:
|
||||
if not user: return public.returnMsg(False, '用户名不能为空!')
|
||||
if not group: return public.returnMsg(False, '用户组不能为空!')
|
||||
if self._check_user(user): return public.returnMsg(False, '用户不存在!')
|
||||
if not self._check_group(group): self.add_group(group)
|
||||
r = public.ExecShell("usermod -g " + group + " " + user)
|
||||
if r[1].find('usermod:') != -1: return public.returnMsg(False, r[1])
|
||||
return public.returnResult(code=1, msg='修改成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='修改失败!', status=False)
|
||||
|
||||
# --------------------修改用户组 End------------------------
|
||||
|
||||
# --------------------修改用户备注 Start------------------------
|
||||
def edit_user_ps(self, user: str, ps: str) -> Dict[str, Any]:
|
||||
try:
|
||||
if not user: return public.returnResult(code=0, msg='用户名不能为空!', status=False)
|
||||
if self._check_user(user): return public.returnResult(code=0, msg='用户不存在!', status=False)
|
||||
r = public.ExecShell("usermod -c " + ps + " " + user)
|
||||
if r[1].find('usermod:') != -1: return public.returnResult(code=0, msg=r[1], status=False)
|
||||
return public.returnResult(code=1, msg='修改成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='修改失败!', status=False)
|
||||
|
||||
# --------------------修改用户备注 End------------------------
|
||||
|
||||
# --------------------修改用户备注 Start------------------------
|
||||
def edit_user_status(self, user: str, status: int):
|
||||
try:
|
||||
if not user: return public.returnResult(code=0, msg='用户名不能为空!', status=False)
|
||||
if self._check_user(user): return public.returnResult(code=0, msg='用户不存在!', status=False)
|
||||
if int(status) == 1:
|
||||
r = public.ExecShell("usermod -L " + user)
|
||||
else:
|
||||
r = public.ExecShell("usermod -U " + user)
|
||||
if r[1].find('usermod:') != -1: return public.returnResult(code=0, msg=r[1], status=False)
|
||||
return public.returnResult(code=1, msg='修改成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='修改失败!', status=False)
|
||||
|
||||
# --------------------修改用户备注 End------------------------
|
||||
|
||||
# --------------------修改用户登录Shell Start------------------------
|
||||
def edit_user_login_shell(self, user: str, login_shell: str) -> Dict[str, Any]:
|
||||
try:
|
||||
if not user: return public.returnResult(code=0, msg='用户名不能为空!', status=False)
|
||||
if self._check_user(user): return public.returnResult(code=0, msg='用户不存在!', status=False)
|
||||
r = public.ExecShell("usermod -s " + login_shell + " " + user)
|
||||
if r[1].find('usermod:') != -1: return public.returnResult(code=0, msg=r[1], status=False)
|
||||
return public.returnResult(code=1, msg='修改成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='修改失败!', status=False)
|
||||
|
||||
# --------------------修改用户登录Shell End------------------------
|
||||
|
||||
# --------------------修改用户家目录 Start------------------------
|
||||
def edit_user_home(self, user: str, home: str) -> Dict[str, Any]:
|
||||
try:
|
||||
if not user: return public.returnResult(code=0, msg='用户名不能为空!', status=False)
|
||||
if self._check_user(user): return public.returnResult(code=0, msg='用户不存在!', status=False)
|
||||
r = public.ExecShell("usermod -d " + home + " " + user)
|
||||
if r[1].find('usermod:') != -1: return public.returnResult(code=0, msg=r[1], status=False)
|
||||
return public.returnResult(code=1, msg='修改成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='修改失败!', status=False)
|
||||
|
||||
# --------------------修改用户家目录 End------------------------
|
||||
|
||||
# --------------------获取用户信息 Start------------------------
|
||||
def get_user_info(self, user: str) -> Dict[str, Any]:
|
||||
try:
|
||||
user = user.strip()
|
||||
tmpList = public.readFile('/etc/passwd').split("\n")
|
||||
userInfo = {}
|
||||
for ul in tmpList:
|
||||
tmp = ul.split(':')
|
||||
if len(tmp) < 6: continue
|
||||
if tmp[0] == user:
|
||||
userInfo['username'] = tmp[0]
|
||||
userInfo['uid'] = tmp[2]
|
||||
userInfo['gid'] = tmp[3]
|
||||
userInfo['group'] = self._get_group_name(tmp[3])
|
||||
userInfo['ps'] = self._get_user_ps(tmp[0], tmp[4])
|
||||
userInfo['home'] = tmp[5]
|
||||
userInfo['login_shell'] = tmp[6]
|
||||
break
|
||||
return public.returnResult(code=1, data=userInfo, msg='获取用户信息成功!', status=True)
|
||||
except:
|
||||
print(traceback.format_exc())
|
||||
return public.returnResult(code=0, msg='获取用户信息失败!', status=False)
|
||||
|
||||
# --------------------添加用户组 Start------------------------
|
||||
def add_group(self, group: str) -> Dict[str, Any]:
|
||||
"""
|
||||
添加用户组
|
||||
:param group: 用户组
|
||||
:return: dict
|
||||
"""
|
||||
try:
|
||||
if not group: return public.returnResult(code=0, msg='用户组不能为空!', status=False)
|
||||
if self._check_group(group): return public.returnResult(code=0, msg='用户组已存在!', status=False)
|
||||
r = public.ExecShell("groupadd " + group)
|
||||
if r[1].find('groupadd:') != -1: return public.returnResult(code=0, msg=r[1], status=False)
|
||||
return public.returnResult(code=1, msg='Successfully added!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='添加失败!', status=False)
|
||||
|
||||
# --------------------添加用户组 End------------------------
|
||||
|
||||
# --------------------删除用户组 Start------------------------
|
||||
def remove_group(self, group: str) -> Dict[str, Any]:
|
||||
"""
|
||||
删除用户组
|
||||
:param group: 用户组
|
||||
:return: dict
|
||||
"""
|
||||
try:
|
||||
if not group: return public.returnResult(code=0, msg='用户组不能为空!', status=False)
|
||||
if not self._check_group(group): return public.returnResult(code=0, msg='用户组不存在!', status=False)
|
||||
r = public.ExecShell("groupdel " + group)
|
||||
if r[1].find('groupdel:') != -1: return public.returnResult(code=0, msg=r[1], status=False)
|
||||
return public.returnResult(code=1, msg='删除成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='删除失败!', status=False)
|
||||
|
||||
# --------------------删除用户组 End------------------------
|
||||
|
||||
# --------------------修改用户组名称 Start------------------------
|
||||
def edit_group_name(self, group: str, new_group: str) -> Dict[str, Any]:
|
||||
"""
|
||||
修改用户组名称
|
||||
:param group: 用户组
|
||||
:param new_group: 新用户组
|
||||
:return: dict
|
||||
"""
|
||||
try:
|
||||
if not group: return public.returnResult(code=0, msg='用户组不能为空!', status=False)
|
||||
if not new_group: return public.returnResult(code=0, msg='新用户组不能为空!', status=False)
|
||||
if not self._check_group(group): return public.returnResult(code=0, msg='用户组不存在!', status=False)
|
||||
if self._check_group(new_group): return public.returnResult(code=0, msg='新用户组已存在!', status=False)
|
||||
r = public.ExecShell("groupmod -n " + new_group + " " + group)
|
||||
if r[1].find('groupmod:') != -1: return public.returnResult(code=0, msg=r[1], status=False)
|
||||
return public.returnResult(code=1, msg='修改成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='修改失败!', status=False)
|
||||
|
||||
# --------------------获取用户组列表 End------------------------
|
||||
|
||||
# --------------------获取用户组信息 Start------------------------
|
||||
def get_group_info(self, group) -> Dict[str, Any]:
|
||||
"""
|
||||
获取用户组信息
|
||||
:param group: 用户组
|
||||
:return: dict
|
||||
"""
|
||||
try:
|
||||
group = group.strip()
|
||||
tmpList = public.readFile('/etc/group').split("\n")
|
||||
groupInfo = {}
|
||||
for gl in tmpList:
|
||||
tmp = gl.split(':')
|
||||
if len(tmp) < 3: continue
|
||||
if tmp[0] == group:
|
||||
groupInfo['group'] = tmp[0]
|
||||
groupInfo['gid'] = tmp[2]
|
||||
break
|
||||
return public.returnResult(code=1, data=groupInfo, msg='获取用户组信息成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='获取用户组信息失败!', status=False)
|
||||
|
||||
# --------------------获取用户组信息 End------------------------
|
||||
|
||||
# --------------------获取用户组信息 Start------------------------
|
||||
def get_group_user(self, group: str) -> Dict[str, Any]:
|
||||
"""
|
||||
获取用户组用户
|
||||
:param group: 用户组
|
||||
:return: dict
|
||||
"""
|
||||
try:
|
||||
group = group.strip()
|
||||
tmpList = self.get_user_list()['data']
|
||||
userList = []
|
||||
for ul in tmpList:
|
||||
if ul['group'] == group:
|
||||
userList.append(ul['username'])
|
||||
return public.returnResult(code=1, data=userList, msg='获取用户组用户成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='获取用户组用户失败!', status=False)
|
||||
|
||||
# --------------------获取用户组信息 End------------------------
|
||||
|
||||
# --------------------获取用户组信息 Start------------------------
|
||||
def get_user_group(self, user: str) -> Dict[str, Any]:
|
||||
"""
|
||||
获取用户组用户
|
||||
:param user: 用户
|
||||
:return: dict
|
||||
"""
|
||||
try:
|
||||
user = user.strip()
|
||||
tmpList = self.get_user_list()['data']
|
||||
groupList = []
|
||||
for gl in tmpList:
|
||||
if gl['username'] == user:
|
||||
groupList.append(gl['group'])
|
||||
return public.returnResult(code=1, data=groupList, msg='获取用户组用户成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='获取用户组用户失败!', status=False)
|
||||
|
||||
# --------------------获取用户组信息 End------------------------
|
||||
|
||||
# --------------------修改用户权限 Start------------------------
|
||||
def edit_user_permission(self, user: str, permission: str) -> Dict[str, Any]:
|
||||
"""
|
||||
修改用户权限
|
||||
:param user:
|
||||
:param permission:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
if not user: return public.returnResult(code=0, msg='用户名不能为空!', status=False)
|
||||
if self._check_user(user): return public.returnResult(code=0, msg='用户不存在!', status=False)
|
||||
r = public.ExecShell("chmod -R " + permission + " /home/" + user)
|
||||
if r[1].find('chmod:') != -1: return public.returnResult(code=0, msg=r[1], status=False)
|
||||
return public.returnResult(code=1, msg='修改成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='修改失败!', status=False)
|
||||
|
||||
# --------------------修改用户权限 End------------------------
|
||||
|
||||
# --------------------修改用户组权限 Start------------------------
|
||||
def edit_group_permission(self, group: str, permission: str) -> Dict[str, Any]:
|
||||
"""
|
||||
修改用户组权限
|
||||
:param group:
|
||||
:param permission:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
if not group: return public.returnResult(code=0, msg='用户组不能为空!', status=False)
|
||||
if not self._check_group(group): return public.returnResult(code=0, msg='用户组不存在!', status=False)
|
||||
r = public.ExecShell("chmod -R " + permission + " /home/" + group)
|
||||
if r[1].find('chmod:') != -1: return public.returnResult(code=0, msg=r[1], status=False)
|
||||
return public.returnResult(code=1, msg='修改成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='修改失败!', status=False)
|
||||
|
||||
def edit_user_name(self, user: str, new_user: str) -> Dict[str, Any]:
|
||||
try:
|
||||
user = user.strip()
|
||||
new_user = new_user.strip()
|
||||
r = public.ExecShell("usermod -l " + new_user + " " + user)
|
||||
if r[1].find('usermod:') != -1: return public.returnResult(code=0, msg=r[1], status=False)
|
||||
return public.returnResult(code=1, msg='修改成功!', status=True)
|
||||
except:
|
||||
return public.returnResult(code=0, msg='修改失败!', status=False)
|
||||
|
||||
|
||||
class User(object):
|
||||
def __init__(self):
|
||||
self.real_user = RealUser()
|
||||
|
||||
# 获取用户列表
|
||||
def get_user_list(self, get):
|
||||
search = ''
|
||||
if hasattr(get, 'search'):
|
||||
search = get.search
|
||||
return self.real_user.get_user_list(search)
|
||||
|
||||
# 删除用户
|
||||
def remove_user(self, get):
|
||||
if not hasattr(get, 'user'):
|
||||
return public.returnMsg(False, '用户不存在!')
|
||||
user = get.user.strip()
|
||||
return self.real_user.remove_user(user)
|
||||
|
||||
# 添加用户
|
||||
def add_user(self, get):
|
||||
if not hasattr(get, 'user'):
|
||||
return public.returnMsg(False, '用户名不能为空!')
|
||||
if not hasattr(get, 'pwd'):
|
||||
return public.returnMsg(False, '密码不能为空!')
|
||||
if not hasattr(get, 'group'):
|
||||
return public.returnMsg(False, '用户组不能为空!')
|
||||
user = get.user.strip()
|
||||
pwd = get.pwd.strip()
|
||||
group = get.group.strip()
|
||||
return self.real_user.add_user(user, pwd, group)
|
||||
|
||||
# 修改用户密码
|
||||
def edit_user_pwd(self, get):
|
||||
if not hasattr(get, 'user'):
|
||||
return public.returnMsg(False, '用户名不能为空!')
|
||||
if not hasattr(get, 'pwd'):
|
||||
return public.returnMsg(False, '密码不能为空!')
|
||||
user = get.user.strip()
|
||||
pwd = get.pwd.strip()
|
||||
return self.real_user.edit_user(user, pwd)
|
||||
|
||||
# 修改用户的用户组
|
||||
def edit_user_group(self, get):
|
||||
if not hasattr(get, 'user'):
|
||||
return public.returnMsg(False, '用户名不能为空!')
|
||||
if not hasattr(get, 'group'):
|
||||
return public.returnMsg(False, '用户组不能为空!')
|
||||
user = get.user.strip()
|
||||
group = get.group.strip()
|
||||
return self.real_user.edit_group(user, group)
|
||||
|
||||
# 修改用户备注
|
||||
def edit_user_ps(self, get):
|
||||
if not hasattr(get, 'user'):
|
||||
return public.returnMsg(False, '用户名不能为空!')
|
||||
user = get.user.strip()
|
||||
return self.real_user.edit_user_ps(user)
|
||||
|
||||
# 添加用户组
|
||||
def add_group(self, get):
|
||||
if not hasattr(get, 'group'):
|
||||
return public.returnMsg(False, '用户组不能为空!')
|
||||
group = get.group.strip()
|
||||
return self.real_user.add_group(group)
|
||||
|
||||
# 删除用户组
|
||||
def remove_group(self, get):
|
||||
if not hasattr(get, 'group'):
|
||||
return public.returnMsg(False, '用户组不能为空!')
|
||||
group = get.group.strip()
|
||||
return self.real_user.remove_group(group)
|
||||
|
||||
# 修改用户组名称
|
||||
def edit_group_name(self, get):
|
||||
if not hasattr(get, 'group'):
|
||||
return public.returnMsg(False, '用户组不能为空!')
|
||||
if not hasattr(get, 'new_group'):
|
||||
return public.returnMsg(False, '新用户组不能为空!')
|
||||
group = get.group.strip()
|
||||
new_group = get.new_group.strip()
|
||||
return self.real_user.edit_group_name(group, new_group)
|
||||
|
||||
# 获取用户组列表
|
||||
def get_group_list(self, get):
|
||||
return self.real_user.get_group_list()
|
||||
|
||||
# 获取用户组信息
|
||||
def get_group_info(self, get):
|
||||
if not hasattr(get, 'group'):
|
||||
return public.returnMsg(False, '用户组不能为空!')
|
||||
group = get.group.strip()
|
||||
return self.real_user.get_group_info(group)
|
||||
|
||||
# 获取用户组用户
|
||||
def get_group_user(self, get):
|
||||
if not hasattr(get, 'group'):
|
||||
return public.returnMsg(False, '用户组不能为空!')
|
||||
group = get.group.strip()
|
||||
return self.real_user.get_group_user(group)
|
||||
|
||||
# 获取用户组用户
|
||||
def get_user_group(self, get):
|
||||
if not hasattr(get, 'user'):
|
||||
return public.returnMsg(False, '用户不能为空!')
|
||||
user = get.user.strip()
|
||||
return self.real_user.get_user_group(user)
|
||||
|
||||
# 修改用户备注
|
||||
def edit_ps(self, get):
|
||||
if not hasattr(get, 'user'):
|
||||
return public.returnMsg(False, '用户名不能为空!')
|
||||
if not hasattr(get, 'ps'):
|
||||
return public.returnMsg(False, '备注不能为空!')
|
||||
user = get.user.strip()
|
||||
ps = get.ps.strip()
|
||||
return self.real_user.edit_ps(user, ps)
|
||||
|
||||
# 修改用户登录Shell
|
||||
def edit_user_login_shell(self, get):
|
||||
if not hasattr(get, 'user'):
|
||||
return public.returnMsg(False, '用户名不能为空!')
|
||||
if not hasattr(get, 'login_shell'):
|
||||
return public.returnMsg(False, '登录Shell不能为空!')
|
||||
user = get.user.strip()
|
||||
login_shell = get.login_shell.strip()
|
||||
return self.real_user.edit_login_shell(user, login_shell)
|
||||
|
||||
# 修改用户家目录
|
||||
def edit_user_home(self, get):
|
||||
if not hasattr(get, 'user'):
|
||||
return public.returnMsg(False, '用户名不能为空!')
|
||||
if not hasattr(get, 'home'):
|
||||
return public.returnMsg(False, '家目录不能为空!')
|
||||
user = get.user.strip()
|
||||
home = get.home.strip()
|
||||
return self.real_user.edit_home(user, home)
|
||||
|
||||
# 修改用户权限
|
||||
def edit_user_permission(self, get):
|
||||
if not hasattr(get, 'user'):
|
||||
return public.returnMsg(False, '用户名不能为空!')
|
||||
if not hasattr(get, 'permission'):
|
||||
return public.returnMsg(False, '权限不能为空!')
|
||||
user = get.user.strip()
|
||||
permission = get.permission.strip()
|
||||
return self.real_user.edit_user_permission(user, permission)
|
||||
|
||||
# 修改用户组权限
|
||||
def edit_group_permission(self, get):
|
||||
if not hasattr(get, 'group'):
|
||||
return public.returnMsg(False, '用户组不能为空!')
|
||||
if not hasattr(get, 'permission'):
|
||||
return public.returnMsg(False, '权限不能为空!')
|
||||
group = get.group.strip()
|
||||
permission = get.permission.strip()
|
||||
return self.real_user.edit_group_permission(group, permission)
|
||||
|
||||
def edit_user_name(self, get):
|
||||
if not hasattr(get, 'user'):
|
||||
return public.returnMsg(False, '用户名不能为空!')
|
||||
if not hasattr(get, 'new_user'):
|
||||
return public.returnMsg(False, '新用户名不能为空!')
|
||||
user = get.user.strip()
|
||||
new_user = get.new_user.strip()
|
||||
return self.real_user.edit_user_name(user, new_user)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
user = User()
|
||||
print(user.get_user_list(public.to_dict_obj({})))
|
||||
86
mod/base/process/user_readme.text
Normal file
86
mod/base/process/user_readme.text
Normal file
@@ -0,0 +1,86 @@
|
||||
User类:
|
||||
返回值默认类型:
|
||||
Dict[str, Any]
|
||||
{
|
||||
'status': bool,
|
||||
'msg': str,
|
||||
'data': Any
|
||||
}
|
||||
def get_user_list(self, search: str = '') -> Dict[str, Union[bool, str, List[Dict[str, Any]]]]:
|
||||
# 获取用户列表
|
||||
# 传参:search(可选参数,搜索关键词)
|
||||
|
||||
def _get_user_ps(self, name: str, ps: str) -> str:
|
||||
# 获取用户备注
|
||||
# 传参:name(用户名),ps(备注)
|
||||
|
||||
def _get_group_name(self, gid: str) -> str:
|
||||
# 获取用户组名称
|
||||
# 传参:gid(用户组ID)
|
||||
|
||||
def _search_user(self, data: List[Dict[str, Any]], search: str) -> List[Dict[str, Union[str, Any]]]:
|
||||
# 搜索用户
|
||||
# 传参:data(用户列表),search(搜索关键词)
|
||||
|
||||
def _get_group_list(self) -> List[Dict[str, Union[str, str]]]:
|
||||
# 获取用户组列表
|
||||
|
||||
def remove_user(self, user: str) -> Dict[str, Any]:
|
||||
# 删除用户
|
||||
# 传参:user(用户名)
|
||||
|
||||
def add_user(self, user: str, pwd: str, group: str) -> Dict[str, Any]:
|
||||
# 添加用户
|
||||
# 传参:user(用户名),pwd(密码),group(用户组)
|
||||
|
||||
def edit_user(self, user: str, pwd: str) -> Dict[str, Any]:
|
||||
# 修改用户密码
|
||||
# 传参:user(用户名),pwd(新密码)
|
||||
|
||||
def edit_group(self, user: str, group: str) -> Dict[str, Any]:
|
||||
# 修改用户组
|
||||
# 传参:user(用户名),group(新用户组)
|
||||
|
||||
def edit_ps(self, user: str, ps: str) -> Dict[str, Any]:
|
||||
# 修改用户备注
|
||||
# 传参:user(用户名),ps(新备注)
|
||||
|
||||
def edit_login_shell(self, user: str, login_shell: str) -> Dict[str, Any]:
|
||||
# 修改用户登录Shell
|
||||
# 传参:user(用户名),login_shell(新Shell)
|
||||
|
||||
def edit_home(self, user: str, home: str) -> Dict[str, Any]:
|
||||
# 修改用户家目录
|
||||
# 传参:user(用户名),home(新家目录)
|
||||
|
||||
def get_user_info(self, user: str) -> Dict[str, Any]:
|
||||
# 获取用户信息
|
||||
# 传参:user(用户名)
|
||||
|
||||
def add_group(self, group: str) -> Dict[str, Any]:
|
||||
# 添加用户组
|
||||
# 传参:group(用户组)
|
||||
|
||||
def remove_group(self, group: str) -> Dict[str, Any]:
|
||||
# 删除用户组
|
||||
# 传参:group(用户组)
|
||||
|
||||
def edit_group_name(self, group: str, new_group: str) -> Dict[str, Any]:
|
||||
# 修改用户组名称
|
||||
# 传参:group(用户组),new_group(新用户组)
|
||||
|
||||
def get_group_list(self) -> Dict[str, Union[bool, str, List[Dict[str, Any]]]]:
|
||||
# 获取用户组列表
|
||||
|
||||
def get_group_info(self, group) -> Dict[str, Any]:
|
||||
# 获取用户组信息
|
||||
# 传参:group(用户组)
|
||||
|
||||
def get_group_user(self, group: str) -> Dict[str, Any]:
|
||||
# 获取用户组用户
|
||||
# 传参:group(用户组)
|
||||
|
||||
def get_user_group(self, user: str) -> Dict[str, Any]:
|
||||
# 获取用户组用户
|
||||
# 传参:user(用户)
|
||||
|
||||
495
mod/base/push_mod/__init__.py
Normal file
495
mod/base/push_mod/__init__.py
Normal file
@@ -0,0 +1,495 @@
|
||||
import json
|
||||
import os
|
||||
from typing import Dict, Union
|
||||
|
||||
from .mods import TaskConfig, TaskTemplateConfig, TaskRecordConfig, SenderConfig, load_task_template_by_config, \
|
||||
load_task_template_by_file, UPDATE_MOD_PUSH_FILE, UPDATE_VERSION_FILE, PUSH_DATA_PATH
|
||||
from .base_task import BaseTask
|
||||
from .send_tool import WxAccountMsg, WxAccountLoginMsg, WxAccountMsgBase
|
||||
from .system import PushSystem, get_push_public_data, push_by_task_keyword, push_by_task_id
|
||||
from .manager import PushManager
|
||||
from .util import read_file, write_file
|
||||
|
||||
|
||||
__all__ = [
|
||||
"TaskConfig",
|
||||
"TaskTemplateConfig",
|
||||
"TaskRecordConfig",
|
||||
"SenderConfig",
|
||||
"load_task_template_by_config",
|
||||
"load_task_template_by_file",
|
||||
"BaseTask",
|
||||
"WxAccountMsg",
|
||||
"WxAccountLoginMsg",
|
||||
"WxAccountMsgBase",
|
||||
"PushSystem",
|
||||
"get_push_public_data",
|
||||
"PushManager",
|
||||
"push_by_task_keyword",
|
||||
"push_by_task_id",
|
||||
"UPDATE_MOD_PUSH_FILE",
|
||||
"update_mod_push_system",
|
||||
"UPDATE_VERSION_FILE",
|
||||
"PUSH_DATA_PATH",
|
||||
"get_default_module_dict",
|
||||
]
|
||||
|
||||
|
||||
def update_mod_push_system():
|
||||
if os.path.exists(UPDATE_MOD_PUSH_FILE):
|
||||
return
|
||||
|
||||
# 只将已有的告警任务("site_push", "system_push", "database_push") 移动
|
||||
|
||||
try:
|
||||
push_data = json.loads(read_file("/www/server/panel/class/push/push.json"))
|
||||
except:
|
||||
return
|
||||
|
||||
if not isinstance(push_data, dict):
|
||||
return
|
||||
pmgr = PushManager()
|
||||
default_module_dict = get_default_module_dict()
|
||||
for key, value in push_data.items():
|
||||
if key == "site_push":
|
||||
_update_site_push(value, pmgr, default_module_dict)
|
||||
elif key == "system_push":
|
||||
_update_system_push(value, pmgr, default_module_dict)
|
||||
elif key == "database_push":
|
||||
_update_database_push(value, pmgr, default_module_dict)
|
||||
elif key == "rsync_push":
|
||||
_update_rsync_push(value, pmgr, default_module_dict)
|
||||
elif key == "load_balance_push":
|
||||
_update_load_push(value, pmgr, default_module_dict)
|
||||
elif key == "task_manager_push":
|
||||
_update_task_manager_push(value, pmgr, default_module_dict)
|
||||
|
||||
write_file(UPDATE_MOD_PUSH_FILE, "")
|
||||
|
||||
|
||||
def get_default_module_dict():
|
||||
res = {}
|
||||
# wx_account_list = []
|
||||
for data in SenderConfig().config:
|
||||
if not data["used"]:
|
||||
continue
|
||||
if data.get("original", False):
|
||||
res[data["sender_type"]] = data["id"]
|
||||
|
||||
if data["sender_type"] == "webhook":
|
||||
res[data["data"].get("title")] = data["id"]
|
||||
|
||||
# if data["sender_type"] == "wx_account":
|
||||
# wx_account_list.append(data)
|
||||
|
||||
# wx_account_list.sort(key=lambda x: x.get("data", {}).get("create_time", ""))
|
||||
# if wx_account_list:
|
||||
# res["wx_account"] = wx_account_list[0]["id"]
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def _update_site_push(old_data: Dict[str, Dict[str, Union[str, int, float, list]]],
|
||||
pmgr: PushManager,
|
||||
df_mdl: Dict[str, str]):
|
||||
|
||||
for k, v in old_data.items():
|
||||
sender_list = [df_mdl[i.strip()] for i in v.get("module", "").split(",") if i.strip() in df_mdl]
|
||||
if v["type"] == "ssl":
|
||||
push_data = {
|
||||
"template_id": "1",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"project": v.get("project", "all"),
|
||||
"cycle": v.get("cycle", 15)
|
||||
},
|
||||
"number_rule": {
|
||||
"total": v.get("push_count", 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
elif v["type"] == "site_endtime":
|
||||
push_data = {
|
||||
"template_id": "2",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"cycle": v.get("cycle", 7)
|
||||
},
|
||||
"number_rule": {
|
||||
"total": v.get("push_count", 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
elif v["type"] == "panel_pwd_endtime":
|
||||
push_data = {
|
||||
"template_id": "3",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"cycle": v.get("cycle", 15),
|
||||
"interval": 600
|
||||
},
|
||||
"number_rule": {
|
||||
"total": v.get("push_count", 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
elif v["type"] == "ssh_login_error":
|
||||
push_data = {
|
||||
"template_id": "4",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"cycle": v.get("cycle", 30),
|
||||
"count": v.get("count", 3),
|
||||
"interval": v.get("interval", 600)
|
||||
},
|
||||
"number_rule": {
|
||||
"day_num": v.get("day_limit", 3)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
elif v["type"] == "services":
|
||||
push_data = {
|
||||
"template_id": "5",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"project": v.get("project", "nginx"),
|
||||
"count": v.get("count", 3),
|
||||
"interval": v.get("interval", 600)
|
||||
},
|
||||
"number_rule": {
|
||||
"day_num": v.get("day_limit", 3)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
elif v["type"] == "panel_safe_push":
|
||||
push_data = {
|
||||
"template_id": "6",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {},
|
||||
"number_rule": {
|
||||
"day_num": v.get("day_limit", 3)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
elif v["type"] == "ssh_login":
|
||||
push_data = {
|
||||
"template_id": "7",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {},
|
||||
"number_rule": {}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
elif v["type"] == "panel_login":
|
||||
push_data = {
|
||||
"template_id": "8",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {},
|
||||
"number_rule": {}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
elif v["type"] == "project_status":
|
||||
push_data = {
|
||||
"template_id": "9",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"cycle": v.get("cycle", 1),
|
||||
"project": v.get("project", 0),
|
||||
"count": v.get("count", 2) if v.get("count", 2) not in (1, 2) else 2,
|
||||
"interval": v.get("interval", 600)
|
||||
},
|
||||
"number_rule": {
|
||||
"day_num": v.get("push_count", 3)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
elif v["type"] == "panel_update":
|
||||
push_data = {
|
||||
"template_id": "10",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {},
|
||||
"number_rule": {
|
||||
"day_num": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
send_type = None
|
||||
login_send_type_conf = "/www/server/panel/data/panel_login_send.pl"
|
||||
if os.path.exists(login_send_type_conf):
|
||||
send_type = read_file(login_send_type_conf).strip()
|
||||
else:
|
||||
# user_info["server_id"]之前的
|
||||
if os.path.exists("/www/server/panel/data/login_send_type.pl"):
|
||||
send_type = read_file("/www/server/panel/data/login_send_type.pl")
|
||||
else:
|
||||
if os.path.exists('/www/server/panel/data/login_send_mail.pl'):
|
||||
send_type = "mail"
|
||||
if os.path.exists('/www/server/panel/data/login_send_dingding.pl'):
|
||||
send_type = "dingding"
|
||||
|
||||
if isinstance(send_type, str):
|
||||
sender_list = [df_mdl[i.strip()] for i in send_type.split(",") if i.strip() in df_mdl]
|
||||
push_data = {
|
||||
"template_id": "8",
|
||||
"task_data": {
|
||||
"status": True,
|
||||
"sender": sender_list,
|
||||
"task_data": {},
|
||||
"number_rule": {}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
login_send_type_conf = "/www/server/panel/data/ssh_send_type.pl"
|
||||
if os.path.exists(login_send_type_conf):
|
||||
ssh_send_type = read_file(login_send_type_conf).strip()
|
||||
if isinstance(ssh_send_type, str):
|
||||
sender_list = [df_mdl[i.strip()] for i in ssh_send_type.split(",") if i.strip() in df_mdl]
|
||||
push_data = {
|
||||
"template_id": "7",
|
||||
"task_data": {
|
||||
"status": True,
|
||||
"sender": sender_list,
|
||||
"task_data": {},
|
||||
"number_rule": {}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
return
|
||||
|
||||
|
||||
def _update_system_push(old_data: Dict[str, Dict[str, Union[str, int, float, list]]],
|
||||
pmgr: PushManager,
|
||||
df_mdl: Dict[str, str]):
|
||||
|
||||
for k, v in old_data.items():
|
||||
sender_list = [df_mdl[i.strip()] for i in v.get("module", "").split(",") if i.strip() in df_mdl]
|
||||
if v["type"] == "disk":
|
||||
push_data = {
|
||||
"template_id": "20",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"project": v.get("project", "/"),
|
||||
"cycle": v.get("cycle", 2) if v.get("cycle", 2) not in (1, 2) else 2,
|
||||
"count": v.get("count", 80),
|
||||
},
|
||||
"number_rule": {
|
||||
"total": v.get("push_count", 3)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
if v["type"] == "disk":
|
||||
push_data = {
|
||||
"template_id": "21",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"cycle": v.get("cycle", 5) if v.get("cycle", 5) not in (3, 5, 15) else 5,
|
||||
"count": v.get("count", 80),
|
||||
},
|
||||
"number_rule": {
|
||||
"total": v.get("push_count", 3)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
if v["type"] == "load":
|
||||
push_data = {
|
||||
"template_id": "22",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"cycle": v.get("cycle", 5) if v.get("cycle", 5) not in (1, 5, 15) else 5,
|
||||
"count": v.get("count", 80),
|
||||
},
|
||||
"number_rule": {
|
||||
"total": v.get("push_count", 3)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
if v["type"] == "mem":
|
||||
push_data = {
|
||||
"template_id": "23",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"cycle": v.get("cycle", 5) if v.get("cycle", 5) not in (3, 5, 15) else 5,
|
||||
"count": v.get("count", 80),
|
||||
},
|
||||
"number_rule": {
|
||||
"total": v.get("push_count", 3)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def _update_database_push(old_data: Dict[str, Dict[str, Union[str, int, float, list]]],
|
||||
pmgr: PushManager,
|
||||
df_mdl: Dict[str, str]):
|
||||
|
||||
for k, v in old_data.items():
|
||||
sender_list = [df_mdl[i.strip()] for i in v.get("module", "").split(",") if i.strip() in df_mdl]
|
||||
if v["type"] == "mysql_pwd_endtime":
|
||||
push_data = {
|
||||
"template_id": "30",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"project": v.get("project", []),
|
||||
"cycle": v.get("cycle", 15),
|
||||
},
|
||||
"number_rule": {}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
elif v["type"] == "mysql_replicate_status":
|
||||
push_data = {
|
||||
"template_id": "31",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"project": v.get("project", []),
|
||||
"count": v.get("cycle", 15),
|
||||
"interval": v.get("interval", 600)
|
||||
},
|
||||
"number_rule": {}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _update_rsync_push(
|
||||
old_data: Dict[str, Dict[str, Union[str, int, float, list]]],
|
||||
pmgr: PushManager,
|
||||
df_mdl: Dict[str, str]):
|
||||
|
||||
for k, v in old_data.items():
|
||||
sender_list = [df_mdl[i.strip()] for i in v.get("module", "").split(",") if i.strip() in df_mdl]
|
||||
push_data = {
|
||||
"template_id": "40",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"interval": v.get("interval", 600)
|
||||
},
|
||||
"number_rule": {
|
||||
"day_num": v.get("push_count", 3)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
|
||||
def _update_load_push(
|
||||
old_data: Dict[str, Dict[str, Union[str, int, float, list]]],
|
||||
pmgr: PushManager,
|
||||
df_mdl: Dict[str, str]):
|
||||
|
||||
for k, v in old_data.items():
|
||||
sender_list = [df_mdl[i.strip()] for i in v.get("module", "").split(",") if i.strip() in df_mdl]
|
||||
push_data = {
|
||||
"template_id": "50",
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"project": v.get("project", ""),
|
||||
"cycle": v.get("cycle", "200|301|302|403|404")
|
||||
},
|
||||
"number_rule": {
|
||||
"day_num": v.get("push_count", 2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
|
||||
def _update_task_manager_push(
|
||||
old_data: Dict[str, Dict[str, Union[str, int, float, list]]],
|
||||
pmgr: PushManager,
|
||||
df_mdl: Dict[str, str]):
|
||||
|
||||
for k, v in old_data.items():
|
||||
sender_list = [df_mdl[i.strip()] for i in v.get("module", "").split(",") if i.strip() in df_mdl]
|
||||
template_id_dict = {
|
||||
"task_manager_cpu": "60",
|
||||
"task_manager_mem": "61",
|
||||
"task_manager_process": "62"
|
||||
}
|
||||
if v["type"] in template_id_dict:
|
||||
push_data = {
|
||||
"template_id": template_id_dict[v["type"]],
|
||||
"task_data": {
|
||||
"status": bool(v.get("status", True)),
|
||||
"sender": sender_list,
|
||||
"task_data": {
|
||||
"project": v.get("project", ""),
|
||||
"count": v.get("count", 80),
|
||||
"interval": v.get("count", 600),
|
||||
},
|
||||
"number_rule": {
|
||||
"day_num": v.get("push_count", 3)
|
||||
}
|
||||
}
|
||||
}
|
||||
pmgr.set_task_conf_data(push_data)
|
||||
|
||||
BIN
mod/base/push_mod/__pycache__/site_push.cpython-314.pyc
Normal file
BIN
mod/base/push_mod/__pycache__/site_push.cpython-314.pyc
Normal file
Binary file not shown.
215
mod/base/push_mod/base_task.py
Normal file
215
mod/base/push_mod/base_task.py
Normal file
@@ -0,0 +1,215 @@
|
||||
from typing import Union, Optional, List, Tuple
|
||||
from .send_tool import WxAccountMsg
|
||||
|
||||
|
||||
# 告警系统在处理每个任务时,都会重新建立有一个Task的对象,(请勿在__init__的初始化函数中添加任何参数)
|
||||
# 故每个对象中都可以大胆存放本任务所有数据,不会影响同类型的其他任务
|
||||
class BaseTask:
|
||||
|
||||
def __init__(self):
|
||||
self.source_name: str = ''
|
||||
self.title: str = '' # 这个是告警任务的标题(根据实际情况改变)
|
||||
self.template_name: str = '' # 这个告警模板的标题(不会改变)
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
"""
|
||||
检查设置的告警参数(是否合理)
|
||||
@param task_data: 传入的告警参数,提前会经过默认值处理(即没有的字段添加默认值)
|
||||
@return: 当检查无误时,返回一个 dict 当做后续的添加和修改的数据,
|
||||
当检查有误时, 直接返回错误信息的字符串
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
"""
|
||||
返回一个关键字,用于后续查询或执行任务时使用, 例如:防篡改告警,可以根据其规则id生成一个关键字,
|
||||
后续通过规则id和来源tamper 查询并使用
|
||||
@param task_data: 通过check_args后生成的告警参数字典
|
||||
@return: 返回一个关键词字符串
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
"""
|
||||
返回一个标题
|
||||
@param task_data: 通过check_args后生成的告警参数字典
|
||||
@return: 返回一个关键词字符串
|
||||
"""
|
||||
if self.title:
|
||||
return self.title
|
||||
return self.template_name
|
||||
|
||||
def task_run_end_hook(self, res: dict) -> None:
|
||||
"""
|
||||
在告警系统中。执行完了任务后,会去掉用这个函数
|
||||
@type res: dict, 执行任务的结果
|
||||
@return:
|
||||
"""
|
||||
return
|
||||
|
||||
def task_config_update_hook(self, task: dict) -> None:
|
||||
"""
|
||||
在告警管理中。更新任务数据后,会去掉用这个函数
|
||||
@return:
|
||||
"""
|
||||
return
|
||||
|
||||
def task_config_remove_hook(self, task: dict) -> None:
|
||||
"""
|
||||
在告警管理中。移除这个任务后,会去掉用这个函数
|
||||
@return:
|
||||
"""
|
||||
return
|
||||
|
||||
def task_config_create_hook(self, task: dict) -> None:
|
||||
"""
|
||||
在告警管理中。新建这个任务后,会去掉用这个函数
|
||||
@return:
|
||||
"""
|
||||
return
|
||||
|
||||
def check_time_rule(self, time_rule: dict) -> Union[dict, str]:
|
||||
"""
|
||||
检查和修改设置的告警的时间控制参数是是否合理
|
||||
可以添加参数 get_by_func 字段用于指定使用本类中的那个函数执行时间判断标准, 替换标准的时间规则判断功能
|
||||
↑示例如本类中的: can_send_by_time_rule
|
||||
@param time_rule: 传入的告警参数,提前会经过默认值处理(即没有的字段添加默认值)
|
||||
@return: 当检查无误时,返回一个 dict 当做后续的添加和修改的数据,
|
||||
当检查有误时, 直接返回错误信息的字符串
|
||||
"""
|
||||
return time_rule
|
||||
|
||||
def check_num_rule(self, num_rule: dict) -> Union[dict, str]:
|
||||
"""
|
||||
检查和修改设置的告警的次数控制参数是是否合理
|
||||
可以添加参数 get_by_func 字段用于指定使用本类中的那个函数执行次数判断标准, 替换标准的次数规则判断功能
|
||||
↑示例如本类中的: can_send_by_num_rule
|
||||
@param num_rule: 传入的告警参数,提前会经过默认值处理(即没有的字段添加默认值)
|
||||
@return: 当检查无误时,返回一个 dict 当做后续的添加和修改的数据,
|
||||
当检查有误时, 直接返回错误信息的字符串
|
||||
"""
|
||||
return num_rule
|
||||
|
||||
def can_send_by_num_rule(self, task_id: str, task_data: dict, number_rule: dict, push_data: dict) -> Optional[str]:
|
||||
"""
|
||||
这是一个通过函数判断是否能够发送告警的示例,并非每一个告警任务都需要有
|
||||
@param task_id: 任务id
|
||||
@param task_data: 告警参数信息
|
||||
@param number_rule: 次数控制信息
|
||||
@param push_data: 本次要发送的告警信息的原文,应当为字典, 来自 get_push_data 函数的返回值
|
||||
@return: 返回None
|
||||
"""
|
||||
return None
|
||||
|
||||
def can_send_by_time_rule(self, task_id: str, task_data: dict, time_rule: dict, push_data: dict) -> Optional[str]:
|
||||
"""
|
||||
这是一个通过函数判断是否能够发送告警的示例,并非每一个告警任务都需要有
|
||||
@param task_id: 任务id
|
||||
@param task_data: 告警参数信息
|
||||
@param time_rule: 时间控制信息
|
||||
@param push_data: 本次要发送的告警信息的原文,应当为字典, 来自 get_push_data 函数的返回值
|
||||
@return:
|
||||
"""
|
||||
return None
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
"""
|
||||
判断这个任务是否需要返送
|
||||
@param task_id: 任务id
|
||||
@param task_data: 任务的告警参数
|
||||
@return: 如果触发了告警,返回一个dict的原文,作为告警信息,否则应当返回None表示未触发
|
||||
返回之中应当包含一个 msg_list 的键(值为List[str]类型),将主要的信息返回
|
||||
用于以下信息的自动序列化包含[dingding, feishu, mail, weixin, web_hook]
|
||||
短信和微信公众号由于长度问题,必须每个任务手动实现
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
"""
|
||||
过滤 和 更改模板中的信息, 返回空表是当前无法设置该任务
|
||||
@param template: 任务的模板信息
|
||||
@return:
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
# push_public_data 公共的告警参数提取位置
|
||||
# 内容包含:
|
||||
# ip 网络ip
|
||||
# local_ip 本机ip
|
||||
# time 时间日志的字符串
|
||||
# timestamp 当前的时间戳
|
||||
# server_name 服务器别名
|
||||
def to_dingding_msg(self, push_data: dict, push_public_data: dict) -> str:
|
||||
msg_list = push_data.get('msg_list', None)
|
||||
if msg_list is None:
|
||||
raise ValueError("Task: {} alert push data parameter error, there is no msg_list field".format(self.title))
|
||||
return self.public_headers_msg(push_public_data,dingding=True) + "\n\n" + "\n\n".join(msg_list)
|
||||
|
||||
def to_feishu_msg(self, push_data: dict, push_public_data: dict) -> str:
|
||||
msg_list = push_data.get('msg_list', None)
|
||||
if msg_list is None:
|
||||
raise ValueError("Task: {} alert push data parameter error, there is no msg_list field".format(self.title))
|
||||
return self.public_headers_msg(push_public_data) + "\n\n" + "\n\n".join(msg_list)
|
||||
|
||||
def to_mail_msg(self, push_data: dict, push_public_data: dict) -> str:
|
||||
msg_list = push_data.get('msg_list', None)
|
||||
if msg_list is None:
|
||||
raise ValueError("Task: {} alert push data parameter error, there is no msg_list field".format(self.title))
|
||||
public_headers = self.public_headers_msg(push_public_data, "<br>")
|
||||
return public_headers + "<br>" + "<br>".join(msg_list)
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
"""
|
||||
返回 短信告警的类型和数据
|
||||
@param push_data:
|
||||
@param push_public_data:
|
||||
@return: 第一项是类型, 第二项是数据
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
def to_tg_msg(self, push_data: dict, push_public_data: dict) -> str:
|
||||
msg_list = push_data.get('msg_list', None)
|
||||
if msg_list is None:
|
||||
raise ValueError("Task: {} alert push data parameter error, there is no msg_list field".format(self.title))
|
||||
public_headers = self.public_headers_msg(push_public_data, "<br>")
|
||||
return public_headers + "<br>" + "<br>".join(msg_list)
|
||||
def to_weixin_msg(self, push_data: dict, push_public_data: dict) -> str:
|
||||
msg_list = push_data.get('msg_list', None)
|
||||
if msg_list is None:
|
||||
raise ValueError("Task: {} alert push data parameter error, there is no msg_list field".format(self.title))
|
||||
spc = "\n "
|
||||
public_headers = self.public_headers_msg(push_public_data, "\n ")
|
||||
return public_headers + spc + spc.join(msg_list)
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
raise NotImplementedError()
|
||||
|
||||
def to_web_hook_msg(self, push_data: dict, push_public_data: dict) -> str:
|
||||
msg_list = push_data.get('msg_list', None)
|
||||
if msg_list is None:
|
||||
raise ValueError("Task: {} alert push data parameter error, there is no msg_list field".format(self.title))
|
||||
public_headers = self.public_headers_msg(push_public_data, "\n")
|
||||
return public_headers + "\n" + "\n".join(msg_list)
|
||||
|
||||
def public_headers_msg(self, push_public_data: dict, spc: str = None,dingding=False) -> str:
|
||||
if spc is None:
|
||||
spc = "\n\n"
|
||||
title = self.title
|
||||
print(title)
|
||||
if dingding:
|
||||
print("dingdingtitle",title)
|
||||
if "yakpanel" not in title:
|
||||
title += "yakpanel"
|
||||
print("dingdingtitle",title)
|
||||
|
||||
print(title)
|
||||
return spc.join([
|
||||
"#### {}".format(title),
|
||||
">Server:" + push_public_data['server_name'],
|
||||
">IPAddress: {}(Internet) {}(Internal)".format(push_public_data['ip'], push_public_data['local_ip']),
|
||||
">SendingTime: " + push_public_data['time']
|
||||
])
|
||||
|
||||
class BaseTaskViewMsg:
|
||||
|
||||
def get_msg(self, task: dict) -> Optional[str]:
|
||||
return ""
|
||||
27
mod/base/push_mod/compatible.py
Normal file
27
mod/base/push_mod/compatible.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import os
|
||||
from .util import read_file, write_file
|
||||
|
||||
|
||||
def rsync_compatible():
|
||||
files = [
|
||||
"/www/server/panel/class/push/rsync_push.py",
|
||||
"/www/server/panel/plugin/rsync/rsync_push.py",
|
||||
]
|
||||
for f in files:
|
||||
print(f)
|
||||
if not os.path.exists(f):
|
||||
continue
|
||||
src_data = read_file(f)
|
||||
if src_data.find("push_rsync_by_task_name") != -1:
|
||||
continue
|
||||
src_data = src_data.replace("""if __name__ == "__main__":
|
||||
rsync_push().main()""", """
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
sys.path.insert(0, "/www/server/panel")
|
||||
from mod.base.push_mod.rsync_push import push_rsync_by_task_name
|
||||
push_rsync_by_task_name(sys.argv[1])
|
||||
except:
|
||||
rsync_push().main()
|
||||
""")
|
||||
write_file(f, src_data)
|
||||
239
mod/base/push_mod/database_push.py
Normal file
239
mod/base/push_mod/database_push.py
Normal file
@@ -0,0 +1,239 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import ipaddress
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Tuple, Union, Optional
|
||||
|
||||
from .send_tool import WxAccountMsg
|
||||
from .base_task import BaseTask
|
||||
from .util import read_file, DB, GET_CLASS
|
||||
|
||||
try:
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
from panel_msg.collector import DatabasePushMsgCollect
|
||||
except ImportError:
|
||||
DatabasePushMsgCollect = None
|
||||
|
||||
|
||||
def is_ipaddress(ip_data: str) -> bool:
|
||||
try:
|
||||
ipaddress.ip_address(ip_data)
|
||||
except ValueError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
# class MysqlPwdEndTimeTask(BaseTask):
|
||||
#
|
||||
# def __init__(self):
|
||||
# super().__init__()
|
||||
# self.template_name = "MySQL数据库密码到期"
|
||||
# self.source_name = "mysql_pwd_end"
|
||||
#
|
||||
# self.push_db_user = ""
|
||||
#
|
||||
# def get_title(self, task_data: dict) -> str:
|
||||
# return "Msql:" + task_data["project"][1] + "用户密码到期提醒"
|
||||
#
|
||||
# def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
# task_data["interval"] = 600
|
||||
# if not (isinstance(task_data["project"], list) and len(task_data["project"]) == 3):
|
||||
# return "设置的用户格式错误"
|
||||
# project = task_data["project"]
|
||||
# if not (isinstance(project[0], int) and isinstance(project[1], str) and is_ipaddress(project[2])):
|
||||
# return "设置的检测用户格式错误"
|
||||
#
|
||||
# if not (isinstance(task_data["cycle"], int) and task_data["cycle"] >= 1):
|
||||
# return "到期时间参数错误,至少为 1 天"
|
||||
# return task_data
|
||||
#
|
||||
# def get_keyword(self, task_data: dict) -> str:
|
||||
# return "_".join([str(i) for i in task_data["project"]])
|
||||
#
|
||||
# def check_num_rule(self, num_rule: dict) -> Union[dict, str]:
|
||||
# num_rule["day_num"] = 1
|
||||
# return num_rule
|
||||
#
|
||||
# def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
# sid = task_data["project"][0]
|
||||
# username = task_data["project"][1]
|
||||
# host = task_data["project"][2]
|
||||
#
|
||||
# if "/www/server/panel/class" not in sys.path:
|
||||
# sys.path.insert(0, "/www/server/panel/class")
|
||||
# try:
|
||||
# import panelMysql
|
||||
# import db_mysql
|
||||
# except ImportError:
|
||||
# return None
|
||||
#
|
||||
# if sid == 0:
|
||||
# try:
|
||||
# db_port = int(panelMysql.panelMysql().query("show global variables like 'port'")[0][1])
|
||||
# if db_port == 0:
|
||||
# db_port = 3306
|
||||
# except:
|
||||
# db_port = 3306
|
||||
# conn_config = {
|
||||
# "db_host": "localhost",
|
||||
# "db_port": db_port,
|
||||
# "db_user": "root",
|
||||
# "db_password": DB("config").where("id=?", (1,)).getField("mysql_root"),
|
||||
# "ps": "local server",
|
||||
# }
|
||||
# else:
|
||||
# conn_config = DB("database_servers").where("id=? AND LOWER(db_type)=LOWER('mysql')", (sid,)).find()
|
||||
# if not conn_config:
|
||||
# return None
|
||||
#
|
||||
# mysql_obj = db_mysql.panelMysql().set_host(conn_config["db_host"], conn_config["db_port"], None,
|
||||
# conn_config["db_user"], conn_config["db_password"])
|
||||
# if isinstance(mysql_obj, bool):
|
||||
# return None
|
||||
#
|
||||
# data_list = mysql_obj.query(
|
||||
# "SELECT password_last_changed FROM mysql.user WHERE user='{}' AND host='{}';".format(username, host))
|
||||
#
|
||||
# if not isinstance(data_list, list) or not data_list:
|
||||
# return None
|
||||
#
|
||||
# try:
|
||||
# # todo:检查这里的时间转化逻辑问题
|
||||
# last_time = data_list[0][0]
|
||||
# expire_time = last_time + timedelta(days=task_data["cycle"])
|
||||
# except:
|
||||
# return None
|
||||
#
|
||||
# if datetime.now() > expire_time:
|
||||
# self.title = self.get_title(task_data)
|
||||
# self.push_db_user = username
|
||||
# return {"msg_list": [
|
||||
# ">告警类型:MySQL密码即将到期",
|
||||
# ">Content of alarm: {} {}@{} 密码过期时间<font color=#ff0000>{} 天</font>".format(
|
||||
# conn_config["ps"], username, host, expire_time.strftime("%Y-%m-%d %H:%M:%S"))
|
||||
# ]}
|
||||
#
|
||||
# def filter_template(self, template: dict) -> Optional[dict]:
|
||||
# return template
|
||||
#
|
||||
# def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
# return "", {}
|
||||
#
|
||||
# def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
# msg = WxAccountMsg.new_msg()
|
||||
# msg.thing_type = "MySQL数据库密码到期"
|
||||
# msg.msg = "Mysql用户:{}的密码即将过期,请注意".format(self.push_db_user)
|
||||
# msg.next_msg = "Please log in to the panel to view the host status"
|
||||
# return msg
|
||||
|
||||
|
||||
class MysqlReplicateStatusTask(BaseTask):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.template_name = "MySQL主从复制异常告警"
|
||||
self.source_name = "mysql_replicate_status"
|
||||
self.title = "MySQL主从复制异常告警"
|
||||
|
||||
self.slave_ip = ''
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
if not (isinstance(task_data["project"], str) and task_data["project"]):
|
||||
return "请选择告警的从库!"
|
||||
|
||||
if not (isinstance(task_data["count"], int) and task_data["count"] in (1, 2)):
|
||||
return "是否自动修复选择错误!"
|
||||
|
||||
if not (isinstance(task_data["interval"], int) and task_data["interval"] >= 60):
|
||||
return "检查间隔时间错误,至少需要60s的间隔"
|
||||
return task_data
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return task_data["project"]
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
import PluginLoader
|
||||
|
||||
args = GET_CLASS()
|
||||
args.slave_ip = task_data["project"]
|
||||
res = PluginLoader.plugin_run("mysql_replicate", "get_replicate_status", args)
|
||||
if res.get("status", False) is False:
|
||||
return None
|
||||
|
||||
self.slave_ip = task_data["project"]
|
||||
if len(res.get("data", [])) == 0:
|
||||
s_list = [">告警类型:MySQL主从复制异常告警",
|
||||
">Content of alarm: <font color=#ff0000>从库 {} 主从复制已停止,请尽快登录面板查看详情</font>".format(
|
||||
task_data["project"])]
|
||||
return {"msg_list": s_list}
|
||||
|
||||
sql_status = io_status = False
|
||||
for item in res.get("data", []):
|
||||
if item["name"] == "Slave_IO_Running" and item["value"] == "Yes":
|
||||
io_status = True
|
||||
if item["name"] == "Slave_SQL_Running" and item["value"] == "Yes":
|
||||
sql_status = True
|
||||
if io_status is True and sql_status is True:
|
||||
break
|
||||
|
||||
if io_status is False or sql_status is False:
|
||||
repair_txt = "请尽快登录面板查看详情"
|
||||
if task_data["count"] == 1: # 自动修复
|
||||
PluginLoader.plugin_run("mysql_replicate", "repair_replicate", args)
|
||||
repair_txt = ",正在尝试修复"
|
||||
|
||||
s_list = [">告警类型:MySQL主从复制异常告警",
|
||||
">Content of alarm: <font color=#ff0000>从库 {} 主从复制发生异常{}</font>".format(
|
||||
task_data["project"], repair_txt)]
|
||||
return {"msg_list": s_list}
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_mysql_replicate():
|
||||
slave_list = []
|
||||
mysql_replicate_path = os.path.join("/www/server/panel/plugin", "mysql_replicate", "config.json")
|
||||
if os.path.isfile(mysql_replicate_path):
|
||||
conf = read_file(mysql_replicate_path)
|
||||
try:
|
||||
conf = json.loads(conf)
|
||||
slave_list = [{"title": slave_ip, "value": slave_ip} for slave_ip in conf["slave"].keys()]
|
||||
except:
|
||||
pass
|
||||
return slave_list
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
template["field"][0]["items"] = self._get_mysql_replicate()
|
||||
if not template["field"][0]["items"]:
|
||||
return None
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return '', {}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "MySQL主从复制异常告警"
|
||||
msg.msg = "从库 {} 主从复制发生异常".format(self.slave_ip)
|
||||
msg.next_msg = "请登录面板,在[软件商店-MySQL主从复制(重构版)]中查看"
|
||||
return msg
|
||||
|
||||
|
||||
class ViewMsgFormat(object):
|
||||
_FORMAT = {
|
||||
"30": (
|
||||
lambda x: "<span>剩余时间小于{}天{}</span>".format(
|
||||
x["task_data"].get("cycle"),
|
||||
("(如未处理,次日会重新发送1次,持续%d天)" % x.get("number_rule", {}).get("day_num", 0)) if x.get(
|
||||
"number_rule", {}).get("day_num", 0) else ""
|
||||
|
||||
)
|
||||
),
|
||||
"31": (lambda x: "<span>MySQL主从复制异常告警</span>".format()),
|
||||
}
|
||||
|
||||
def get_msg(self, task: dict) -> Optional[str]:
|
||||
if task["template_id"] in self._FORMAT:
|
||||
return self._FORMAT[task["template_id"]](task)
|
||||
return None
|
||||
81
mod/base/push_mod/database_push_template.json
Normal file
81
mod/base/push_mod/database_push_template.json
Normal file
@@ -0,0 +1,81 @@
|
||||
[
|
||||
{
|
||||
"id": "31",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "mysql_replicate_status",
|
||||
"title": "Msql主从同步告警",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.database_push",
|
||||
"name": "MysqlReplicateStatusTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "project",
|
||||
"name": "选择监控的从库",
|
||||
"type": "select",
|
||||
"default": null,
|
||||
"items": []
|
||||
},
|
||||
{
|
||||
"attr": "count",
|
||||
"name": "自动修复",
|
||||
"type": "radio",
|
||||
"suffix": "",
|
||||
"default": 1,
|
||||
"items": [
|
||||
{
|
||||
"title": "自动尝试修复",
|
||||
"value": 1
|
||||
},
|
||||
{
|
||||
"title": "不做修复尝试",
|
||||
"value": 2
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "interval",
|
||||
"name": "间隔时间",
|
||||
"type": "number",
|
||||
"unit": "秒",
|
||||
"suffix": "后再次监控检测条件",
|
||||
"default": 600
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"project"
|
||||
],
|
||||
[
|
||||
"count"
|
||||
],
|
||||
[
|
||||
"interval"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"project": "",
|
||||
"count": 2,
|
||||
"interval": 600
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"wx_account",
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": false
|
||||
}
|
||||
]
|
||||
178
mod/base/push_mod/domain_blcheck_push.py
Normal file
178
mod/base/push_mod/domain_blcheck_push.py
Normal file
@@ -0,0 +1,178 @@
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
|
||||
import psutil
|
||||
from datetime import datetime
|
||||
from importlib import import_module
|
||||
from typing import Tuple, Union, Optional, List
|
||||
|
||||
from .send_tool import WxAccountMsg, WxAccountLoginMsg
|
||||
from .base_task import BaseTask
|
||||
from .mods import PUSH_DATA_PATH, TaskConfig, SenderConfig, PANEL_PATH
|
||||
from .util import read_file, DB, write_file, check_site_status,GET_CLASS, ExecShell, get_config_value, public_get_cache_func, \
|
||||
public_set_cache_func, get_network_ip, public_get_user_info, public_http_post, panel_version
|
||||
from mod.base.web_conf import RealSSLManger
|
||||
|
||||
# 邮局域名进入黑名单
|
||||
class MailDomainBlcheck(BaseTask):
|
||||
push_tip_file = "/www/server/panel/data/mail_domain_blcheck_send_type.pl"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "mail_domain_black"
|
||||
self.template_name = "Your IP is on the email blacklist"
|
||||
self.title = "Your IP is on the email blacklist"
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
return {}
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return "mail_domain_black"
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
return None
|
||||
|
||||
def filter_template(self, template) -> dict:
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return "", {}
|
||||
|
||||
def task_config_update_hook(self, task: dict) -> None:
|
||||
|
||||
sender = task["sender"]
|
||||
if len(sender) > 0:
|
||||
send_id = sender[0]
|
||||
else:
|
||||
return
|
||||
|
||||
sender_data = SenderConfig().get_by_id(send_id)
|
||||
if sender_data:
|
||||
write_file(self.push_tip_file, sender_data["sender_type"])
|
||||
|
||||
def task_config_create_hook(self, task: dict) -> None:
|
||||
return self.task_config_update_hook(task)
|
||||
|
||||
def task_config_remove_hook(self, task: dict) -> None:
|
||||
if os.path.exists(self.push_tip_file):
|
||||
os.remove(self.push_tip_file)
|
||||
|
||||
# 邮局服务异常告警
|
||||
class MailServerDown(BaseTask):
|
||||
push_tip_file = "/www/server/panel/data/mail_server_down_send_type.pl"
|
||||
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "mail_server_status"
|
||||
self.template_name = "Your Mail Service is down"
|
||||
self.title = "Your Mail Service is down"
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
return {}
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return "mail_server_status"
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
return None
|
||||
|
||||
def filter_template(self, template) -> dict:
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return "", {}
|
||||
|
||||
def task_config_update_hook(self, task: dict) -> None:
|
||||
|
||||
sender = task["sender"]
|
||||
if len(sender) > 0:
|
||||
send_id = sender[0]
|
||||
else:
|
||||
return
|
||||
|
||||
sender_data = SenderConfig().get_by_id(send_id)
|
||||
if sender_data:
|
||||
write_file(self.push_tip_file, sender_data["sender_type"])
|
||||
|
||||
def task_config_create_hook(self, task: dict) -> None:
|
||||
return self.task_config_update_hook(task)
|
||||
|
||||
def task_config_remove_hook(self, task: dict) -> None:
|
||||
if os.path.exists(self.push_tip_file):
|
||||
os.remove(self.push_tip_file)
|
||||
|
||||
# 邮局服务异常告警
|
||||
class MailDomainQuota(BaseTask):
|
||||
push_tip_file = "/www/server/panel/data/mail_domain_quota_alert_send_type.pl"
|
||||
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "mail_domain_quota_alert"
|
||||
self.template_name = "Your Mail Domain Quota Alert"
|
||||
self.title = "Your Mail Domain Quota Alert"
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
return {}
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return "mail_domain_quota_alert"
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
return None
|
||||
|
||||
def filter_template(self, template) -> dict:
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return "", {}
|
||||
|
||||
def task_config_update_hook(self, task: dict) -> None:
|
||||
|
||||
sender = task["sender"]
|
||||
if len(sender) > 0:
|
||||
send_id = sender[0]
|
||||
else:
|
||||
return
|
||||
|
||||
sender_data = SenderConfig().get_by_id(send_id)
|
||||
if sender_data:
|
||||
write_file(self.push_tip_file, sender_data["sender_type"])
|
||||
|
||||
def task_config_create_hook(self, task: dict) -> None:
|
||||
return self.task_config_update_hook(task)
|
||||
|
||||
def task_config_remove_hook(self, task: dict) -> None:
|
||||
if os.path.exists(self.push_tip_file):
|
||||
os.remove(self.push_tip_file)
|
||||
|
||||
|
||||
class ViewMsgFormat(object):
|
||||
_FORMAT = {
|
||||
"1": (
|
||||
lambda x: "<span>When your MailServer domain is blacklisted, an alarm is generated</span>"
|
||||
),
|
||||
"2": (
|
||||
lambda x: "<span>When your Mail Service is down, an alarm is generated</span>"
|
||||
),
|
||||
"3": (
|
||||
lambda x: "<span>When your Mail domain usage exceeds quota, an alarm is generated</span>"
|
||||
)
|
||||
}
|
||||
|
||||
def get_msg(self, task: dict) -> Optional[str]:
|
||||
if task["template_id"] in ["80"]:
|
||||
return self._FORMAT["1"](task)
|
||||
if task["template_id"] in ["81"]:
|
||||
return self._FORMAT["2"](task)
|
||||
if task["template_id"] in ["82"]:
|
||||
return self._FORMAT["3"](task)
|
||||
if task["template_id"] in self._FORMAT:
|
||||
return self._FORMAT[task["template_id"]](task)
|
||||
return None
|
||||
105
mod/base/push_mod/domain_blcheck_push_template.json
Normal file
105
mod/base/push_mod/domain_blcheck_push_template.json
Normal file
@@ -0,0 +1,105 @@
|
||||
[
|
||||
{
|
||||
"id": "80",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "mail_domain_black",
|
||||
"title": "Your IP is on the email blacklist",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.domain_blcheck_push",
|
||||
"name": "MailDomainBlcheck"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
],
|
||||
"sorted": [
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": true
|
||||
},
|
||||
{
|
||||
"id": "81",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "mail_server_status",
|
||||
"title": "Your Mail Service is down",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.domain_blcheck_push",
|
||||
"name": "MailServerDown"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
],
|
||||
"sorted": [
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": true
|
||||
},
|
||||
{
|
||||
"id": "82",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "mail_domain_quota_alert",
|
||||
"title": "Your Mail Domain Quota Alert",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.domain_blcheck_push",
|
||||
"name": "MailDomainQuota"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
],
|
||||
"sorted": [
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 1
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": true
|
||||
}
|
||||
]
|
||||
|
||||
274
mod/base/push_mod/load_push.py
Normal file
274
mod/base/push_mod/load_push.py
Normal file
@@ -0,0 +1,274 @@
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from typing import Tuple, Union, Optional
|
||||
|
||||
from .mods import PUSH_DATA_PATH, TaskTemplateConfig
|
||||
from .send_tool import WxAccountMsg
|
||||
from .base_task import BaseTask
|
||||
from .util import read_file, DB, GET_CLASS, write_file
|
||||
|
||||
|
||||
class NginxLoadTask(BaseTask):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "nginx_load_push"
|
||||
self.template_name = "Load balancing alarm"
|
||||
# self.title = "Load balancing alarm"
|
||||
self._tip_counter = None
|
||||
|
||||
@property
|
||||
def tip_counter(self) -> dict:
|
||||
if self._tip_counter is not None:
|
||||
return self._tip_counter
|
||||
tip_counter = '{}/load_balance_push.json'.format(PUSH_DATA_PATH)
|
||||
if os.path.exists(tip_counter):
|
||||
try:
|
||||
self._tip_counter = json.loads(read_file(tip_counter))
|
||||
except json.JSONDecodeError:
|
||||
self._tip_counter = {}
|
||||
else:
|
||||
self._tip_counter = {}
|
||||
return self._tip_counter
|
||||
|
||||
def save_tip_counter(self):
|
||||
tip_counter = '{}/load_balance_push.json'.format(PUSH_DATA_PATH)
|
||||
write_file(tip_counter, json.dumps(self.tip_counter))
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
if task_data["project"] == "all":
|
||||
return "Load balancing alarm"
|
||||
return "Load balancing alarm -- [{}] ".format(task_data["project"])
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
all_upstream_name = DB("upstream").field("name").select()
|
||||
if isinstance(all_upstream_name, str) and all_upstream_name.startswith("error"):
|
||||
return 'Alarms cannot be set without load balancing configuration'
|
||||
all_upstream_name = [i["name"] for i in all_upstream_name]
|
||||
if not bool(all_upstream_name):
|
||||
return 'Alarms cannot be set without load balancing configuration'
|
||||
if task_data["project"] not in all_upstream_name and task_data["project"] != "all":
|
||||
return 'Without this load balancer configuration, alarms cannot be set'
|
||||
|
||||
cycle = []
|
||||
for i in task_data["cycle"].split("|"):
|
||||
if bool(i) and i.isdecimal():
|
||||
code = int(i)
|
||||
if 100 <= code < 600:
|
||||
cycle.append(str(code))
|
||||
if not bool(cycle):
|
||||
return 'If no error code is specified, the alarm cannot be set'
|
||||
|
||||
task_data["cycle"] = "|".join(cycle)
|
||||
return task_data
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return task_data["project"]
|
||||
|
||||
def _check_func(self, upstream_name: str, codes: str) -> list:
|
||||
import PluginLoader
|
||||
get_obj = GET_CLASS()
|
||||
get_obj.upstream_name = upstream_name
|
||||
# 调用外部插件检查负载均衡的健康状况
|
||||
upstreams = PluginLoader.plugin_run("load_balance", "get_check_upstream", get_obj)
|
||||
access_codes = [int(i) for i in codes.split("|") if bool(i.strip())]
|
||||
res_list = []
|
||||
for upstream in upstreams:
|
||||
# 检查每个节点,返回有问题的节点信息
|
||||
res = upstream.check_nodes(access_codes, return_nodes=True)
|
||||
for ping_url in res:
|
||||
if ping_url in self.tip_counter:
|
||||
self.tip_counter[ping_url].append(int(time.time()))
|
||||
idx = 0
|
||||
for i in self.tip_counter[ping_url]:
|
||||
# 清理超过4分钟的记录
|
||||
if time.time() - i > 60 * 4:
|
||||
idx += 1
|
||||
self.tip_counter[ping_url] = self.tip_counter[ping_url][idx:]
|
||||
print("self.tip_counter[ping_url]",self.tip_counter[ping_url])
|
||||
# 如果一个节点连续三次出现在告警列表中,则视为需要告警
|
||||
if len(self.tip_counter[ping_url]) >= 3:
|
||||
res_list.append(ping_url)
|
||||
self.tip_counter[ping_url] = []
|
||||
else:
|
||||
self.tip_counter[ping_url] = [int(time.time()), ]
|
||||
self.save_tip_counter()
|
||||
return res_list
|
||||
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
err_nodes = self._check_func(task_data["project"], task_data["cycle"])
|
||||
if not err_nodes:
|
||||
return None
|
||||
pj = "load balancing:【{}】".format(task_data["project"]) if task_data["project"] != "all" else "load balancing"
|
||||
nodes = '、'.join(err_nodes)
|
||||
self.title = self.get_title(task_data)
|
||||
return {
|
||||
"msg_list": [
|
||||
">Notification type: Enterprise Edition load balancing alarm",
|
||||
">Content of alarm: <font color=#ff0000>{}The node [{}] under the configuration has access error, please pay attention to the node situation in time and deal with it.</font> ".format(
|
||||
pj, nodes),
|
||||
],
|
||||
"pj": pj,
|
||||
"nodes": nodes
|
||||
}
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
if not os.path.exists("/www/server/panel/plugin/load_balance/load_balance_main.py"):
|
||||
return None
|
||||
all_upstream = DB("upstream").field("name").select()
|
||||
if isinstance(all_upstream, str) and all_upstream.startswith("error"):
|
||||
return None
|
||||
all_upstream_name = [i["name"] for i in all_upstream]
|
||||
if not all_upstream_name:
|
||||
return None
|
||||
for name in all_upstream_name:
|
||||
template["field"][0]["items"].append({
|
||||
"title": name,
|
||||
"value": name
|
||||
})
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return '', {}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "Load balancing alarm"
|
||||
msg.msg = "If the node is abnormal, log in to the panel"
|
||||
return msg
|
||||
|
||||
def task_config_create_hook(self, task: dict) -> None:
|
||||
old_config_file = "/www/server/panel/class/push/push.json"
|
||||
try:
|
||||
old_config = json.loads(read_file(old_config_file))
|
||||
except:
|
||||
return
|
||||
if "load_balance_push" not in old_config:
|
||||
old_config["load_balance_push"] = {}
|
||||
old_data = {
|
||||
"push_count": task["number_rule"].get("day_num", 2),
|
||||
"cycle": task["task_data"].get("cycle", "200|301|302|403|404"),
|
||||
"interval": task["task_data"].get("interval", 60),
|
||||
"title": task["title"],
|
||||
"status": task['status'],
|
||||
"module": ",".join(task["sender"])
|
||||
}
|
||||
for k, v in old_config["load_balance_push"].items():
|
||||
if v["project"] == task["task_data"]["project"]:
|
||||
v.update(old_data)
|
||||
else:
|
||||
old_data["project"] = task["task_data"]["project"]
|
||||
old_config["load_balance_push"][int(time.time())] = old_data
|
||||
|
||||
write_file(old_config_file, json.dumps(old_config))
|
||||
|
||||
def task_config_update_hook(self, task: dict) -> None:
|
||||
return self.task_config_create_hook(task)
|
||||
|
||||
def task_config_remove_hook(self, task: dict) -> None:
|
||||
old_config_file = "/www/server/panel/class/push/push.json"
|
||||
try:
|
||||
old_config = json.loads(read_file(old_config_file))
|
||||
except:
|
||||
return
|
||||
if "load_balance_push" not in old_config:
|
||||
old_config["load_balance_push"] = {}
|
||||
old_config["load_balance_push"] = {
|
||||
k: v for k, v in old_config["load_balance_push"].items()
|
||||
if v["project"] != task["task_data"]["project"]
|
||||
}
|
||||
|
||||
|
||||
def load_load_template():
|
||||
if TaskTemplateConfig().get_by_id("50"):
|
||||
return None
|
||||
|
||||
from .mods import load_task_template_by_config
|
||||
load_task_template_by_config(
|
||||
[{
|
||||
"id": "50",
|
||||
"ver": "1",
|
||||
"used": True,
|
||||
"source": "nginx_load_push",
|
||||
"title": "load balancing",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.load_push",
|
||||
"name": "NginxLoadTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "project",
|
||||
"name": "The name of the payload",
|
||||
"type": "select",
|
||||
"default": "all",
|
||||
"unit": "",
|
||||
"suffix": (
|
||||
"<i style='color: #999;font-style: initial;font-size: 12px;margin-right: 5px'>*</i>"
|
||||
"<span style='color:#999'>If a node fails to access a node in the selected load configuration, an alarm is triggered</span>"
|
||||
),
|
||||
"items": [
|
||||
{
|
||||
"title": "All configured loads",
|
||||
"value": "all"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "The status code of the success",
|
||||
"type": "textarea",
|
||||
"unit": "",
|
||||
"suffix": (
|
||||
"<br><i style='color: #999;font-style: initial;font-size: 12px;margin-right: 5px'>*</i>"
|
||||
"<span style='color:#999'>Status codes are separated by vertical bars, for example:200|301|302|403|404</span>"
|
||||
),
|
||||
"width": "400px",
|
||||
"style": {
|
||||
'height': '70px',
|
||||
},
|
||||
"default": "200|301|302|403|404"
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"project"
|
||||
],
|
||||
[
|
||||
"cycle"
|
||||
]
|
||||
],
|
||||
},
|
||||
"default": {
|
||||
"project": "all",
|
||||
"cycle": "200|301|302|403|404"
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"wx_account",
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg",
|
||||
],
|
||||
"unique": False
|
||||
}]
|
||||
)
|
||||
|
||||
|
||||
class ViewMsgFormat(object):
|
||||
|
||||
@staticmethod
|
||||
def get_msg(task: dict) -> Optional[str]:
|
||||
if task["template_id"] == "50":
|
||||
return "<span>When the node access is abnormal, the alarm message is pushed (it is not pushed after {} times per day)<span>".format(
|
||||
task.get("number_rule", {}).get("day_num"))
|
||||
return None
|
||||
296
mod/base/push_mod/manager.py
Normal file
296
mod/base/push_mod/manager.py
Normal file
@@ -0,0 +1,296 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from typing import Union, Optional
|
||||
|
||||
from mod.base import json_response
|
||||
from .mods import TaskTemplateConfig, TaskConfig, SenderConfig, TaskRecordConfig
|
||||
from .system import PushSystem
|
||||
|
||||
sys.path.insert(0, "/www/server/panel/class/")
|
||||
import public
|
||||
|
||||
|
||||
class PushManager:
|
||||
def __init__(self):
|
||||
self.template_conf = TaskTemplateConfig()
|
||||
self.task_conf = TaskConfig()
|
||||
self.send_config = SenderConfig()
|
||||
self._send_conf_cache = {}
|
||||
|
||||
def _get_sender_conf(self, sender_id):
|
||||
if sender_id in self._send_conf_cache:
|
||||
return self._send_conf_cache[sender_id]
|
||||
tmp = self.send_config.get_by_id(sender_id)
|
||||
self._send_conf_cache[sender_id] = tmp
|
||||
return tmp
|
||||
|
||||
def normalize_task_config(self, task, template) -> Union[dict, str]:
|
||||
result = {}
|
||||
sender = task.get("sender", None)
|
||||
if sender is None:
|
||||
return "No alarm channel is configured"
|
||||
if not isinstance(sender, list):
|
||||
return "The alarm channel is incorrect"
|
||||
|
||||
new_sender = []
|
||||
for i in sender:
|
||||
sender_conf = self._get_sender_conf(i)
|
||||
if not sender_conf:
|
||||
continue
|
||||
else:
|
||||
new_sender.append(i)
|
||||
if sender_conf["sender_type"] not in template["send_type_list"]:
|
||||
if sender_conf["sender_type"] == "sms":
|
||||
return "SMS alerts are not supported"
|
||||
return "Unsupported alerting methods:{}".format(sender_conf['data']["title"])
|
||||
if not sender_conf["used"]:
|
||||
if sender_conf["sender_type"] == "sms":
|
||||
return "The SMS alert channel has been closed"
|
||||
return "Closed alert mode:{}".format(sender_conf['data']["title"])
|
||||
|
||||
result["sender"] = new_sender
|
||||
|
||||
if "default" in template and template["default"]:
|
||||
task_data = task.get("task_data", {})
|
||||
for k, v in template["default"].items():
|
||||
if k not in task_data:
|
||||
task_data[k] = v
|
||||
|
||||
result["task_data"] = task_data
|
||||
# 避免default为空时,无数据
|
||||
else:
|
||||
result["task_data"] = task.get("task_data", {})
|
||||
|
||||
if "task_data" not in result:
|
||||
result["task_data"] = {}
|
||||
|
||||
time_rule = task.get("time_rule", {})
|
||||
|
||||
if "send_interval" in time_rule:
|
||||
if not isinstance(time_rule["send_interval"], int):
|
||||
return "The minimum interval is set incorrectly"
|
||||
if time_rule["send_interval"] < 0:
|
||||
return "The minimum interval is set incorrectly"
|
||||
|
||||
if "time_range" in time_rule:
|
||||
if not isinstance(time_rule["time_range"], list):
|
||||
return "The time range is set incorrectly"
|
||||
if not len(time_rule["time_range"]) == 2:
|
||||
del time_rule["time_range"]
|
||||
else:
|
||||
time_range = time_rule["time_range"]
|
||||
if not (isinstance(time_range[0], int) and isinstance(time_range[1], int) and
|
||||
0 <= time_range[0] < time_range[1] <= 60 * 60 * 24):
|
||||
return "The time range is set incorrectly"
|
||||
|
||||
result["time_rule"] = time_rule
|
||||
|
||||
number_rule = task.get("number_rule", {})
|
||||
if "day_num" in number_rule:
|
||||
if not (isinstance(number_rule["day_num"], int) and number_rule["day_num"] >= 0):
|
||||
return "The minimum number of times per day is set incorrectly"
|
||||
|
||||
if "total" in number_rule:
|
||||
if not (isinstance(number_rule["total"], int) and number_rule["total"] >= 0):
|
||||
return "The maximum number of alarms is set incorrectly"
|
||||
|
||||
result["number_rule"] = number_rule
|
||||
|
||||
if "status" not in task:
|
||||
result["status"] = True
|
||||
if "status" in task:
|
||||
if isinstance(task["status"], bool):
|
||||
result["status"] = task["status"]
|
||||
|
||||
return result
|
||||
|
||||
def set_task_conf_data(self, push_data: dict) -> Optional[str]:
|
||||
task_id = push_data.get("task_id", None)
|
||||
template_id = push_data.get("template_id")
|
||||
task = push_data.get("task_data")
|
||||
|
||||
target_task_conf = None
|
||||
if task_id is not None:
|
||||
tmp = self.task_conf.get_by_id(task_id)
|
||||
if tmp is None:
|
||||
target_task_conf = tmp
|
||||
|
||||
template = self.template_conf.get_by_id(template_id)
|
||||
|
||||
if not template:
|
||||
# 如果没有找到模板,则尝试加载默认的安全推送模板
|
||||
from .mods import load_task_template_by_file
|
||||
if not os.path.exists("/www/server/panel/mod/base/push_mod/safe_mod_push_template.json"):
|
||||
return "No alarm template was found"
|
||||
load_task_template_by_file("/www/server/panel/mod/base/push_mod/safe_mod_push_template.json")
|
||||
self.template_conf = TaskTemplateConfig()
|
||||
template = self.template_conf.get_by_id(template_id)
|
||||
if not template:
|
||||
return "No alarm template was found"
|
||||
|
||||
if template["unique"] and not target_task_conf:
|
||||
for i in self.task_conf.config:
|
||||
if i["template_id"] == template["id"]:
|
||||
target_task_conf = i
|
||||
break
|
||||
|
||||
task_obj = PushSystem().get_task_object(template_id, template["load_cls"])
|
||||
if not task_obj:
|
||||
return "Loading task type error, you can try to fix the panel"
|
||||
|
||||
res = self.normalize_task_config(task, template)
|
||||
if isinstance(res, str):
|
||||
return res
|
||||
|
||||
task_data = task_obj.check_task_data(res["task_data"])
|
||||
if isinstance(task_data, str):
|
||||
return task_data
|
||||
|
||||
number_rule = task_obj.check_num_rule(res["number_rule"])
|
||||
if isinstance(number_rule, str):
|
||||
return number_rule
|
||||
|
||||
time_rule = task_obj.check_time_rule(res["time_rule"])
|
||||
if isinstance(time_rule, str):
|
||||
return time_rule
|
||||
|
||||
res["task_data"] = task_data
|
||||
res["number_rule"] = number_rule
|
||||
res["time_rule"] = time_rule
|
||||
|
||||
res["keyword"] = task_obj.get_keyword(task_data)
|
||||
res["source"] = task_obj.source_name
|
||||
res["title"] = task_obj.get_title(task_data)
|
||||
|
||||
if not target_task_conf:
|
||||
tmp = self.task_conf.get_by_keyword(res["source"], res["keyword"])
|
||||
if tmp:
|
||||
target_task_conf = tmp
|
||||
|
||||
if not target_task_conf:
|
||||
res["id"] = self.task_conf.nwe_id()
|
||||
res["template_id"] = template_id
|
||||
res["status"] = True
|
||||
res["pre_hook"] = {}
|
||||
res["after_hook"] = {}
|
||||
res["last_check"] = 0
|
||||
res["last_send"] = 0
|
||||
res["number_data"] = {}
|
||||
res["create_time"] = time.time()
|
||||
res["record_time"] = 0
|
||||
self.task_conf.config.append(res)
|
||||
task_obj.task_config_create_hook(res)
|
||||
else:
|
||||
target_task_conf.update(res)
|
||||
target_task_conf["last_check"] = 0
|
||||
target_task_conf["number_data"] = {} # 次数控制数据置空
|
||||
task_obj.task_config_update_hook(target_task_conf)
|
||||
|
||||
self.task_conf.save_config()
|
||||
return None
|
||||
|
||||
def update_task_status(self, get):
|
||||
# 先调用 set_task_conf 修改任务配置
|
||||
set_conf_response = self.set_task_conf(get)
|
||||
|
||||
if set_conf_response['status'] != 0:
|
||||
return set_conf_response # 返回错误信息
|
||||
|
||||
# 读取任务数据
|
||||
file_path = '{}/data/mod_push_data/task.json'.format(public.get_panel_path())
|
||||
try:
|
||||
with open(file_path, 'r') as file:
|
||||
tasks = json.load(file)
|
||||
except (IOError, json.JSONDecodeError):
|
||||
return json_response(status=False, msg=public.lang("Unable to read task data."))
|
||||
# 查找对应的 task_id
|
||||
task_title = get.title.strip() # 假设 get 中有 title 参数
|
||||
task_id = None
|
||||
|
||||
for task in tasks:
|
||||
if task.get('title') == task_title:
|
||||
task_id = task.get('id')
|
||||
break
|
||||
|
||||
if not task_id:
|
||||
return json_response(status=False, msg=public.lang("The task has not been found."))
|
||||
|
||||
# 调用 change_task_conf 修改任务状态
|
||||
get.task_id = task_id
|
||||
return self.change_task_conf(get)
|
||||
|
||||
def set_task_conf(self, get):
|
||||
task_id = None
|
||||
try:
|
||||
if hasattr(get, "task_id"):
|
||||
task_id = get.task_id.strip()
|
||||
if not task_id:
|
||||
task_id = None
|
||||
else:
|
||||
self.remove_task_conf(get)
|
||||
template_id = get.template_id.strip()
|
||||
task = json.loads(get.task_data.strip())
|
||||
except (AttributeError, json.JSONDecodeError, TypeError, ValueError):
|
||||
return json_response(status=False, msg="The parameter is incorrect")
|
||||
push_data = {
|
||||
"task_id": task_id,
|
||||
"template_id": template_id,
|
||||
"task_data": task,
|
||||
}
|
||||
res = self.set_task_conf_data(push_data)
|
||||
if res:
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, msg="The alarm task is saved successfully")
|
||||
|
||||
def change_task_conf(self, get):
|
||||
try:
|
||||
task_id = get.task_id.strip()
|
||||
status = int(get.status) # 获取status字段并转换为整数
|
||||
except (AttributeError, ValueError):
|
||||
return json_response(status=False, msg="Parameter error")
|
||||
|
||||
if status not in [0, 1]:
|
||||
return json_response(status=False, msg="Invalid status value")
|
||||
|
||||
tmp = self.task_conf.get_by_id(task_id)
|
||||
if tmp is None:
|
||||
return json_response(status=True, msg="No alarm task was queried")
|
||||
|
||||
tmp["status"] = bool(status) # 将status转换为布尔值并设置
|
||||
|
||||
self.task_conf.save_config()
|
||||
return json_response(status=True, msg="operate successfully")
|
||||
|
||||
def change_task(self, task_id, status):
|
||||
tmp = self.task_conf.get_by_id(task_id)
|
||||
tmp["status"] = bool(status) # 将status转换为布尔值并设置
|
||||
self.task_conf.save_config()
|
||||
|
||||
def remove_task_conf(self, get):
|
||||
try:
|
||||
task_id = get.task_id.strip()
|
||||
except AttributeError:
|
||||
return json_response(status=False, msg="The parameter is incorrect")
|
||||
|
||||
tmp = self.task_conf.get_by_id(task_id)
|
||||
if tmp is None:
|
||||
return json_response(status=True, msg="No alarm task was queried")
|
||||
|
||||
self.task_conf.config.remove(tmp)
|
||||
|
||||
self.task_conf.save_config()
|
||||
template = self.template_conf.get_by_id(tmp["template_id"])
|
||||
if template:
|
||||
task_obj = PushSystem().get_task_object(template["id"], template["load_cls"])
|
||||
if task_obj:
|
||||
task_obj.task_config_remove_hook(tmp)
|
||||
|
||||
return json_response(status=True, msg="operate successfully")
|
||||
|
||||
@staticmethod
|
||||
def clear_task_record_by_task_id(task_id):
|
||||
tr_conf = TaskRecordConfig(task_id)
|
||||
if os.path.exists(tr_conf.config_file_path):
|
||||
os.remove(tr_conf.config_file_path)
|
||||
371
mod/base/push_mod/mods.py
Normal file
371
mod/base/push_mod/mods.py
Normal file
@@ -0,0 +1,371 @@
|
||||
# coding: utf-8
|
||||
# -------------------------------------------------------------------
|
||||
# yakpanel
|
||||
# -------------------------------------------------------------------
|
||||
# Copyright (c) 2015-2017 yakpanel(https://www.yakpanel.com) All rights reserved.
|
||||
# -------------------------------------------------------------------
|
||||
# Author: baozi <baozi@yakpanel.com>
|
||||
# -------------------------------------------------------------------
|
||||
# 新告警的所有数据库操作
|
||||
# ------------------------------
|
||||
import json
|
||||
import os
|
||||
import types
|
||||
from typing import Any, Dict, Optional, List
|
||||
from threading import Lock
|
||||
from uuid import uuid4
|
||||
|
||||
import fcntl
|
||||
|
||||
from .util import Sqlite, write_log, read_file, write_file
|
||||
|
||||
_push_db_lock = Lock()
|
||||
|
||||
|
||||
# 代替 class/db.py 中, 离谱的两个query函数
|
||||
def msg_db_query_func(self, sql, param=()):
|
||||
# 执行SQL语句返回数据集
|
||||
self._Sql__GetConn()
|
||||
try:
|
||||
return self._Sql__DB_CONN.execute(sql, self._Sql__to_tuple(param))
|
||||
except Exception as ex:
|
||||
return "error: " + str(ex)
|
||||
|
||||
|
||||
def get_push_db():
|
||||
db_file = "/www/server/panel/data/db/mod_push.db"
|
||||
if not os.path.isdir(os.path.dirname(db_file)):
|
||||
os.makedirs(os.path.dirname(db_file))
|
||||
|
||||
db = Sqlite()
|
||||
setattr(db, "_Sql__DB_FILE", db_file)
|
||||
setattr(db, "query", types.MethodType(msg_db_query_func, db))
|
||||
return db
|
||||
|
||||
|
||||
def get_table(table_name: str):
|
||||
db = get_push_db()
|
||||
db.table = table_name
|
||||
return db
|
||||
|
||||
|
||||
def lock_push_db():
|
||||
with open("/www/server/panel/data/db/mod_push.db", mode="rb") as msg_fd:
|
||||
fcntl.flock(msg_fd.fileno(), fcntl.LOCK_EX)
|
||||
_push_db_lock.locked()
|
||||
|
||||
|
||||
def unlock_push_db():
|
||||
with open("/www/server/panel/data/db/mod_push.db", mode="rb") as msg_fd:
|
||||
fcntl.flock(msg_fd.fileno(), fcntl.LOCK_UN)
|
||||
_push_db_lock.acquire()
|
||||
|
||||
|
||||
def push_db_locker(func):
|
||||
def inner_func(*args, **kwargs):
|
||||
lock_push_db()
|
||||
try:
|
||||
res = func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
unlock_push_db() # 即使 报错了 也先解锁再操作
|
||||
raise e
|
||||
else:
|
||||
unlock_push_db()
|
||||
return res
|
||||
|
||||
return inner_func
|
||||
|
||||
|
||||
DB_INIT_ERROR = False
|
||||
|
||||
PANEL_PATH = "/www/server/panel"
|
||||
PUSH_DATA_PATH = "{}/data/mod_push_data".format(PANEL_PATH)
|
||||
UPDATE_VERSION_FILE = "{}/update_panel.pl".format(PUSH_DATA_PATH)
|
||||
UPDATE_MOD_PUSH_FILE = "{}/update_mod.pl".format(PUSH_DATA_PATH)
|
||||
|
||||
|
||||
class BaseConfig:
|
||||
config_file_path = ""
|
||||
# config_file_path = "/www/server/panel/data/mod_push_data/task.json"
|
||||
# /www/server/panel/data/mod_push_data/sender.json
|
||||
|
||||
def __init__(self):
|
||||
if not os.path.exists(PUSH_DATA_PATH):
|
||||
os.makedirs(PUSH_DATA_PATH)
|
||||
self._config: Optional[List[Dict[str, Any]]] = None
|
||||
|
||||
@property
|
||||
def config(self) -> List[Dict[str, Any]]:
|
||||
if self._config is None:
|
||||
|
||||
try:
|
||||
self._config = json.loads(read_file(self.config_file_path))
|
||||
except:
|
||||
self._config = []
|
||||
return self._config
|
||||
|
||||
def save_config(self) -> None:
|
||||
write_file(self.config_file_path, json.dumps(self.config))
|
||||
|
||||
@staticmethod
|
||||
def nwe_id() -> str:
|
||||
return uuid4().hex[::2]
|
||||
|
||||
def get_by_id(self, target_id: str) -> Optional[Dict[str, Any]]:
|
||||
for i in self.config:
|
||||
if i.get("id", None) == target_id:
|
||||
return i
|
||||
|
||||
|
||||
class TaskTemplateConfig(BaseConfig):
|
||||
config_file_path = "{}/task_template.json".format(PUSH_DATA_PATH)
|
||||
|
||||
|
||||
class TaskConfig(BaseConfig):
|
||||
config_file_path = "{}/task.json".format(PUSH_DATA_PATH)
|
||||
|
||||
def get_by_keyword(self, source: str, keyword: str) -> Optional[Dict[str, Any]]:
|
||||
for i in self.config:
|
||||
if i.get("source", None) == source and i.get("keyword", None) == keyword:
|
||||
return i
|
||||
|
||||
|
||||
class TaskRecordConfig(BaseConfig):
|
||||
config_file_path_fmt = "%s/task_record_{}.json" % PUSH_DATA_PATH
|
||||
|
||||
def __init__(self, task_id: str):
|
||||
super().__init__()
|
||||
self.config_file_path = self.config_file_path_fmt.format(task_id)
|
||||
|
||||
|
||||
class SenderConfig(BaseConfig):
|
||||
config_file_path = "{}/sender.json".format(PUSH_DATA_PATH)
|
||||
|
||||
def __init__(self):
|
||||
super(SenderConfig, self).__init__()
|
||||
if not os.path.exists(self.config_file_path):
|
||||
write_file(self.config_file_path, json.dumps([{
|
||||
"id": self.nwe_id(),
|
||||
"used": True,
|
||||
"sender_type": "sms",
|
||||
"data": {},
|
||||
"original": True
|
||||
}]))
|
||||
|
||||
|
||||
def init_db():
|
||||
global DB_INIT_ERROR
|
||||
# id 模板id 必须唯一, 后端开发需要协商
|
||||
# ver 模板版本号, 用于更新
|
||||
# used 是否在使用用
|
||||
# source 来源, 如Waf, rsync
|
||||
# title 标题
|
||||
# load_cls 要加载的类,或者从那种调用方法中获取到任务处理对象
|
||||
# template 给前端,用于展示的数据
|
||||
# default 默认数据,用于数据过滤, 和默认值
|
||||
# unique 是否仅可唯一设置
|
||||
# create_time 创建时间
|
||||
create_task_template_sql = (
|
||||
"CREATE TABLE IF NOT EXISTS 'task_template' ("
|
||||
"'id' INTEGER PRIMARY KEY AUTOINCREMENT, "
|
||||
"'ver' TEXT NOT NULL DEFAULT '1.0.0', "
|
||||
"'used' INTEGER NOT NULL DEFAULT 1, "
|
||||
"'source' TEXT NOT NULL DEFAULT 'site_push', "
|
||||
"'title' TEXT NOT NULL DEFAULT '', "
|
||||
"'load_cls' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'template' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'default' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'send_type_list' TEXT NOT NULL DEFAULT '[]', "
|
||||
"'unique' INTEGER NOT NULL DEFAULT 0, "
|
||||
"'create_time' INTEGER NOT NULL DEFAULT (strftime('%s'))"
|
||||
");"
|
||||
)
|
||||
# source 来源, 例如waf(防火墙), rsync(文件同步)
|
||||
# keyword 关键词, 不同的来源在使用中可以以此查出具体的任务,需要每个来源自己约束
|
||||
# task_data 任务数据字典,字段可以自由设计
|
||||
# sender 告警通道信息,为字典,可通过get_by_func字段指定从某个函数获取,用于发送
|
||||
# time_rule 告警的时间规则,包含 间隔时间(send_interval), (time-range)
|
||||
# number_rule 告警的次数规则,包含 每日次数(day_num), 总次数(total), 通过函数判断(get_by_func)
|
||||
# status 状态是否开启
|
||||
# pre_hook, after_hook 前置处理和后置处理
|
||||
# record_time, 告警记录存储时间, 默认为0, 认为长时间储存
|
||||
# last_check, 上次执行检查的时间
|
||||
# last_send, 上次次发送时间
|
||||
# number_data, 发送次数信息
|
||||
create_task_sql = (
|
||||
"CREATE TABLE IF NOT EXISTS 'task' ("
|
||||
"'id' INTEGER PRIMARY KEY AUTOINCREMENT, "
|
||||
"'template_id' INTEGER NOT NULL DEFAULT 0, "
|
||||
"'source' TEXT NOT NULL DEFAULT '', "
|
||||
"'keyword' TEXT NOT NULL DEFAULT '', "
|
||||
"'title' TEXT NOT NULL DEFAULT '', "
|
||||
"'task_data' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'sender' TEXT NOT NULL DEFAULT '[]', "
|
||||
"'time_rule' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'number_rule' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'status' INTEGER NOT NULL DEFAULT 1, "
|
||||
"'pre_hook' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'after_hook' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'last_check' INTEGER NOT NULL DEFAULT 0, "
|
||||
"'last_send' INTEGER NOT NULL DEFAULT 0, "
|
||||
"'number_data' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'create_time' INTEGER NOT NULL DEFAULT (strftime('%s')), "
|
||||
"'record_time' INTEGER NOT NULL DEFAULT 0"
|
||||
");"
|
||||
)
|
||||
|
||||
create_task_record_sql = (
|
||||
"CREATE TABLE IF NOT EXISTS 'task_record' ("
|
||||
"'id' INTEGER PRIMARY KEY AUTOINCREMENT, "
|
||||
"'template_id' INTEGER NOT NULL DEFAULT 0, "
|
||||
"'task_id' INTEGER NOT NULL DEFAULT 0, "
|
||||
"'do_send' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'send_data' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'result' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'create_time' INTEGER NOT NULL DEFAULT (strftime('%s'))"
|
||||
");"
|
||||
)
|
||||
|
||||
create_send_record_sql = (
|
||||
"CREATE TABLE IF NOT EXISTS 'send_record' ("
|
||||
"'id' INTEGER PRIMARY KEY AUTOINCREMENT, "
|
||||
"'record_id' INTEGER NOT NULL DEFAULT 0, "
|
||||
"'sender_name' TEXT NOT NULL DEFAULT '', "
|
||||
"'sender_id' INTEGER NOT NULL DEFAULT 0, "
|
||||
"'sender_type' TEXT NOT NULL DEFAULT '', "
|
||||
"'send_data' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'result' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'create_time' INTEGER NOT NULL DEFAULT (strftime('%s'))"
|
||||
");"
|
||||
)
|
||||
|
||||
create_sender_sql = (
|
||||
"CREATE TABLE IF NOT EXISTS 'sender' ("
|
||||
"'id' INTEGER PRIMARY KEY AUTOINCREMENT, "
|
||||
"'used' INTEGER NOT NULL DEFAULT 1, "
|
||||
"'sender_type' TEXT NOT NULL DEFAULT '', "
|
||||
"'name' TEXT NOT NULL DEFAULT '', "
|
||||
"'data' TEXT NOT NULL DEFAULT '{}', "
|
||||
"'create_time' INTEGER NOT NULL DEFAULT (strftime('%s'))"
|
||||
");"
|
||||
)
|
||||
|
||||
lock_push_db()
|
||||
with get_push_db() as db:
|
||||
db.execute("pragma journal_mode=wal")
|
||||
|
||||
res = db.execute(create_task_template_sql)
|
||||
if isinstance(res, str) and res.startswith("error"):
|
||||
write_log("warning system", "task_template Data table creation error:" + res)
|
||||
DB_INIT_ERROR = True
|
||||
return
|
||||
|
||||
res = db.execute(create_task_sql)
|
||||
if isinstance(res, str) and res.startswith("error"):
|
||||
write_log("warning system", "task Data table creation error:" + res)
|
||||
DB_INIT_ERROR = True
|
||||
return
|
||||
|
||||
res = db.execute(create_task_record_sql)
|
||||
if isinstance(res, str) and res.startswith("error"):
|
||||
write_log("warning system", "task_recorde Data table creation error:" + res)
|
||||
DB_INIT_ERROR = True
|
||||
return
|
||||
|
||||
res = db.execute(create_send_record_sql)
|
||||
if isinstance(res, str) and res.startswith("error"):
|
||||
write_log("warning system", "send_record Data table creation error:" + res)
|
||||
DB_INIT_ERROR = True
|
||||
return
|
||||
|
||||
res = db.execute(create_sender_sql)
|
||||
if isinstance(res, str) and res.startswith("error"):
|
||||
write_log("warning system", "sender Data table creation error:" + res)
|
||||
DB_INIT_ERROR = True
|
||||
return
|
||||
|
||||
db.execute(
|
||||
"INSERT INTO 'sender' (id, sender_type, data) VALUES (?,?,?)",
|
||||
(1, 'sms', json.dumps({"count": 0, "total": 0}))
|
||||
) # 插入短信
|
||||
|
||||
unlock_push_db()
|
||||
|
||||
init_template_file = "/www/server/panel/config/mod_push_init.json"
|
||||
err = load_task_template_by_file(init_template_file)
|
||||
if err:
|
||||
write_log("warning system", "task template data table initial data load failed:" + res)
|
||||
|
||||
|
||||
def _check_fields(template: dict) -> bool:
|
||||
if not isinstance(template, dict):
|
||||
return False
|
||||
|
||||
fields = ("id", "ver", "used", "source", "title", "load_cls", "template", "default", "unique", "create_time")
|
||||
for field in fields:
|
||||
if field not in template:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def load_task_template_by_config(templates: List[Dict]) -> None:
|
||||
"""
|
||||
通过 传入的配置信息 执行一次模板更新操作
|
||||
@param templates: 模板内容,为一个数据列表
|
||||
@return: 报错信息,如果返回None则表示执行成功
|
||||
"""
|
||||
|
||||
task_template_config = TaskTemplateConfig()
|
||||
add_list = []
|
||||
for template in templates:
|
||||
tmp = task_template_config.get_by_id(template['id'])
|
||||
if tmp is not None:
|
||||
tmp.update(template)
|
||||
else:
|
||||
add_list.append(template)
|
||||
|
||||
task_template_config.config.extend(add_list)
|
||||
task_template_config.save_config()
|
||||
|
||||
# with get_table('task_template') as table:
|
||||
# for template in templates:
|
||||
# if not _check_fields(template):
|
||||
# continue
|
||||
# res = table.where("id = ?", (template['id'])).field('ver').select()
|
||||
# if isinstance(res, str):
|
||||
# return "数据库损坏:" + res
|
||||
# if not res: # 没有就插入
|
||||
# table.insert(template)
|
||||
# else:
|
||||
# # 版本不一致就更新版本
|
||||
# if res['ver'] != template['ver']:
|
||||
# template.pop("id")
|
||||
# table.where("id = ?", (template['id'])).update(template)
|
||||
#
|
||||
|
||||
|
||||
def load_task_template_by_file(template_file: str) -> Optional[str]:
|
||||
"""
|
||||
执行一次模板更新操作
|
||||
@param template_file: 模板文件路径
|
||||
@return: 报错信息,如果返回None则表示执行成功
|
||||
"""
|
||||
if not os.path.isfile(template_file):
|
||||
return "The template file does not exist and the update fails"
|
||||
|
||||
if DB_INIT_ERROR:
|
||||
return "An error is reported during database initialization and cannot be updated"
|
||||
|
||||
res = read_file(template_file)
|
||||
if not isinstance(res, str):
|
||||
return "Data read failed"
|
||||
|
||||
try:
|
||||
templates = json.loads(res)
|
||||
except (json.JSONDecoder, TypeError, ValueError):
|
||||
return "Only JSON data is supported"
|
||||
|
||||
if not isinstance(templates, list):
|
||||
return "The data is in the wrong format and should be a list"
|
||||
|
||||
return load_task_template_by_config(templates)
|
||||
302
mod/base/push_mod/rsync_push.py
Normal file
302
mod/base/push_mod/rsync_push.py
Normal file
@@ -0,0 +1,302 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Tuple, Union, Optional, Iterator
|
||||
|
||||
from .send_tool import WxAccountMsg
|
||||
from .base_task import BaseTask
|
||||
from .mods import TaskTemplateConfig
|
||||
from .util import read_file
|
||||
|
||||
|
||||
def rsync_ver_is_38() -> Optional[bool]:
|
||||
"""
|
||||
检查rsync的版本是否为3.8。
|
||||
该函数不接受任何参数。
|
||||
返回值:
|
||||
- None: 如果无法确定rsync的版本或文件不存在。
|
||||
- bool: 如果版本确定为3.8,则返回True;否则返回False。
|
||||
"""
|
||||
push_file = "/www/server/panel/plugin/rsync/rsync_push.py"
|
||||
if not os.path.exists(push_file):
|
||||
return None
|
||||
ver_info_file = "/www/server/panel/plugin/rsync/info.json"
|
||||
if not os.path.exists(ver_info_file):
|
||||
return None
|
||||
try:
|
||||
info = json.loads(read_file(ver_info_file))
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return None
|
||||
ver = info["versions"]
|
||||
ver_tuples = [int(i) for i in ver.split(".")]
|
||||
if len(ver_tuples) < 3:
|
||||
ver_tuples = ver_tuples.extend([0] * (3 - len(ver_tuples)))
|
||||
if ver_tuples[0] < 3:
|
||||
return None
|
||||
if ver_tuples[1] <= 8 and ver_tuples[0] == 3:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class Rsync38Task(BaseTask):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "rsync_push"
|
||||
self.template_name = "File synchronization alarm"
|
||||
self.title = "File synchronization alarm"
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
if "interval" not in task_data or not isinstance(task_data["interval"], int):
|
||||
task_data["interval"] = 600
|
||||
return task_data
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return "rsync_push"
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
has_err = self._check(task_data.get("interval", 600))
|
||||
if not has_err:
|
||||
return None
|
||||
|
||||
return {
|
||||
"msg_list": [
|
||||
">Notification type: File synchronization alarm",
|
||||
">Content of alarm: <font color=#ff0000>If an error occurs during file synchronization, please pay attention to the file synchronization situation and handle it in a timely manner.</font> ",
|
||||
]
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _check(interval: int) -> bool:
|
||||
if not isinstance(interval, int):
|
||||
return False
|
||||
start_time = datetime.now() - timedelta(seconds=interval * 1.2)
|
||||
log_file = "{}/plugin/rsync/lsyncd.log".format("/www/server/panel")
|
||||
if not os.path.exists(log_file):
|
||||
return False
|
||||
return LogChecker(log_file=log_file, start_time=start_time)()
|
||||
|
||||
def check_time_rule(self, time_rule: dict) -> Union[dict, str]:
|
||||
if "send_interval" not in time_rule or not isinstance(time_rule["interval"], int):
|
||||
time_rule["send_interval"] = 3 * 60
|
||||
if time_rule["send_interval"] < 60:
|
||||
time_rule["send_interval"] = 60
|
||||
return time_rule
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
res = rsync_ver_is_38()
|
||||
if res is None:
|
||||
return None
|
||||
if res:
|
||||
return template
|
||||
else:
|
||||
return None
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return '', {}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "File synchronization alarm"
|
||||
msg.msg = "There was an error in the synchronization. Please keep an eye on the synchronization"
|
||||
return msg
|
||||
|
||||
|
||||
class Rsync39Task(BaseTask):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "rsync_push"
|
||||
self.template_name = "File synchronization alarm"
|
||||
self.title = "File synchronization alarm"
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
if "interval" not in task_data or not isinstance(task_data["interval"], int):
|
||||
task_data["interval"] = 600
|
||||
return task_data
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return "rsync_push"
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
"""
|
||||
不返回数据,以实时触发为主
|
||||
"""
|
||||
return None
|
||||
|
||||
def check_time_rule(self, time_rule: dict) -> Union[dict, str]:
|
||||
if "send_interval" not in time_rule or not isinstance(time_rule["send_interval"], int):
|
||||
time_rule["send_interval"] = 3 * 60
|
||||
if time_rule["send_interval"] < 60:
|
||||
time_rule["send_interval"] = 60
|
||||
return time_rule
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
res = rsync_ver_is_38()
|
||||
if res is None:
|
||||
return None
|
||||
if res is False:
|
||||
return template
|
||||
else:
|
||||
return None
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return '', {}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
task_name = push_data.get("task_name", None)
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "File synchronization alarm"
|
||||
if task_name:
|
||||
msg.msg = "An error occurred on file synchronization task {}".format(task_name)
|
||||
else:
|
||||
msg.msg = "There was an error in the synchronization. Please keep an eye on the synchronization"
|
||||
return msg
|
||||
|
||||
|
||||
class LogChecker:
|
||||
"""
|
||||
排序查询并获取日志内容
|
||||
"""
|
||||
rep_time = re.compile(r'(?P<target>(\w{3}\s+){2}(\d{1,2})\s+(\d{2}:?){3}\s+\d{4})')
|
||||
format_str = '%a %b %d %H:%M:%S %Y'
|
||||
err_datetime = datetime.fromtimestamp(0)
|
||||
err_list = ("error", "Error", "ERROR", "exitcode = 10", "failed")
|
||||
|
||||
def __init__(self, log_file: str, start_time: datetime):
|
||||
self.log_file = log_file
|
||||
self.start_time = start_time
|
||||
self.is_over_time = None # None:还没查到时间,未知, False: 可以继续网上查询, True:比较早的数据了,不再向上查询
|
||||
self.has_err = False # 目前已查询的内容中是否有报错信息
|
||||
|
||||
def _format_time(self, log_line) -> Optional[datetime]:
|
||||
try:
|
||||
date_str_res = self.rep_time.search(log_line)
|
||||
if date_str_res:
|
||||
time_str = date_str_res.group("target")
|
||||
return datetime.strptime(time_str, self.format_str)
|
||||
except Exception:
|
||||
return self.err_datetime
|
||||
return None
|
||||
|
||||
# 返回日志内容
|
||||
def __call__(self):
|
||||
_buf = b""
|
||||
file_size, fp = os.stat(self.log_file).st_size - 1, open(self.log_file, mode="rb")
|
||||
fp.seek(-1, 2)
|
||||
while file_size:
|
||||
read_size = min(1024, file_size)
|
||||
fp.seek(-read_size, 1)
|
||||
buf: bytes = fp.read(read_size) + _buf
|
||||
fp.seek(-read_size, 1)
|
||||
if file_size > 1024:
|
||||
idx = buf.find(ord("\n"))
|
||||
_buf, buf = buf[:idx], buf[idx + 1:]
|
||||
for i in self._get_log_line_from_buf(buf):
|
||||
self._check(i)
|
||||
if self.is_over_time:
|
||||
return self.has_err
|
||||
file_size -= read_size
|
||||
return False
|
||||
|
||||
# 从缓冲中读取日志
|
||||
@staticmethod
|
||||
def _get_log_line_from_buf(buf: bytes) -> Iterator[str]:
|
||||
n, m = 0, 0
|
||||
buf_len = len(buf) - 1
|
||||
for i in range(buf_len, -1, -1):
|
||||
if buf[i] == ord("\n"):
|
||||
log_line = buf[buf_len + 1 - m: buf_len - n + 1].decode("utf-8")
|
||||
yield log_line
|
||||
n = m = m + 1
|
||||
else:
|
||||
m += 1
|
||||
yield buf[0: buf_len - n + 1].decode("utf-8")
|
||||
|
||||
# 格式化并筛选查询条件
|
||||
def _check(self, log_line: str) -> None:
|
||||
# 筛选日期
|
||||
for err in self.err_list:
|
||||
if err in log_line:
|
||||
self.has_err = True
|
||||
|
||||
ck_time = self._format_time(log_line)
|
||||
if ck_time:
|
||||
self.is_over_time = self.start_time > ck_time
|
||||
|
||||
|
||||
def load_rsync_template():
|
||||
"""
|
||||
加载rsync模板
|
||||
"""
|
||||
if TaskTemplateConfig().get_by_id("40"):
|
||||
return None
|
||||
from .mods import load_task_template_by_config
|
||||
load_task_template_by_config(
|
||||
[{
|
||||
"id": "40",
|
||||
"ver": "1",
|
||||
"used": True,
|
||||
"source": "rsync_push",
|
||||
"title": "File synchronization alarm",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.rsync_push",
|
||||
"name": "RsyncTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
],
|
||||
"sorted": [
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"wx_account",
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg",
|
||||
],
|
||||
"unique": True
|
||||
}]
|
||||
)
|
||||
|
||||
|
||||
RsyncTask = Rsync39Task
|
||||
if rsync_ver_is_38() is True:
|
||||
RsyncTask = Rsync38Task
|
||||
|
||||
|
||||
def push_rsync_by_task_name(task_name: str):
|
||||
from .system import push_by_task_keyword
|
||||
|
||||
push_data = {
|
||||
"task_name": task_name,
|
||||
"msg_list": [
|
||||
">Notification type: File synchronization alarm",
|
||||
">Content of alarm: <font color=#ff0000>File synchronization task {} has failed during the execution, please pay attention to the file synchronization situation and deal with it.</font> ".format(
|
||||
task_name),
|
||||
]
|
||||
}
|
||||
push_by_task_keyword("rsync_push", "rsync_push", push_data=push_data)
|
||||
|
||||
|
||||
class ViewMsgFormat(object):
|
||||
|
||||
@staticmethod
|
||||
def get_msg(task: dict) -> Optional[str]:
|
||||
if task["template_id"] == "40":
|
||||
return "<span>Push alarm information when there is an exception in file synchronization (push {} times per day and then not push)<span>".format(
|
||||
task.get("number_rule", {}).get("day_num"))
|
||||
return None
|
||||
615
mod/base/push_mod/safe_mod_push.py
Normal file
615
mod/base/push_mod/safe_mod_push.py
Normal file
@@ -0,0 +1,615 @@
|
||||
import json
|
||||
from typing import Tuple, Union, Optional, Dict, List
|
||||
|
||||
from . import WxAccountMsg
|
||||
from .mods import TaskConfig
|
||||
from .base_task import BaseTask, BaseTaskViewMsg
|
||||
from .site_push import web_info
|
||||
from .util import get_db_by_file, DB, GET_CLASS, read_file, write_file
|
||||
from .manager import PushManager
|
||||
from .system import push_by_task_keyword
|
||||
|
||||
|
||||
|
||||
class SiteMonitorViolationWordTask(BaseTask): # 网站违规词告警检查
|
||||
DB_FILE = "/www/server/panel/class/projectModel/content/content.db"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name: str = 'site_monitor_violation_word'
|
||||
self.title: str = 'Website violation keyword alert'
|
||||
self.template_name: str = 'Website violation keyword alert'
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
"""
|
||||
检查设置的告警参数(是否合理)
|
||||
@param task_data: 传入的告警参数,提前会经过默认值处理(即没有的字段添加默认值)
|
||||
@return: 当检查无误时,返回一个 dict 当做后续的添加和修改的数据,
|
||||
当检查有误时, 直接返回错误信息的字符串
|
||||
"""
|
||||
if "interval" in task_data:
|
||||
task_data.pop("interval")
|
||||
if "mvw_id" in task_data and not task_data["mvw_id"]:
|
||||
task_data.pop("mvw_id")
|
||||
if "site_name" in task_data and not task_data["site_name"]:
|
||||
task_data.pop("site_name")
|
||||
return task_data
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
return None
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
return "Violation keyword check on website {}".format(task_data["site_name"])
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return "site_mvw_{}".format(task_data.get("mvw_id", 0))
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
"""
|
||||
过滤 和 更改模板中的信息, 返回空表是当前无法设置该任务
|
||||
@param template: 任务的模板信息
|
||||
@return:
|
||||
"""
|
||||
_, items_type = web_info(project_types=("PHP",))
|
||||
template["field"][0]["items"].extend(items_type["PHP"])
|
||||
return template
|
||||
|
||||
def task_config_create_hook(self, task: dict) -> Optional[str]:
|
||||
task_data = task["task_data"]
|
||||
if "mvw_id" not in task_data or not task_data["mvw_id"]:
|
||||
return "No corresponding website violation word scanning task, unable to add alert"
|
||||
if "site_name" not in task_data or not task_data["site_name"]:
|
||||
return "No corresponding website violation word scanning task, unable to add alert"
|
||||
|
||||
def task_config_update_hook(self, task: dict) -> Optional[str]:
|
||||
task_data = task["task_data"]
|
||||
if "mvw_id" not in task_data or not task_data["mvw_id"]:
|
||||
return "No corresponding website violation word scanning task, unable to add alert"
|
||||
if "site_name" not in task_data or not task_data["site_name"]:
|
||||
return "No corresponding website violation word scanning task, unable to add alert"
|
||||
|
||||
db_obj = get_db_by_file(self.DB_FILE)
|
||||
if not db_obj:
|
||||
return "Website violation word scanning task database file does not exist"
|
||||
pdata = {
|
||||
"send_msg": int(task["status"]),
|
||||
"send_type": ",".join(task["sender"])
|
||||
}
|
||||
try:
|
||||
db_obj.table("monitor_site").where("id=?", task_data["mvw_id"]).update(pdata)
|
||||
db_obj.close()
|
||||
except:
|
||||
return "Website violation word scanning task update failed"
|
||||
# 保障keyword的id正确
|
||||
task["keyword"] = self.get_keyword(task_data)
|
||||
|
||||
def task_config_remove_hook(self, task: dict) -> None:
|
||||
task_data = task["task_data"]
|
||||
if "mvw_id" not in task_data or not task_data["mvw_id"]:
|
||||
return
|
||||
db_obj = get_db_by_file(self.DB_FILE)
|
||||
if not db_obj:
|
||||
return
|
||||
try:
|
||||
db_obj.table("monitor_site").where("id=?", task_data["mvw_id"]).update({"send_msg": 0})
|
||||
db_obj.close()
|
||||
except:
|
||||
return
|
||||
|
||||
@classmethod
|
||||
def set_push_task(cls, mvw_id: int, site_name: str, status: bool, sender: list):
|
||||
task_conf = TaskConfig()
|
||||
old_task = task_conf.get_by_keyword("site_monitor_violation_word", "site_mvw_{}".format(mvw_id))
|
||||
if not old_task:
|
||||
push_data = {
|
||||
"template_id": "121",
|
||||
"task_data": {
|
||||
"status": True,
|
||||
"sender": sender,
|
||||
"task_data": {
|
||||
"site_name": site_name,
|
||||
"mvw_id": mvw_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
from .manager import PushManager
|
||||
PushManager().set_task_conf_data(push_data)
|
||||
else:
|
||||
old_task["sender"] = sender
|
||||
old_task["status"] = status
|
||||
task_conf.save_config()
|
||||
|
||||
@classmethod
|
||||
def remove_push_task(cls, mvw_id: int):
|
||||
task_conf = TaskConfig()
|
||||
old_task = task_conf.get_by_keyword("site_monitor_violation_word", "site_mvw_{}".format(mvw_id))
|
||||
if old_task:
|
||||
task_conf.config.remove(old_task)
|
||||
|
||||
task_conf.save_config()
|
||||
|
||||
|
||||
class VulnerabilityScanningTask(BaseTask):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name: str = 'vulnerability_scanning'
|
||||
self.title: str = 'Website vulnerability alert'
|
||||
self.template_name: str = 'Website vulnerability alert'
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
"""
|
||||
检查设置的告警参数(是否合理)
|
||||
@param task_data: 传入的告警参数,提前会经过默认值处理(即没有的字段添加默认值)
|
||||
@return: 当检查无误时,返回一个 dict 当做后续的添加和修改的数据,
|
||||
当检查有误时, 直接返回错误信息的字符串
|
||||
"""
|
||||
if "cycle" not in task_data or not task_data["cycle"] or not isinstance(task_data["cycle"], int):
|
||||
return "Cycle cannot be empty"
|
||||
|
||||
return {"interval": 60*60*24 * (task_data["cycle"] + 1), "cycle": task_data["cycle"]}
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
"""
|
||||
返回一个关键字,用于后续查询或执行任务时使用, 例如:防篡改告警,可以根据其规则id生成一个关键字,
|
||||
后续通过规则id和来源tamper 查询并使用
|
||||
@param task_data: 通过check_args后生成的告警参数字典
|
||||
@return: 返回一个关键词字符串
|
||||
"""
|
||||
return "vulnerability_scanning"
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
"""
|
||||
返回一个标题
|
||||
@param task_data: 通过check_args后生成的告警参数字典
|
||||
@return: 返回一个关键词字符串
|
||||
"""
|
||||
return 'Website vulnerability alert'
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
"""
|
||||
判断这个任务是否需要返送
|
||||
@param task_id: 任务id
|
||||
@param task_data: 任务的告警参数
|
||||
@return: 如果触发了告警,返回一个dict的原文,作为告警信息,否则应当返回None表示未触发
|
||||
返回之中应当包含一个 msg_list 的键(值为List[str]类型),将主要的信息返回
|
||||
用于以下信息的自动序列化包含[dingding, feishu, mail, weixin, web_hook]
|
||||
短信和微信公众号由于长度问题,必须每个任务手动实现
|
||||
"""
|
||||
return None
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
"""
|
||||
过滤 和 更改模板中的信息, 返回空表是当前无法设置该任务
|
||||
@param template: 任务的模板信息
|
||||
@return:
|
||||
"""
|
||||
return template
|
||||
|
||||
@classmethod
|
||||
def set_push_task(cls, status: bool, day: int, sender: list):
|
||||
push_data = {
|
||||
"template_id": "122",
|
||||
"task_data": {
|
||||
"status": status,
|
||||
"sender": sender,
|
||||
"task_data": {
|
||||
"cycle": day,
|
||||
}
|
||||
}
|
||||
}
|
||||
return PushManager().set_task_conf_data(push_data)
|
||||
|
||||
@staticmethod
|
||||
def del_crontab():
|
||||
"""
|
||||
@name 删除项目定时清理任务
|
||||
@auther hezhihong<2022-10-31>
|
||||
@return
|
||||
"""
|
||||
cron_name = '[Do not delete] Vulnerability scanning scheduled task'
|
||||
cron_list = DB('crontab').where("name=?", (cron_name,)).select()
|
||||
|
||||
if cron_list:
|
||||
for i in cron_list:
|
||||
if not i:
|
||||
continue
|
||||
args = {"id": i['id']}
|
||||
import crontab
|
||||
crontab.crontab().DelCrontab(args)
|
||||
|
||||
def add_crontab(self, day, channel):
|
||||
"""
|
||||
@name 构造计划任务
|
||||
"""
|
||||
cron_name = '[Do not delete] Vulnerability scanning scheduled task'
|
||||
cron_list = DB('crontab').where("name=?", (cron_name,)).select()
|
||||
if cron_list:
|
||||
self.del_crontab()
|
||||
if not DB('crontab').where('name=?',(cron_name,)).count():
|
||||
args = {
|
||||
"name": cron_name,
|
||||
"type": 'day-n',
|
||||
"where1": day,
|
||||
"hour": '10',
|
||||
"minute": '30',
|
||||
"sName": "",
|
||||
"sType": 'toShell',
|
||||
"notice": '0',
|
||||
"notice_channel": channel,
|
||||
"save": '',
|
||||
"save_local": '1',
|
||||
"backupTo": '',
|
||||
"sBody": 'btpython /www/server/panel/script/cron_scaning.py {}'.format(channel),
|
||||
"urladdress": '',
|
||||
"user": 'root'
|
||||
}
|
||||
import crontab
|
||||
res = crontab.crontab().AddCrontab(args)
|
||||
if res and "id" in res.keys():
|
||||
return True
|
||||
return False
|
||||
return True
|
||||
|
||||
def task_config_create_hook(self, task: dict) -> Optional[str]:
|
||||
return self.task_config_update_hook(task)
|
||||
|
||||
def task_config_update_hook(self, task: dict) -> Optional[str]:
|
||||
if task["status"]:
|
||||
day = task['task_data']['cycle']
|
||||
channel = ",".join(task['sender'])
|
||||
if self.add_crontab(day, channel):
|
||||
return None
|
||||
return "Failed to add scheduled task"
|
||||
else:
|
||||
self.del_crontab()
|
||||
|
||||
def task_config_remove_hook(self, task: dict) -> None:
|
||||
self.del_crontab()
|
||||
|
||||
|
||||
class FileDetectTask(BaseTask):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name: str = 'file_detect'
|
||||
self.title: str = 'System file integrity reminder'
|
||||
self.template_name: str = 'System file integrity reminder'
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
"""
|
||||
检查设置的告警参数(是否合理)
|
||||
@param task_data: 传入的告警参数,提前会经过默认值处理(即没有的字段添加默认值)
|
||||
@return: 当检查无误时,返回一个 dict 当做后续的添加和修改的数据,
|
||||
当检查有误时, 直接返回错误信息的字符串
|
||||
"""
|
||||
if not isinstance(task_data["hour"], int) or not isinstance(task_data["minute"], int):
|
||||
return "Hours and minutes must be integers"
|
||||
|
||||
if task_data["hour"] < 0 or task_data["hour"] > 23:
|
||||
return "The hour must be an integer between 0 and 23"
|
||||
|
||||
if task_data["minute"] < 0 or task_data["minute"] > 59:
|
||||
return "Minutes must be an integer between 0 and 59"
|
||||
|
||||
return {
|
||||
"interval": 60 * 60 * 24,
|
||||
"hour": task_data["hour"],
|
||||
"minute": task_data["minute"],
|
||||
}
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
"""
|
||||
返回一个关键字,用于后续查询或执行任务时使用, 例如:防篡改告警,可以根据其规则id生成一个关键字,
|
||||
后续通过规则id和来源tamper 查询并使用
|
||||
@param task_data: 通过check_args后生成的告警参数字典
|
||||
@return: 返回一个关键词字符串
|
||||
"""
|
||||
return "file_detect"
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
"""
|
||||
返回一个标题
|
||||
@param task_data: 通过check_args后生成的告警参数字典
|
||||
@return: 返回一个关键词字符串
|
||||
"""
|
||||
return 'System file integrity reminder'
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
return None
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
"""
|
||||
过滤 和 更改模板中的信息, 返回空表是当前无法设置该任务
|
||||
@param template: 任务的模板信息
|
||||
@return:
|
||||
"""
|
||||
return template
|
||||
|
||||
def add_crontab(self, hour, minute, channel):
|
||||
"""
|
||||
@name 构造计划任务
|
||||
"""
|
||||
cron_name = '[Do not delete] File integrity monitoring scheduled task'
|
||||
cron_list = DB('crontab').where("name=?", (cron_name,)).select()
|
||||
|
||||
if cron_list:
|
||||
self.del_crontab()
|
||||
if not DB('crontab').where('name=?', (cron_name,)).count():
|
||||
args = {
|
||||
"name": cron_name,
|
||||
"type": 'day',
|
||||
"where1": '',
|
||||
"hour": hour,
|
||||
"minute": minute,
|
||||
"sName": "",
|
||||
"sType": 'toShell',
|
||||
"notice": '0',
|
||||
"notice_channel": channel,
|
||||
"save": '',
|
||||
"save_local": '1',
|
||||
"backupTo": '',
|
||||
"sBody": 'btpython /www/server/panel/script/cron_file.py {}'.format(channel),
|
||||
"urladdress": ''
|
||||
}
|
||||
import crontab
|
||||
res = crontab.crontab().AddCrontab(args)
|
||||
if res and "id" in res.keys():
|
||||
return True
|
||||
return False
|
||||
return True
|
||||
|
||||
# 删除项目定时清理任务
|
||||
@staticmethod
|
||||
def del_crontab():
|
||||
cron_name = '[Do not delete] File integrity monitoring scheduled task'
|
||||
cron_list = DB('crontab').where("name=?", (cron_name,)).select()
|
||||
if cron_list:
|
||||
for i in cron_list:
|
||||
if not i: continue
|
||||
args = {"id": i['id']}
|
||||
import crontab
|
||||
crontab.crontab().DelCrontab(args)
|
||||
|
||||
def task_config_create_hook(self, task: dict) -> Optional[str]:
|
||||
return self.task_config_update_hook(task)
|
||||
|
||||
def task_config_update_hook(self, task: dict) -> Optional[str]:
|
||||
if task["status"]:
|
||||
hour = task['task_data']['hour']
|
||||
minute = task['task_data']['minute']
|
||||
channel = ",".join(task['sender'])
|
||||
if self.add_crontab(hour, minute, channel):
|
||||
return None
|
||||
return "Failed to add scheduled task"
|
||||
else:
|
||||
self.del_crontab()
|
||||
|
||||
def task_config_remove_hook(self, task: dict) -> None:
|
||||
self.del_crontab()
|
||||
|
||||
@classmethod
|
||||
def set_push_task(cls, status: bool, hour: int, minute: int, sender: list):
|
||||
push_data = {
|
||||
"template_id": "123",
|
||||
"task_data": {
|
||||
"status": status,
|
||||
"sender": sender,
|
||||
"task_data": {
|
||||
"hour": hour,
|
||||
"minute": minute,
|
||||
}
|
||||
}
|
||||
}
|
||||
from .manager import PushManager
|
||||
return PushManager().set_task_conf_data(push_data)
|
||||
|
||||
|
||||
class SafeCloudTask(BaseTask):
|
||||
_config_file = "/www/server/panel/data/safeCloud/config.json"
|
||||
_all_safe_type = ("webshell", )
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "safe_cloud_hinge"
|
||||
self.title = "Yakpanel Cloud Security Center Alarm"
|
||||
self.template_name = "Yakpanel Cloud Security Center Alarm"
|
||||
|
||||
self._safe_cloud_conf: Optional[dict] = None
|
||||
|
||||
@property
|
||||
def safe_cloud_conf(self) -> Optional[dict]:
|
||||
"""
|
||||
获取云安全配置
|
||||
:return: 云安全配置
|
||||
"""
|
||||
if self._safe_cloud_conf and isinstance(self._safe_cloud_conf, dict):
|
||||
return self._safe_cloud_conf
|
||||
try:
|
||||
self._safe_cloud_conf = json.loads(read_file(self._config_file))
|
||||
return self._safe_cloud_conf
|
||||
except:
|
||||
self._init_config()
|
||||
try:
|
||||
self._safe_cloud_conf = json.loads(read_file(self._config_file))
|
||||
return self._safe_cloud_conf
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
"""
|
||||
过滤模板
|
||||
:param template: 模板
|
||||
:return: 过滤后的模板
|
||||
"""
|
||||
return template
|
||||
|
||||
def save_safe_cloud_conf(self):
|
||||
"""
|
||||
保存云安全配置
|
||||
"""
|
||||
write_file(self._config_file, json.dumps(self._safe_cloud_conf))
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
"""
|
||||
检查任务数据
|
||||
:param task_data: 任务数据
|
||||
:return: 检查后的任务数据
|
||||
"""
|
||||
if "safe_type" in task_data:
|
||||
for i in task_data["safe_type"]:
|
||||
if i not in self._all_safe_type:
|
||||
return "Security type error"
|
||||
else:
|
||||
task_data["safe_type"] = ["webshell"]
|
||||
|
||||
task_data["interval"] = 60 * 60 * 3
|
||||
return task_data
|
||||
|
||||
def check_num_rule(self, num_rule: dict) -> Union[dict, str]:
|
||||
"""
|
||||
检查告警数量规则,一天只能告警20次
|
||||
:param num_rule: 告警数量规则
|
||||
:return: 检查后的告警数量规则
|
||||
"""
|
||||
num_rule["day_num"] = 20
|
||||
return num_rule
|
||||
|
||||
def check_time_rule(self, time_rule: dict) -> Union[dict, str]:
|
||||
"""
|
||||
检查告警时间规则[写死]
|
||||
:param time_rule: 告警时间规则
|
||||
:return: 检查后的告警时间规则
|
||||
"""
|
||||
# 测试数据为1秒 ,正常数据为 1200 20*60 20分钟告警一次
|
||||
time_rule["send_interval"] = 1200
|
||||
return time_rule
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
"""
|
||||
返回一个关键字,用于后续查询或执行任务时使用, 例如:防篡改告警,可以根据其规则id生成一个关键字,
|
||||
后续通过规则id和来源tamper 查询并使用
|
||||
@param task_data: 通过check_args后生成的告警参数字典
|
||||
@return: 返回一个关键词字符串
|
||||
"""
|
||||
return "safe_cloud_hinge"
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
"""
|
||||
返回一个标题
|
||||
@param task_data: 通过check_args后生成的告警参数字典
|
||||
@return: 返回一个关键词字符串
|
||||
"""
|
||||
return 'Yakpanel Cloud Security Center Alarm'
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
"""
|
||||
判断这个任务是否需要返送
|
||||
@param task_id: 任务id
|
||||
@param task_data: 任务的告警参数
|
||||
@return: 如果触发了告警,返回一个dict的原文,作为告警信息,否则应当返回None表示未触发
|
||||
返回之中应当包含一个 msg_list 的键(值为List[str]类型),将主要的信息返回
|
||||
用于以下信息的自动序列化包含[dingding, feishu, mail, weixin, web_hook]
|
||||
短信和微信公众号由于长度问题,必须每个任务手动实现
|
||||
"""
|
||||
|
||||
return None
|
||||
|
||||
def task_config_create_hook(self, task: dict) -> Optional[str]:
|
||||
return self.task_config_update_hook(task)
|
||||
|
||||
def task_config_update_hook(self, task: dict) -> Optional[str]:
|
||||
"""
|
||||
更新告警配置
|
||||
:param task: 任务
|
||||
:return: 更新后的任务
|
||||
"""
|
||||
if not self.safe_cloud_conf:
|
||||
return "Failed to initialize configuration file, unable to add"
|
||||
|
||||
alert_data = self.safe_cloud_conf["alertable"]
|
||||
alert_data["safe_type"] = task["task_data"].get("safe_type", ["webshell"])
|
||||
alert_data["interval"] = task["task_data"].get("interval", 60*60*3)
|
||||
alert_data["status"] = task["status"]
|
||||
alert_data["sender"] = task["sender"]
|
||||
alert_data["time_rule"] = task["time_rule"]
|
||||
alert_data["number_rule"] = task["number_rule"]
|
||||
self.save_safe_cloud_conf()
|
||||
|
||||
def task_config_remove_hook(self, task: dict) -> Optional[str]:
|
||||
"""
|
||||
删除告警配置
|
||||
:param task: 任务
|
||||
:return: 删除后的任务
|
||||
"""
|
||||
if not self.safe_cloud_conf:
|
||||
return None
|
||||
|
||||
alert_data = self.safe_cloud_conf["alertable"]
|
||||
alert_data["safe_type"] = task["task_data"].get("safe_type", ["webshell"])
|
||||
alert_data["interval"] = task["task_data"].get("interval", 60*60*3)
|
||||
alert_data["status"] = False
|
||||
alert_data["sender"] = []
|
||||
alert_data["time_rule"] = task["time_rule"]
|
||||
alert_data["number_rule"] = task["number_rule"]
|
||||
self.save_safe_cloud_conf()
|
||||
|
||||
# 更新告警配置
|
||||
@staticmethod
|
||||
def set_push_conf(alert_data: dict) -> Optional[str]:
|
||||
"""
|
||||
将告警信息设置到告警任务中去
|
||||
:param alert_data:
|
||||
:return:
|
||||
"""
|
||||
pm = PushManager()
|
||||
p_data = {
|
||||
"template_id": "124",
|
||||
"task_data": {
|
||||
"status": alert_data.get("status", True),
|
||||
"sender": alert_data.get("sender", []),
|
||||
"task_data": {
|
||||
"safe_type": alert_data.get("safe_type", ["webshell"]),
|
||||
"interval": alert_data.get("interval", 60*60*3),
|
||||
},
|
||||
"time_rule": alert_data.get("time_rule", {}),
|
||||
"number_rule": alert_data.get("number_rule", {}),
|
||||
}
|
||||
}
|
||||
return pm.set_task_conf_data(p_data)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _init_config():
|
||||
"""
|
||||
初始化配置
|
||||
"""
|
||||
try:
|
||||
import PluginLoader
|
||||
|
||||
args = GET_CLASS()
|
||||
args.model_index = 'project'
|
||||
PluginLoader.module_run("safecloud", "init_config", args)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class ViewMsgFormat(BaseTaskViewMsg):
|
||||
|
||||
def get_msg(self, task: dict) -> Optional[str]:
|
||||
if task["template_id"] == "122":
|
||||
return "Every {} days, identify and scan for vulnerabilities in common open-source CMS programs across all websites and send alerts".format(task["task_data"]["cycle"])
|
||||
if task["template_id"] == "123":
|
||||
return "Scan the critical executable files in the system daily at 【{}:{}】, and send alerts when changes are detected".format(
|
||||
task["task_data"]["hour"], task["task_data"]["minute"]
|
||||
)
|
||||
if task["template_id"] == "124":
|
||||
return "Every {} hours, scan the server files to identify abnormal situations such as high resource consumption and malicious control of the server, and send alerts".format(
|
||||
int(task["task_data"]["interval"] / 3600)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
SiteMonitorViolationWordTask.VIEW_MSG = VulnerabilityScanningTask.VIEW_MSG = FileDetectTask.VIEW_MSG = SafeCloudTask.VIEW_MSG = ViewMsgFormat
|
||||
196
mod/base/push_mod/safe_mod_push_template.json
Normal file
196
mod/base/push_mod/safe_mod_push_template.json
Normal file
@@ -0,0 +1,196 @@
|
||||
[
|
||||
{
|
||||
"id": "122",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "vulnerability_scanning",
|
||||
"title": "Website vulnerability alert",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.safe_mod_push",
|
||||
"name": "VulnerabilityScanningTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "Interval period",
|
||||
"type": "number",
|
||||
"suffix": "",
|
||||
"unit": "day",
|
||||
"default": 1
|
||||
},
|
||||
{
|
||||
"attr": "help",
|
||||
"name": "source",
|
||||
"type": "help",
|
||||
"unit": "",
|
||||
"style": {
|
||||
"margin-top": "6px"
|
||||
},
|
||||
"list": [
|
||||
"【Website】-> 【Vulnerability Scanning】-> 【Automatic Scanning】"
|
||||
],
|
||||
"suffix": "",
|
||||
"default": 600
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"cycle"
|
||||
],
|
||||
[
|
||||
"help"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 1
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": true,
|
||||
"tags": [
|
||||
"safe",
|
||||
"site"
|
||||
],
|
||||
"description": "Regularly scan all websites on the server and identify various popular open-source CMS programs to help users quickly discover potential security vulnerabilities on the website and send alert notifications",
|
||||
"is_pro": true
|
||||
},
|
||||
{
|
||||
"id": "123",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "file_detect",
|
||||
"title": "System file integrity reminder",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.safe_mod_push",
|
||||
"name": "FileDetectTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "hour",
|
||||
"name": "daily",
|
||||
"type": "number",
|
||||
"suffix": "",
|
||||
"unit": "hour",
|
||||
"default": 4
|
||||
},
|
||||
{
|
||||
"attr": "minute",
|
||||
"name": " ",
|
||||
"type": "number",
|
||||
"suffix": "Execute detection tasks",
|
||||
"unit": "minute",
|
||||
"default": 10
|
||||
},
|
||||
{
|
||||
"attr": "help",
|
||||
"name": "source",
|
||||
"type": "help",
|
||||
"unit": "",
|
||||
"style": {
|
||||
"margin-top": "6px"
|
||||
},
|
||||
"list": [
|
||||
"【Security】-> 【Security testing】-> 【File integrity check】-> 【Regular scanning】"
|
||||
],
|
||||
"suffix": "",
|
||||
"default": 600
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"hour",
|
||||
"minute"
|
||||
],
|
||||
[
|
||||
"help"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 1
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": true,
|
||||
"tags": [
|
||||
"safe"
|
||||
],
|
||||
"is_pro": true,
|
||||
"description": "Regularly scan the system's critical file directory (such as /var/bin) according to the rules, identify abnormal situations such as file tampering, deletion, or movement, and send alert notifications to the administrator"
|
||||
},
|
||||
{
|
||||
"id": "124",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "safe_cloud_hinge",
|
||||
"title": "Cloud Security Center Alert",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.safe_mod_push",
|
||||
"name": "SafeCloudTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "help",
|
||||
"name": "help",
|
||||
"type": "link",
|
||||
"unit": "",
|
||||
"style": {
|
||||
"margin-top": "6px"
|
||||
},
|
||||
"list": [
|
||||
"<a href=\"/\">Go to 【 Home>Baota Cloud Security Center 】 to view details</a>"
|
||||
]
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"help"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {},
|
||||
"advanced_default": {},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"wx_account",
|
||||
"tg"
|
||||
],
|
||||
"unique": true,
|
||||
"tags": [
|
||||
"safe"
|
||||
],
|
||||
"is_pro": true,
|
||||
"description": "The Baota Cloud Security Center will scan server files, identify abnormal situations such as occupying a large amount of resources or maliciously controlling servers, and send alert notifications to administrators."
|
||||
}
|
||||
]
|
||||
133
mod/base/push_mod/send_tool.py
Normal file
133
mod/base/push_mod/send_tool.py
Normal file
@@ -0,0 +1,133 @@
|
||||
import ipaddress
|
||||
import re
|
||||
|
||||
from .util import get_config_value
|
||||
|
||||
|
||||
class WxAccountMsgBase:
|
||||
|
||||
@classmethod
|
||||
def new_msg(cls):
|
||||
return cls()
|
||||
|
||||
def set_ip_address(self, server_ip, local_ip):
|
||||
pass
|
||||
|
||||
def to_send_data(self):
|
||||
return "", {}
|
||||
|
||||
|
||||
class WxAccountMsg(WxAccountMsgBase):
|
||||
def __init__(self):
|
||||
self.ip_address: str = ""
|
||||
self.thing_type: str = ""
|
||||
self.msg: str = ""
|
||||
self.next_msg: str = ""
|
||||
|
||||
def set_ip_address(self, server_ip, local_ip):
|
||||
self.ip_address = "{}({})".format(server_ip, local_ip)
|
||||
if len(self.ip_address) > 32:
|
||||
self.ip_address = self.ip_address[:29] + "..."
|
||||
|
||||
def to_send_data(self):
|
||||
res = {
|
||||
"first": {},
|
||||
"keyword1": {
|
||||
"value": self.ip_address,
|
||||
},
|
||||
"keyword2": {
|
||||
"value": self.thing_type,
|
||||
},
|
||||
"keyword3": {
|
||||
"value": self.msg,
|
||||
}
|
||||
}
|
||||
|
||||
if self.next_msg != "":
|
||||
res["keyword4"] = {"value": self.next_msg}
|
||||
|
||||
return "", res
|
||||
|
||||
|
||||
class WxAccountLoginMsg(WxAccountMsgBase):
|
||||
tid = "RJNG8dBZ5Tb9EK6j6gOlcAgGs2Fjn5Fb07vZIsYg1P4"
|
||||
|
||||
def __init__(self):
|
||||
self.login_name: str = ""
|
||||
self.login_ip: str = ""
|
||||
self.thing_type: str = ""
|
||||
self.login_type: str = ""
|
||||
self.address: str = ""
|
||||
self._server_name: str = ""
|
||||
|
||||
def set_ip_address(self, server_ip, local_ip):
|
||||
if self._server_name == "":
|
||||
self._server_name = "服务器IP{}".format(server_ip)
|
||||
|
||||
def _get_server_name(self):
|
||||
data = get_config_value("title") # 若获得别名,则使用别名
|
||||
if data != "":
|
||||
self._server_name = data
|
||||
|
||||
def to_send_data(self):
|
||||
self._get_server_name()
|
||||
if self.address.startswith(">Place of Ownership:"):
|
||||
self.address = self.address[5:]
|
||||
if self.address == "":
|
||||
self.address = "Unknown place of ownership"
|
||||
|
||||
if not _is_ipv4(self.login_ip):
|
||||
self.login_ip = "ipv6-can not show"
|
||||
|
||||
res = {
|
||||
"thing10": {
|
||||
"value": self._server_name,
|
||||
},
|
||||
"character_string9": {
|
||||
"value": self.login_ip,
|
||||
},
|
||||
"thing7": {
|
||||
"value": self.login_type,
|
||||
},
|
||||
"thing11": {
|
||||
"value": self.address,
|
||||
},
|
||||
"thing2": {
|
||||
"value": self.login_name,
|
||||
}
|
||||
}
|
||||
return self.tid, res
|
||||
|
||||
|
||||
# 处理短信告警信息的不规范问题
|
||||
def sms_msg_normalize(sm_args: dict) -> dict:
|
||||
for key, val in sm_args.items():
|
||||
sm_args[key] = _norm_sms_push_argv(str(val))
|
||||
return sm_args
|
||||
|
||||
|
||||
def _norm_sms_push_argv(data):
|
||||
"""
|
||||
@处理短信参数,否则会被拦截
|
||||
"""
|
||||
if _is_ipv4(data):
|
||||
tmp1 = data.split('.')
|
||||
return '{}_***_***_{}'.format(tmp1[0], tmp1[3])
|
||||
|
||||
data = data.replace(".", "_").replace("+", "+")
|
||||
return data
|
||||
|
||||
|
||||
def _is_ipv4(data: str) -> bool:
|
||||
try:
|
||||
ipaddress.IPv4Address(data)
|
||||
except:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _is_domain(domain):
|
||||
rep_domain = re.compile(r"^([\w\-*]{1,100}\.){1,10}([\w\-]{1,24}|[\w\-]{1,24}\.[\w\-]{1,24})$")
|
||||
if rep_domain.match(domain):
|
||||
return True
|
||||
return False
|
||||
1151
mod/base/push_mod/site_push.py
Normal file
1151
mod/base/push_mod/site_push.py
Normal file
File diff suppressed because it is too large
Load Diff
538
mod/base/push_mod/site_push_template.json
Normal file
538
mod/base/push_mod/site_push_template.json
Normal file
@@ -0,0 +1,538 @@
|
||||
[
|
||||
{
|
||||
"id": "1",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "ssl",
|
||||
"title": "Certificate (SSL) expiration",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.site_push",
|
||||
"name": "SSLCertificateTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "project",
|
||||
"name": "Model",
|
||||
"type": "select",
|
||||
"default": "all",
|
||||
"items": [
|
||||
{
|
||||
"title": "ALL SSL",
|
||||
"value": "all"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "Remaining days",
|
||||
"type": "number",
|
||||
"suffix": "",
|
||||
"unit": "day(s)",
|
||||
"default": 15
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"project"
|
||||
],
|
||||
[
|
||||
"cycle"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"project": "all",
|
||||
"cycle": 15
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"total": 2
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"sms",
|
||||
"tg"
|
||||
],
|
||||
"unique": false
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "site_end_time",
|
||||
"title": "Website expiration",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.site_push",
|
||||
"name": "SiteEndTimeTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "Remaining days",
|
||||
"type": "number",
|
||||
"unit": "day(s)",
|
||||
"suffix": "",
|
||||
"default": 7
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"cycle"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"cycle": 7
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"total": 2
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": true
|
||||
},
|
||||
{
|
||||
"id": "3",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "panel_pwd_end_time",
|
||||
"title": "YakPanel password expiration date",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.site_push",
|
||||
"name": "PanelPwdEndTimeTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "Remaining days",
|
||||
"type": "number",
|
||||
"unit": "day(s)",
|
||||
"suffix": "",
|
||||
"default": 15
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"cycle"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"cycle": 15
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"total": 2
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": true
|
||||
},
|
||||
{
|
||||
"id": "4",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "ssh_login_error",
|
||||
"title": "SSH login failure alarm",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.site_push",
|
||||
"name": "SSHLoginErrorTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "Trigger conditions",
|
||||
"type": "number",
|
||||
"unit": "minute(s)",
|
||||
"suffix": "less than ",
|
||||
"default": 30
|
||||
},
|
||||
{
|
||||
"attr": "count",
|
||||
"name": "Login failed",
|
||||
"type": "number",
|
||||
"unit": "time(s)",
|
||||
"suffix": "",
|
||||
"default": 3
|
||||
},
|
||||
{
|
||||
"attr": "interval",
|
||||
"name": "Interval",
|
||||
"type": "number",
|
||||
"unit": "second(s)",
|
||||
"suffix": "more than ",
|
||||
"default": 600
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"cycle",
|
||||
"count"
|
||||
],
|
||||
[
|
||||
"interval"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"cycle": 30,
|
||||
"count": 3,
|
||||
"interval": 600
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
},
|
||||
"time_rule": {
|
||||
"send_interval": 600
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": true
|
||||
},
|
||||
{
|
||||
"id": "5",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "services",
|
||||
"title": "Service Stop Alert",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.site_push",
|
||||
"name": "ServicesTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "project",
|
||||
"name": "Notification type",
|
||||
"type": "select",
|
||||
"default": null,
|
||||
"items": [
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "count",
|
||||
"name": "Auto-restart",
|
||||
"type": "radio",
|
||||
"suffix": "",
|
||||
"default": 1,
|
||||
"items": [
|
||||
{
|
||||
"title": "Automatically attempt to restart the project",
|
||||
"value": 1
|
||||
},
|
||||
{
|
||||
"title": "Do not attempt to restart",
|
||||
"value": 2
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "interval",
|
||||
"name": "Interval",
|
||||
"type": "number",
|
||||
"unit": "second(s)",
|
||||
"suffix": "more than ",
|
||||
"default": 600
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"project"
|
||||
],
|
||||
[
|
||||
"count"
|
||||
],
|
||||
[
|
||||
"interval"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"project": "",
|
||||
"count": 2,
|
||||
"interval": 600
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": false
|
||||
},
|
||||
{
|
||||
"id": "6",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "panel_safe_push",
|
||||
"title": "YakPanel security alarms",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.site_push",
|
||||
"name": "PanelSafePushTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "help",
|
||||
"name": "Alarm content",
|
||||
"type": "help",
|
||||
"unit": "",
|
||||
"style": {
|
||||
"margin-top": "6px"
|
||||
},
|
||||
"list": [
|
||||
"Panel user changes, panel logs are deleted, panels are opened for developers"
|
||||
],
|
||||
"suffix": "",
|
||||
"default": 600
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"help"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": true
|
||||
},
|
||||
{
|
||||
"id": "7",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "ssh_login",
|
||||
"title": "SSH login alert",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.site_push",
|
||||
"name": "SSHLoginTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
],
|
||||
"sorted": [
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": true
|
||||
},
|
||||
{
|
||||
"id": "8",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "panel_login",
|
||||
"title": "YakPanel login alarm",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.site_push",
|
||||
"name": "PanelLoginTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
],
|
||||
"sorted": [
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"sms",
|
||||
"tg"
|
||||
],
|
||||
"unique": true
|
||||
},
|
||||
{
|
||||
"id": "9",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "project_status",
|
||||
"title": "Project Stop Alert",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.site_push",
|
||||
"name": "ProjectStatusTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "project type",
|
||||
"type": "select",
|
||||
"default": 1,
|
||||
"items": [
|
||||
{
|
||||
"title": "Node Project",
|
||||
"value": 2
|
||||
},
|
||||
{
|
||||
"title": "Go Project",
|
||||
"value": 3
|
||||
},
|
||||
{
|
||||
"title": "Python Project",
|
||||
"value": 4
|
||||
},
|
||||
{
|
||||
"title": "Other Project",
|
||||
"value": 5
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "project",
|
||||
"name": "Project name",
|
||||
"type": "select",
|
||||
"default": null,
|
||||
"all_items": null,
|
||||
"items": [
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "interval",
|
||||
"name": "interval",
|
||||
"type": "number",
|
||||
"unit": "second(s)",
|
||||
"suffix": "more than ",
|
||||
"default": 600
|
||||
},
|
||||
{
|
||||
"attr": "count",
|
||||
"name": "Auto-restart",
|
||||
"type": "radio",
|
||||
"suffix": "",
|
||||
"default": 1,
|
||||
"items": [
|
||||
{
|
||||
"title": "Automatically attempt to restart the project",
|
||||
"value": 1
|
||||
},
|
||||
{
|
||||
"title": "Do not attempt to restart",
|
||||
"value": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"cycle"
|
||||
],
|
||||
[
|
||||
"project"
|
||||
],
|
||||
[
|
||||
"interval"
|
||||
],
|
||||
[
|
||||
"count"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"cycle": 1,
|
||||
"project": "",
|
||||
"interval": 600,
|
||||
"count": 2
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"sms",
|
||||
"tg"
|
||||
],
|
||||
"unique": false,
|
||||
"tags": ["site", "common"]
|
||||
}
|
||||
]
|
||||
|
||||
348
mod/base/push_mod/ssl_push.py
Normal file
348
mod/base/push_mod/ssl_push.py
Normal file
@@ -0,0 +1,348 @@
|
||||
import json
|
||||
import time
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Tuple, Union, Optional
|
||||
|
||||
from .send_tool import WxAccountMsg
|
||||
from .base_task import BaseTask
|
||||
from .mods import PUSH_DATA_PATH, TaskConfig, PANEL_PATH
|
||||
from .util import read_file, DB, write_file
|
||||
from mod.base.web_conf import RealSSLManger
|
||||
import public
|
||||
|
||||
class DomainEndTimeTask(BaseTask):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "domain_endtime"
|
||||
self.template_name = "Domain expiration"
|
||||
# self.title = "Domain expiration"
|
||||
self._tip_file = "{}/domain_endtime.tip".format(PUSH_DATA_PATH)
|
||||
self._tip_data: Optional[dict] = None
|
||||
self._task_config = TaskConfig()
|
||||
|
||||
# 每次任务使用
|
||||
self.domain_list = []
|
||||
self.push_keys = []
|
||||
self.task_id = None
|
||||
|
||||
@property
|
||||
def tips(self) -> dict:
|
||||
if self._tip_data is not None:
|
||||
return self._tip_data
|
||||
try:
|
||||
self._tip_data = json.loads(read_file(self._tip_file))
|
||||
except:
|
||||
self._tip_data = {}
|
||||
return self._tip_data
|
||||
|
||||
def save_tip(self):
|
||||
write_file(self._tip_file, json.dumps(self.tips))
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return task_data["project"]
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
self.title = self.get_title(task_data)
|
||||
# 过滤单独设置提醒的域名
|
||||
not_push_web = [i["task_data"]["project"] for i in self._task_config.config if i["source"] == self.source_name]
|
||||
|
||||
sql = DB("ssl_domains")
|
||||
total = self._task_config.get_by_id(task_id).get("number_rule", {}).get("total", 1)
|
||||
if "all" in not_push_web:
|
||||
not_push_web.remove("all")
|
||||
# need_check_list = []
|
||||
if task_data["project"] == "all":
|
||||
# 所有域名
|
||||
domain_list = sql.select()
|
||||
for domain in domain_list:
|
||||
if domain['domain'] in not_push_web:
|
||||
continue
|
||||
if self.tips.get(task_id, {}).get(domain['domain'], 0) > total:
|
||||
continue
|
||||
end_time = datetime.strptime(domain['endtime'], '%Y-%m-%d')
|
||||
if int((end_time.timestamp() - time.time()) / 86400) <= task_data['cycle']:
|
||||
self.push_keys.append(domain['domain'])
|
||||
self.domain_list.append(domain)
|
||||
# need_check_list.append(domain['domain'])
|
||||
|
||||
else:
|
||||
find = sql.where('domain=?', (task_data['project'],)).find()
|
||||
if not find:
|
||||
return None
|
||||
|
||||
end_time = datetime.strptime(find['endtime'], '%Y-%m-%d')
|
||||
if int((end_time.timestamp() - time.time()) / 86400) <= task_data['cycle']:
|
||||
self.push_keys.append(find['domain'])
|
||||
self.domain_list.append(find)
|
||||
|
||||
# need_check_list.append((find['domain']))
|
||||
|
||||
# for name, project_type in need_check_list:
|
||||
# info = self._check_end_time(name, task_data['cycle'], project_type)
|
||||
# if isinstance(info, dict): # 返回的是详情,说明需要推送了
|
||||
# info['site_name'] = name
|
||||
# self.push_keys.append(name)
|
||||
# self.domain_list.append(info)
|
||||
|
||||
if len(self.domain_list) == 0:
|
||||
return None
|
||||
|
||||
s_list = ['>About to expire: <font color=#ff0000>{} </font>'.format(len(self.domain_list))]
|
||||
for x in self.domain_list:
|
||||
s_list.append(">Domain: {} Expiration:{}".format(x['domain'], x['endtime']))
|
||||
|
||||
self.task_id = task_id
|
||||
return {"msg_list": s_list}
|
||||
|
||||
@staticmethod
|
||||
def _check_end_time(site_name, limit, prefix) -> Optional[dict]:
|
||||
info = RealSSLManger(conf_prefix=prefix).get_site_ssl_info(site_name)
|
||||
if info is not None:
|
||||
end_time = datetime.strptime(info['notAfter'], '%Y-%m-%d')
|
||||
if int((end_time.timestamp() - time.time()) / 86400) <= limit:
|
||||
return info
|
||||
return None
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
if task_data["project"] == "all":
|
||||
return "Domain expiration -- All"
|
||||
return "Domain expiration -- [{}]".format(task_data["project"])
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return 'domain_end|Domain expiration reminders', {
|
||||
"name": push_public_data["ip"],
|
||||
"domain": self.domain_list[0]['domain'],
|
||||
'time': self.domain_list[0]["endtime"],
|
||||
'total': len(self.domain_list)
|
||||
}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "Domain expiration reminders"
|
||||
msg.msg = "There are {} domain names that will expire and will affect access".format(len(self.domain_list))
|
||||
return msg
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
task_data["interval"] = 60 * 60 * 24 # 默认检测间隔时间 1 天
|
||||
if not (isinstance(task_data['cycle'], int) and task_data['cycle'] > 1):
|
||||
return "The remaining time parameter is incorrect, at least 1 day"
|
||||
return task_data
|
||||
|
||||
def filter_template(self, template) -> dict:
|
||||
domain_list = DB("ssl_domains").select()
|
||||
|
||||
items = [{"title": i["domain"], "value": i["domain"]}for i in domain_list]
|
||||
|
||||
template["field"][0]["items"].extend(items)
|
||||
return template
|
||||
|
||||
|
||||
def check_num_rule(self, num_rule: dict) -> Union[dict, str]:
|
||||
num_rule["get_by_func"] = "can_send_by_num_rule"
|
||||
return num_rule
|
||||
|
||||
# 实际的次数检查已在 get_push_data 其他位置完成
|
||||
def can_send_by_num_rule(self, task_id: str, task_data: dict, number_rule: dict, push_data: dict) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def task_run_end_hook(self, res) -> None:
|
||||
if not res["do_send"]:
|
||||
return
|
||||
if self.task_id:
|
||||
if self.task_id not in self.tips:
|
||||
self.tips[self.task_id] = {}
|
||||
|
||||
for w in self.push_keys:
|
||||
if w in self.tips[self.task_id]:
|
||||
self.tips[self.task_id][w] += 1
|
||||
else:
|
||||
self.tips[self.task_id][w] = 1
|
||||
|
||||
self.save_tip()
|
||||
|
||||
def task_config_update_hook(self, task: dict) -> None:
|
||||
if task["id"] in self.tips:
|
||||
self.tips.pop(task["id"])
|
||||
self.save_tip()
|
||||
|
||||
def task_config_remove_hook(self, task: dict) -> None:
|
||||
if task["id"] in self.tips:
|
||||
self.tips.pop(task["id"])
|
||||
self.save_tip()
|
||||
|
||||
|
||||
class CertEndTimeTask(BaseTask):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "cert_endtime"
|
||||
self.template_name = "Certificate expiration"
|
||||
# self.title = "Certificate expiration"
|
||||
self._tip_file = "{}/cert_endtime.tip".format(PUSH_DATA_PATH)
|
||||
self._tip_data: Optional[dict] = None
|
||||
self._task_config = TaskConfig()
|
||||
|
||||
# 每次任务使用
|
||||
self.cert_list = []
|
||||
self.push_keys = []
|
||||
self.task_id = None
|
||||
|
||||
@property
|
||||
def tips(self) -> dict:
|
||||
if self._tip_data is not None:
|
||||
return self._tip_data
|
||||
try:
|
||||
self._tip_data = json.loads(read_file(self._tip_file))
|
||||
except:
|
||||
self._tip_data = {}
|
||||
return self._tip_data
|
||||
|
||||
def save_tip(self):
|
||||
write_file(self._tip_file, json.dumps(self.tips))
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return task_data["project"]
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
from .util import get_cert_list, to_dict_obj
|
||||
|
||||
exclude_ids = [i["task_data"]["project"] for i in self._task_config.config if i["source"] == self.source_name]
|
||||
total = self._task_config.get_by_id(task_id).get("number_rule", {}).get("total", 1)
|
||||
|
||||
if "all" in exclude_ids:
|
||||
exclude_ids.remove("all")
|
||||
data = get_cert_list(to_dict_obj({"status_id": 1}))['data']
|
||||
if task_data["project"] == "all":
|
||||
for cert in data:
|
||||
if cert["ssl_id"] in exclude_ids:
|
||||
continue
|
||||
if self.tips.get(task_id, {}).get(cert['ssl_id'], 0) > total:
|
||||
continue
|
||||
if not cert.get("endDay") and cert.get("endDay") != 0:
|
||||
continue
|
||||
if cert["endDay"] <= task_data["cycle"]:
|
||||
self.cert_list.append(cert)
|
||||
else:
|
||||
for cert in data:
|
||||
if cert["ssl_id"] != task_data["project"]:
|
||||
continue
|
||||
if not cert.get("endDay") and cert.get("endDay") != 0:
|
||||
continue
|
||||
if cert["endDay"] <= task_data["cycle"]:
|
||||
self.cert_list.append(cert)
|
||||
self.title = self.get_title(task_data)
|
||||
if len(self.cert_list) == 0:
|
||||
return None
|
||||
|
||||
s_list = ['>About to expire: <font color=#ff0000>{} </font>'.format(len(self.cert_list))]
|
||||
for x in self.cert_list:
|
||||
s_list.append(
|
||||
">Certificates: {} [{}]expire in days Websites that may be affected:{}".format("{} | {}".format(x["title"],",".join(x.get("domainName", []) or "None")), x['endDay'], ','.join(x.get('use_site', [])) or "None")
|
||||
)
|
||||
|
||||
self.task_id = task_id
|
||||
return {"msg_list": s_list}
|
||||
|
||||
@staticmethod
|
||||
def _check_end_time(site_name, limit, prefix) -> Optional[dict]:
|
||||
info = RealSSLManger(conf_prefix=prefix).get_site_ssl_info(site_name)
|
||||
if info is not None:
|
||||
end_time = datetime.strptime(info['notAfter'], '%Y-%m-%d')
|
||||
if int((end_time.timestamp() - time.time()) / 86400) <= limit:
|
||||
return info
|
||||
return None
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
from .util import get_cert_list, to_dict_obj
|
||||
if task_data["project"] == "all":
|
||||
return "Certificate expiration -- All"
|
||||
data = get_cert_list(to_dict_obj({}))['data']
|
||||
for cert in data:
|
||||
if cert["ssl_id"] == task_data["project"]:
|
||||
return "Certificate expiration -- [{} | {}]".format(cert["title"],",".join(cert.get("domainName", []) or "None"))
|
||||
return "Domain [{}] expiration reminder".format(task_data["project"])
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return 'cert_end|Certificate expiration reminders', {
|
||||
"name": push_public_data["ip"],
|
||||
"cert": self.cert_list[0]['domain'],
|
||||
'time': self.cert_list[0]["endtime"],
|
||||
'total': len(self.cert_list)
|
||||
}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "Certificate expiration reminders"
|
||||
msg.msg = "There are {} certificates that will expire and will affect access".format(len(self.cert_list))
|
||||
return msg
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
task_data["interval"] = 60 * 60 * 24 # 默认检测间隔时间 1 天
|
||||
if not (isinstance(task_data['cycle'], int) and task_data['cycle'] > 1):
|
||||
return "The remaining time parameter is incorrect, at least 1 day"
|
||||
return task_data
|
||||
|
||||
def filter_template(self, template) -> dict:
|
||||
from .util import get_cert_list, to_dict_obj
|
||||
|
||||
items = [
|
||||
{"title": "{} | {}".format(i["title"],",".join(i.get("domainName", []) or "None")), "value": i["ssl_id"]}
|
||||
for i in get_cert_list(to_dict_obj({}))['data']
|
||||
if i.get("endDay")
|
||||
]
|
||||
|
||||
template["field"][0]["items"].extend(items)
|
||||
|
||||
return template
|
||||
|
||||
def check_num_rule(self, num_rule: dict) -> Union[dict, str]:
|
||||
num_rule["get_by_func"] = "can_send_by_num_rule"
|
||||
return num_rule
|
||||
|
||||
# 实际的次数检查已在 get_push_data 其他位置完成
|
||||
def can_send_by_num_rule(self, task_id: str, task_data: dict, number_rule: dict, push_data: dict) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def task_run_end_hook(self, res) -> None:
|
||||
if not res["do_send"]:
|
||||
return
|
||||
if self.task_id:
|
||||
if self.task_id not in self.tips:
|
||||
self.tips[self.task_id] = {}
|
||||
|
||||
for w in self.push_keys:
|
||||
if w in self.tips[self.task_id]:
|
||||
self.tips[self.task_id][w] += 1
|
||||
else:
|
||||
self.tips[self.task_id][w] = 1
|
||||
|
||||
self.save_tip()
|
||||
|
||||
def task_config_update_hook(self, task: dict) -> None:
|
||||
if task["id"] in self.tips:
|
||||
self.tips.pop(task["id"])
|
||||
self.save_tip()
|
||||
|
||||
def task_config_remove_hook(self, task: dict) -> None:
|
||||
if task["id"] in self.tips:
|
||||
self.tips.pop(task["id"])
|
||||
self.save_tip()
|
||||
|
||||
class ViewMsgFormat(object):
|
||||
_FORMAT = {
|
||||
"1": (
|
||||
lambda x: "<span>Time remaining less than {} days {}</span>".format(
|
||||
x["task_data"].get("cycle"),
|
||||
("(If it is not processed, it will be resent 1 time the next day for %d days)" % x.get("number_rule", {}).get("total", 0)) if x.get("number_rule", {}).get("total", 0) else ""
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
def get_msg(self, task: dict) -> Optional[str]:
|
||||
if task["template_id"] in ["70", "71"]:
|
||||
return self._FORMAT["1"](task)
|
||||
if task["template_id"] in self._FORMAT:
|
||||
return self._FORMAT[task["template_id"]](task)
|
||||
return None
|
||||
127
mod/base/push_mod/ssl_push_template.json
Normal file
127
mod/base/push_mod/ssl_push_template.json
Normal file
@@ -0,0 +1,127 @@
|
||||
[
|
||||
{
|
||||
"id": "70",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "domain_endtime",
|
||||
"title": "Domain expiration",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.ssl_push",
|
||||
"name": "DomainEndTimeTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "project",
|
||||
"name": "domain",
|
||||
"type": "select",
|
||||
"default": "all",
|
||||
"items": [
|
||||
{
|
||||
"title": "All domain",
|
||||
"value": "all"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "Remaining days",
|
||||
"type": "number",
|
||||
"suffix": "",
|
||||
"unit": "day(s)",
|
||||
"default": 30
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"project"
|
||||
],
|
||||
[
|
||||
"cycle"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"project": "all",
|
||||
"cycle": 30
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"total": 2
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": false
|
||||
},
|
||||
{
|
||||
"id": "71",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "cert_endtime",
|
||||
"title": "Certificate expiration",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.ssl_push",
|
||||
"name": "CertEndTimeTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "project",
|
||||
"name": "Certificate",
|
||||
"type": "select",
|
||||
"default": "all",
|
||||
"items": [
|
||||
{
|
||||
"title": "all certificates",
|
||||
"value": "all"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "Remaining days",
|
||||
"type": "number",
|
||||
"suffix": "",
|
||||
"unit": "day(s)",
|
||||
"default": 30
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"project"
|
||||
],
|
||||
[
|
||||
"cycle"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"project": "all",
|
||||
"cycle": 30
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"total": 2
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg"
|
||||
],
|
||||
"unique": false
|
||||
}
|
||||
]
|
||||
|
||||
491
mod/base/push_mod/system.py
Normal file
491
mod/base/push_mod/system.py
Normal file
@@ -0,0 +1,491 @@
|
||||
import datetime
|
||||
import time
|
||||
from threading import Thread
|
||||
from typing import Optional, List, Dict, Type, Union, Any
|
||||
|
||||
import public
|
||||
from .base_task import BaseTask
|
||||
from .compatible import rsync_compatible
|
||||
from .mods import TaskTemplateConfig, TaskConfig, TaskRecordConfig, SenderConfig
|
||||
from .send_tool import sms_msg_normalize
|
||||
from .tool import load_task_cls_by_path, load_task_cls_by_function, T_CLS
|
||||
from .util import get_server_ip, get_network_ip, format_date, get_config_value
|
||||
|
||||
WAIT_TASK_LIST: List[Thread] = []
|
||||
|
||||
|
||||
class PushSystem:
|
||||
def __init__(self):
|
||||
self.task_cls_cache: Dict[str, Type[T_CLS]] = {} # NOQA
|
||||
self._today_zero: Optional[datetime.datetime] = None
|
||||
self._sender_type_class: Optional[dict] = {}
|
||||
self.sd_cfg = SenderConfig()
|
||||
|
||||
def sender_cls(self, sender_type: str):
|
||||
if not self._sender_type_class:
|
||||
from mod.base.msg import WeiXinMsg, MailMsg, WebHookMsg, FeiShuMsg, DingDingMsg, SMSMsg, TgMsg
|
||||
self._sender_type_class = {
|
||||
"weixin": WeiXinMsg,
|
||||
"mail": MailMsg,
|
||||
"webhook": WebHookMsg,
|
||||
"feishu": FeiShuMsg,
|
||||
"dingding": DingDingMsg,
|
||||
"sms": SMSMsg,
|
||||
# "wx_account": WeChatAccountMsg,
|
||||
"tg": TgMsg,
|
||||
}
|
||||
return self._sender_type_class[sender_type]
|
||||
|
||||
@staticmethod
|
||||
def remove_old_task(task: dict):
|
||||
if not task.get("id"):
|
||||
return
|
||||
task_id = task["id"]
|
||||
try:
|
||||
from . import PushManager
|
||||
PushManager().remove_task_conf(public.to_dict_obj(
|
||||
{"task_id": task_id}
|
||||
))
|
||||
except:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def can_run_task_list():
|
||||
result = []
|
||||
result_template = {}
|
||||
for task in TaskConfig().config: # all task
|
||||
# ======== 移除旧任务 ==============
|
||||
if task.get("source") == "cert_endtime" and task.get("task_data", {}).get(
|
||||
"title") == "Certificate expiration":
|
||||
PushSystem.remove_old_task(task) # 移除旧的ssl通知
|
||||
# ======== 移除旧任务 End ==========
|
||||
|
||||
if not task["status"]:
|
||||
continue
|
||||
# 间隔检测时间未到跳过
|
||||
if "interval" in task["task_data"] and isinstance(task["task_data"]["interval"], int):
|
||||
if time.time() < task["last_check"] + task["task_data"]["interval"]:
|
||||
continue
|
||||
result.append(task)
|
||||
for template in TaskTemplateConfig().config: # task's template
|
||||
if template.get("id") == task["template_id"] and template.get("used"):
|
||||
result_template.update({task["id"]: template})
|
||||
break
|
||||
return result, result_template
|
||||
|
||||
def get_task_object(self, template_id, load_cls_data: dict) -> Optional[BaseTask]:
|
||||
if template_id in self.task_cls_cache:
|
||||
return self.task_cls_cache[template_id]()
|
||||
if "load_type" not in load_cls_data:
|
||||
return None
|
||||
if load_cls_data["load_type"] == "func":
|
||||
cls = load_task_cls_by_function(
|
||||
name=load_cls_data["name"],
|
||||
func_name=load_cls_data["func_name"],
|
||||
is_model=load_cls_data.get("is_model", False),
|
||||
model_index=load_cls_data.get("is_model", ''),
|
||||
args=load_cls_data.get("args", None),
|
||||
sub_name=load_cls_data.get("sub_name", None),
|
||||
)
|
||||
else:
|
||||
cls_path = load_cls_data["cls_path"]
|
||||
cls = load_task_cls_by_path(cls_path, load_cls_data["name"])
|
||||
|
||||
if not cls:
|
||||
return None
|
||||
self.task_cls_cache[template_id] = cls
|
||||
return cls()
|
||||
|
||||
def run(self):
|
||||
rsync_compatible()
|
||||
task_list, task_template = self.can_run_task_list()
|
||||
try:
|
||||
for t in task_list:
|
||||
template = task_template[t["id"]]
|
||||
print(PushRunner(t, template, self)())
|
||||
except Exception as e:
|
||||
import traceback
|
||||
public.print_log(f"run task error %s", e)
|
||||
public.print_log(traceback.format_exc())
|
||||
|
||||
global WAIT_TASK_LIST
|
||||
if WAIT_TASK_LIST: # 有任务启用子线程的,要等到这个线程结束,再结束主线程
|
||||
for i in WAIT_TASK_LIST:
|
||||
i.join()
|
||||
|
||||
def get_today_zero(self) -> datetime.datetime:
|
||||
if self._today_zero is None:
|
||||
t = datetime.datetime.today()
|
||||
t_zero = datetime.datetime.combine(t, datetime.time.min)
|
||||
self._today_zero = t_zero
|
||||
return self._today_zero
|
||||
|
||||
|
||||
class PushRunner:
|
||||
def __init__(self, task: dict, template: dict, push_system: PushSystem, custom_push_data: Optional[dict] = None):
|
||||
self._public_push_data: Optional[dict] = None
|
||||
self.result: dict = {
|
||||
"do_send": False,
|
||||
"stop_msg": "",
|
||||
"push_data": {},
|
||||
"check_res": False,
|
||||
"check_stop_on": "",
|
||||
"send_data": {},
|
||||
} # 记录结果
|
||||
self.change_fields = set() # 记录task变化值
|
||||
self.task_obj: Optional[BaseTask] = None
|
||||
self.task = task
|
||||
self.template = template
|
||||
self.push_system = push_system
|
||||
self._add_hook_msg: Optional[str] = None # 记录前置钩子处理后的追加信息
|
||||
self.custom_push_data = custom_push_data
|
||||
|
||||
self.tr_cfg = TaskRecordConfig(task["id"])
|
||||
self.is_number_rule_by_func = False # 记录这个任务是否使用自定义的次数检测, 如果是,就不需要做次数更新
|
||||
|
||||
def save_result(self):
|
||||
t = TaskConfig()
|
||||
tmp = t.get_by_id(self.task["id"])
|
||||
if tmp:
|
||||
for f in self.change_fields:
|
||||
tmp[f] = self.task[f]
|
||||
|
||||
if self.result["do_send"]:
|
||||
tmp["last_send"] = int(time.time())
|
||||
tmp["last_check"] = int(time.time())
|
||||
|
||||
t.save_config()
|
||||
|
||||
if self.result["push_data"]:
|
||||
result_data = self.result.copy()
|
||||
self.tr_cfg.config.append(
|
||||
{
|
||||
"id": self.tr_cfg.nwe_id(),
|
||||
"template_id": self.template["id"],
|
||||
"task_id": self.task["id"],
|
||||
"do_send": result_data.pop("do_send"),
|
||||
"send_data": result_data.pop("push_data"),
|
||||
"result": result_data,
|
||||
"create_time": int(time.time()),
|
||||
}
|
||||
)
|
||||
self.tr_cfg.save_config()
|
||||
|
||||
@property
|
||||
def public_push_data(self) -> dict:
|
||||
if self._public_push_data is None:
|
||||
self._public_push_data = {
|
||||
'ip': get_server_ip(),
|
||||
'local_ip': get_network_ip(),
|
||||
'server_name': get_config_value('title')
|
||||
}
|
||||
data = self._public_push_data.copy()
|
||||
data['time'] = format_date()
|
||||
data['timestamp'] = int(time.time())
|
||||
return data
|
||||
|
||||
def __call__(self):
|
||||
self.run()
|
||||
self.save_result()
|
||||
if self.task_obj:
|
||||
self.task_obj.task_run_end_hook(self.result)
|
||||
return self.result_to_return()
|
||||
|
||||
def result_to_return(self) -> dict:
|
||||
return self.result
|
||||
|
||||
def _append_msg_list_for_hook(self, push_data: dict) -> dict:
|
||||
for key in ["pre_hook", "after_hook"]:
|
||||
if not self.task.get("task_data", {}).get(key):
|
||||
continue
|
||||
for k, v in self.task["task_data"][key].items():
|
||||
try:
|
||||
val = ", ".join(v) if isinstance(v, list) else str(v)
|
||||
act = k.capitalize() if k and isinstance(k , str) else k
|
||||
push_data['msg_list'].append(f">{key.capitalize()}: {act} - {val} ")
|
||||
except Exception as e:
|
||||
public.print_log(f"Append {key} hook msg error: {e}")
|
||||
continue
|
||||
return push_data
|
||||
|
||||
def run(self):
|
||||
self.task_obj = self.push_system.get_task_object(self.template["id"], self.template["load_cls"])
|
||||
if not self.task_obj:
|
||||
self.result["stop_msg"] = "The task class failed to load"
|
||||
return
|
||||
if self.custom_push_data is None:
|
||||
push_data = None
|
||||
try:
|
||||
push_data = self.task_obj.get_push_data(self.task["id"], self.task["task_data"])
|
||||
except Exception:
|
||||
import traceback
|
||||
public.print_log(f"get_push_data error: {traceback.format_exc()}")
|
||||
if not push_data:
|
||||
return
|
||||
else:
|
||||
push_data = self.custom_push_data
|
||||
|
||||
self.result["push_data"] = push_data
|
||||
# 执行全局前置钩子
|
||||
if self.task.get("pre_hook"):
|
||||
if not self.run_hook(self.task["pre_hook"], "pre_hook"):
|
||||
self.result["stop_msg"] = "Task global pre hook stopped execution"
|
||||
return
|
||||
# 执行任务自身前置钩子
|
||||
if self.task.get("task_data", {}).get("pre_hook"):
|
||||
if not self.run_hook(self.task["task_data"]["pre_hook"], "pre_hook"):
|
||||
self.result["stop_msg"] = "Task pre hook stopped execution"
|
||||
return
|
||||
|
||||
# 执行时间规则判断
|
||||
if not self.run_time_rule(self.task["time_rule"]):
|
||||
return
|
||||
|
||||
# 执行频率规则判断
|
||||
if not self.number_rule(self.task["number_rule"]):
|
||||
return
|
||||
|
||||
# 注入任务自身更多钩子消息, 全局钩子静默处理
|
||||
push_data = self._append_msg_list_for_hook(push_data)
|
||||
|
||||
# 执行发送信息
|
||||
self.send_message(push_data)
|
||||
|
||||
self.change_fields.add("number_data")
|
||||
if "day_num" not in self.task["number_data"]:
|
||||
self.task["number_data"]["day_num"] = 0
|
||||
if "total" not in self.task["number_data"]:
|
||||
self.task["number_data"]["total"] = 0
|
||||
self.task["number_data"]["day_num"] += 1
|
||||
self.task["number_data"]["total"] += 1
|
||||
self.task["number_data"]["time"] = int(time.time())
|
||||
|
||||
# 执行任务自身后置钩子
|
||||
if self.task.get("task_data", {}).get("after_hook"):
|
||||
self.run_hook(self.task["task_data"]["after_hook"], "after_hook")
|
||||
# 执行全局后置钩子
|
||||
if self.task.get("after_hook"):
|
||||
self.run_hook(self.task["after_hook"], "after_hook")
|
||||
|
||||
# hook函数, 额外扩展
|
||||
def run_hook(self, hook_data: Dict[str, List[Any]], hook_name: str) -> bool:
|
||||
"""
|
||||
执行hook操作,并返回是否继续执行, 并将hook的执行结果记录
|
||||
@param hook_name: 钩子的名称,如:after_hook, pre_hook
|
||||
@param hook_data: 执行的内容
|
||||
@return: bool
|
||||
"""
|
||||
if not isinstance(hook_data, dict) or not isinstance(hook_name, str):
|
||||
return False
|
||||
|
||||
if hook_name == "pre_hook":
|
||||
return True
|
||||
|
||||
elif hook_name == "after_hook":
|
||||
from script.restart_services import ServicesHelper
|
||||
# restart action
|
||||
if hook_data.get("restart"):
|
||||
for s in hook_data.get("restart", []):
|
||||
if not s or not isinstance(s, str):
|
||||
continue
|
||||
service_obj = ServicesHelper(s.strip())
|
||||
if not service_obj.is_install:
|
||||
continue
|
||||
service_obj.script("restart", "Alarm Triggered")
|
||||
return True
|
||||
|
||||
# module action
|
||||
elif hook_data.get("module"):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def run_time_rule(self, time_rule: dict) -> bool:
|
||||
if "send_interval" in time_rule and time_rule["send_interval"] > 0:
|
||||
if self.task["last_send"] + time_rule["send_interval"] > time.time():
|
||||
self.result['stop_msg'] = 'If the minimum send time is less, no sending will be made'
|
||||
self.result['check_stop_on'] = "time_rule_send_interval"
|
||||
return False
|
||||
|
||||
time_range = time_rule.get("time_range", None)
|
||||
if time_range and isinstance(time_range, list) and len(time_range) == 2:
|
||||
t_zero = self.push_system.get_today_zero()
|
||||
start_time = t_zero + datetime.timedelta(seconds=time_range[0]) # NOQA
|
||||
end_time = t_zero + datetime.timedelta(seconds=time_range[1]) # NOQA
|
||||
if not start_time < datetime.datetime.now() < end_time:
|
||||
self.result['stop_msg'] = 'It is not within the time frame within which the alarm can be sent'
|
||||
self.result['check_stop_on'] = "time_rule_time_range"
|
||||
return False
|
||||
return True
|
||||
|
||||
def number_rule(self, number_rule: dict) -> bool:
|
||||
number_data = self.task.get("number_data", {})
|
||||
# 判断通过 自定义函数的方式确认是否达到发送次数
|
||||
if "get_by_func" in number_rule and isinstance(number_rule["get_by_func"], str):
|
||||
f = getattr(self.task_obj, number_rule["get_by_func"], None)
|
||||
if f is not None and callable(f):
|
||||
res = f(self.task["id"], self.task["task_data"], number_data, self.result["push_data"])
|
||||
if isinstance(res, str):
|
||||
self.result['stop_msg'] = res
|
||||
self.result['check_stop_on'] = "number_rule_get_by_func"
|
||||
return False
|
||||
|
||||
# 只要是走了使用函数检查的,不再处理默认情况 change_fields 中不添加 number_data
|
||||
return True
|
||||
|
||||
if "day_num" in number_rule and isinstance(number_rule["day_num"], int) and number_rule["day_num"] > 0:
|
||||
record_time = number_data.get("time", 0)
|
||||
if record_time < self.push_system.get_today_zero().timestamp(): # 昨日触发
|
||||
self.task["number_data"]["day_num"] = record_num = 0
|
||||
self.task["number_data"]["time"] = time.time()
|
||||
self.change_fields.add("number_data")
|
||||
else:
|
||||
record_num = self.task["number_data"].get("day_num")
|
||||
if record_num >= number_rule["day_num"]:
|
||||
self.result['stop_msg'] = "Exceeding the daily limit:{}".format(number_rule["day_num"])
|
||||
self.result['check_stop_on'] = "number_rule_day_num"
|
||||
return False
|
||||
|
||||
if "total" in number_rule and isinstance(number_rule["total"], int) and number_rule["total"] > 0:
|
||||
record_total = number_data.get("total", 0)
|
||||
if record_total >= number_rule["total"]:
|
||||
self.result['stop_msg'] = "The maximum number of times the limit is exceeded:{}".format(
|
||||
number_rule["total"])
|
||||
self.result['check_stop_on'] = "number_rule_total"
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def send_message(self, push_data: dict):
|
||||
self.result["do_send"] = True
|
||||
self.result["push_data"] = push_data
|
||||
for sender_id in self.task["sender"]:
|
||||
conf = self.push_system.sd_cfg.get_by_id(sender_id)
|
||||
if conf is None:
|
||||
continue
|
||||
if not conf["used"]:
|
||||
self.result["send_data"][sender_id] = "The alarm channel {} is closed, skip sending".format(
|
||||
conf["data"].get("title"))
|
||||
continue
|
||||
sd_cls = self.push_system.sender_cls(conf["sender_type"])
|
||||
res = None
|
||||
if conf["sender_type"] == "weixin":
|
||||
res = sd_cls(conf).send_msg(
|
||||
self.task_obj.to_weixin_msg(push_data, self.public_push_data),
|
||||
self.task_obj.title
|
||||
)
|
||||
|
||||
elif conf["sender_type"] == "mail":
|
||||
res = sd_cls(conf).send_msg(
|
||||
self.task_obj.to_mail_msg(push_data, self.public_push_data),
|
||||
self.task_obj.title
|
||||
)
|
||||
|
||||
elif conf["sender_type"] == "webhook":
|
||||
res = sd_cls(conf).send_msg(
|
||||
self.task_obj.to_web_hook_msg(push_data, self.public_push_data),
|
||||
self.task_obj.title,
|
||||
)
|
||||
|
||||
elif conf["sender_type"] == "feishu":
|
||||
res = sd_cls(conf).send_msg(
|
||||
self.task_obj.to_feishu_msg(push_data, self.public_push_data),
|
||||
self.task_obj.title
|
||||
)
|
||||
elif conf["sender_type"] == "dingding":
|
||||
res = sd_cls(conf).send_msg(
|
||||
self.task_obj.to_dingding_msg(push_data, self.public_push_data),
|
||||
self.task_obj.title
|
||||
)
|
||||
elif conf["sender_type"] == "sms":
|
||||
sm_type, sm_args = self.task_obj.to_sms_msg(push_data, self.public_push_data)
|
||||
if not sm_type or not sm_args:
|
||||
continue
|
||||
sm_args = sms_msg_normalize(sm_args)
|
||||
res = sd_cls(conf).send_msg(sm_type, sm_args)
|
||||
|
||||
elif conf["sender_type"] == "tg":
|
||||
# public.print_log("tg -- 发送数据 {}".format(self.task_obj.to_tg_msg(push_data, self.public_push_data)))
|
||||
from mod.base.msg import TgMsg
|
||||
# Home CPU alarms<br>
|
||||
# >Server:xxx<br>
|
||||
# >IPAddress: xxx.xxx.xxx.xxx(Internet) xxx.xxx.xxx.xxx(Internal)<br>
|
||||
# >SendingTime: 2024-00-00 00:00:00<br>
|
||||
# >Notification type: High CPU usage alarm<br>
|
||||
# >Content of alarm: The average CPU usage of the machine in the last 5 minutes is 3.24%, which is higher than the alarm value 1%.
|
||||
|
||||
try:
|
||||
res = sd_cls(conf).send_msg(
|
||||
# res = TgMsg().send_msg(
|
||||
self.task_obj.to_tg_msg(push_data, self.public_push_data),
|
||||
self.task_obj.title
|
||||
)
|
||||
except:
|
||||
public.print_log(public.get_error_info())
|
||||
else:
|
||||
continue
|
||||
if isinstance(res, str) and res.find("Traceback") != -1:
|
||||
self.result["send_data"][sender_id] = ("An error occurred during the execution of the message "
|
||||
"transmission, and the transmission was not successful")
|
||||
if isinstance(res, str):
|
||||
self.result["send_data"][sender_id] = res
|
||||
else:
|
||||
self.result["send_data"][sender_id] = 1
|
||||
|
||||
|
||||
def push_by_task_keyword(source: str, keyword: str, push_data: Optional[dict] = None) -> Union[str, dict]:
|
||||
"""
|
||||
通过关键字查询告警任务,并发送信息
|
||||
@param push_data:
|
||||
@param source:
|
||||
@type keyword:
|
||||
@return:
|
||||
"""
|
||||
push_system = PushSystem()
|
||||
target_task = {}
|
||||
for i in TaskConfig().config:
|
||||
if i["source"] == source and i["keyword"] == keyword:
|
||||
target_task = i
|
||||
break
|
||||
if not target_task:
|
||||
return "The task was not found"
|
||||
|
||||
target_template = TaskTemplateConfig().get_by_id(target_task["template_id"]) # NOQA
|
||||
if not target_template["used"]:
|
||||
return "This task type has been banned"
|
||||
if not target_task['status']:
|
||||
return "The task has been stopped"
|
||||
|
||||
return PushRunner(target_task, target_template, push_system, push_data)()
|
||||
|
||||
|
||||
def push_by_task_id(task_id: str, push_data: Optional[dict] = None):
|
||||
"""
|
||||
通过任务id触发告警 并 发送信息
|
||||
@param push_data:
|
||||
@param task_id:
|
||||
@return:
|
||||
"""
|
||||
push_system = PushSystem()
|
||||
target_task = TaskConfig().get_by_id(task_id)
|
||||
if not target_task:
|
||||
return "The task was not found"
|
||||
|
||||
target_template = TaskTemplateConfig().get_by_id(target_task["template_id"])
|
||||
if not target_template["used"]:
|
||||
return "This task type has been banned"
|
||||
if not target_task['status']:
|
||||
return "The task has been stopped"
|
||||
|
||||
return PushRunner(target_task, target_template, push_system, push_data)()
|
||||
|
||||
|
||||
def get_push_public_data():
|
||||
data = {
|
||||
'ip': get_server_ip(),
|
||||
'local_ip': get_network_ip(),
|
||||
'server_name': get_config_value('title'),
|
||||
'time': format_date(),
|
||||
'timestamp': int(time.time())}
|
||||
|
||||
return data
|
||||
418
mod/base/push_mod/system_push.py
Normal file
418
mod/base/push_mod/system_push.py
Normal file
@@ -0,0 +1,418 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from importlib import import_module
|
||||
from typing import Tuple, Union, Optional, List
|
||||
|
||||
import psutil
|
||||
|
||||
from .base_task import BaseTask
|
||||
from .mods import PUSH_DATA_PATH
|
||||
from .send_tool import WxAccountMsg
|
||||
from .system import WAIT_TASK_LIST
|
||||
from .util import read_file, write_file, get_config_value, generate_fields
|
||||
|
||||
try:
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
from panel_msg.collector import SitePushMsgCollect, SystemPushMsgCollect
|
||||
except ImportError:
|
||||
SitePushMsgCollect = None
|
||||
SystemPushMsgCollect = None
|
||||
|
||||
|
||||
def _get_panel_name() -> str:
|
||||
data = get_config_value("title") # 若获得别名,则使用别名
|
||||
if data == "":
|
||||
data = "YakPanel"
|
||||
return data
|
||||
|
||||
|
||||
class PanelSysDiskTask(BaseTask):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "system_disk"
|
||||
self.template_name = "Home disk alerts"
|
||||
self.title = "Home disk alerts"
|
||||
|
||||
self.wx_msg = ""
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
return "Home disk alerts -- Mount directory[{}]".format(task_data["project"])
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
if task_data["project"] not in [i[0] for i in self._get_disk_name()]:
|
||||
return "The specified disk does not exist"
|
||||
if not (isinstance(task_data['cycle'], int) and task_data['cycle'] in (1, 2)):
|
||||
return "The type parameter is incorrect"
|
||||
if not (isinstance(task_data['count'], int) and task_data['count'] >= 1):
|
||||
return "The threshold parameter is incorrect"
|
||||
if task_data['cycle'] == 2 and task_data['count'] >= 100:
|
||||
return "The threshold parameter is incorrect, and the set check range is incorrect"
|
||||
task_data['interval'] = 600
|
||||
return task_data
|
||||
|
||||
@staticmethod
|
||||
def _get_disk_name() -> list:
|
||||
"""获取硬盘挂载点"""
|
||||
if "/www/server/panel" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel")
|
||||
|
||||
system_modul = import_module('.system', package="class")
|
||||
system = getattr(system_modul, "system")
|
||||
|
||||
disk_info = system.GetDiskInfo2(None, human=False)
|
||||
|
||||
return [(d.get("path"), d.get("size")[0]) for d in disk_info]
|
||||
|
||||
@staticmethod
|
||||
def _get_disk_info() -> list:
|
||||
"""获取硬盘挂载点"""
|
||||
if "/www/server/panel" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel")
|
||||
|
||||
system_modul = import_module('.system', package="class")
|
||||
system = getattr(system_modul, "system")
|
||||
|
||||
disk_info = system.GetDiskInfo2(None, human=False)
|
||||
|
||||
return disk_info
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return task_data["project"]
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
disk_info = self._get_disk_info()
|
||||
unsafe_disk_list = []
|
||||
|
||||
for d in disk_info:
|
||||
if task_data["project"] != d["path"]:
|
||||
continue
|
||||
free = int(d["size"][2]) / 1048576
|
||||
proportion = int(d["size"][3] if d["size"][3][-1] != "%" else d["size"][3][:-1])
|
||||
|
||||
if task_data["cycle"] == 1 and free < task_data["count"]:
|
||||
unsafe_disk_list.append(
|
||||
"The remaining capacity of the disk mounted on {} is {}G, which is less than the alarm value {}G.".format(
|
||||
d["path"], round(free, 2), task_data["count"])
|
||||
)
|
||||
self.wx_msg = "The remaining capacity is less than {}G".format(task_data["count"])
|
||||
|
||||
elif task_data["cycle"] == 2 and proportion > task_data["count"]:
|
||||
unsafe_disk_list.append(
|
||||
"The used capacity of the disk mounted on {} is {}%, which is greater than the alarm value {}%.".format(
|
||||
d["path"], round(proportion, 2), task_data["count"])
|
||||
)
|
||||
self.wx_msg = "Occupancy greater than {}%".format(task_data["count"])
|
||||
|
||||
if len(unsafe_disk_list) == 0:
|
||||
return None
|
||||
|
||||
return {
|
||||
"msg_list": [
|
||||
">Notification type: Disk Balance Alert",
|
||||
">Alarm content:\n" + "\n".join(unsafe_disk_list)
|
||||
]
|
||||
}
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
for (path, total_size) in self._get_disk_name():
|
||||
template["field"][0]["items"].append({
|
||||
"title": "[{}] disk".format(path),
|
||||
"value": path,
|
||||
"count_default": round((int(total_size) * 0.2) / 1024 / 1024, 1)
|
||||
})
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return 'machine_exception|Disk Balance Alert', {
|
||||
'name': _get_panel_name(),
|
||||
'type': "Insufficient disk space",
|
||||
}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "Home disk alerts"
|
||||
if len(self.wx_msg) > 20:
|
||||
self.wx_msg = self.wx_msg[:17] + "..."
|
||||
msg.msg = self.wx_msg
|
||||
return msg
|
||||
|
||||
|
||||
class PanelSysCPUTask(BaseTask):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "system_cpu"
|
||||
self.template_name = "Home CPU alarms"
|
||||
self.title = "Home CPU alarms"
|
||||
|
||||
self.cpu_count = 0
|
||||
|
||||
self._tip_file = "{}/system_cpu.tip".format(PUSH_DATA_PATH)
|
||||
self._tip_data: Optional[List[Tuple[float, float]]] = None
|
||||
|
||||
@property
|
||||
def cache_list(self) -> List[Tuple[float, float]]:
|
||||
if self._tip_data is not None:
|
||||
return self._tip_data
|
||||
try:
|
||||
self._tip_data = json.loads(read_file(self._tip_file))
|
||||
except:
|
||||
self._tip_data = []
|
||||
return self._tip_data
|
||||
|
||||
def save_cache_list(self):
|
||||
write_file(self._tip_file, json.dumps(self.cache_list))
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
if not (isinstance(task_data['cycle'], int) and task_data['cycle'] >= 1):
|
||||
return "The time parameter is incorrect"
|
||||
if not (isinstance(task_data['count'], int) and task_data['count'] >= 1):
|
||||
return "Threshold parameter error, at least 1%"
|
||||
task_data['interval'] = 60
|
||||
return task_data
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return "system_cpu"
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
expiration = datetime.now() - timedelta(seconds=task_data["cycle"] * 60 + 10)
|
||||
for i in range(len(self.cache_list) - 1, -1, -1):
|
||||
data_time, _ = self.cache_list[i]
|
||||
if datetime.fromtimestamp(data_time) < expiration:
|
||||
del self.cache_list[i]
|
||||
|
||||
# 记录下次的
|
||||
def thread_get_cpu_data():
|
||||
self.cache_list.append((time.time(), psutil.cpu_percent(10)))
|
||||
self.save_cache_list()
|
||||
|
||||
thread_active = threading.Thread(target=thread_get_cpu_data, args=())
|
||||
thread_active.start()
|
||||
WAIT_TASK_LIST.append(thread_active)
|
||||
|
||||
if len(self.cache_list) < task_data["cycle"]: # 小于指定次数不推送
|
||||
return None
|
||||
|
||||
if len(self.cache_list) > 0:
|
||||
avg_data = sum(i[1] for i in self.cache_list) / len(self.cache_list)
|
||||
else:
|
||||
avg_data = 0
|
||||
|
||||
if avg_data < task_data["count"]:
|
||||
return None
|
||||
else:
|
||||
self.cache_list.clear()
|
||||
self.cpu_count = round(avg_data, 2)
|
||||
s_list = [
|
||||
">Notification type: High CPU usage alarm",
|
||||
">Content of alarm: The average CPU usage of the machine in the last {} minutes is {}%, which is higher than the alarm value {}%.".format(
|
||||
task_data["cycle"], round(avg_data, 2), task_data["count"]),
|
||||
]
|
||||
|
||||
return {
|
||||
"msg_list": s_list,
|
||||
}
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
template = generate_fields(template, "restart")
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return 'machine_exception|High CPU usage alarm', {
|
||||
'name': _get_panel_name(),
|
||||
'type': "High CPU usage",
|
||||
}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "Home CPU alarms"
|
||||
msg.msg = "The CPU usage of the host is exceeded:{}%".format(self.cpu_count)
|
||||
msg.next_msg = "Please log in to the panel to view the host status"
|
||||
return msg
|
||||
|
||||
|
||||
class PanelSysLoadTask(BaseTask):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "system_load"
|
||||
self.template_name = "Home load alerts"
|
||||
self.title = "Home load alerts"
|
||||
|
||||
self.avg_data = 0
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
if not (isinstance(task_data['cycle'], int) and task_data['cycle'] >= 1):
|
||||
return "The time parameter is incorrect"
|
||||
if not (isinstance(task_data['count'], int) and task_data['count'] >= 1):
|
||||
return "Threshold parameter error, at least 1%"
|
||||
task_data['interval'] = 60 * task_data['cycle']
|
||||
return task_data
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return "system_load"
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
now_load = os.getloadavg()
|
||||
cpu_count = psutil.cpu_count()
|
||||
now_load = [i / (cpu_count * 2) * 100 for i in now_load]
|
||||
need_push = False
|
||||
avg_data = 0
|
||||
if task_data["cycle"] == 15 and task_data["count"] < now_load[2]:
|
||||
avg_data = now_load[2]
|
||||
need_push = True
|
||||
elif task_data["cycle"] == 5 and task_data["count"] < now_load[1]:
|
||||
avg_data = now_load[1]
|
||||
need_push = True
|
||||
elif task_data["cycle"] == 1 and task_data["count"] < now_load[0]:
|
||||
avg_data = now_load[0]
|
||||
need_push = True
|
||||
|
||||
if not need_push:
|
||||
return None
|
||||
|
||||
self.avg_data = avg_data
|
||||
|
||||
return {
|
||||
"msg_list": [
|
||||
">Notification type: Alarm when the load exceeds the standard",
|
||||
">Content of alarm: The average load factor of the machine in the last {} minutes is {}%, which is higher than the alarm value of {}%.".format(
|
||||
task_data["cycle"], round(avg_data, 2), task_data["count"]),
|
||||
]
|
||||
}
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
template = generate_fields(template, "restart")
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return 'machine_exception|Alarm when the load exceeds the standard', {
|
||||
'name': _get_panel_name(),
|
||||
'type': "The average load is too high",
|
||||
}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "Home load alerts"
|
||||
msg.msg = "The host load exceeds:{}%".format(round(self.avg_data, 2))
|
||||
msg.next_msg = "Please log in to the panel to view the host status"
|
||||
return msg
|
||||
|
||||
|
||||
class PanelSysMEMTask(BaseTask):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "system_mem"
|
||||
self.template_name = "Home memory alarms"
|
||||
self.title = "Home memory alarms"
|
||||
|
||||
self.wx_data = 0
|
||||
|
||||
self._tip_file = "{}/system_mem.tip".format(PUSH_DATA_PATH)
|
||||
self._tip_data: Optional[List[Tuple[float, float]]] = None
|
||||
|
||||
@property
|
||||
def cache_list(self) -> List[Tuple[float, float]]:
|
||||
if self._tip_data is not None:
|
||||
return self._tip_data
|
||||
try:
|
||||
self._tip_data = json.loads(read_file(self._tip_file))
|
||||
except:
|
||||
self._tip_data = []
|
||||
return self._tip_data
|
||||
|
||||
def save_cache_list(self):
|
||||
write_file(self._tip_file, json.dumps(self.cache_list))
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
if not (isinstance(task_data['cycle'], int) and task_data['cycle'] >= 1):
|
||||
return "The number parameter is incorrect"
|
||||
if not (isinstance(task_data['count'], int) and task_data['count'] >= 1):
|
||||
return "Threshold parameter error, at least 1%"
|
||||
task_data['interval'] = task_data['cycle'] * 60
|
||||
return task_data
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return "system_mem"
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
mem = psutil.virtual_memory()
|
||||
real_used: float = (mem.total - mem.free - mem.buffers - mem.cached) / mem.total
|
||||
stime = datetime.now()
|
||||
expiration = stime - timedelta(seconds=task_data["cycle"] * 60 + 10)
|
||||
|
||||
self.cache_list.append((stime.timestamp(), real_used))
|
||||
|
||||
for i in range(len(self.cache_list) - 1, -1, -1):
|
||||
data_time, _ = self.cache_list[i]
|
||||
if datetime.fromtimestamp(data_time) < expiration:
|
||||
del self.cache_list[i]
|
||||
|
||||
avg_data = sum(i[1] for i in self.cache_list) / len(self.cache_list)
|
||||
|
||||
if avg_data * 100 < task_data["count"]:
|
||||
self.save_cache_list()
|
||||
return None
|
||||
else:
|
||||
self.cache_list.clear()
|
||||
self.save_cache_list()
|
||||
self.wx_data = round(avg_data * 100, 2)
|
||||
return {
|
||||
'msg_list': [
|
||||
">Notification type: High memory usage alarm",
|
||||
">Content of alarm: The average memory usage of the machine in the last {} minutes is {}%, which is higher than the alarm value {}%.".format(
|
||||
task_data["cycle"], round(avg_data * 100, 2), task_data["count"]),
|
||||
]
|
||||
}
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
template = generate_fields(template, "restart")
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return 'machine_exception|High memory usage alarm', {
|
||||
'name': _get_panel_name(),
|
||||
'type': "High memory usage",
|
||||
}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "Home memory alarms"
|
||||
msg.msg = "Host memory usage exceeded: {}%".format(self.wx_data)
|
||||
msg.next_msg = "Please log in to the panel to view the host status"
|
||||
return msg
|
||||
|
||||
|
||||
class ViewMsgFormat(object):
|
||||
_FORMAT = {
|
||||
"20": (
|
||||
lambda x: "<span>Triggered by {} disk mounted on {}</span>".format(
|
||||
x.get("project"),
|
||||
"The margin is less than %.1f G" % round(x.get("count"), 1) if x.get(
|
||||
"cycle") == 1 else "ake up more than %d%%" % x.get("count"),
|
||||
)
|
||||
),
|
||||
"21": (
|
||||
lambda x: "<span>Triggers when the average CPU usage exceeds {}% in {} minutes</span>".format(
|
||||
x.get("count"), x.get("cycle")
|
||||
)
|
||||
),
|
||||
"22": (
|
||||
lambda x: "<span>Triggered by an average load exceeding {}% in {} minutes</span>".format(
|
||||
x.get("count"), x.get("cycle")
|
||||
)
|
||||
),
|
||||
"23": (
|
||||
lambda x: "<span>Triggered if the memory usage exceeds {}% within {} minutes</span>".format(
|
||||
x.get("count"), x.get("cycle")
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
def get_msg(self, task: dict) -> Optional[str]:
|
||||
if task["template_id"] in self._FORMAT:
|
||||
return self._FORMAT[task["template_id"]](task["task_data"])
|
||||
return None
|
||||
294
mod/base/push_mod/system_push_template.json
Normal file
294
mod/base/push_mod/system_push_template.json
Normal file
@@ -0,0 +1,294 @@
|
||||
[
|
||||
{
|
||||
"id": "20",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "system_disk",
|
||||
"title": "Home disk alerts",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.system_push",
|
||||
"name": "PanelSysDiskTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "project",
|
||||
"name": "disk information",
|
||||
"type": "select",
|
||||
"items": [
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "detection type",
|
||||
"type": "radio",
|
||||
"suffix": "",
|
||||
"default": 2,
|
||||
"items": [
|
||||
{
|
||||
"title": "Remaining capacity",
|
||||
"value": 1
|
||||
},
|
||||
{
|
||||
"title": "percentage occupied",
|
||||
"value": 2
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "count",
|
||||
"name": "occupancy rate exceeds",
|
||||
"type": "number",
|
||||
"unit": "%",
|
||||
"suffix": "will trigger an alarm",
|
||||
"default": 80,
|
||||
"err_msg_prefix": "disk threshold"
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"project"
|
||||
],
|
||||
[
|
||||
"cycle"
|
||||
],
|
||||
[
|
||||
"count"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"project": "/",
|
||||
"cycle": 2,
|
||||
"count": 80
|
||||
},
|
||||
"send_type_list": [
|
||||
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"sms",
|
||||
"tg"
|
||||
],
|
||||
"unique": false
|
||||
},
|
||||
{
|
||||
"id": "21",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "system_cpu",
|
||||
"title": "Home CPU alarms",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.system_push",
|
||||
"name": "PanelSysCPUTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "every time",
|
||||
"type": "select",
|
||||
"unit": "minute(s)",
|
||||
"suffix": "average ",
|
||||
"width": "70px",
|
||||
"disabled": true,
|
||||
"default": 5,
|
||||
"items": [
|
||||
{
|
||||
"title": "1",
|
||||
"value": 3
|
||||
},
|
||||
{
|
||||
"title": "5",
|
||||
"value": 5
|
||||
},
|
||||
{
|
||||
"title": "15",
|
||||
"value": 15
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "count",
|
||||
"name": "CPU usage exceeded",
|
||||
"type": "number",
|
||||
"unit": "%",
|
||||
"suffix": "will trigger an alarm",
|
||||
"default": 80,
|
||||
"err_msg_prefix": "CPU"
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"cycle"
|
||||
],
|
||||
[
|
||||
"count"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"cycle": 5,
|
||||
"count": 80
|
||||
},
|
||||
"send_type_list": [
|
||||
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"sms",
|
||||
"tg"
|
||||
],
|
||||
"unique": true
|
||||
},
|
||||
{
|
||||
"id": "22",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "system_load",
|
||||
"title": "Home load alerts",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.system_push",
|
||||
"name": "PanelSysLoadTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "every time",
|
||||
"type": "select",
|
||||
"unit": "minute(s)",
|
||||
"suffix": "average ",
|
||||
"default": 5,
|
||||
"width": "70px",
|
||||
"disabled": true,
|
||||
"items": [
|
||||
{
|
||||
"title": "1",
|
||||
"value": 1
|
||||
},
|
||||
{
|
||||
"title": "5",
|
||||
"value": 5
|
||||
},
|
||||
{
|
||||
"title": "15",
|
||||
"value": 15
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "count",
|
||||
"name": " load over",
|
||||
"type": "number",
|
||||
"unit": "%",
|
||||
"suffix": "will trigger an alarm",
|
||||
"default": 80,
|
||||
"err_msg_prefix": "Load"
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"cycle"
|
||||
],
|
||||
[
|
||||
"count"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"cycle": 5,
|
||||
"count": 80
|
||||
},
|
||||
"send_type_list": [
|
||||
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg",
|
||||
"sms"
|
||||
],
|
||||
"unique": true
|
||||
},
|
||||
{
|
||||
"id": "23",
|
||||
"ver": "1",
|
||||
"used": true,
|
||||
"source": "system_mem",
|
||||
"title": "Home memory alarms",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.system_push",
|
||||
"name": "PanelSysMEMTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "cycle",
|
||||
"name": "every time",
|
||||
"type": "select",
|
||||
"unit": "minute(s)",
|
||||
"suffix": "average ",
|
||||
"width": "70px",
|
||||
"disabled": true,
|
||||
"default": 5,
|
||||
"items": [
|
||||
{
|
||||
"title": "1",
|
||||
"value": 3
|
||||
},
|
||||
{
|
||||
"title": "5",
|
||||
"value": 5
|
||||
},
|
||||
{
|
||||
"title": "15",
|
||||
"value": 15
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"attr": "count",
|
||||
"name": "Memory usage is more than",
|
||||
"type": "number",
|
||||
"unit": "%",
|
||||
"suffix": "will trigger an alarm",
|
||||
"default": 80,
|
||||
"err_msg_prefix": "Memory"
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"cycle"
|
||||
],
|
||||
[
|
||||
"count"
|
||||
]
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"cycle": 5,
|
||||
"count": 80
|
||||
},
|
||||
"send_type_list": [
|
||||
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg",
|
||||
"sms"
|
||||
],
|
||||
"unique": true
|
||||
}
|
||||
]
|
||||
506
mod/base/push_mod/task_manager_push.py
Normal file
506
mod/base/push_mod/task_manager_push.py
Normal file
@@ -0,0 +1,506 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from importlib import import_module
|
||||
from typing import Tuple, Union, Optional, List
|
||||
|
||||
import psutil
|
||||
|
||||
from .send_tool import WxAccountMsg
|
||||
from .base_task import BaseTask
|
||||
from .mods import PUSH_DATA_PATH, TaskTemplateConfig
|
||||
from .util import read_file, write_file, get_config_value, GET_CLASS
|
||||
|
||||
|
||||
class _ProcessInfo:
|
||||
|
||||
def __init__(self):
|
||||
self.data = None
|
||||
self.last_time = 0
|
||||
|
||||
def __call__(self) -> list:
|
||||
if self.data is not None and time.time() - self.last_time < 60:
|
||||
return self.data
|
||||
|
||||
try:
|
||||
import PluginLoader
|
||||
get_obj = GET_CLASS()
|
||||
get_obj.sort = "status"
|
||||
p_info = PluginLoader.plugin_run("task_manager", "get_process_list", get_obj)
|
||||
except:
|
||||
return []
|
||||
|
||||
if isinstance(p_info, dict) and "process_list" in p_info and isinstance(
|
||||
p_info["process_list"], list):
|
||||
self._process_info = p_info["process_list"]
|
||||
self.last_time = time.time()
|
||||
return self._process_info
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
get_process_info = _ProcessInfo()
|
||||
|
||||
|
||||
def have_task_manager_plugin():
|
||||
"""
|
||||
通过文件判断是否有进程管理器
|
||||
"""
|
||||
return os.path.exists("/www/server/panel/plugin/task_manager/task_manager_push.py")
|
||||
|
||||
|
||||
def load_task_manager_template():
|
||||
if TaskTemplateConfig().get_by_id("60"):
|
||||
return None
|
||||
|
||||
from .mods import load_task_template_by_config
|
||||
load_task_template_by_config([
|
||||
{
|
||||
"id": "60",
|
||||
"ver": "1",
|
||||
"used": True,
|
||||
"source": "task_manager_cpu",
|
||||
"title": "Task Manager CPU usage alarm",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.task_manager_push",
|
||||
"name": "TaskManagerCPUTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "project",
|
||||
"name": "project name",
|
||||
"type": "select",
|
||||
"items": {
|
||||
"url": "plugin?action=a&name=task_manager&s=get_process_list_to_push"
|
||||
}
|
||||
},
|
||||
{
|
||||
"attr": "count",
|
||||
"name": "Occupancy exceeded",
|
||||
"type": "number",
|
||||
"unit": "%",
|
||||
"suffix": "trigger an alarm",
|
||||
"default": 80,
|
||||
"err_msg_prefix": "CPU occupancy"
|
||||
},
|
||||
{
|
||||
"attr": "interval",
|
||||
"name": "Interval",
|
||||
"type": "number",
|
||||
"unit": "second(s)",
|
||||
"suffix": "monitor the detection conditions again",
|
||||
"default": 600
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"project"
|
||||
],
|
||||
[
|
||||
"count"
|
||||
],
|
||||
[
|
||||
"interval"
|
||||
]
|
||||
],
|
||||
},
|
||||
"default": {
|
||||
"project": '',
|
||||
"count": 80,
|
||||
"interval": 600
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg",
|
||||
],
|
||||
"unique": False
|
||||
},
|
||||
{
|
||||
"id": "61",
|
||||
"ver": "1",
|
||||
"used": True,
|
||||
"source": "task_manager_mem",
|
||||
"title": "Task Manager memory usage alarm",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.task_manager_push",
|
||||
"name": "TaskManagerMEMTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "project",
|
||||
"name": "project name",
|
||||
"type": "select",
|
||||
"items": {
|
||||
"url": "plugin?action=a&name=task_manager&s=get_process_list_to_push"
|
||||
}
|
||||
},
|
||||
{
|
||||
"attr": "count",
|
||||
"name": "The occupancy is more than",
|
||||
"type": "number",
|
||||
"unit": "MB",
|
||||
"suffix": "trigger an alarm",
|
||||
"default": None,
|
||||
"err_msg_prefix": "Occupancy"
|
||||
},
|
||||
{
|
||||
"attr": "interval",
|
||||
"name": "Interval",
|
||||
"type": "number",
|
||||
"unit": "second(s)",
|
||||
"suffix": "monitor the detection conditions again",
|
||||
"default": 600
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"project"
|
||||
],
|
||||
[
|
||||
"count"
|
||||
],
|
||||
[
|
||||
"interval"
|
||||
]
|
||||
],
|
||||
},
|
||||
"default": {
|
||||
"project": '',
|
||||
"count": 80,
|
||||
"interval": 600
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg",
|
||||
],
|
||||
"unique": False
|
||||
},
|
||||
{
|
||||
"id": "62",
|
||||
"ver": "1",
|
||||
"used": True,
|
||||
"source": "task_manager_process",
|
||||
"title": "Task Manager Process Overhead Alert",
|
||||
"load_cls": {
|
||||
"load_type": "path",
|
||||
"cls_path": "mod.base.push_mod.task_manager_push",
|
||||
"name": "TaskManagerProcessTask"
|
||||
},
|
||||
"template": {
|
||||
"field": [
|
||||
{
|
||||
"attr": "project",
|
||||
"name": "project name",
|
||||
"type": "select",
|
||||
"items": {
|
||||
"url": "plugin?action=a&name=task_manager&s=get_process_list_to_push"
|
||||
}
|
||||
},
|
||||
{
|
||||
"attr": "count",
|
||||
"name": "Number of processes exceeds",
|
||||
"type": "number",
|
||||
"unit": "of them",
|
||||
"suffix": "trigger an alarm",
|
||||
"default": 20,
|
||||
"err_msg_prefix": "NumberOfProcesses"
|
||||
},
|
||||
{
|
||||
"attr": "interval",
|
||||
"name": "Interval",
|
||||
"type": "number",
|
||||
"unit": "second(s)",
|
||||
"suffix": "monitor the detection conditions again",
|
||||
"default": 600
|
||||
}
|
||||
],
|
||||
"sorted": [
|
||||
[
|
||||
"project"
|
||||
],
|
||||
[
|
||||
"count"
|
||||
],
|
||||
[
|
||||
"interval"
|
||||
]
|
||||
],
|
||||
},
|
||||
"default": {
|
||||
"project": '',
|
||||
"count": 80,
|
||||
"interval": 600
|
||||
},
|
||||
"advanced_default": {
|
||||
"number_rule": {
|
||||
"day_num": 3
|
||||
}
|
||||
},
|
||||
"send_type_list": [
|
||||
"dingding",
|
||||
"feishu",
|
||||
"mail",
|
||||
"weixin",
|
||||
"webhook",
|
||||
"tg",
|
||||
],
|
||||
"unique": False
|
||||
}
|
||||
])
|
||||
|
||||
|
||||
class TaskManagerCPUTask(BaseTask):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "task_manager_cpu"
|
||||
self.template_name = "Task Manager CPU usage alarm"
|
||||
# self.title = "Task Manager CPU usage alarm"
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
return "Task Manager CPU usage alarm -- [{}]".format(task_data["project"])
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
if "interval" not in task_data or not isinstance(task_data["interval"], int):
|
||||
task_data["interval"] = 600
|
||||
if task_data["interval"] < 60:
|
||||
task_data["interval"] = 60
|
||||
if "count" not in task_data or not isinstance(task_data["count"], int):
|
||||
return "The check range is set incorrectly"
|
||||
if not 1 <= task_data["count"] < 100:
|
||||
return "The check range is set incorrectly"
|
||||
if not task_data["project"]:
|
||||
return "Please select a process"
|
||||
return task_data
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return task_data["project"]
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
process_info = get_process_info()
|
||||
self.title = self.get_title(task_data)
|
||||
count = used = 0
|
||||
for p in process_info:
|
||||
if p["name"] == task_data['project']:
|
||||
used += p["cpu_percent"]
|
||||
count += 1 if "children" not in p else len(p["children"]) + 1
|
||||
|
||||
if used <= task_data['count']:
|
||||
return None
|
||||
|
||||
return {
|
||||
'msg_list':
|
||||
[
|
||||
">Notification type: Task Manager CPU usage alarm",
|
||||
">Alarm content: There are {} processes with the process name [{}], and the proportion of CPU resources consumed is {}%, which is greater than the alarm threshold {}%.".format(
|
||||
task_data['project'], count, used, task_data['count']
|
||||
)
|
||||
],
|
||||
"project": task_data['project'],
|
||||
"count": int(task_data['count'])
|
||||
}
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
if not have_task_manager_plugin():
|
||||
return None
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return '', {}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "Task Manager CPU usage alarm"
|
||||
if len(push_data["project"]) > 11:
|
||||
project = push_data["project"][:9] + ".."
|
||||
else:
|
||||
project = push_data["project"]
|
||||
|
||||
msg.msg = "The CPU of {} exceeds {}%".format(project, push_data["count"])
|
||||
return msg
|
||||
|
||||
|
||||
class TaskManagerMEMTask(BaseTask):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "task_manager_mem"
|
||||
self.template_name = "Task Manager memory usage alarm"
|
||||
# self.title = "Task Manager memory usage alarm"
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
return "Task Manager memory usage alarm -- [{}].".format(task_data["project"])
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
if not task_data["project"]:
|
||||
return "Please select a process"
|
||||
if "interval" not in task_data or not isinstance(task_data["interval"], int):
|
||||
task_data["interval"] = 600
|
||||
task_data["interval"] = max(60, task_data["interval"])
|
||||
if "count" not in task_data or not isinstance(task_data["count"], int):
|
||||
return "The check range is set incorrectly"
|
||||
if task_data["count"] < 1:
|
||||
return "The check range is set incorrectly"
|
||||
return task_data
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return task_data["project"]
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
process_info = get_process_info()
|
||||
self.title = self.get_title(task_data)
|
||||
|
||||
used = count = 0
|
||||
for p in process_info:
|
||||
if p["name"] == task_data['project']:
|
||||
used += p["memory_used"]
|
||||
count += 1 if "children" not in p else len(p["children"]) + 1
|
||||
|
||||
if used <= task_data['count'] * 1024 * 1024:
|
||||
return None
|
||||
return {
|
||||
'msg_list': [
|
||||
">Notification type: Task Manager memory usage alarm",
|
||||
">Alarm content: There are {} processes with process name [{}], and the memory resources consumed are {}MB, which is greater than the alarm threshold {}MB.".format(
|
||||
task_data['project'], count, int(used / 1024 / 1024), task_data['count']
|
||||
)
|
||||
],
|
||||
"project": task_data['project']
|
||||
}
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
if not have_task_manager_plugin():
|
||||
return None
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return '', {}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
if len(push_data["project"]) > 11:
|
||||
project = push_data["project"][:9] + ".."
|
||||
else:
|
||||
project = push_data["project"]
|
||||
msg.thing_type = "Task Manager memory usage alarm"
|
||||
msg.msg = "The memory of {} exceeds the alarm value".format(project)
|
||||
return msg
|
||||
|
||||
|
||||
class TaskManagerProcessTask(BaseTask):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_name = "task_manager_process"
|
||||
self.template_name = "Task Manager Process Overhead Alert"
|
||||
self.title = "Task Manager Process Overhead Alert"
|
||||
|
||||
def get_title(self, task_data: dict) -> str:
|
||||
return "Task Manager Process Overhead Alert [{}]".format(task_data["project"])
|
||||
|
||||
def check_task_data(self, task_data: dict) -> Union[dict, str]:
|
||||
if not task_data["project"]:
|
||||
return "Please select a process"
|
||||
if "interval" not in task_data or not isinstance(task_data["interval"], int):
|
||||
task_data["interval"] = 600
|
||||
task_data["interval"] = max(60, task_data["interval"])
|
||||
if "count" not in task_data or not isinstance(task_data["count"], int):
|
||||
return "The check range is set incorrectly"
|
||||
if task_data["count"] < 1:
|
||||
return "The check range is set incorrectly"
|
||||
return task_data
|
||||
|
||||
def get_keyword(self, task_data: dict) -> str:
|
||||
return task_data["project"]
|
||||
|
||||
def get_push_data(self, task_id: str, task_data: dict) -> Optional[dict]:
|
||||
process_info = get_process_info()
|
||||
count = 0
|
||||
for p in process_info:
|
||||
if p["name"] == task_data['project']:
|
||||
count += 1 if "children" not in p else len(p["children"]) + 1
|
||||
|
||||
if count <= task_data['count']:
|
||||
return None
|
||||
|
||||
return {
|
||||
'msg_list':
|
||||
[
|
||||
">Notification type: Task Manager Process Overhead Alert",
|
||||
">Alarm content: There are {} processes with process name {}, which is greater than the alarm threshold.".format(
|
||||
task_data['project'], count, task_data['count']
|
||||
)
|
||||
],
|
||||
"project": task_data['project'],
|
||||
"count": task_data['count'],
|
||||
}
|
||||
|
||||
def filter_template(self, template: dict) -> Optional[dict]:
|
||||
if not have_task_manager_plugin():
|
||||
return None
|
||||
return template
|
||||
|
||||
def to_sms_msg(self, push_data: dict, push_public_data: dict) -> Tuple[str, dict]:
|
||||
return '', {}
|
||||
|
||||
def to_wx_account_msg(self, push_data: dict, push_public_data: dict) -> WxAccountMsg:
|
||||
msg = WxAccountMsg.new_msg()
|
||||
msg.thing_type = "Task Manager Process Overhead Alert"
|
||||
if len(push_data["project"]) > 11:
|
||||
project = push_data["project"][:9] + ".."
|
||||
else:
|
||||
project = push_data["project"]
|
||||
|
||||
if push_data["count"] > 100: # 节省字数
|
||||
push_data["count"] = "LIMIT"
|
||||
|
||||
msg.msg = "{} has more children than {}".format(project, push_data["count"])
|
||||
return msg
|
||||
|
||||
|
||||
class ViewMsgFormat(object):
|
||||
_FORMAT = {
|
||||
"60": (
|
||||
lambda x: "<span>Process: The CPU occupation of {} is more than {}% triggered</span>".format(
|
||||
x.get("project"), x.get("count")
|
||||
)
|
||||
),
|
||||
"61": (
|
||||
lambda x: "<span>Process: Triggered when the memory usage of {} exceeds {}MB</span>".format(
|
||||
x.get("project"), x.get("count")
|
||||
)
|
||||
),
|
||||
"62": (
|
||||
lambda x: "<span>Process: Triggered when the number of child processes exceeds {}</span>".format(
|
||||
x.get("project"), x.get("count")
|
||||
)
|
||||
),
|
||||
}
|
||||
|
||||
def get_msg(self, task: dict) -> Optional[str]:
|
||||
if task["template_id"] in self._FORMAT:
|
||||
return self._FORMAT[task["template_id"]](task["task_data"])
|
||||
return None
|
||||
92
mod/base/push_mod/tool.py
Normal file
92
mod/base/push_mod/tool.py
Normal file
@@ -0,0 +1,92 @@
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional, Type, TypeVar
|
||||
import traceback
|
||||
from importlib import import_module
|
||||
|
||||
from .base_task import BaseTask
|
||||
from .util import GET_CLASS, get_client_ip, debug_log
|
||||
|
||||
|
||||
T_CLS = TypeVar('T_CLS', bound=BaseTask)
|
||||
|
||||
|
||||
def load_task_cls_by_function(
|
||||
name: str,
|
||||
func_name: str,
|
||||
is_model: bool = False,
|
||||
model_index: str = '',
|
||||
args: Optional[dict] = None,
|
||||
sub_name: Optional[str] = None,
|
||||
) -> Optional[Type[T_CLS]]:
|
||||
"""
|
||||
从执行函数的结果中获取任务类
|
||||
@param model_index: 模块来源,例如:新场景就是mod
|
||||
@param name: 名称
|
||||
@param func_name: 函数名称
|
||||
@param is_model: 是否在Model中,不在Model中,就应该在插件中
|
||||
@param args: 请求这个接口的参数, 默认为空
|
||||
@param sub_name: 自分类名称, 如果有,则会和主名称name做拼接
|
||||
@return: 返回None 或者有效的任务类
|
||||
"""
|
||||
import PluginLoader
|
||||
real_name = name
|
||||
if isinstance(sub_name, str):
|
||||
real_name = "{}/{}".format(name, sub_name)
|
||||
|
||||
get_obj = GET_CLASS()
|
||||
if args is not None and isinstance(args, dict):
|
||||
for key, value in args.items():
|
||||
setattr(get_obj, key, value)
|
||||
try:
|
||||
if is_model:
|
||||
get_obj.model_index = model_index
|
||||
res = PluginLoader.module_run(real_name, func_name, get_obj)
|
||||
else:
|
||||
get_obj.fun = func_name
|
||||
get_obj.s = func_name
|
||||
get_obj.client_ip = get_client_ip
|
||||
res = PluginLoader.plugin_run(name, func_name, get_obj)
|
||||
except:
|
||||
debug_log(traceback.format_exc())
|
||||
return None
|
||||
if isinstance(res, dict):
|
||||
return None
|
||||
elif isinstance(res, BaseTask):
|
||||
return res.__class__
|
||||
elif issubclass(res, BaseTask):
|
||||
return res
|
||||
return None
|
||||
|
||||
|
||||
def load_task_cls_by_path(path: str, cls_name: str) -> Optional[Type[T_CLS]]:
|
||||
try:
|
||||
# 插件优先检测路径
|
||||
path_sep = path.split(".")
|
||||
if len(path_sep) >= 2 and path_sep[0] == "plugin":
|
||||
plugin_path = "/www/server/panel/plugin/{}".format(path_sep[1])
|
||||
if not os.path.isdir(plugin_path):
|
||||
return None
|
||||
|
||||
module = import_module(path)
|
||||
cls = getattr(module, cls_name, None)
|
||||
if issubclass(cls, BaseTask):
|
||||
return cls
|
||||
elif isinstance(cls, BaseTask):
|
||||
return cls.__class__
|
||||
else:
|
||||
debug_log("Error: The loaded class is not a subclass of BaseTask")
|
||||
return None
|
||||
except ModuleNotFoundError as e:
|
||||
# todo暂时忽略 ssl_push
|
||||
if 'mod.base.push_mod.ssl_push' in str(e):
|
||||
return None
|
||||
else:
|
||||
debug_log(traceback.format_exc())
|
||||
debug_log("ModuleNotFoundError: {}".format(str(e)))
|
||||
return None
|
||||
except Exception:
|
||||
print(traceback.format_exc())
|
||||
print(sys.path)
|
||||
debug_log(traceback.format_exc())
|
||||
return None
|
||||
193
mod/base/push_mod/util.py
Normal file
193
mod/base/push_mod/util.py
Normal file
@@ -0,0 +1,193 @@
|
||||
import sys
|
||||
from typing import Optional, Callable
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
import public
|
||||
from db import Sql
|
||||
import os
|
||||
from sslModel import certModel
|
||||
|
||||
|
||||
def write_file(filename: str, s_body: str, mode='w+') -> bool:
|
||||
"""
|
||||
写入文件内容
|
||||
@filename 文件名
|
||||
@s_body 欲写入的内容
|
||||
return bool 若文件不存在则尝试自动创建
|
||||
"""
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
try:
|
||||
fp = open(filename, mode=mode, encoding="utf-8")
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def read_file(filename, mode='r') -> Optional[str]:
|
||||
"""
|
||||
读取文件内容
|
||||
@filename 文件名
|
||||
return string(bin) 若文件不存在,则返回None
|
||||
"""
|
||||
import os
|
||||
if not os.path.exists(filename):
|
||||
return None
|
||||
fp = None
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
f_body = fp.read()
|
||||
except:
|
||||
return None
|
||||
finally:
|
||||
if fp and not fp.closed:
|
||||
fp.close()
|
||||
return f_body
|
||||
|
||||
|
||||
ExecShell: Callable = public.ExecShell
|
||||
|
||||
write_log: Callable = public.WriteLog
|
||||
|
||||
Sqlite: Callable = Sql
|
||||
|
||||
GET_CLASS: Callable = public.dict_obj
|
||||
|
||||
debug_log: Callable = public.print_log
|
||||
|
||||
get_config_value: Callable = public.GetConfigValue
|
||||
|
||||
get_server_ip: Callable = public.get_server_ip
|
||||
|
||||
get_network_ip: Callable = public.get_network_ip
|
||||
|
||||
format_date: Callable = public.format_date
|
||||
|
||||
public_get_cache_func: Callable = public.get_cache_func
|
||||
|
||||
public_set_cache_func: Callable = public.set_cache_func
|
||||
|
||||
public_get_user_info: Callable = public.get_user_info
|
||||
|
||||
public_http_post = public.httpPost
|
||||
|
||||
panel_version = public.version
|
||||
|
||||
try:
|
||||
get_cert_list = certModel.main().get_cert_list
|
||||
to_dict_obj = public.to_dict_obj
|
||||
|
||||
except:
|
||||
public.print_log(public.get_error_info())
|
||||
|
||||
|
||||
def get_client_ip() -> str:
|
||||
return public.GetClientIp()
|
||||
|
||||
|
||||
class _DB:
|
||||
|
||||
def __call__(self, table: str):
|
||||
import db
|
||||
with db.Sql() as t:
|
||||
t.table(table)
|
||||
return t
|
||||
|
||||
|
||||
DB = _DB()
|
||||
|
||||
|
||||
def check_site_status(web):
|
||||
panelPath = '/www/server/panel/'
|
||||
os.chdir(panelPath)
|
||||
sys.path.insert(0, panelPath)
|
||||
|
||||
if web['project_type'] == "Java":
|
||||
from mod.project.java.projectMod import main as java
|
||||
if not java().get_project_stat(web)['pid']:
|
||||
return None
|
||||
if web['project_type'] == "Node":
|
||||
from projectModelV2.nodejsModel import main as nodejs
|
||||
if not nodejs().get_project_run_state(project_name=web['name']):
|
||||
return None
|
||||
if web['project_type'] == "Go":
|
||||
from projectModel.goModel import main as go # NOQA
|
||||
if not go().get_project_run_state(project_name=web['name']):
|
||||
return None
|
||||
if web['project_type'] == "Python":
|
||||
from projectModelV2.pythonModel import main as python
|
||||
if not python().get_project_run_state(project_name=web['name']):
|
||||
return None
|
||||
if web['project_type'] == "Other":
|
||||
from projectModel.otherModel import main as other # NOQA
|
||||
if not other().get_project_run_state(project_name=web['name']):
|
||||
return None
|
||||
return True
|
||||
|
||||
|
||||
def get_db_by_file(file: str):
|
||||
import db
|
||||
if not os.path.exists(file):
|
||||
return None
|
||||
db_obj = db.Sql()
|
||||
db_obj._Sql__DB_FILE = file
|
||||
return db_obj
|
||||
|
||||
|
||||
def generate_fields(template: dict, add_type: str) -> dict:
|
||||
""""动态表单生成附加选项 hook"""
|
||||
if add_type not in [
|
||||
"restart", "module",
|
||||
]:
|
||||
return template
|
||||
if add_type == "restart":
|
||||
template = generate_restart_fields(template)
|
||||
elif add_type == "module":
|
||||
template = generate_module_fields(template)
|
||||
return template
|
||||
|
||||
|
||||
def generate_restart_fields(template: dict) -> dict:
|
||||
"""动态表单生成重启服务 hook 选项"""
|
||||
from script.restart_services import SERVICES_MAP, ServicesHelper
|
||||
f = {
|
||||
"attr": "after_hook",
|
||||
"name": "After the alarm excutes",
|
||||
"suffix": "select after alarm action (Optional)",
|
||||
"type": "multiple-select",
|
||||
"items": [
|
||||
{
|
||||
"title": f"Restart {x}",
|
||||
"type": "restart",
|
||||
"value": x
|
||||
} for x in SERVICES_MAP.keys() if ServicesHelper(x).is_install
|
||||
],
|
||||
"default": []
|
||||
}
|
||||
if "field" in template:
|
||||
template["field"].append(f)
|
||||
else:
|
||||
template["field"] = [f]
|
||||
|
||||
if "sorted" in template and isinstance(template["sorted"], list):
|
||||
if ["after_hook"] not in template["sorted"]:
|
||||
template["sorted"].append(["after_hook"])
|
||||
elif "sorted" not in template:
|
||||
template["sorted"] = [["after_hook"]]
|
||||
else:
|
||||
template["sorted"] = [template["sorted"], ["after_hook"]]
|
||||
return template
|
||||
|
||||
|
||||
def generate_module_fields(template: dict) -> dict:
|
||||
"""动态表单生成模块调用 hook 选项"""
|
||||
# todo
|
||||
return template
|
||||
20
mod/base/ssh_executor/__init__.py
Normal file
20
mod/base/ssh_executor/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from .ssh_executor import SSHExecutor, CommandResult
|
||||
from .rate_limiter import (
|
||||
RateLimiter,
|
||||
ProgressTracker,
|
||||
TokenBucketRateLimiter,
|
||||
LeakyBucketRateLimiter
|
||||
)
|
||||
from .util import test_ssh_config
|
||||
|
||||
__all__ = [
|
||||
"CommandResult",
|
||||
"SSHExecutor",
|
||||
"RateLimiter",
|
||||
"ProgressTracker",
|
||||
"TokenBucketRateLimiter",
|
||||
"LeakyBucketRateLimiter",
|
||||
"test_ssh_config"
|
||||
]
|
||||
|
||||
|
||||
255
mod/base/ssh_executor/rate_limiter.py
Normal file
255
mod/base/ssh_executor/rate_limiter.py
Normal file
@@ -0,0 +1,255 @@
|
||||
"""
|
||||
Rate limiter for file transfers with clean separation of concerns.
|
||||
|
||||
NOTE: These rate limiters are NOT thread-safe by design.
|
||||
Rate limiting operations are typically single-threaded, and removing
|
||||
threading overhead improves performance.
|
||||
"""
|
||||
|
||||
import time
|
||||
from typing import Optional, Callable
|
||||
|
||||
|
||||
class TokenBucketRateLimiter:
|
||||
"""
|
||||
Token bucket rate limiter for controlling transfer speed with burst support.
|
||||
|
||||
NOTE: This class is NOT thread-safe. Rate limiting operations are typically
|
||||
single-threaded, and removing threading overhead improves performance.
|
||||
|
||||
The token bucket algorithm allows for:
|
||||
- Sustained rate limiting (tokens per second)
|
||||
- Burst control (bucket capacity)
|
||||
- Peak rate control
|
||||
|
||||
Usage:
|
||||
limiter = TokenBucketRateLimiter(
|
||||
rate=1024*1024, # 1MB/s sustained rate
|
||||
capacity=2*1024*1024 # 2MB burst capacity
|
||||
)
|
||||
for chunk in data_chunks:
|
||||
limiter.wait_if_needed(len(chunk))
|
||||
# transfer chunk
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
rate: Optional[int] = None,
|
||||
capacity: Optional[int] = None,
|
||||
initial_tokens: Optional[int] = None):
|
||||
"""
|
||||
Initialize token bucket rate limiter.
|
||||
|
||||
Args:
|
||||
rate: Tokens (bytes) per second. None means no rate limiting.
|
||||
capacity: Maximum bucket capacity in bytes. If None, defaults to rate.
|
||||
initial_tokens: Initial tokens in bucket. If None, defaults to capacity.
|
||||
"""
|
||||
self.rate = rate
|
||||
self.capacity = capacity if capacity is not None else rate
|
||||
self.initial_tokens = initial_tokens if initial_tokens is not None else self.capacity
|
||||
|
||||
# Current state
|
||||
self.tokens = self.initial_tokens
|
||||
self.last_update = None
|
||||
self._started = False
|
||||
|
||||
def start(self):
|
||||
"""Start timing for rate limiting."""
|
||||
if self.rate and not self._started:
|
||||
self._started = True
|
||||
self.last_update = time.time()
|
||||
self.tokens = self.initial_tokens
|
||||
|
||||
def wait_if_needed(self, chunk_size: int):
|
||||
"""
|
||||
Wait if necessary to maintain the specified transfer rate.
|
||||
|
||||
Args:
|
||||
chunk_size: Size of the chunk just transferred in bytes.
|
||||
"""
|
||||
if not self.rate or not self.last_update:
|
||||
return
|
||||
|
||||
# Refill tokens based on elapsed time
|
||||
now = time.time()
|
||||
elapsed = now - self.last_update
|
||||
tokens_to_add = elapsed * self.rate
|
||||
|
||||
# Add tokens up to capacity
|
||||
self.tokens = min(self.capacity, self.tokens + tokens_to_add)
|
||||
self.last_update = now
|
||||
|
||||
# Check if we have enough tokens
|
||||
if self.tokens < chunk_size:
|
||||
# Calculate wait time to get enough tokens
|
||||
tokens_needed = chunk_size - self.tokens
|
||||
wait_time = tokens_needed / self.rate
|
||||
time.sleep(wait_time)
|
||||
|
||||
# Refill tokens after waiting
|
||||
now = time.time()
|
||||
elapsed = now - self.last_update
|
||||
tokens_to_add = elapsed * self.rate
|
||||
self.tokens = min(self.capacity, self.tokens + tokens_to_add)
|
||||
self.last_update = now
|
||||
|
||||
# Consume tokens
|
||||
self.tokens -= chunk_size
|
||||
|
||||
|
||||
class LeakyBucketRateLimiter:
|
||||
"""
|
||||
Leaky bucket rate limiter for strict rate limiting without burst support.
|
||||
|
||||
NOTE: This class is NOT thread-safe. Rate limiting operations are typically
|
||||
single-threaded, and removing threading overhead improves performance.
|
||||
|
||||
The leaky bucket algorithm provides:
|
||||
- Strict rate limiting (no burst)
|
||||
- Predictable output rate
|
||||
- Better for network protocols that can't handle bursts
|
||||
|
||||
Usage:
|
||||
limiter = LeakyBucketRateLimiter(rate=1024*1024) # 1MB/s strict rate
|
||||
for chunk in data_chunks:
|
||||
limiter.wait_if_needed(len(chunk))
|
||||
# transfer chunk
|
||||
"""
|
||||
|
||||
def __init__(self, rate: Optional[int] = None):
|
||||
"""
|
||||
Initialize leaky bucket rate limiter.
|
||||
|
||||
Args:
|
||||
rate: Tokens (bytes) per second. None means no rate limiting.
|
||||
"""
|
||||
self.rate = rate
|
||||
self.last_update = None
|
||||
self._started = False
|
||||
|
||||
def start(self):
|
||||
"""Start timing for rate limiting."""
|
||||
if self.rate and not self._started:
|
||||
self.last_update = time.time()
|
||||
|
||||
def wait_if_needed(self, chunk_size: int):
|
||||
"""
|
||||
Wait if necessary to maintain the specified transfer rate.
|
||||
|
||||
Args:
|
||||
chunk_size: Size of the chunk just transferred in bytes.
|
||||
"""
|
||||
if not self.rate or not self.last_update:
|
||||
return
|
||||
|
||||
now = time.time()
|
||||
elapsed = now - self.last_update
|
||||
|
||||
# Calculate minimum time needed for this chunk
|
||||
min_time = chunk_size / self.rate
|
||||
|
||||
if elapsed < min_time:
|
||||
wait_time = min_time - elapsed
|
||||
time.sleep(wait_time)
|
||||
|
||||
self.last_update = time.time()
|
||||
|
||||
|
||||
class RateLimiter:
|
||||
"""
|
||||
Main rate limiter class that provides a unified interface.
|
||||
|
||||
NOTE: This class is NOT thread-safe. Rate limiting operations are typically
|
||||
single-threaded, and removing threading overhead improves performance.
|
||||
|
||||
This class can use different underlying algorithms:
|
||||
- TokenBucketRateLimiter: For burst-capable rate limiting
|
||||
- LeakyBucketRateLimiter: For strict rate limiting
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
bytes_per_second: Optional[int] = None,
|
||||
algorithm: str = "token_bucket",
|
||||
burst_capacity: Optional[int] = None):
|
||||
"""
|
||||
Initialize rate limiter.
|
||||
|
||||
Args:
|
||||
bytes_per_second: Transfer rate limit in bytes per second.
|
||||
algorithm: Rate limiting algorithm ("token_bucket" or "leaky_bucket")
|
||||
burst_capacity: For token bucket, maximum burst capacity in bytes.
|
||||
If None, defaults to bytes_per_second.
|
||||
"""
|
||||
if algorithm == "token_bucket":
|
||||
self._limiter = TokenBucketRateLimiter(
|
||||
rate=bytes_per_second,
|
||||
capacity=burst_capacity
|
||||
)
|
||||
elif algorithm == "leaky_bucket":
|
||||
self._limiter = LeakyBucketRateLimiter(bytes_per_second)
|
||||
else:
|
||||
raise ValueError(f"Unknown algorithm: {algorithm}. Use 'token_bucket' or 'leaky_bucket'")
|
||||
|
||||
self.bytes_per_second = bytes_per_second
|
||||
self.algorithm = algorithm
|
||||
|
||||
def start(self):
|
||||
"""Start timing for rate limiting."""
|
||||
self._limiter.start()
|
||||
|
||||
def wait_if_needed(self, chunk_size: int):
|
||||
"""
|
||||
Wait if necessary to maintain the specified transfer rate.
|
||||
|
||||
Args:
|
||||
chunk_size: Size of the chunk just transferred in bytes.
|
||||
"""
|
||||
self._limiter.wait_if_needed(chunk_size)
|
||||
|
||||
|
||||
class ProgressTracker:
|
||||
"""
|
||||
Simple progress tracking with configurable update frequency.
|
||||
|
||||
NOTE: This class is NOT thread-safe. Progress tracking operations are typically
|
||||
single-threaded, and removing threading overhead improves performance.
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
callback: Optional[Callable[[int, int], None]] = None,
|
||||
update_interval: float = 0.1):
|
||||
"""
|
||||
Initialize progress tracker.
|
||||
|
||||
Args:
|
||||
callback: Function to call with (transferred, total) progress updates
|
||||
update_interval: Minimum seconds between progress updates
|
||||
"""
|
||||
self.callback = callback
|
||||
self.update_interval = update_interval
|
||||
self.last_update_time = 0
|
||||
|
||||
def start(self):
|
||||
"""Start progress tracking."""
|
||||
self.last_update_time = 0
|
||||
|
||||
def update(self, transferred: int, total: int):
|
||||
"""
|
||||
Update progress if enough time has passed since last update.
|
||||
|
||||
Args:
|
||||
transferred: Bytes transferred so far
|
||||
total: Total bytes to transfer
|
||||
"""
|
||||
if not self.callback:
|
||||
return
|
||||
|
||||
current_time = time.time()
|
||||
if current_time - self.last_update_time >= self.update_interval:
|
||||
self.callback(transferred, total)
|
||||
self.last_update_time = current_time
|
||||
|
||||
def finish(self, total: int):
|
||||
"""Force final progress update."""
|
||||
if self.callback:
|
||||
self.callback(total, total)
|
||||
808
mod/base/ssh_executor/ssh_executor.py
Normal file
808
mod/base/ssh_executor/ssh_executor.py
Normal file
@@ -0,0 +1,808 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import stat
|
||||
from typing import Optional, Tuple, Callable, Union, Dict, Any, Iterator
|
||||
import time
|
||||
import io
|
||||
import math
|
||||
|
||||
import paramiko
|
||||
from dataclasses import dataclass
|
||||
from .rate_limiter import RateLimiter, ProgressTracker
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommandResult:
|
||||
exit_code: int
|
||||
stdout: str
|
||||
stderr: str
|
||||
|
||||
|
||||
class SSHExecutor:
|
||||
"""
|
||||
High-level SSH executor wrapping Paramiko for command execution and SFTP upload.
|
||||
|
||||
Usage:
|
||||
with SSHExecutor(host, user, password=...) as ssh:
|
||||
code, out, err = ssh.run("uname -a")
|
||||
ssh.upload("./local.txt", "/tmp/remote.txt")
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
host: str,
|
||||
username: str,
|
||||
port: int = 22,
|
||||
password: Optional[str] = None,
|
||||
key_file: Optional[str] = None,
|
||||
passphrase: Optional[str] = None,
|
||||
key_data: Optional[str] = None,
|
||||
timeout: Optional[int] = None,
|
||||
strict_host_key_checking: bool = False,
|
||||
allow_agent: bool = False,
|
||||
look_for_keys: bool = False,
|
||||
threading_mod: bool = False, # 线程模式,默认为False,当线程模式,会在每次获取sftp客户端时重新获取,
|
||||
) -> None:
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.key_file = key_file
|
||||
self.passphrase = passphrase
|
||||
self.key_data = key_data
|
||||
self.timeout = timeout or 20
|
||||
self.strict_host_key_checking = strict_host_key_checking
|
||||
self.allow_agent = allow_agent
|
||||
self.look_for_keys = look_for_keys
|
||||
self._client: Optional[paramiko.SSHClient] = None
|
||||
self._sftp: Optional[paramiko.SFTPClient] = None
|
||||
self._threading_mod = threading_mod
|
||||
|
||||
def open(self) -> None:
|
||||
if self._client is not None:
|
||||
return
|
||||
client = paramiko.SSHClient()
|
||||
if self.strict_host_key_checking:
|
||||
client.set_missing_host_key_policy(paramiko.RejectPolicy())
|
||||
else:
|
||||
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
|
||||
connect_kwargs: Dict[str, Any] = {
|
||||
"hostname": self.host,
|
||||
"port": self.port,
|
||||
"username": self.username,
|
||||
"look_for_keys": self.look_for_keys,
|
||||
"allow_agent": self.allow_agent,
|
||||
}
|
||||
if self.timeout is not None:
|
||||
connect_kwargs.update({
|
||||
"timeout": self.timeout,
|
||||
"banner_timeout": self.timeout,
|
||||
"auth_timeout": self.timeout,
|
||||
})
|
||||
if self.password:
|
||||
connect_kwargs["password"] = self.password
|
||||
|
||||
if self.key_file or self.key_data:
|
||||
pkey = self._load_private_key(self.key_file, self.key_data, self.passphrase)
|
||||
connect_kwargs["pkey"] = pkey
|
||||
|
||||
try:
|
||||
client.connect(**connect_kwargs)
|
||||
except Exception as e:
|
||||
client.close()
|
||||
raise RuntimeError(f"SSH connection failed: {e}")
|
||||
|
||||
self._client = client
|
||||
|
||||
def close(self) -> None:
|
||||
if self._sftp is not None:
|
||||
try:
|
||||
self._sftp.close()
|
||||
finally:
|
||||
self._sftp = None
|
||||
if self._client is not None:
|
||||
try:
|
||||
self._client.close()
|
||||
finally:
|
||||
self._client = None
|
||||
|
||||
def __enter__(self) -> "SSHExecutor":
|
||||
self.open()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc, tb) -> None:
|
||||
self.close()
|
||||
|
||||
def run(self, command: str, timeout: Optional[int] = None) -> Tuple[int, str, str]:
|
||||
client = self._require_client()
|
||||
try:
|
||||
effective_timeout = timeout if timeout is not None else self.timeout
|
||||
stdin, stdout, stderr = client.exec_command(command, timeout=effective_timeout)
|
||||
exit_status = stdout.channel.recv_exit_status()
|
||||
out = stdout.read().decode("utf-8", errors="replace")
|
||||
err = stderr.read().decode("utf-8", errors="replace")
|
||||
return exit_status, out, err
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Command execution failed: {e}")
|
||||
|
||||
def upload(
|
||||
self,
|
||||
local_path: str,
|
||||
remote_path: str,
|
||||
rate_limit: Optional[int] = None,
|
||||
progress_callback: Optional[Callable[[int, int], None]] = None,
|
||||
resume: bool = False,
|
||||
rate_algorithm: str = "token_bucket",
|
||||
burst_capacity: Optional[int] = None,
|
||||
rate_limiter: Optional[RateLimiter] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Upload a file via SFTP with optional rate limiting and resume support.
|
||||
|
||||
Args:
|
||||
local_path: Local file path
|
||||
remote_path: Remote destination path
|
||||
rate_limit: Rate limit in bytes per second (None = no limit)
|
||||
progress_callback: Callback(transferred_bytes, total_bytes) for progress updates
|
||||
resume: Whether to resume upload if remote file exists and is smaller.
|
||||
WARNING: Only checks file size, no content verification. Use with caution.
|
||||
rate_algorithm: Rate limiting algorithm ("token_bucket" or "leaky_bucket")
|
||||
burst_capacity: For token bucket, maximum burst capacity in bytes
|
||||
rate_limiter: External RateLimiter instance for unified control across multiple transfers
|
||||
"""
|
||||
expanded_local = os.path.expanduser(local_path)
|
||||
if not os.path.isfile(expanded_local):
|
||||
raise FileNotFoundError(f"Local file not found: {expanded_local}")
|
||||
|
||||
local_size = os.path.getsize(expanded_local)
|
||||
resume_offset = 0
|
||||
|
||||
if resume:
|
||||
sftp = None
|
||||
try:
|
||||
sftp = self.get_sftp()
|
||||
remote_stat = sftp.stat(remote_path)
|
||||
if remote_stat.st_size < local_size:
|
||||
resume_offset = remote_stat.st_size
|
||||
if progress_callback:
|
||||
progress_callback(resume_offset, local_size)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
if self._threading_mod and sftp:
|
||||
sftp.close()
|
||||
|
||||
sftp = None
|
||||
try:
|
||||
sftp = self.get_sftp()
|
||||
# Use external rate limiter if provided, otherwise create new one
|
||||
if rate_limiter is None:
|
||||
rate_limiter = RateLimiter(rate_limit, rate_algorithm, burst_capacity)
|
||||
progress_tracker = ProgressTracker(progress_callback)
|
||||
|
||||
# Use chunked transfer for better control
|
||||
self._upload_chunked(sftp, expanded_local, remote_path, resume_offset, rate_limiter, progress_tracker)
|
||||
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"SFTP upload failed: {e}")
|
||||
finally:
|
||||
if self._threading_mod and sftp:
|
||||
sftp.close()
|
||||
|
||||
def _ensure_remote_dir(self, sftp, path):
|
||||
try:
|
||||
sftp.stat(path)
|
||||
except FileNotFoundError:
|
||||
self._create_remote_dir_recursive(sftp, path)
|
||||
|
||||
@staticmethod
|
||||
def _create_remote_dir_recursive(sftp, path):
|
||||
dirs = [d for d in path.split('/') if d]
|
||||
current = ''
|
||||
for d in dirs:
|
||||
current += '/' + d
|
||||
try:
|
||||
sftp.stat(current)
|
||||
except FileNotFoundError:
|
||||
sftp.mkdir(current)
|
||||
|
||||
def _upload_chunked(
|
||||
self,
|
||||
sftp: paramiko.SFTPClient,
|
||||
local_path: str,
|
||||
remote_path: str,
|
||||
resume_offset: int,
|
||||
rate_limiter: Optional[RateLimiter] = None,
|
||||
progress_tracker: Optional[ProgressTracker] = None,
|
||||
) -> None:
|
||||
"""Upload file in chunks with optional rate limiting and progress tracking."""
|
||||
local_size = os.path.getsize(local_path)
|
||||
chunk_size = 32768 # 32KB chunks
|
||||
transferred = resume_offset
|
||||
|
||||
# Initialize components if provided
|
||||
if rate_limiter:
|
||||
rate_limiter.start()
|
||||
if progress_tracker:
|
||||
progress_tracker.start()
|
||||
|
||||
with open(local_path, "rb") as local_file:
|
||||
if resume_offset > 0:
|
||||
local_file.seek(resume_offset)
|
||||
|
||||
# 检查目录是否存在,不存在则创建
|
||||
self._ensure_remote_dir(sftp, os.path.dirname(remote_path))
|
||||
|
||||
with sftp.file(remote_path, "ab" if resume_offset > 0 else "wb") as remote_file:
|
||||
while transferred < local_size:
|
||||
chunk = local_file.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
|
||||
# Apply rate limiting before transfer
|
||||
if rate_limiter:
|
||||
rate_limiter.wait_if_needed(len(chunk))
|
||||
|
||||
# Perform the actual transfer
|
||||
remote_file.write(chunk)
|
||||
transferred += len(chunk)
|
||||
|
||||
# Update progress after transfer
|
||||
if progress_tracker:
|
||||
progress_tracker.update(transferred, local_size)
|
||||
|
||||
# Final progress update
|
||||
if progress_tracker:
|
||||
progress_tracker.finish(local_size)
|
||||
|
||||
def download(
|
||||
self,
|
||||
remote_path: str,
|
||||
local_path: str,
|
||||
rate_limit: Optional[int] = None,
|
||||
progress_callback: Optional[Callable[[int, int], None]] = None,
|
||||
resume: bool = False,
|
||||
rate_algorithm: str = "token_bucket",
|
||||
burst_capacity: Optional[int] = None,
|
||||
rate_limiter: Optional[RateLimiter] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Download a file via SFTP with optional rate limiting and resume support.
|
||||
|
||||
Args:
|
||||
remote_path: Remote file path
|
||||
local_path: Local destination path
|
||||
rate_limit: Rate limit in bytes per second (None = no limit)
|
||||
progress_callback: Callback(transferred_bytes, total_bytes) for progress updates
|
||||
resume: Whether to resume download if local file exists and is smaller.
|
||||
WARNING: Only checks file size, no content verification. Use with caution.
|
||||
rate_algorithm: Rate limiting algorithm ("token_bucket" or "leaky_bucket")
|
||||
burst_capacity: For token bucket, maximum burst capacity in bytes
|
||||
rate_limiter: External RateLimiter instance for unified control across multiple transfers
|
||||
"""
|
||||
expanded_local = os.path.expanduser(local_path)
|
||||
resume_offset = 0
|
||||
|
||||
sftp = None
|
||||
if resume and os.path.exists(expanded_local):
|
||||
local_size = os.path.getsize(expanded_local)
|
||||
try:
|
||||
sftp = self.get_sftp()
|
||||
remote_stat = sftp.stat(remote_path)
|
||||
if local_size < remote_stat.st_size:
|
||||
resume_offset = local_size
|
||||
if progress_callback:
|
||||
progress_callback(resume_offset, remote_stat.st_size)
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
if self._threading_mod and sftp:
|
||||
sftp.close()
|
||||
|
||||
try:
|
||||
sftp = self.get_sftp()
|
||||
# Use external rate limiter if provided, otherwise create new one
|
||||
if rate_limiter is None:
|
||||
rate_limiter = RateLimiter(rate_limit, rate_algorithm, burst_capacity)
|
||||
progress_tracker = ProgressTracker(progress_callback)
|
||||
|
||||
# Use chunked transfer for better control
|
||||
self._download_chunked(sftp, remote_path, expanded_local, resume_offset, rate_limiter, progress_tracker)
|
||||
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"SFTP download failed: {e}")
|
||||
finally:
|
||||
if self._threading_mod and sftp:
|
||||
sftp.close()
|
||||
|
||||
@staticmethod
|
||||
def _download_chunked(
|
||||
sftp,
|
||||
remote_path: str,
|
||||
local_path: str,
|
||||
resume_offset: int,
|
||||
rate_limiter: Optional[RateLimiter] = None,
|
||||
progress_tracker: Optional[ProgressTracker] = None,
|
||||
) -> None:
|
||||
"""Download file in chunks with optional rate limiting and progress tracking."""
|
||||
remote_size = sftp.stat(remote_path).st_size
|
||||
chunk_size = 32768 # 32KB chunks
|
||||
transferred = resume_offset
|
||||
|
||||
# Initialize components if provided
|
||||
if rate_limiter:
|
||||
rate_limiter.start()
|
||||
if progress_tracker:
|
||||
progress_tracker.start()
|
||||
|
||||
mode = "ab" if resume_offset > 0 else "wb"
|
||||
with open(local_path, mode) as local_file:
|
||||
with sftp.file(remote_path, "rb") as remote_file:
|
||||
if resume_offset > 0:
|
||||
remote_file.seek(resume_offset)
|
||||
|
||||
while transferred < remote_size:
|
||||
# Apply rate limiting before transfer
|
||||
if rate_limiter:
|
||||
rate_limiter.wait_if_needed(chunk_size)
|
||||
|
||||
# Perform the actual transfer
|
||||
chunk = remote_file.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
|
||||
local_file.write(chunk)
|
||||
transferred += len(chunk)
|
||||
|
||||
# Update progress after transfer
|
||||
if progress_tracker:
|
||||
progress_tracker.update(transferred, remote_size)
|
||||
|
||||
# Final progress update
|
||||
if progress_tracker:
|
||||
progress_tracker.finish(remote_size)
|
||||
|
||||
def _require_client(self) -> paramiko.SSHClient:
|
||||
if self._client is None:
|
||||
raise RuntimeError("SSH client is not connected. Call open() or use a with-context.")
|
||||
return self._client
|
||||
|
||||
def get_sftp(self) -> paramiko.SFTPClient:
|
||||
if self._threading_mod:
|
||||
th_sftp = self._require_client().open_sftp()
|
||||
return th_sftp
|
||||
if self._sftp is None:
|
||||
self._sftp = self._require_client().open_sftp()
|
||||
return self._sftp
|
||||
|
||||
@staticmethod
|
||||
def _load_private_key(
|
||||
key_file: Optional[str],
|
||||
key_data: Optional[str],
|
||||
passphrase: Optional[str],
|
||||
) -> paramiko.PKey:
|
||||
"""Load a private key by normalizing to key_data and parsing it.
|
||||
|
||||
Priority is mutually exclusive by design: key_file > key_data.
|
||||
Supported types: RSA, DSS, ECDSA, Ed25519.
|
||||
"""
|
||||
if not key_data and key_file:
|
||||
path = os.path.expanduser(key_file)
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
key_data = f.read()
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Failed to read private key file: {e}")
|
||||
|
||||
if not key_data:
|
||||
raise RuntimeError("No private key provided")
|
||||
|
||||
stream = io.StringIO(key_data)
|
||||
last_error: Optional[Exception] = None
|
||||
key_classes = [paramiko.RSAKey, paramiko.ECDSAKey, paramiko.Ed25519Key]
|
||||
if hasattr(paramiko, "DSSKey"): # 兼容无DSSKey功能的paramiko版本
|
||||
key_classes.append(paramiko.DSSKey)
|
||||
for key_cls in key_classes:
|
||||
try:
|
||||
stream.seek(0)
|
||||
return key_cls.from_private_key(stream, password=passphrase)
|
||||
except paramiko.PasswordRequiredException:
|
||||
raise RuntimeError("Private key is encrypted; provide passphrase.")
|
||||
except Exception as e:
|
||||
last_error = e
|
||||
raise RuntimeError(f"Failed to load private key from data: {last_error}")
|
||||
|
||||
def run_streaming(
|
||||
self,
|
||||
command: str,
|
||||
on_stdout: Optional[Callable[[bytes], None]] = None,
|
||||
on_stderr: Optional[Callable[[bytes], None]] = None,
|
||||
timeout: Optional[int] = None,
|
||||
read_chunk_size: int = 32768,
|
||||
poll_interval_sec: float = 0.05,
|
||||
) -> int:
|
||||
"""
|
||||
Execute a remote command and stream output chunks to callbacks to minimize memory usage.
|
||||
|
||||
Returns the process exit status when the command completes.
|
||||
"""
|
||||
client = self._require_client()
|
||||
transport = client.get_transport()
|
||||
if transport is None:
|
||||
raise RuntimeError("SSH transport is not available")
|
||||
|
||||
effective_timeout = timeout if timeout is not None else self.timeout
|
||||
chan = transport.open_session(timeout=effective_timeout)
|
||||
chan.exec_command(command)
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
while True:
|
||||
if chan.recv_ready():
|
||||
data = chan.recv(read_chunk_size)
|
||||
if data and on_stdout is not None:
|
||||
on_stdout(data)
|
||||
if chan.recv_stderr_ready():
|
||||
data = chan.recv_stderr(read_chunk_size)
|
||||
if data and on_stderr is not None:
|
||||
on_stderr(data)
|
||||
|
||||
if chan.exit_status_ready() and not chan.recv_ready() and not chan.recv_stderr_ready():
|
||||
break
|
||||
|
||||
if effective_timeout is not None and (time.time() - start_time) > effective_timeout:
|
||||
chan.close()
|
||||
raise TimeoutError("Command execution timed out")
|
||||
|
||||
time.sleep(poll_interval_sec)
|
||||
|
||||
exit_code = chan.recv_exit_status()
|
||||
return exit_code
|
||||
finally:
|
||||
try:
|
||||
chan.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def execute_script_streaming(
|
||||
self,
|
||||
script_content: str,
|
||||
script_type: str = "shell",
|
||||
remote_dir: str = "/tmp",
|
||||
script_name: Optional[str] = None,
|
||||
timeout: Optional[int] = None,
|
||||
cleanup: bool = True,
|
||||
env_vars: Optional[Dict[str, str]] = None,
|
||||
on_stdout: Optional[Callable[[bytes], None]] = None,
|
||||
on_stderr: Optional[Callable[[bytes], None]] = None,
|
||||
) -> int:
|
||||
"""
|
||||
Execute a bash script with streaming output.
|
||||
|
||||
Args:
|
||||
script_content: The bash script content to execute
|
||||
remote_dir: Remote directory to place the script (default: /tmp)
|
||||
script_name: Name for the script file (auto-generated if None)
|
||||
timeout: Command execution timeout in seconds
|
||||
cleanup: Whether to delete the script file after execution
|
||||
env_vars: Environment variables to set before script execution
|
||||
on_stdout: Callback to receive stdout chunks (bytes)
|
||||
on_stderr: Callback to receive stderr chunks (bytes)
|
||||
|
||||
Returns:
|
||||
int: Exit code of the script execution
|
||||
|
||||
Raises:
|
||||
RuntimeError: If script upload or execution fails
|
||||
"""
|
||||
remote_script_path = self._prepare_script(script_content, remote_dir, script_name)
|
||||
|
||||
try:
|
||||
command = self._build_command(remote_script_path, script_type, env_vars)
|
||||
return self.run_streaming(
|
||||
command,
|
||||
on_stdout=on_stdout,
|
||||
on_stderr=on_stderr,
|
||||
timeout=timeout,
|
||||
)
|
||||
finally:
|
||||
if cleanup:
|
||||
self._cleanup_script(remote_script_path)
|
||||
|
||||
def execute_script_collect(
|
||||
self,
|
||||
script_content: str,
|
||||
script_type: str = "shell",
|
||||
remote_dir: str = "/tmp",
|
||||
script_name: Optional[str] = None,
|
||||
timeout: Optional[int] = None,
|
||||
cleanup: bool = True,
|
||||
env_vars: Optional[Dict[str, str]] = None,
|
||||
) -> CommandResult:
|
||||
"""
|
||||
Execute a bash script and collect all output.
|
||||
|
||||
Args:
|
||||
script_content: The bash script content to execute
|
||||
remote_dir: Remote directory to place the script (default: /tmp)
|
||||
script_name: Name for the script file (auto-generated if None)
|
||||
timeout: Command execution timeout in seconds
|
||||
cleanup: Whether to delete the script file after execution
|
||||
env_vars: Environment variables to set before script execution
|
||||
|
||||
Returns:
|
||||
CommandResult: The execution result with exit_code, stdout, stderr
|
||||
|
||||
Raises:
|
||||
RuntimeError: If script upload or execution fails
|
||||
"""
|
||||
remote_script_path = self._prepare_script(script_content, remote_dir, script_name)
|
||||
|
||||
try:
|
||||
command = self._build_command(remote_script_path, script_type, env_vars)
|
||||
code, out, err = self.run(command, timeout=timeout)
|
||||
return CommandResult(exit_code=code, stdout=out, stderr=err)
|
||||
finally:
|
||||
if cleanup:
|
||||
self._cleanup_script(remote_script_path)
|
||||
|
||||
def execute_local_script_streaming(
|
||||
self,
|
||||
local_script_path: str,
|
||||
script_type: str = "shell",
|
||||
remote_dir: str = "/tmp",
|
||||
script_name: Optional[str] = None,
|
||||
timeout: Optional[int] = None,
|
||||
cleanup: bool = True,
|
||||
env_vars: Optional[Dict[str, str]] = None,
|
||||
on_stdout: Optional[Callable[[bytes], None]] = None,
|
||||
on_stderr: Optional[Callable[[bytes], None]] = None,
|
||||
) -> int:
|
||||
"""
|
||||
Execute a local bash script with streaming output.
|
||||
|
||||
Args:
|
||||
local_script_path: Path to the local script file
|
||||
remote_dir: Remote directory to place the script (default: /tmp)
|
||||
script_name: Name for the script file (uses basename if None)
|
||||
timeout: Command execution timeout in seconds
|
||||
cleanup: Whether to delete the script file after execution
|
||||
env_vars: Environment variables to set before script execution
|
||||
on_stdout: Callback to receive stdout chunks (bytes)
|
||||
on_stderr: Callback to receive stderr chunks (bytes)
|
||||
|
||||
Returns:
|
||||
int: Exit code of the script execution
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If local script file not found
|
||||
RuntimeError: If script upload or execution fails
|
||||
"""
|
||||
if not os.path.isfile(local_script_path):
|
||||
raise FileNotFoundError(f"Local script not found: {local_script_path}")
|
||||
|
||||
if not script_name:
|
||||
script_name = os.path.basename(local_script_path)
|
||||
|
||||
remote_script_path = f"{remote_dir.rstrip('/')}/{script_name}"
|
||||
|
||||
# Upload the local script file via SFTP with LF normalization
|
||||
sftp = self.get_sftp()
|
||||
with open(local_script_path, "r", encoding="utf-8", newline="") as f:
|
||||
content = f.read()
|
||||
content_lf = content.replace("\r\n", "\n").replace("\r", "\n")
|
||||
with sftp.file(remote_script_path, "w") as remote_file:
|
||||
remote_file.write(content_lf.encode("utf-8"))
|
||||
|
||||
try:
|
||||
command = self._build_command(remote_script_path, script_type, env_vars)
|
||||
return self.run_streaming(
|
||||
command,
|
||||
on_stdout=on_stdout,
|
||||
on_stderr=on_stderr,
|
||||
timeout=timeout,
|
||||
)
|
||||
finally:
|
||||
if cleanup:
|
||||
self._cleanup_script(remote_script_path)
|
||||
|
||||
def execute_local_script_collect(
|
||||
self,
|
||||
local_script_path: str,
|
||||
script_type: str = "shell",
|
||||
remote_dir: str = "/tmp",
|
||||
script_name: Optional[str] = None,
|
||||
timeout: Optional[int] = None,
|
||||
cleanup: bool = True,
|
||||
env_vars: Optional[Dict[str, str]] = None,
|
||||
) -> CommandResult:
|
||||
"""
|
||||
Execute a local bash script and collect all output.
|
||||
|
||||
Args:
|
||||
local_script_path: Path to the local script file
|
||||
remote_dir: Remote directory to place the script (default: /tmp)
|
||||
script_name: Name for the script file (uses basename if None)
|
||||
timeout: Command execution timeout in seconds
|
||||
cleanup: Whether to delete the script file after execution
|
||||
env_vars: Environment variables to set before script execution
|
||||
|
||||
Returns:
|
||||
CommandResult: The execution result with exit_code, stdout, stderr
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If local script file not found
|
||||
RuntimeError: If script upload or execution fails
|
||||
"""
|
||||
if not os.path.isfile(local_script_path):
|
||||
raise FileNotFoundError(f"Local script not found: {local_script_path}")
|
||||
|
||||
if not script_name:
|
||||
script_name = os.path.basename(local_script_path)
|
||||
|
||||
remote_script_path = f"{remote_dir.rstrip('/')}/{script_name}"
|
||||
|
||||
# Upload the local script file via SFTP with LF normalization
|
||||
sftp = self.get_sftp()
|
||||
with open(local_script_path, "r", encoding="utf-8", newline="") as f:
|
||||
content = f.read()
|
||||
content_lf = content.replace("\r\n", "\n").replace("\r", "\n")
|
||||
with sftp.file(remote_script_path, "w") as remote_file:
|
||||
remote_file.write(content_lf.encode("utf-8"))
|
||||
|
||||
try:
|
||||
command = self._build_command(remote_script_path, script_type, env_vars)
|
||||
code, out, err = self.run(command, timeout=timeout)
|
||||
return CommandResult(exit_code=code, stdout=out, stderr=err)
|
||||
finally:
|
||||
if cleanup:
|
||||
self._cleanup_script(remote_script_path)
|
||||
|
||||
def _prepare_script(self, script_content: str, remote_dir: str, script_name: Optional[str]) -> str:
|
||||
"""Prepare script by uploading content (LF normalized)."""
|
||||
if not script_name:
|
||||
import uuid
|
||||
script_name = f"script_{uuid.uuid4().hex[:8]}"
|
||||
|
||||
remote_script_path = f"{remote_dir.rstrip('/')}/{script_name}"
|
||||
|
||||
sftp = None
|
||||
try:
|
||||
# Upload script content directly via SFTP (normalize to LF)
|
||||
sftp = self.get_sftp()
|
||||
content_lf = script_content.replace("\r\n", "\n").replace("\r", "\n")
|
||||
with sftp.file(remote_script_path, "w") as remote_file:
|
||||
remote_file.write(content_lf.encode("utf-8"))
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
if sftp and self._threading_mod:
|
||||
sftp.close()
|
||||
|
||||
return remote_script_path
|
||||
|
||||
@staticmethod
|
||||
def _build_command(
|
||||
remote_script_path: str,
|
||||
script_type: str = "shell",
|
||||
env_vars: Optional[Dict[str, str]] = None) -> str:
|
||||
"""Build the command string with environment variables."""
|
||||
env_string = ""
|
||||
if env_vars:
|
||||
env_pairs = [f"{k}='{v}'" for k, v in env_vars.items()]
|
||||
env_string = " ".join(env_pairs) + " "
|
||||
|
||||
if script_type == "shell":
|
||||
return f"{env_string}bash {remote_script_path}"
|
||||
elif script_type == "python":
|
||||
get_py_bin = "pyBin=$(which python3 2> /dev/null || which python 2> /dev/null || echo 'python')"
|
||||
py_info = "echo ""; echo \"Current Python environment:${pyBin} $(${pyBin} -c 'import sys,platform;print(sys.version.split()[0],platform.platform())')\""
|
||||
cmd = "%s;${pyBin} %s; ret=$?; [ $ret -eq 0 ] && exit $ret; %s;exit $ret;" % (
|
||||
get_py_bin, remote_script_path, py_info
|
||||
)
|
||||
return cmd
|
||||
else:
|
||||
raise ValueError("Invalid script type")
|
||||
|
||||
def _cleanup_script(self, remote_script_path: str) -> None:
|
||||
"""Clean up the remote script file via SFTP without invoking shell."""
|
||||
sftp = None
|
||||
try:
|
||||
sftp = self.get_sftp()
|
||||
# Ensure path exists before removal
|
||||
try:
|
||||
sftp.stat(remote_script_path)
|
||||
except FileNotFoundError:
|
||||
return
|
||||
sftp.remove(remote_script_path)
|
||||
except Exception:
|
||||
# Swallow cleanup errors
|
||||
pass
|
||||
finally:
|
||||
if sftp and self._threading_mod:
|
||||
sftp.close()
|
||||
|
||||
def path_exists(self, path: str) -> Tuple[bool, str]:
|
||||
"""
|
||||
Check if a path exists on the remote server.
|
||||
|
||||
Args:
|
||||
path: Path to check
|
||||
|
||||
Returns:
|
||||
Tuple[bool, str]: A tuple containing a boolean indicating whether the path exists and an error message
|
||||
"""
|
||||
sftp = None
|
||||
try:
|
||||
sftp = self.get_sftp()
|
||||
try:
|
||||
sftp.stat(path)
|
||||
return True, ""
|
||||
except FileNotFoundError:
|
||||
return False, ""
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
finally:
|
||||
if sftp and self._threading_mod:
|
||||
sftp.close()
|
||||
|
||||
|
||||
def create_dir(self, path: str):
|
||||
"""
|
||||
Create a directory on the remote server.
|
||||
|
||||
Args:
|
||||
path: Path to create
|
||||
|
||||
Returns:
|
||||
Tuple[bool, str]: A tuple containing a boolean indicating whether the directory was created successfully and an error message
|
||||
"""
|
||||
sftp = None
|
||||
try:
|
||||
sftp = self.get_sftp()
|
||||
self._ensure_remote_dir(sftp, path)
|
||||
return True, ""
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
finally:
|
||||
if sftp and self._threading_mod:
|
||||
return sftp.close()
|
||||
|
||||
def path_info(self, path: str) -> Dict:
|
||||
"""
|
||||
Get information about a path on the remote server.
|
||||
|
||||
Args:
|
||||
path: Path to get information about
|
||||
|
||||
Returns:
|
||||
Dict: A dictionary containing information about the path, including path, isdir, size, mtime, mode, uid, gid, and exists
|
||||
"""
|
||||
sftp = None
|
||||
not_found = {"path": path,"isdir": False,"size": 0,"mtime": 0,"mode": 0,"uid": 0,"gid": 0, "exists": False}
|
||||
try:
|
||||
sftp = self.get_sftp()
|
||||
info = sftp.stat(path)
|
||||
return {
|
||||
"path": path,
|
||||
"isdir": stat.S_ISDIR(info.st_mode),
|
||||
"size": info.st_size,
|
||||
"mtime": info.st_mtime,
|
||||
"mode": info.st_mode,
|
||||
"uid": info.st_uid,
|
||||
"gid": info.st_gid,
|
||||
"exists": True
|
||||
}
|
||||
except FileNotFoundError:
|
||||
return not_found
|
||||
except:
|
||||
return not_found
|
||||
finally:
|
||||
if sftp and self._threading_mod:
|
||||
sftp.close()
|
||||
|
||||
51
mod/base/ssh_executor/util.py
Normal file
51
mod/base/ssh_executor/util.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import io
|
||||
import paramiko
|
||||
|
||||
|
||||
def test_ssh_config(host, port, username, password, pkey, pkey_passwd, timeout: int = 10) -> str:
|
||||
try:
|
||||
ssh = paramiko.SSHClient()
|
||||
pkey_obj = None
|
||||
if pkey:
|
||||
pky_io = io.StringIO(pkey)
|
||||
key_cls_list = [paramiko.RSAKey, paramiko.ECDSAKey, paramiko.Ed25519Key]
|
||||
if hasattr(paramiko, "DSSKey"):
|
||||
key_cls_list.append(paramiko.DSSKey)
|
||||
for key_cls in key_cls_list:
|
||||
pky_io.seek(0)
|
||||
try:
|
||||
pkey_obj = key_cls.from_private_key(pky_io, password=(pkey_passwd if pkey_passwd else None))
|
||||
except Exception as e:
|
||||
if "base64 decoding error" in str(e):
|
||||
return "Private key data error, please check if it is a complete copy of the private key information"
|
||||
elif "Private key file is encrypted" in str(e):
|
||||
return "The private key has been encrypted, but the password for the private key has not been provided, so the private key information cannot be verified"
|
||||
elif "Invalid key" in str(e):
|
||||
return "Private key parsing error, please check if the password for the private key is correct"
|
||||
continue
|
||||
else:
|
||||
break
|
||||
else:
|
||||
return "Private key parsing error, please confirm that the entered key format is correct"
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
# look_for_keys 一定要是False,排除不必要的私钥尝试导致的错误
|
||||
ssh.connect(hostname=host, port=port, username=username, password=(password if password else None),
|
||||
pkey=pkey_obj, look_for_keys=False, auth_timeout=timeout)
|
||||
ssh.close()
|
||||
return ""
|
||||
except Exception as e:
|
||||
err_str = str(e)
|
||||
auth_str = "{}@{}:{}".format(username, host, port)
|
||||
if err_str.find('Authentication timeout') != -1:
|
||||
return 'Authentication timeout, [{}] error:{}'.format(auth_str, e)
|
||||
if err_str.find('Authentication failed') != -1:
|
||||
if pkey:
|
||||
return 'Authentication failed, please check if the private key is correct: ' + auth_str
|
||||
return 'Account or password error:' + auth_str
|
||||
if err_str.find('Bad authentication type; allowed types') != -1:
|
||||
return 'Unsupported authentication type: {}'.format(err_str)
|
||||
if err_str.find('Connection reset by peer') != -1:
|
||||
return 'The target server actively rejects the connection'
|
||||
if err_str.find('Error reading SSH protocol banner') != -1:
|
||||
return 'Protocol header response timeout, error:' + err_str
|
||||
return "Connection failed:" + err_str
|
||||
66
mod/base/web_conf/__init__.py
Normal file
66
mod/base/web_conf/__init__.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import json
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
from .ip_restrict import IpRestrict, RealIpRestrict
|
||||
from .redirect import RealRedirect, Redirect
|
||||
from .access_restriction import AccessRestriction, RealAccessRestriction
|
||||
from .domain_tool import domain_to_puny_code, check_domain, normalize_domain, NginxDomainTool, ApacheDomainTool, \
|
||||
is_domain
|
||||
from .dir_tool import DirTool
|
||||
from .referer import Referer, RealReferer
|
||||
from .logmanager import LogMgr, RealLogMgr
|
||||
from .proxy import Proxy, RealProxy
|
||||
from .ssl import SSLManager, RealSSLManger
|
||||
from .config_mgr import ConfigMgr
|
||||
from .default_site import set_default_site, get_default_site, check_default
|
||||
from .server_extension import NginxExtension as ng_ext, ApacheExtension as ap_ext
|
||||
|
||||
|
||||
def remove_sites_service_config(site_name: str, config_prefix: str = ""):
|
||||
"""
|
||||
用于删除一个网站的nginx,apache的所有相关配置文件和配置项
|
||||
包含:
|
||||
配置文件,访问限制, 反向代理, 重定向, 防盗链,证书目录, IP黑白名单, 历史配置文件, 默认站点, 日志格式配置记录, 伪静态等
|
||||
"""
|
||||
# 配置文件
|
||||
ng_file = "/www/server/panel/vhost/nginx/{}{}.conf".format(config_prefix, site_name)
|
||||
if os.path.exists(ng_file):
|
||||
os.remove(ng_file)
|
||||
ap_file = "/www/server/panel/vhost/apache/{}{}.conf".format(config_prefix, site_name)
|
||||
if os.path.exists(ap_file):
|
||||
os.remove(ap_file)
|
||||
# 访问限制
|
||||
RealAccessRestriction(config_prefix=config_prefix).remove_site_access_restriction_info(site_name)
|
||||
# 反向代理
|
||||
RealProxy(config_prefix=config_prefix).remove_site_proxy_info(site_name)
|
||||
# 重定向
|
||||
RealRedirect(config_prefix=config_prefix).remove_site_redirect_info(site_name)
|
||||
# 防盗链
|
||||
RealReferer(config_prefix=config_prefix).remove_site_referer_info(site_name)
|
||||
# 证书目录
|
||||
cert_path = "/www/server/panel/vhost/cert/" + site_name
|
||||
if os.path.isdir(cert_path):
|
||||
shutil.rmtree(cert_path)
|
||||
# IP黑白名单
|
||||
RealIpRestrict(config_prefix=config_prefix).remove_site_ip_restrict_info(site_name)
|
||||
# 历史配置文件
|
||||
ConfigMgr(site_name=site_name, config_prefix=config_prefix).clear_history_file()
|
||||
# 默认站点
|
||||
d_site_name, d_prefix = get_default_site()
|
||||
if d_site_name == site_name and d_prefix == config_prefix:
|
||||
d_file = "/www/server/panel/data/mod_default_site.pl"
|
||||
f = open(d_file, mode="w+")
|
||||
json.dump({"name": None, "prefix": None}, f)
|
||||
|
||||
# 日志格式配置记录
|
||||
RealLogMgr(conf_prefix=config_prefix).remove_site_log_format_info(site_name)
|
||||
|
||||
# 伪静态
|
||||
rewrite_path = "/www/server/panel/vhost/rewrite/{}{}.conf".format(config_prefix, site_name)
|
||||
if os.path.isdir(rewrite_path):
|
||||
os.remove(rewrite_path)
|
||||
BIN
mod/base/web_conf/__pycache__/ssl.cpython-314.pyc
Normal file
BIN
mod/base/web_conf/__pycache__/ssl.cpython-314.pyc
Normal file
Binary file not shown.
608
mod/base/web_conf/access_restriction.py
Normal file
608
mod/base/web_conf/access_restriction.py
Normal file
@@ -0,0 +1,608 @@
|
||||
# 访问限制, 目前不兼容之前版本的访问限制
|
||||
# nginx 使用 if 和 正则实现,保障与反向代理、重定向的兼容性
|
||||
# apache 实现方案未变
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import shutil
|
||||
import warnings
|
||||
from typing import Optional, Union, List, Dict
|
||||
from itertools import chain
|
||||
from .util import webserver, check_server_config, write_file, read_file, DB, service_reload, get_log_path, pre_re_key
|
||||
from mod.base import json_response
|
||||
warnings.filterwarnings("ignore", category=SyntaxWarning)
|
||||
|
||||
class _ConfigObject:
|
||||
_config_file_path = ""
|
||||
panel_path = "/www/server/panel"
|
||||
|
||||
def __init__(self):
|
||||
self._config: Optional[dict] = None
|
||||
|
||||
@property
|
||||
def config(self) -> Dict[str, dict]:
|
||||
if self._config is None:
|
||||
try:
|
||||
self._config = json.loads(read_file(self._config_file_path))
|
||||
except (json.JSONDecodeError, TypeError, ValueError):
|
||||
self._config = {}
|
||||
return self._config
|
||||
|
||||
def save_config(self):
|
||||
if self._config:
|
||||
write_file(self._config_file_path, json.dumps(self._config))
|
||||
|
||||
|
||||
class ServerConfig:
|
||||
_vhost_path = "/www/server/panel/vhost"
|
||||
|
||||
def __init__(self, config_prefix: str):
|
||||
self.config_prefix: str = config_prefix
|
||||
|
||||
@staticmethod
|
||||
def crypt_password(password) -> str:
|
||||
import crypt
|
||||
return crypt.crypt(password,password)
|
||||
|
||||
|
||||
# nginx配置文件相关操作
|
||||
class _NginxAccessConf(ServerConfig):
|
||||
|
||||
# 添加 include 导入配置项
|
||||
def set_nginx_access_include(self, site_name) -> Optional[str]:
|
||||
ng_file = "{}/nginx/{}{}.conf".format(self._vhost_path, self.config_prefix, site_name)
|
||||
ng_conf = read_file(ng_file)
|
||||
if not ng_conf:
|
||||
return "配置文件丢失"
|
||||
access_dir = "{}/nginx/access/{}".format(self._vhost_path, site_name)
|
||||
if not os.path.isdir(os.path.dirname(access_dir)):
|
||||
os.makedirs(os.path.dirname(access_dir))
|
||||
|
||||
if not os.path.isdir(access_dir):
|
||||
os.makedirs(access_dir)
|
||||
|
||||
include_conf = (
|
||||
" #引用访问限制规则,注释后配置的访问限制将无效\n"
|
||||
" include /www/server/panel/vhost/nginx/access/%s/*.conf;\n"
|
||||
) % site_name
|
||||
|
||||
rep_include = re.compile(r"\s*include.*/access/.*/\*\.conf\s*;", re.M)
|
||||
if rep_include.search(ng_conf):
|
||||
return
|
||||
# 添加 引入
|
||||
rep_list = [
|
||||
(re.compile(r"#SSL-END"), False), # 匹配Referer配置, 加其下
|
||||
(re.compile(r"(\s*#.*)?\s*include\s+.*/redirect/.*\.conf;"), True), # 重定向
|
||||
(re.compile(r"(\s*#.*)?\s*include\s+.*/ip-restrict/.*\.conf;"), True), # Ip黑白名单
|
||||
]
|
||||
|
||||
# 使用正则匹配确定插入位置 use_start 在前面插入还是后面插入
|
||||
def set_by_rep_idx(tmp_rep: re.Pattern, use_start: bool) -> bool:
|
||||
tmp_res = tmp_rep.search(ng_conf)
|
||||
if not tmp_res:
|
||||
return False
|
||||
if use_start:
|
||||
new_conf = ng_conf[:tmp_res.start()] + include_conf + tmp_res.group() + ng_conf[tmp_res.end():]
|
||||
else:
|
||||
new_conf = ng_conf[:tmp_res.start()] + tmp_res.group() + include_conf + ng_conf[tmp_res.end():]
|
||||
|
||||
write_file(ng_file, new_conf)
|
||||
if webserver() == "nginx" and check_server_config() is not None:
|
||||
write_file(ng_file, ng_conf)
|
||||
return False
|
||||
return True
|
||||
for r, s in rep_list:
|
||||
if set_by_rep_idx(r, s):
|
||||
break
|
||||
else:
|
||||
return "无法在配置文件中定位到需要添加的项目"
|
||||
|
||||
# 写入配置文件
|
||||
def set_nginx_access_by_conf(self, site_name: str, configs: Dict[str, List[Dict[str, str]]]) -> Optional[str]:
|
||||
""" configs 示例结构
|
||||
configs = {
|
||||
"auth_dir": [
|
||||
{
|
||||
"name": "aaa",
|
||||
"dir_path": "/",
|
||||
"auth_file": "/www/server/pass/www.cache.com/aaa.pass",
|
||||
"username":"aaaa",
|
||||
"password":"aaaa",
|
||||
}
|
||||
],
|
||||
"file_deny": [
|
||||
{
|
||||
"name": "bbb",
|
||||
"dir_path": "/",
|
||||
"suffix": ["png", "jpg"]
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
|
||||
path_map = {}
|
||||
for c in chain(configs.get("auth_dir", []), configs.get("file_deny", [])):
|
||||
if c["dir_path"] not in path_map:
|
||||
path_map[c["dir_path"]] = {"path": c["dir_path"]}
|
||||
path_map[c["dir_path"]].update(c)
|
||||
|
||||
path_list = list(path_map.values())
|
||||
path_list.sort(key=lambda x: len(x["path"].split("/")), reverse=True)
|
||||
conf_template = r"""location ~ "^%s.*$" {
|
||||
auth_basic "Authorization";
|
||||
auth_basic_user_file %s;
|
||||
%s
|
||||
}
|
||||
"""
|
||||
suffix_template = r'{tmp_pre}if ( $uri ~ "\.({suffix})$" ) {{\n{tmp_pre} return 404;\n{tmp_pre}}}'
|
||||
suffix_template2 = r'if ( $uri ~ "^{path}.*\.({suffix})$" ) {{\n return 404;\n}}\n'
|
||||
tmp_conf_list = []
|
||||
for i in path_list:
|
||||
if "auth_file" in i and "suffix" in i:
|
||||
tmp_pre = " "
|
||||
tmp_conf = conf_template % (
|
||||
i["path"], i["auth_file"], suffix_template.format(tmp_pre=tmp_pre, suffix="|".join(i["suffix"]))
|
||||
)
|
||||
write_file(i["auth_file"], "{}:{}".format(i["username"], self.crypt_password(i["password"])))
|
||||
|
||||
elif "auth_file" in i:
|
||||
tmp_conf = conf_template % (i["path"], i["auth_file"], "")
|
||||
write_file(i["auth_file"], "{}:{}".format(i["username"], self.crypt_password(i["password"])))
|
||||
else:
|
||||
tmp_conf = suffix_template2.format(path=i["path"], suffix="|".join(i["suffix"]))
|
||||
|
||||
tmp_conf_list.append(tmp_conf)
|
||||
|
||||
config_data = "\n".join(tmp_conf_list)
|
||||
config_file = "{}/nginx/access/{}/{}{}.conf".format(self._vhost_path, site_name, self.config_prefix, site_name)
|
||||
old_config = read_file(config_file)
|
||||
write_file(config_file, config_data)
|
||||
if webserver() == "nginx" and check_server_config() is not None:
|
||||
if isinstance(old_config, str):
|
||||
write_file(config_file, old_config)
|
||||
else:
|
||||
write_file(config_file, "")
|
||||
return "配置失败"
|
||||
|
||||
|
||||
class _ApacheAccessConf(ServerConfig):
|
||||
|
||||
def set_apache_access_include(self, site_name) -> Optional[str]:
|
||||
ap_file = "{}/apache/{}{}.conf".format(self._vhost_path, self.config_prefix, site_name)
|
||||
ap_conf = read_file(ap_file)
|
||||
if not ap_conf:
|
||||
return "配置文件丢失"
|
||||
access_dir = "{}/apache/access/{}".format(self._vhost_path, site_name)
|
||||
if not os.path.isdir(os.path.dirname(access_dir)):
|
||||
os.makedirs(os.path.dirname(access_dir))
|
||||
|
||||
if not os.path.isdir(access_dir):
|
||||
os.makedirs(access_dir)
|
||||
|
||||
pass_dir = "/www/server/pass/" + site_name
|
||||
if not os.path.isdir(os.path.dirname(pass_dir)):
|
||||
os.makedirs(os.path.dirname(pass_dir))
|
||||
|
||||
if not os.path.isdir(pass_dir):
|
||||
os.makedirs(pass_dir)
|
||||
|
||||
include_conf = (
|
||||
"\n #引用访问限制规则,注释后配置的访问限制将无效\n"
|
||||
" IncludeOptional /www/server/panel/vhost/apache/access/%s/*.conf\n"
|
||||
) % site_name
|
||||
|
||||
rep_include = re.compile(r"\s*IncludeOptional.*/access/.*/\*\.conf", re.M)
|
||||
if rep_include.search(ap_conf):
|
||||
return
|
||||
# 添加 引入
|
||||
rep_vhost_r = re.compile(r"</VirtualHost>")
|
||||
new_conf = rep_vhost_r.sub(include_conf + "</VirtualHost>", ap_conf)
|
||||
if not rep_include.search(new_conf):
|
||||
return "配置添加失败"
|
||||
|
||||
write_file(ap_file, new_conf)
|
||||
if webserver() == "nginx" and check_server_config() is not None:
|
||||
write_file(ap_file, ap_conf)
|
||||
return "配置添加失败"
|
||||
|
||||
def set_apache_access_by_conf(self, site_name: str, configs: Dict[str, List[Dict[str, str]]]) -> Optional[str]:
|
||||
""" configs 示例结构
|
||||
configs = {
|
||||
"auth_dir": [
|
||||
{
|
||||
"name": "aaa",
|
||||
"dir_path": "/",
|
||||
"auth_file": "/www/server/pass/www.cache.com/aaa.pass",
|
||||
"username":"aaaa",
|
||||
"password":"aaaa",
|
||||
}
|
||||
],
|
||||
"file_deny": [
|
||||
{
|
||||
"name": "bbb",
|
||||
"dir_path": "/",
|
||||
"suffix": ["png", "jpg"]
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
site_path = DB("sites").where("name=?", (site_name, )).find()["path"]
|
||||
names = []
|
||||
old_configs = []
|
||||
access_dir = "{}/apache/access/{}".format(self._vhost_path, site_name)
|
||||
for i in os.listdir(access_dir):
|
||||
if not os.path.isfile(os.path.join(access_dir, i)):
|
||||
continue
|
||||
old_configs.append((i, read_file(os.path.join(access_dir, i))))
|
||||
|
||||
for c in chain(configs.get("auth_dir", []), configs.get("file_deny", [])):
|
||||
if "suffix" in c:
|
||||
self._set_apache_file_deny(c, site_name)
|
||||
names.append("deny_{}.conf".format(c["name"]))
|
||||
else:
|
||||
self._set_apache_auth_dir(c, site_name, site_path)
|
||||
names.append("auth_{}.conf".format(c["name"]))
|
||||
|
||||
for i in os.listdir(access_dir):
|
||||
if i not in names:
|
||||
os.remove(os.path.join(access_dir, i))
|
||||
|
||||
if webserver() == "apache" and check_server_config() is not None:
|
||||
for i in os.listdir(access_dir):
|
||||
os.remove(os.path.join(access_dir, i))
|
||||
for n, data in old_configs: # 还原之前的配置文件
|
||||
write_file(os.path.join(access_dir, n), data)
|
||||
return "配置保存失败"
|
||||
|
||||
def _set_apache_file_deny(self, data: dict, site_name: str):
|
||||
conf = r'''
|
||||
#BEGIN_DENY_{n}
|
||||
<Directory ~ "{d}.*\.({s})$">
|
||||
Order allow,deny
|
||||
Deny from all
|
||||
</Directory>
|
||||
#END_DENY_{n}
|
||||
'''.format(n=data["name"], d=data["dir_path"], s="|".join(data["suffix"]))
|
||||
access_file = "{}/apache/access/{}/deny_{}.conf".format(self._vhost_path, site_name, data["name"])
|
||||
write_file(access_file, conf)
|
||||
|
||||
def _set_apache_auth_dir(self, data: dict, site_path: str, site_name: str):
|
||||
conf = '''
|
||||
<Directory "{site_path}{site_dir}">
|
||||
#AUTH_START
|
||||
AuthType basic
|
||||
AuthName "Authorization "
|
||||
AuthUserFile {auth_file}
|
||||
Require user {username}
|
||||
#AUTH_END
|
||||
SetOutputFilter DEFLATE
|
||||
Options FollowSymLinks
|
||||
AllowOverride All
|
||||
#Require all granted
|
||||
DirectoryIndex index.php index.html index.htm default.php default.html default.htm
|
||||
</Directory>'''.format(site_path=site_path, site_dir=data["dir_path"], auth_file=data["auth_file"],
|
||||
username=data["username"], site_name=site_name)
|
||||
write_file(data["auth_file"], "{}:{}".format(data["username"], self.crypt_password(data["password"])))
|
||||
access_file = "{}/apache/access/{}/auth_{}.conf".format(self._vhost_path, site_path, data["name"])
|
||||
write_file(access_file, conf)
|
||||
|
||||
|
||||
class RealAccessRestriction(_ConfigObject, _ApacheAccessConf, _NginxAccessConf):
|
||||
_config_file_path = "/www/server/panel/data/site_access.json"
|
||||
|
||||
def __init__(self, config_prefix: str):
|
||||
super(RealAccessRestriction, self).__init__()
|
||||
super(_ApacheAccessConf, self).__init__(config_prefix)
|
||||
|
||||
# 把配置信息更新到服务配置文件中
|
||||
def _refresh_web_server_conf(self, site_name: str, site_access_conf: dict, web_server=None) -> Optional[str]:
|
||||
if web_server is None:
|
||||
web_server = webserver()
|
||||
error_msg = self.set_apache_access_by_conf(site_name, site_access_conf)
|
||||
if web_server == "apache" and error_msg is not None:
|
||||
return error_msg
|
||||
error_msg = self.set_nginx_access_by_conf(site_name, site_access_conf)
|
||||
if web_server == "nginx" and error_msg is not None:
|
||||
return error_msg
|
||||
|
||||
# 添加include配置到对应站点的配置文件中
|
||||
def _set_web_server_conf_include(self, site_name, web_server=None) -> Optional[str]:
|
||||
if web_server is None:
|
||||
web_server = webserver()
|
||||
error_msg = self.set_apache_access_include(site_name)
|
||||
if web_server == "apache" and error_msg is not None:
|
||||
return error_msg
|
||||
error_msg = self.set_nginx_access_include(site_name)
|
||||
if web_server == "nginx" and error_msg is not None:
|
||||
return error_msg
|
||||
|
||||
def check_auth_dir_args(self, get, is_modify=False) -> Union[str, dict]:
|
||||
values = {}
|
||||
try:
|
||||
values["site_name"] = get.site_name.strip()
|
||||
values["dir_path"] = get.dir_path.strip()
|
||||
except AttributeError:
|
||||
return "parameter error"
|
||||
|
||||
if hasattr(get, "password"):
|
||||
password = get.password.strip()
|
||||
if len(password) < 3:
|
||||
return '密码不能少于3位'
|
||||
if re.search(r'\s', password):
|
||||
return '密码不能存在空格'
|
||||
values['password'] = password
|
||||
else:
|
||||
return '请输入密码!'
|
||||
|
||||
if hasattr(get, "username"):
|
||||
username = get.username.strip()
|
||||
if len(username) < 3:
|
||||
return '账号不能少于3位'
|
||||
if re.search(r'\s', username):
|
||||
return '账号不能存在空格'
|
||||
values['username'] = username
|
||||
else:
|
||||
return '请输入用户!'
|
||||
|
||||
if hasattr(get, "name"):
|
||||
name = get.name.strip()
|
||||
if len(name) < 3:
|
||||
return '名称不能少于3位'
|
||||
if re.search(r'\s', name):
|
||||
return '名称不能存在空格'
|
||||
if not re.search(r'^\w+$', name):
|
||||
return '名称格式错误,仅支持数字字母下划线,请参考格式:aaa_bbb'
|
||||
values['name'] = name
|
||||
else:
|
||||
return '请输入名称!'
|
||||
if not is_modify:
|
||||
data = self.config.get(values["site_name"], {}).get("auth_dir", [])
|
||||
for i in data:
|
||||
if i["dir_path"] == values["dir_path"]:
|
||||
return "此路径已存在"
|
||||
if i["name"] == values["name"]:
|
||||
return "此名称已存在"
|
||||
|
||||
values["auth_file"] = "/www/server/pass/{}/{}.pass".format(values["site_name"], values["name"])
|
||||
return values
|
||||
|
||||
def create_auth_dir(self, get) -> Optional[str]:
|
||||
conf = self.check_auth_dir_args(get, is_modify=False)
|
||||
if isinstance(conf, str):
|
||||
return conf
|
||||
|
||||
web_server = webserver()
|
||||
error_msg = self._set_web_server_conf_include(conf["site_name"], web_server)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
|
||||
if conf["site_name"] not in self.config:
|
||||
self.config[conf["site_name"]] = {"auth_dir": [], "file_deny": []}
|
||||
self.config[conf["site_name"]]["auth_dir"].append(conf)
|
||||
|
||||
error_msg = self._refresh_web_server_conf(conf["site_name"], self.config[conf["site_name"]], web_server)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
self.save_config()
|
||||
service_reload()
|
||||
|
||||
def modify_auth_dir(self, get) -> Optional[str]:
|
||||
conf = self.check_auth_dir_args(get, is_modify=True)
|
||||
if isinstance(conf, str):
|
||||
return conf
|
||||
|
||||
data = self.config.get(conf["site_name"], {}).get("auth_dir", [])
|
||||
target_idx = None
|
||||
for idx, i in enumerate(data):
|
||||
if i["name"] == conf["name"]:
|
||||
target_idx = idx
|
||||
break
|
||||
if target_idx is None:
|
||||
return "没有指定的配置信息"
|
||||
web_server = webserver()
|
||||
error_msg = self._set_web_server_conf_include(conf["site_name"], web_server)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
if conf["site_name"] not in self.config:
|
||||
self.config[conf["site_name"]] = {"auth_dir": [], "file_deny": []}
|
||||
|
||||
self.config[conf["site_name"]]["auth_dir"][target_idx] = conf
|
||||
|
||||
error_msg = self._refresh_web_server_conf(conf["site_name"], self.config[conf["site_name"]], web_server)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
self.save_config()
|
||||
service_reload()
|
||||
|
||||
def remove_auth_dir(self, site_name: str, name: str) -> Optional[str]:
|
||||
if site_name not in self.config:
|
||||
return "没有该网站的配置"
|
||||
|
||||
target = None
|
||||
for idx, i in enumerate(self.config[site_name].get("auth_dir", [])):
|
||||
if i.get("name", None) == name:
|
||||
target = idx
|
||||
|
||||
if target is None:
|
||||
return "没有该路径的配置"
|
||||
|
||||
del self.config[site_name]["auth_dir"][target]
|
||||
web_server = webserver()
|
||||
error_msg = self._refresh_web_server_conf(site_name, self.config[site_name], web_server)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
self.save_config()
|
||||
service_reload()
|
||||
return
|
||||
|
||||
def check_file_deny_args(self, get, is_modify=False) -> Union[str, dict]:
|
||||
values = {}
|
||||
try:
|
||||
values["site_name"] = get.site_name.strip()
|
||||
values["name"] = get.name.strip()
|
||||
values["dir_path"] = get.dir_path.strip()
|
||||
values["suffix"] = list(filter(lambda x: bool(x.strip()), json.loads(get.suffix.strip())))
|
||||
except (AttributeError, json.JSONDecodeError, TypeError, ValueError):
|
||||
return "Parameter error"
|
||||
|
||||
if len(values["name"]) < 3:
|
||||
return '规则名最少需要输入3个字符串!'
|
||||
if not values["suffix"]:
|
||||
return '文件扩展名不可为空!'
|
||||
if not values["dir_path"]:
|
||||
return '目录不可为空!'
|
||||
|
||||
if not is_modify:
|
||||
data = self.config.get(values["site_name"], {}).get("file_deny", [])
|
||||
for i in data:
|
||||
if i["dir_path"] == values["dir_path"]:
|
||||
return "此路径已存在"
|
||||
if i["name"] == values["name"]:
|
||||
return "此名称已存在"
|
||||
return values
|
||||
|
||||
def create_file_deny(self, get) -> Optional[str]:
|
||||
conf = self.check_file_deny_args(get, is_modify=False)
|
||||
if isinstance(conf, str):
|
||||
return conf
|
||||
web_server = webserver()
|
||||
error_msg = self._set_web_server_conf_include(conf["site_name"], web_server)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
if conf["site_name"] not in self.config:
|
||||
self.config[conf["site_name"]] = {"auth_dir": [], "file_deny": []}
|
||||
|
||||
self.config[conf["site_name"]]["file_deny"].append(conf)
|
||||
error_msg = self._refresh_web_server_conf(conf["site_name"], self.config[conf["site_name"]], web_server)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
self.save_config()
|
||||
service_reload()
|
||||
|
||||
def modify_file_deny(self, get) -> Optional[str]:
|
||||
conf = self.check_file_deny_args(get, is_modify=True)
|
||||
if isinstance(conf, str):
|
||||
return conf
|
||||
|
||||
data = self.config.get(conf["site_name"], {}).get("file_deny", [])
|
||||
target_idx = None
|
||||
for idx, i in enumerate(data):
|
||||
if i["name"] == conf["name"]:
|
||||
target_idx = idx
|
||||
break
|
||||
if target_idx is None:
|
||||
return "没有指定的配置信息"
|
||||
web_server = webserver()
|
||||
error_msg = self._set_web_server_conf_include(conf["site_name"], web_server)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
if conf["site_name"] not in self.config:
|
||||
self.config[conf["site_name"]] = {"auth_dir": [], "file_deny": []}
|
||||
|
||||
self.config[conf["site_name"]]["file_deny"][target_idx] = conf
|
||||
|
||||
error_msg = self._refresh_web_server_conf(conf["site_name"], self.config[conf["site_name"]], web_server)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
self.save_config()
|
||||
service_reload()
|
||||
|
||||
def remove_file_deny(self, site_name: str, name: str) -> Optional[str]:
|
||||
if site_name not in self.config:
|
||||
return "没有该网站的配置"
|
||||
|
||||
target = None
|
||||
for idx, i in enumerate(self.config[site_name].get("file_deny", [])):
|
||||
if i.get("name", None) == name:
|
||||
target = idx
|
||||
|
||||
if target is None:
|
||||
return "没有该路径的配置"
|
||||
|
||||
del self.config[site_name]["file_deny"][target]
|
||||
web_server = webserver()
|
||||
error_msg = self._refresh_web_server_conf(site_name, self.config[site_name], web_server)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
self.save_config()
|
||||
service_reload()
|
||||
return
|
||||
|
||||
def site_access_restriction_info(self, site_name: str) -> dict:
|
||||
if site_name not in self.config:
|
||||
return {"auth_dir": [], "file_deny": []}
|
||||
else:
|
||||
return self.config[site_name]
|
||||
|
||||
def remove_site_access_restriction_info(self, site_name):
|
||||
if site_name in self.config:
|
||||
del self.config["site_name"]
|
||||
self.save_config()
|
||||
ng_access_dir = "{}/nginx/access/{}".format(self._vhost_path, site_name)
|
||||
ap_access_dir = "{}/apache/access/{}".format(self._vhost_path, site_name)
|
||||
if os.path.isdir(ng_access_dir):
|
||||
shutil.rmtree(ng_access_dir)
|
||||
|
||||
if os.path.isdir(ap_access_dir):
|
||||
shutil.rmtree(ap_access_dir)
|
||||
|
||||
|
||||
class AccessRestriction:
|
||||
|
||||
def __init__(self, config_prefix: str = ""):
|
||||
self.config_prefix: str = config_prefix
|
||||
self._ar = RealAccessRestriction(config_prefix)
|
||||
|
||||
def create_auth_dir(self, get):
|
||||
res = self._ar.create_auth_dir(get)
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, msg="Successfully added")
|
||||
|
||||
def modify_auth_dir(self, get):
|
||||
res = self._ar.modify_auth_dir(get)
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, msg="修改成功")
|
||||
|
||||
def remove_auth_dir(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
name = get.name.strip()
|
||||
except AttributeError:
|
||||
return json_response(status=False, msg="请求参数错误")
|
||||
res = self._ar.remove_auth_dir(site_name, name)
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, msg="Successfully delete")
|
||||
|
||||
def create_file_deny(self, get):
|
||||
res = self._ar.create_file_deny(get)
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, msg="Successfully added")
|
||||
|
||||
def modify_file_deny(self, get):
|
||||
res = self._ar.modify_file_deny(get)
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, msg="修改成功")
|
||||
|
||||
def remove_file_deny(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
name = get.name.strip()
|
||||
except AttributeError:
|
||||
return json_response(status=False, msg="请求参数错误")
|
||||
res = self._ar.remove_file_deny(site_name, name)
|
||||
if isinstance(res, str):
|
||||
return json_response(status=False, msg=res)
|
||||
return json_response(status=True, msg="Successfully delete")
|
||||
|
||||
def site_access_restriction_info(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
except AttributeError:
|
||||
return json_response(status=False, msg="请求参数错误")
|
||||
data = self._ar.site_access_restriction_info(site_name)
|
||||
return json_response(status=True, data=data)
|
||||
154
mod/base/web_conf/config_mgr.py
Normal file
154
mod/base/web_conf/config_mgr.py
Normal file
@@ -0,0 +1,154 @@
|
||||
import os
|
||||
import time
|
||||
from hashlib import md5
|
||||
from typing import Optional
|
||||
from .util import service_reload, check_server_config, write_file, read_file
|
||||
|
||||
|
||||
# 支持读取配置文件
|
||||
# 保存并重启配置文件
|
||||
# 历史文件记录
|
||||
class ConfigMgr:
|
||||
_vhost_path = "/www/server/panel/vhost"
|
||||
|
||||
def __init__(self, site_name: str, config_prefix: str = ""):
|
||||
self.site_name = site_name
|
||||
self.config_prefix = config_prefix
|
||||
|
||||
def _read_config(self, web_server: str) -> Optional[str]:
|
||||
config_file = "{}/{}/{}{}.conf".format(self._vhost_path, web_server, self.config_prefix, self.site_name)
|
||||
res = read_file(config_file)
|
||||
if isinstance(res, str):
|
||||
return res
|
||||
return None
|
||||
|
||||
def nginx_config(self) -> Optional[str]:
|
||||
return self._read_config("nginx")
|
||||
|
||||
def apache_config(self) -> Optional[str]:
|
||||
return self._read_config("apache")
|
||||
|
||||
def save_config(self, conf_data: str, web_server: str):
|
||||
config_file = "{}/{}/{}{}.conf".format(self._vhost_path, web_server, self.config_prefix, self.site_name)
|
||||
old_config = self._read_config(web_server)
|
||||
write_file(config_file, conf_data)
|
||||
errmsg = check_server_config()
|
||||
if errmsg:
|
||||
write_file(config_file, old_config)
|
||||
return errmsg
|
||||
self._save_history(web_server)
|
||||
service_reload()
|
||||
|
||||
def save_nginx_config(self, conf_data: str) -> Optional[str]:
|
||||
return self.save_config(conf_data, "nginx")
|
||||
|
||||
def save_apache_config(self, conf_data: str) -> Optional[str]:
|
||||
return self.save_config(conf_data, "apache")
|
||||
|
||||
def history_list(self):
|
||||
his_path = '/www/backup/file_history'
|
||||
nginx_config_file = "{}/nginx/{}{}.conf".format(self._vhost_path, self.config_prefix, self.site_name)
|
||||
ng_save_path = "{}{}".format(his_path, nginx_config_file)
|
||||
apache_config_file = "{}/apache/{}{}.conf".format(self._vhost_path, self.config_prefix, self.site_name)
|
||||
ap_save_path = "{}{}".format(his_path, apache_config_file)
|
||||
return {
|
||||
"nginx": [] if not os.path.isdir(ng_save_path) else sorted(os.listdir(ng_save_path), reverse=True),
|
||||
"apache": [] if not os.path.isdir(ap_save_path) else sorted(os.listdir(ap_save_path), reverse=True)
|
||||
}
|
||||
|
||||
def history_conf(self, history_id: str) -> Optional[str]:
|
||||
his_path = '/www/backup/file_history'
|
||||
nginx_config_file = "{}/nginx/{}{}.conf".format(self._vhost_path, self.config_prefix, self.site_name)
|
||||
ng_save_path = "{}{}".format(his_path, nginx_config_file)
|
||||
if os.path.isdir(ng_save_path):
|
||||
for i in os.listdir(ng_save_path):
|
||||
if i == history_id:
|
||||
return read_file(os.path.join(ng_save_path, i))
|
||||
|
||||
apache_config_file = "{}/apache/{}{}.conf".format(self._vhost_path, self.config_prefix, self.site_name)
|
||||
ap_save_path = "{}{}".format(his_path, apache_config_file)
|
||||
if os.path.isdir(ap_save_path):
|
||||
for i in os.listdir(ap_save_path):
|
||||
if i == history_id:
|
||||
return read_file(os.path.join(ap_save_path, i))
|
||||
return None
|
||||
|
||||
def remove_history_file(self, history_id: str) -> None:
|
||||
his_path = '/www/backup/file_history'
|
||||
nginx_config_file = "{}/nginx/{}{}.conf".format(self._vhost_path, self.config_prefix, self.site_name)
|
||||
ng_save_path = "{}{}".format(his_path, nginx_config_file)
|
||||
if os.path.isdir(ng_save_path):
|
||||
for i in os.listdir(ng_save_path):
|
||||
if i == history_id:
|
||||
os.remove(os.path.join(ng_save_path, i))
|
||||
|
||||
apache_config_file = "{}/apache/{}{}.conf".format(self._vhost_path, self.config_prefix, self.site_name)
|
||||
ap_save_path = "{}{}".format(his_path, apache_config_file)
|
||||
if os.path.isdir(ap_save_path):
|
||||
for i in os.listdir(ap_save_path):
|
||||
if i == history_id:
|
||||
os.remove(os.path.join(ng_save_path, i))
|
||||
|
||||
def clear_history_file(self) -> None:
|
||||
"""
|
||||
清空所有的历史文件
|
||||
"""
|
||||
his_path = '/www/backup/file_history'
|
||||
nginx_config_file = "{}/nginx/{}{}.conf".format(self._vhost_path, self.config_prefix, self.site_name)
|
||||
ng_save_path = "{}{}".format(his_path, nginx_config_file)
|
||||
if os.path.isdir(ng_save_path):
|
||||
for i in os.listdir(ng_save_path):
|
||||
os.remove(os.path.join(ng_save_path, i))
|
||||
|
||||
apache_config_file = "{}/apache/{}{}.conf".format(self._vhost_path, self.config_prefix, self.site_name)
|
||||
ap_save_path = "{}{}".format(his_path, apache_config_file)
|
||||
if os.path.isdir(ap_save_path):
|
||||
for i in os.listdir(ap_save_path):
|
||||
os.remove(os.path.join(ng_save_path, i))
|
||||
|
||||
@staticmethod
|
||||
def _file_md5(filename):
|
||||
if not os.path.isfile(filename):
|
||||
return False
|
||||
md5_obj = md5()
|
||||
with open(filename, mode="rb") as f:
|
||||
while True:
|
||||
b = f.read(8096)
|
||||
if not b:
|
||||
break
|
||||
md5_obj.update(b)
|
||||
|
||||
return md5_obj.hexdigest()
|
||||
|
||||
def _save_history(self, web_server: str):
|
||||
if os.path.exists('/www/server/panel/data/not_file_history.pl'):
|
||||
return True
|
||||
|
||||
his_path = '/www/backup/file_history'
|
||||
filename = "{}/{}/{}{}.conf".format(self._vhost_path, web_server, self.config_prefix, self.site_name)
|
||||
save_path = "{}{}".format(his_path, filename)
|
||||
if not os.path.isdir(save_path):
|
||||
os.makedirs(save_path, 384)
|
||||
|
||||
his_list = sorted(os.listdir(save_path), reverse=True) # 倒序排列已有的历史文件
|
||||
try:
|
||||
num = int(read_file('data/history_num.pl'))
|
||||
except (ValueError, TypeError):
|
||||
num = 100
|
||||
|
||||
is_write = True
|
||||
if len(his_list) > 0:
|
||||
new_file_md5 = self._file_md5(filename)
|
||||
last_file_md5 = self._file_md5(os.path.join(save_path, his_list[0]))
|
||||
is_write = new_file_md5 != last_file_md5
|
||||
|
||||
if is_write:
|
||||
new_name = str(int(time.time()))
|
||||
write_file(os.path.join(save_path, new_name), read_file(filename, 'rb'), "wb")
|
||||
his_list.insert(0, new_name)
|
||||
|
||||
# 删除多余的副本
|
||||
for i in his_list[num:]:
|
||||
rm_file = save_path + '/' + i
|
||||
if os.path.exists(rm_file):
|
||||
os.remove(rm_file)
|
||||
136
mod/base/web_conf/default_site.py
Normal file
136
mod/base/web_conf/default_site.py
Normal file
@@ -0,0 +1,136 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from typing import Optional, Tuple
|
||||
from .util import listen_ipv6, write_file, read_file, service_reload
|
||||
|
||||
|
||||
def check_default():
|
||||
vhost_path = "/www/server/panel/vhost"
|
||||
nginx = vhost_path + '/nginx'
|
||||
httpd = vhost_path + '/apache'
|
||||
httpd_default = '''<VirtualHost *:80>
|
||||
ServerAdmin webmaster@example.com
|
||||
DocumentRoot "/www/server/apache/htdocs"
|
||||
ServerName bt.default.com
|
||||
<Directory "/www/server/apache/htdocs">
|
||||
SetOutputFilter DEFLATE
|
||||
Options FollowSymLinks
|
||||
AllowOverride All
|
||||
Order allow,deny
|
||||
Allow from all
|
||||
DirectoryIndex index.html
|
||||
</Directory>
|
||||
</VirtualHost>
|
||||
'''
|
||||
|
||||
listen_ipv6_str = ''
|
||||
if listen_ipv6():
|
||||
listen_ipv6_str = "\n listen [::]:80;"
|
||||
|
||||
nginx_default = '''server
|
||||
{
|
||||
listen 80;%s
|
||||
server_name _;
|
||||
index index.html;
|
||||
root /www/server/nginx/html;
|
||||
}''' % listen_ipv6_str
|
||||
|
||||
if not os.path.exists(httpd + '/0.default.conf') and not os.path.exists(httpd + '/default.conf'):
|
||||
write_file(httpd + '/0.default.conf', httpd_default)
|
||||
if not os.path.exists(nginx + '/0.default.conf') and not os.path.exists(nginx + '/default.conf'):
|
||||
write_file(nginx + '/0.default.conf', nginx_default)
|
||||
|
||||
|
||||
def get_default_site() -> Tuple[Optional[str], Optional[str]]:
|
||||
panel_path = "/www/server/panel"
|
||||
|
||||
old_ds_file = panel_path + "/data/defaultSite.pl"
|
||||
new_ds_file = panel_path + "/data/mod_default_site.pl"
|
||||
if os.path.exists(old_ds_file) and not os.path.exists(new_ds_file):
|
||||
write_file(new_ds_file, json.dumps({
|
||||
"name": read_file(old_ds_file).strip(),
|
||||
"prefix": ''
|
||||
}))
|
||||
|
||||
res = read_file(new_ds_file)
|
||||
if not isinstance(res, str):
|
||||
return None, None
|
||||
data = json.loads(res)
|
||||
return data["name"], data["prefix"]
|
||||
|
||||
|
||||
# site_name 传递None的时候,表示将默认站点设置给关闭
|
||||
# prefix 表示配置文件前缀, 如 "net_", 默认为空字符串
|
||||
# domain 站点的域名 如: "www.sss.com:8456"
|
||||
def set_default_site(site_name: Optional[str], prefix="", domain: str = None) -> Optional[str]:
|
||||
# 清理旧的
|
||||
old_default_name, old_prefix = get_default_site()
|
||||
panel_path = "/www/server/panel"
|
||||
default_site_save = panel_path + '/data/mod_default_site.pl'
|
||||
if old_default_name:
|
||||
ng_conf_file = os.path.join(panel_path, "vhost/nginx/{}{}.conf".format(old_prefix, old_default_name))
|
||||
old_conf = read_file(ng_conf_file)
|
||||
if isinstance(old_conf, str):
|
||||
rep_listen_ds = re.compile(r"listen\s+.*default_server.*;")
|
||||
new_conf_list = []
|
||||
start_idx = 0
|
||||
for tmp_res in rep_listen_ds.finditer(old_conf):
|
||||
new_conf_list.append(old_conf[start_idx: tmp_res.start()])
|
||||
new_conf_list.append(tmp_res.group().replace("default_server", ""))
|
||||
start_idx = tmp_res.end()
|
||||
|
||||
new_conf_list.append(old_conf[start_idx:])
|
||||
|
||||
write_file(ng_conf_file, "".join(new_conf_list))
|
||||
|
||||
path = '/www/server/apache/htdocs/.htaccess'
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
|
||||
if site_name is None:
|
||||
write_file(default_site_save, json.dumps({
|
||||
"name": None,
|
||||
"prefix": None
|
||||
}))
|
||||
service_reload()
|
||||
return
|
||||
|
||||
# 处理新的
|
||||
ap_path = '/www/server/apache/htdocs'
|
||||
if os.path.exists(ap_path):
|
||||
conf = '''<IfModule mod_rewrite.c>
|
||||
RewriteEngine on
|
||||
RewriteCond %{{HTTP_HOST}} !^127.0.0.1 [NC]
|
||||
RewriteRule (.*) http://{}/$1 [L]
|
||||
</IfModule>'''.format(domain)
|
||||
|
||||
write_file(ap_path + '/.htaccess', conf)
|
||||
|
||||
ng_conf_file = os.path.join(panel_path, "vhost/nginx/{}{}.conf".format(prefix, site_name))
|
||||
ng_conf = read_file(ng_conf_file)
|
||||
if isinstance(ng_conf, str):
|
||||
rep_listen = re.compile(r"listen[^;]*;")
|
||||
new_conf_list = []
|
||||
|
||||
start_idx = 0
|
||||
for tmp_res in rep_listen.finditer(ng_conf):
|
||||
new_conf_list.append(ng_conf[start_idx: tmp_res.start()])
|
||||
print(tmp_res.group())
|
||||
if tmp_res.group().find("default_server") == -1:
|
||||
new_conf_list.append(tmp_res.group()[:-1] + " default_server;")
|
||||
else:
|
||||
new_conf_list.append(tmp_res.group())
|
||||
start_idx = tmp_res.end()
|
||||
|
||||
new_conf_list.append(ng_conf[start_idx:])
|
||||
|
||||
write_file(ng_conf_file, "".join(new_conf_list))
|
||||
|
||||
write_file(default_site_save, json.dumps({
|
||||
"name": site_name,
|
||||
"prefix": prefix
|
||||
}))
|
||||
|
||||
service_reload()
|
||||
return
|
||||
252
mod/base/web_conf/dir_tool.py
Normal file
252
mod/base/web_conf/dir_tool.py
Normal file
@@ -0,0 +1,252 @@
|
||||
# 网站文件相关操作
|
||||
|
||||
import os
|
||||
import re
|
||||
from typing import Optional, Union, List
|
||||
|
||||
from .util import webserver, check_server_config, write_file, read_file, DB, service_reload, pre_re_key, ExecShell
|
||||
|
||||
|
||||
class DirTool:
|
||||
|
||||
def __init__(self, conf_prefix: str = ""):
|
||||
self.conf_prefix = conf_prefix
|
||||
self._vhost_path = "/www/server/panel/vhost"
|
||||
|
||||
# 修改站点路径
|
||||
def modify_site_path(self, site_name: str, old_site_path: str, new_site_path: str) -> Optional[str]:
|
||||
"""
|
||||
修改 站点root 路径
|
||||
site_name 站点名称
|
||||
old_site_path 旧的root 路径
|
||||
new_site_path 新的root 路径
|
||||
"""
|
||||
site_info = DB("sites").where("name=?", (site_name,)).find()
|
||||
if not isinstance(site_info, dict):
|
||||
return "站点信息查询错误"
|
||||
|
||||
error_msg = check_server_config()
|
||||
if error_msg:
|
||||
return "服务配置无法重载,请检查配置错误再操作。\n" + error_msg
|
||||
|
||||
if not self._check_site_path(new_site_path):
|
||||
return '请不要将网站根目录设置到以下关键目录中'
|
||||
|
||||
if not os.path.exists(new_site_path):
|
||||
return '指定的网站根目录不存在,无法设置,请检查输入信息.'
|
||||
if old_site_path[-1] == '/':
|
||||
old_site_path = old_site_path[:-1]
|
||||
|
||||
if new_site_path[-1] == '/':
|
||||
new_site_path = new_site_path[:-1]
|
||||
|
||||
old_run_path = self.get_site_run_path(site_name)
|
||||
if old_run_path is None:
|
||||
return '读取网站当前运行目录失败,请检查配置文件'
|
||||
old_run_path_sub = old_run_path.replace(old_site_path, "")
|
||||
new_run_path = new_site_path + old_run_path_sub
|
||||
if not os.path.exists(new_site_path):
|
||||
new_run_path = new_site_path
|
||||
nginx_file = '{}/nginx/{}{}.conf'.format(self._vhost_path, self.conf_prefix, site_name)
|
||||
nginx_conf = read_file(nginx_file)
|
||||
if nginx_conf:
|
||||
rep_root = re.compile(r'\s*root\s+(.+);', re.M)
|
||||
new_conf = rep_root.sub(" root {};".format(new_run_path), nginx_conf)
|
||||
write_file(nginx_file, new_conf)
|
||||
|
||||
apache_file = '{}/apache/{}{}.conf'.format(self._vhost_path, self.conf_prefix, site_name)
|
||||
apache_conf = read_file(apache_file)
|
||||
if apache_conf:
|
||||
rep_doc = re.compile(r"DocumentRoot\s+.*\n")
|
||||
new_conf = rep_doc.sub('DocumentRoot "' + new_run_path + '"\n', apache_conf)
|
||||
|
||||
rep_dir = re.compile(r'''<Directory\s+['"]%s['"]'''% pre_re_key(old_site_path))
|
||||
new_conf = rep_dir.sub('<Directory "' + new_run_path + '">\n', new_conf)
|
||||
write_file(apache_file, new_conf)
|
||||
|
||||
# 创建basedir
|
||||
userIni = new_run_path + '/.user.ini'
|
||||
if os.path.exists(userIni):
|
||||
ExecShell("chattr -i " + userIni)
|
||||
write_file(userIni, 'open_basedir=' + new_run_path + '/:/tmp/')
|
||||
ExecShell('chmod 644 ' + userIni)
|
||||
ExecShell('chown root:root ' + userIni)
|
||||
ExecShell('chattr +i ' + userIni)
|
||||
service_reload()
|
||||
DB("sites").where("id=?", (site_info["id"],)).setField('path', new_site_path)
|
||||
return
|
||||
|
||||
# 修改站点的运行路径
|
||||
def modify_site_run_path(self, site_name, site_path, new_run_path_sub: str) -> Optional[str]:
|
||||
"""
|
||||
修改 站点运行路径
|
||||
site_name 站点名称
|
||||
site_path 站点路径
|
||||
new_run_path_sub root路径的子运行目录
|
||||
如 site_path -> /www/wwwroots/aaaa
|
||||
new_run_path_sub -> bbb/ccc
|
||||
new_run_path -> /www/wwwroots/aaaa/bbb/ccc
|
||||
"""
|
||||
# 处理Nginx
|
||||
old_run_path = self.get_site_run_path(site_name)
|
||||
if old_run_path is None:
|
||||
return '读取网站当前运行目录失败,请检查配置文件'
|
||||
if new_run_path_sub.startswith("/"):
|
||||
new_run_path_sub = new_run_path_sub[1:]
|
||||
new_run_path = os.path.join(site_path, new_run_path_sub)
|
||||
filename = '{}/nginx/{}{}.conf'.format(self._vhost_path, self.conf_prefix, site_name)
|
||||
nginx_conf = read_file(filename)
|
||||
if nginx_conf:
|
||||
tmp = re.search(r'\s*root\s+(.+);', nginx_conf)
|
||||
if tmp:
|
||||
o_path = tmp.groups()[0]
|
||||
new_conf = nginx_conf.replace(o_path, new_run_path)
|
||||
write_file(filename, new_conf)
|
||||
|
||||
# 处理Apache
|
||||
filename = '{}/apache/{}{}.conf'.format(self._vhost_path, self.conf_prefix, site_name)
|
||||
ap_conf = read_file(filename)
|
||||
if ap_conf:
|
||||
tmp = re.search(r'\s*DocumentRoot\s*"(.+)"\s*\n', ap_conf)
|
||||
if tmp:
|
||||
o_path = tmp.groups()[0]
|
||||
new_conf = ap_conf.replace(o_path, new_run_path)
|
||||
write_file(filename, new_conf)
|
||||
|
||||
s_path = old_run_path + "/.user.ini"
|
||||
d_path = new_run_path + "/.user.ini"
|
||||
if s_path != d_path:
|
||||
ExecShell("chattr -i {}".format(s_path))
|
||||
ExecShell("mv {} {}".format(s_path, d_path))
|
||||
ExecShell("chattr +i {}".format(d_path))
|
||||
|
||||
service_reload()
|
||||
|
||||
# 获取站点的运行路径, 返回的路径是完整路径
|
||||
def get_site_run_path(self, site_name) -> Optional[str]:
|
||||
web_server = webserver()
|
||||
filename = "{}/{}/{}{}.conf".format(self._vhost_path, web_server, self.conf_prefix, site_name)
|
||||
if not os.path.exists(filename):
|
||||
return None
|
||||
run_path = None
|
||||
conf = read_file(filename)
|
||||
if web_server == 'nginx':
|
||||
tmp1 = re.search(r'\s*root\s+(?P<path>.+);', conf)
|
||||
if tmp1:
|
||||
run_path = tmp1.group("path").strip()
|
||||
elif web_server == 'apache':
|
||||
tmp1 = re.search(r'\s*DocumentRoot\s*"(?P<path>.+)"\s*\n', conf)
|
||||
if tmp1:
|
||||
run_path = tmp1.group("path")
|
||||
else:
|
||||
tmp1 = re.search(r"vhRoot\s*(?P<path>.*)", conf)
|
||||
if tmp1:
|
||||
run_path = tmp1.group("path").strip()
|
||||
|
||||
return run_path
|
||||
|
||||
# 获取index 文件
|
||||
def get_index_conf(self, site_name) -> Union[str, List[str]]:
|
||||
web_server = webserver()
|
||||
filename = "{}/{}/{}{}.conf".format(self._vhost_path, web_server, self.conf_prefix, site_name)
|
||||
if not os.path.exists(filename):
|
||||
return "配置文件丢失"
|
||||
conf = read_file(filename)
|
||||
if not conf:
|
||||
return "配置文件丢失"
|
||||
split_char = " "
|
||||
if web_server == 'nginx':
|
||||
rep = re.compile(r"\s+index\s+(?P<target>.+);", re.M)
|
||||
elif web_server == 'apache':
|
||||
rep = re.compile(r"DirectoryIndex\s+(?P<target>.+)", re.M)
|
||||
else:
|
||||
rep = re.compile(r"indexFiles\s+(?P<target>.+)", re.M)
|
||||
split_char = ","
|
||||
res = rep.search(conf)
|
||||
if not res:
|
||||
return "获取失败,配置文件中不存在默认文档"
|
||||
|
||||
res_list = list(filter(None, map(lambda x: x.strip(), res.group("target").split(split_char))))
|
||||
|
||||
return res_list
|
||||
|
||||
# 获取设置index 文件 可以用 filenames 参数依次传入多个, 或 通过 file_list 参数传入index 列表
|
||||
def set_index_conf(self, site_name, *filenames: str, file_list: Optional[List[str]] = None):
|
||||
index_list = set()
|
||||
for i in filenames:
|
||||
f = i.strip()
|
||||
if not f:
|
||||
continue
|
||||
index_list.add(f)
|
||||
|
||||
if file_list is not None:
|
||||
for i in file_list:
|
||||
f = i.strip()
|
||||
if not f:
|
||||
continue
|
||||
index_list.add(f)
|
||||
|
||||
# nginx
|
||||
file = '{}/nginx/{}{}.conf'.format(self._vhost_path, self.conf_prefix, site_name)
|
||||
conf = read_file(file)
|
||||
if conf:
|
||||
rep_index = re.compile(r"\s*index\s+.+;")
|
||||
new_conf = rep_index.sub(" index {};".format(" ".join(index_list)), conf)
|
||||
write_file(file, new_conf)
|
||||
|
||||
# apache
|
||||
file = '{}/apache/{}{}.conf'.format(self._vhost_path, self.conf_prefix, site_name)
|
||||
conf = read_file(file)
|
||||
if conf:
|
||||
rep_index = re.compile(r"\s*DirectoryIndex\s+.+\n")
|
||||
new_conf = rep_index.sub(" DirectoryIndex {}\n".format(" ".join(index_list)), conf)
|
||||
write_file(file, new_conf)
|
||||
|
||||
# openlitespeed
|
||||
file = '{}/openlitespeed/detail/{}{}.conf'.format(self._vhost_path, self.conf_prefix, site_name)
|
||||
conf = read_file(file)
|
||||
if conf:
|
||||
rep_index = re.compile(r"indexFiles\s+.+\n")
|
||||
new_conf = rep_index.sub('indexFiles {}\n'.format(",".join(index_list)), conf)
|
||||
write_file(file, new_conf)
|
||||
|
||||
service_reload()
|
||||
return
|
||||
|
||||
def _check_site_path(self, site_path):
|
||||
try:
|
||||
if site_path.find('/usr/local/lighthouse/') >= 0:
|
||||
return True
|
||||
|
||||
if site_path in ['/', '/usr', '/dev', '/home', '/media', '/mnt', '/opt', '/tmp', '/var']:
|
||||
return False
|
||||
whites = ['/www/server/tomcat', '/www/server/stop', '/www/server/phpmyadmin']
|
||||
for w in whites:
|
||||
if site_path.find(w) == 0:
|
||||
return True
|
||||
a, error_paths = self._get_sys_path()
|
||||
site_path = site_path.strip()
|
||||
if site_path[-1] == '/': site_path = site_path[:-1]
|
||||
if site_path in a:
|
||||
return False
|
||||
site_path += '/'
|
||||
for ep in error_paths:
|
||||
if site_path.find(ep) == 0:
|
||||
return False
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _get_sys_path():
|
||||
"""
|
||||
@name 关键目录
|
||||
@author hwliang<2021-06-11>
|
||||
@return tuple
|
||||
"""
|
||||
a = ['/www', '/usr', '/', '/dev', '/home', '/media', '/mnt', '/opt', '/tmp', '/var']
|
||||
c = ['/www/.Recycle_bin/', '/www/backup/', '/www/php_session/', '/www/wwwlogs/', '/www/server/', '/etc/',
|
||||
'/usr/', '/var/', '/boot/', '/proc/', '/sys/', '/tmp/', '/root/', '/lib/', '/bin/', '/sbin/', '/run/',
|
||||
'/lib64/', '/lib32/', '/srv/']
|
||||
return a, c
|
||||
|
||||
1682
mod/base/web_conf/dns_api.py
Normal file
1682
mod/base/web_conf/dns_api.py
Normal file
File diff suppressed because it is too large
Load Diff
335
mod/base/web_conf/domain_tool.py
Normal file
335
mod/base/web_conf/domain_tool.py
Normal file
@@ -0,0 +1,335 @@
|
||||
import os
|
||||
import re
|
||||
from typing import Tuple, Optional, Union, List, Dict
|
||||
|
||||
from .util import webserver, check_server_config, write_file, read_file, service_reload, listen_ipv6, use_http2
|
||||
|
||||
|
||||
def domain_to_puny_code(domain: str) -> str:
|
||||
new_domain = ''
|
||||
for dkey in domain.split('.'):
|
||||
if dkey == '*' or dkey == "":
|
||||
continue
|
||||
# 匹配非ascii字符
|
||||
match = re.search(u"[\x80-\xff]+", dkey)
|
||||
if not match:
|
||||
match = re.search(u"[\u4e00-\u9fa5]+", dkey)
|
||||
if not match:
|
||||
new_domain += dkey + '.'
|
||||
else:
|
||||
new_domain += 'xn--' + dkey.encode('punycode').decode('utf-8') + '.'
|
||||
if domain.startswith('*.'):
|
||||
new_domain = "*." + new_domain
|
||||
return new_domain[:-1]
|
||||
|
||||
|
||||
def check_domain(domain: str) -> Optional[str]:
|
||||
domain = domain_to_puny_code(domain)
|
||||
# 判断通配符域名格式
|
||||
if domain.find('*') != -1 and domain.find('*.') == -1:
|
||||
return None
|
||||
|
||||
# 判断域名格式
|
||||
rep_domain = re.compile(r"^([\w\-*]{1,100}\.){1,24}([\w\-]{1,24}|[\w\-]{1,24}\.[\w\-]{1,24})$")
|
||||
if not rep_domain.match(domain):
|
||||
return None
|
||||
return domain
|
||||
|
||||
|
||||
def is_domain(domain: str) -> bool:
|
||||
domain_regex = re.compile(
|
||||
r'(?:[A-Z0-9_](?:[A-Z0-9-_]{0,247}[A-Z0-9])?\.)+(?:[A-Z]{2,6}|[A-Z0-9-]{2,}(?<!-))\Z',
|
||||
re.IGNORECASE
|
||||
)
|
||||
return True if domain_regex.match(domain) else False
|
||||
|
||||
|
||||
# 检查原始的域名列表,返回[(domain, port)] 的格式,并返回其中有错误的项目
|
||||
def normalize_domain(*domains: str) -> Tuple[List[Tuple[str, str]], List[Dict]]:
|
||||
res, error = [], []
|
||||
for i in domains:
|
||||
if not i.strip():
|
||||
continue
|
||||
d_list = [i.strip() for i in i.split(":")]
|
||||
if len(d_list) > 1:
|
||||
try:
|
||||
p = int(d_list[1])
|
||||
if not (1 < p < 65535):
|
||||
error.append({
|
||||
"domain": i,
|
||||
"msg": "端口范围错误"
|
||||
})
|
||||
continue
|
||||
else:
|
||||
d_list[1] = str(p)
|
||||
except:
|
||||
error.append({
|
||||
"domain": i,
|
||||
"msg": "端口范围错误"
|
||||
})
|
||||
continue
|
||||
else:
|
||||
d_list.append("80")
|
||||
d, p = d_list
|
||||
d = check_domain(d)
|
||||
if isinstance(d, str):
|
||||
res.append((d, p)),
|
||||
continue
|
||||
error.append({
|
||||
"domain": i,
|
||||
"msg": "域名格式错误"
|
||||
})
|
||||
|
||||
res = list(set(res))
|
||||
return res, error
|
||||
|
||||
|
||||
class NginxDomainTool:
|
||||
ng_vhost = "/www/server/panel/vhost/nginx"
|
||||
|
||||
def __init__(self, conf_prefix: str = ""):
|
||||
self.conf_prefix = conf_prefix
|
||||
|
||||
# 在给定的配置文件中添加端口
|
||||
@staticmethod
|
||||
def nginx_add_port_by_config(conf, *port: str, is_http3=False) -> str:
|
||||
ports = set()
|
||||
for p in port:
|
||||
ports.add(p)
|
||||
|
||||
# 设置端口
|
||||
rep_port = re.compile(r"\s*listen\s+[\[\]:]*(?P<port>[0-9]+)(?P<ds>\s*default_server)?.*;[^\n]*\n", re.M)
|
||||
use_ipv6 = listen_ipv6()
|
||||
last_port_idx = None
|
||||
need_remove_port_idx = []
|
||||
had_ports = set()
|
||||
is_default_server = False
|
||||
for tmp_res in rep_port.finditer(conf):
|
||||
last_port_idx = tmp_res.end()
|
||||
if tmp_res.group("ds") and tmp_res.group("ds").strip():
|
||||
is_default_server = True
|
||||
if tmp_res.group("port") in ports:
|
||||
had_ports.add(tmp_res.group("port"))
|
||||
elif tmp_res.group("port") != "443":
|
||||
need_remove_port_idx.append((tmp_res.start(), tmp_res.end()))
|
||||
|
||||
if not last_port_idx:
|
||||
last_port_idx = re.search(r"server\s*\{\s*?\n", conf).end()
|
||||
|
||||
need_add_ports = ports - had_ports
|
||||
d_s = " default_server" if is_default_server else ""
|
||||
h2 = " http2" if use_http2() else ""
|
||||
if need_add_ports or is_http3:
|
||||
listen_add_list = []
|
||||
for p in need_add_ports:
|
||||
if p == "443":
|
||||
tmp = " listen 443 ssl{}{};\n".format(h2, d_s)
|
||||
if use_ipv6:
|
||||
tmp += " listen [::]:443 ssl{}{};\n".format(h2, d_s)
|
||||
listen_add_list.append(tmp)
|
||||
continue
|
||||
|
||||
tmp = " listen {}{};\n".format(p, d_s)
|
||||
if use_ipv6:
|
||||
tmp += " listen [::]:{}{};\n".format(p, d_s)
|
||||
listen_add_list.append(tmp)
|
||||
|
||||
if is_http3 and "443" in (had_ports | had_ports):
|
||||
listen_add_list.append(" listen 443 quic{};\n".format(d_s))
|
||||
if use_ipv6:
|
||||
listen_add_list.append(" listen [::]:443 quic{};\n".format(d_s))
|
||||
|
||||
new_conf = conf[:last_port_idx] + "".join(listen_add_list) + conf[last_port_idx:]
|
||||
return new_conf
|
||||
return conf
|
||||
|
||||
# 将站点配置的域名和端口,写到配置文件中
|
||||
def nginx_set_domain(self, site_name, *domain: Tuple[str, str]) -> Optional[str]:
|
||||
ng_file = '{}/{}{}.conf'.format(self.ng_vhost, self.conf_prefix, site_name)
|
||||
ng_conf = read_file(ng_file)
|
||||
if not ng_conf:
|
||||
return "nginx配置文件丢失"
|
||||
|
||||
domains_set, ports = set(), set()
|
||||
for d, p in domain:
|
||||
domains_set.add(d)
|
||||
ports.add(p)
|
||||
|
||||
# 设置域名
|
||||
rep_server_name = re.compile(r"\s*server_name\s*(.*);", re.M)
|
||||
new_conf = rep_server_name.sub("\n server_name {};".format(" ".join(domains_set)), ng_conf, 1)
|
||||
|
||||
# 设置端口
|
||||
rep_port = re.compile(r"\s*listen\s+[\[\]:]*(?P<port>[0-9]+)(?P<ds>\s*default_server)?.*;[^\n]*\n", re.M)
|
||||
use_ipv6 = listen_ipv6()
|
||||
last_port_idx = None
|
||||
need_remove_port_idx = []
|
||||
had_ports = set()
|
||||
is_default_server = False
|
||||
for tmp_res in rep_port.finditer(new_conf):
|
||||
last_port_idx = tmp_res.end()
|
||||
if tmp_res.group("ds") is not None and tmp_res.group("ds").strip():
|
||||
is_default_server = True
|
||||
if tmp_res.group("port") in ports:
|
||||
had_ports.add(tmp_res.group("port"))
|
||||
elif tmp_res.group("port") != "443":
|
||||
need_remove_port_idx.append((tmp_res.start(), tmp_res.end()))
|
||||
|
||||
if not last_port_idx:
|
||||
last_port_idx = re.search(r"server\s*\{\s*?\n", new_conf).end()
|
||||
|
||||
ports = ports - had_ports
|
||||
if ports:
|
||||
d_s = " default_server" if is_default_server else ""
|
||||
listen_add_list = []
|
||||
for p in ports:
|
||||
tmp = " listen {}{};\n".format(p, d_s)
|
||||
if use_ipv6:
|
||||
tmp += " listen [::]:{}{};\n".format(p, d_s)
|
||||
listen_add_list.append(tmp)
|
||||
|
||||
new_conf = new_conf[:last_port_idx] + "".join(listen_add_list) + new_conf[last_port_idx:]
|
||||
|
||||
# 移除多余的port监听:
|
||||
# 所有遍历的索引都在 last_port_idx 之前,所有不会影响之前的修改 ↑
|
||||
if need_remove_port_idx:
|
||||
conf_list = []
|
||||
idx = 0
|
||||
for start, end in need_remove_port_idx:
|
||||
conf_list.append(new_conf[idx:start])
|
||||
idx = end
|
||||
conf_list.append(new_conf[idx:])
|
||||
new_conf = "".join(conf_list)
|
||||
|
||||
# 保存配置文件
|
||||
write_file(ng_file, new_conf)
|
||||
web_server = webserver()
|
||||
if web_server == "nginx" and check_server_config() is not None:
|
||||
write_file(ng_file, ng_conf)
|
||||
return "配置失败"
|
||||
if web_server == "nginx":
|
||||
service_reload()
|
||||
|
||||
|
||||
class ApacheDomainTool:
|
||||
ap_vhost = "/www/server/panel/vhost/apache"
|
||||
ap_path = "/www/server/apache"
|
||||
|
||||
def __init__(self, conf_prefix: str = ""):
|
||||
self.conf_prefix = conf_prefix
|
||||
|
||||
# 将站点配置的域名和端口,写到配置文件中
|
||||
def apache_set_domain(self,
|
||||
site_name, # 站点名称
|
||||
*domain: Tuple[str, str], # 域名列表,可以为多个
|
||||
template_path: Optional[str] = None, # 在新加端口时使用一个模板作为添加内容
|
||||
template_kwargs: Optional[dict] = None, # 在使用一个模板时的填充参数,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
template_path: 在新加端口时使用一个模板作为添加内容
|
||||
template_kwargs: 在使用一个模板时的填充参数
|
||||
port domains server_admin server_name 四个参数会自动生成并填充
|
||||
没有传入 template_path 将会复制第一个虚拟机(VirtualHost)配置
|
||||
"""
|
||||
ap_file = '{}/{}{}.conf'.format(self.ap_vhost, self.conf_prefix, site_name)
|
||||
ap_conf: str = read_file(ap_file)
|
||||
if not ap_conf:
|
||||
return "nginx配置文件丢失"
|
||||
|
||||
domains, ports = set(), set()
|
||||
for i in domain:
|
||||
domains.add(str(i[0]))
|
||||
ports.add(str(i[1]))
|
||||
|
||||
domains_str = " ".join(domains)
|
||||
|
||||
# 设置域名
|
||||
rep_server_name = re.compile(r"\s*ServerAlias\s*(.*)\n", re.M)
|
||||
new_conf = rep_server_name.sub("\n ServerAlias {}\n".format(domains_str), ap_conf)
|
||||
|
||||
tmp_template_res = re.search(r"<VirtualHost(.|\n)*?</VirtualHost>", new_conf)
|
||||
if not tmp_template_res:
|
||||
tmp_template = None
|
||||
else:
|
||||
tmp_template = tmp_template_res.group()
|
||||
|
||||
rep_ports = re.compile(r"<VirtualHost +.*:(?P<port>\d+)+\s*>")
|
||||
need_remove_port = []
|
||||
for tmp in rep_ports.finditer(new_conf):
|
||||
if tmp.group("port") in ports:
|
||||
ports.remove(tmp.group("port"))
|
||||
elif tmp.group("port") != "443":
|
||||
need_remove_port.append(tmp.group("port"))
|
||||
|
||||
if need_remove_port:
|
||||
for i in need_remove_port:
|
||||
tmp_rep = re.compile(r"<VirtualHost.*" + i + r"(.|\n)*?</VirtualHost[^\n]*\n?")
|
||||
new_conf = tmp_rep.sub("", new_conf, 1)
|
||||
|
||||
if ports:
|
||||
other_config_body_list = []
|
||||
if template_path is not None:
|
||||
# 添加其他的port
|
||||
try:
|
||||
config_body = read_file(template_path)
|
||||
for p in ports:
|
||||
other_config_body_list.append(config_body.format(
|
||||
port=p,
|
||||
server_admin="admin@{}".format(site_name),
|
||||
server_name='{}.{}'.format(p, site_name),
|
||||
domains=domains_str,
|
||||
**template_kwargs
|
||||
))
|
||||
except:
|
||||
raise ValueError("参数与模板不匹配")
|
||||
else:
|
||||
if tmp_template is None:
|
||||
return "配置文件格式错误"
|
||||
|
||||
for p in ports:
|
||||
other_config_body_list.append(rep_ports.sub("<VirtualHost *:{}>".format(p), tmp_template, 1))
|
||||
|
||||
new_conf += "\n" + "\n".join(other_config_body_list)
|
||||
write_file(ap_file, new_conf)
|
||||
# 添加端口
|
||||
self.apache_add_ports(*ports)
|
||||
web_server = webserver()
|
||||
if web_server == "apache" and check_server_config() is not None:
|
||||
write_file(ap_file, ap_conf)
|
||||
return "配置失败"
|
||||
|
||||
if web_server == "apache":
|
||||
service_reload()
|
||||
|
||||
# 添加apache主配置文件中的端口监听
|
||||
@classmethod
|
||||
def apache_add_ports(cls, *ports: Union[str, int]) -> None:
|
||||
real_ports = set()
|
||||
for p in ports:
|
||||
real_ports.add(str(p))
|
||||
|
||||
ssl_conf_file = '{}/conf/extra/httpd-ssl.conf'.format(cls.ap_path)
|
||||
if os.path.isfile(ssl_conf_file):
|
||||
ssl_conf = read_file(ssl_conf_file)
|
||||
if isinstance(ssl_conf, str) and ssl_conf.find('Listen 443') != -1:
|
||||
ssl_conf = ssl_conf.replace('Listen 443', '')
|
||||
write_file(ssl_conf_file, ssl_conf)
|
||||
|
||||
ap_conf_file = '{}/conf/httpd.conf'.format(cls.ap_path)
|
||||
if not os.path.isfile(ap_conf_file):
|
||||
return
|
||||
ap_conf = read_file(ap_conf_file)
|
||||
if ap_conf is None:
|
||||
return
|
||||
|
||||
rep_ports = re.compile(r"Listen\s+(?P<port>[0-9]+)\n", re.M)
|
||||
last_idx = None
|
||||
for key in rep_ports.finditer(ap_conf):
|
||||
last_idx = key.end()
|
||||
if key.group("port") in real_ports:
|
||||
real_ports.remove(key.group("port"))
|
||||
|
||||
if not last_idx:
|
||||
return
|
||||
new_conf = ap_conf[:last_idx] + "\n".join(["Listen %s" % i for i in real_ports]) + "\n" + ap_conf[last_idx:]
|
||||
write_file(ap_conf_file, new_conf)
|
||||
326
mod/base/web_conf/ip_restrict.py
Normal file
326
mod/base/web_conf/ip_restrict.py
Normal file
@@ -0,0 +1,326 @@
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
from typing import Tuple, Optional, Union
|
||||
from ipaddress import ip_address
|
||||
|
||||
from .util import webserver, check_server_config, write_file, read_file, DB, service_reload
|
||||
from mod.base import json_response
|
||||
|
||||
|
||||
class _BaseRestrict:
|
||||
def __init__(self, config_file: str, site_name: str):
|
||||
self._conf_file = config_file
|
||||
self._conf = self._read_conf()
|
||||
self.site_name = site_name
|
||||
|
||||
def _read_conf(self):
|
||||
default_conf = {
|
||||
"restrict_type": "closed",
|
||||
"black_list": [],
|
||||
"white_list": []
|
||||
}
|
||||
|
||||
if not os.path.exists(self._conf_file):
|
||||
return default_conf
|
||||
try:
|
||||
conf = json.loads(read_file(self._conf_file))
|
||||
except:
|
||||
conf = default_conf
|
||||
return conf
|
||||
|
||||
def to_view(self):
|
||||
return self._conf
|
||||
|
||||
|
||||
class _IpRestrict(_BaseRestrict):
|
||||
def __init__(self, site_name: str, config_prefix: str):
|
||||
setup_path = "/www/server/panel"
|
||||
ip_restrict_conf_dir = "{}/data/ip_restrict_data".format(setup_path)
|
||||
if not os.path.exists(ip_restrict_conf_dir):
|
||||
os.makedirs(ip_restrict_conf_dir)
|
||||
super().__init__("{}/{}{}".format(ip_restrict_conf_dir, config_prefix, site_name), site_name)
|
||||
self.config_prefix = config_prefix
|
||||
self.nginx_sub_file = "{}/vhost/ip-restrict/{}{}.conf".format(setup_path, self.config_prefix, self.site_name)
|
||||
|
||||
@property
|
||||
def restrict_type(self):
|
||||
return self._conf.get("restrict_type", "black")
|
||||
|
||||
@restrict_type.setter
|
||||
def restrict_type(self, data: str):
|
||||
if data in ("black", "white", "closed"):
|
||||
self._conf["restrict_type"] = data
|
||||
|
||||
@property
|
||||
def black_list(self):
|
||||
return self._conf.get("black_list", [])
|
||||
|
||||
@black_list.setter
|
||||
def black_list(self, list_data: list):
|
||||
self._conf["black_list"] = list_data
|
||||
|
||||
@property
|
||||
def white_list(self):
|
||||
return self._conf.get("white_list", [])
|
||||
|
||||
@white_list.setter
|
||||
def white_list(self, list_data: list):
|
||||
self._conf["white_list"] = list_data
|
||||
|
||||
def save(self) -> Tuple[bool, str]:
|
||||
if not self._conf: # 没有的时候不操作
|
||||
return True, "operate successfully"
|
||||
write_file(self._conf_file, json.dumps(self._conf))
|
||||
|
||||
if self.restrict_type == "closed":
|
||||
write_file(self.nginx_sub_file, "")
|
||||
service_reload()
|
||||
return True, "operate successfully"
|
||||
|
||||
tmp_conf = []
|
||||
if self.restrict_type == "white":
|
||||
for i in self.white_list:
|
||||
tmp_conf.append("allow {};".format(i))
|
||||
|
||||
tmp_conf.append("deny all; # 除开上述IP外,其他IP全部禁止访问")
|
||||
elif self.restrict_type == "black":
|
||||
for i in self.black_list:
|
||||
tmp_conf.append("deny {};".format(i))
|
||||
else:
|
||||
raise ValueError("错误的类型,无法操作")
|
||||
|
||||
write_file(self.nginx_sub_file, "\n".join(tmp_conf))
|
||||
error_msg = check_server_config()
|
||||
if error_msg is not None:
|
||||
write_file(self.nginx_sub_file, "")
|
||||
return False, "操作失败"
|
||||
service_reload()
|
||||
return True, "operate successfully"
|
||||
|
||||
# 删除网站时调用,删除配置文件
|
||||
def remove_config_for_remove_site(self):
|
||||
if os.path.isfile(self.nginx_sub_file):
|
||||
os.remove(self.nginx_sub_file)
|
||||
|
||||
if os.path.isfile(self._conf_file):
|
||||
os.remove(self._conf_file)
|
||||
|
||||
|
||||
class RealIpRestrict:
|
||||
|
||||
def __init__(self, config_prefix: str = ""):
|
||||
self.config_prefix = config_prefix
|
||||
self.web_server = webserver()
|
||||
|
||||
# 获取某个站点的IP黑白名单详情
|
||||
def restrict_conf(self, site_name: str) -> Tuple[bool, Union[str, dict]]:
|
||||
if self.web_server != "nginx":
|
||||
return False, "不支持除nginx之外的服务器"
|
||||
ip_conf = _IpRestrict(site_name, self.config_prefix)
|
||||
if not self._get_status_in_nginx_conf(ip_conf):
|
||||
ip_conf.restrict_type = "closed"
|
||||
return True, ip_conf.to_view()
|
||||
|
||||
# 从配置文件中获取状态
|
||||
def _get_status_in_nginx_conf(self, ip_conf: _IpRestrict) -> bool:
|
||||
setup_path = "/www/server/panel"
|
||||
ng_file = "{}/vhost/nginx/{}{}.conf".format(setup_path, self.config_prefix, ip_conf.site_name)
|
||||
rep_include = re.compile(r"\sinclude +.*/ip-restrict/.*\.conf;", re.M)
|
||||
ng_conf = read_file(ng_file)
|
||||
if not isinstance(ng_conf, str):
|
||||
return False
|
||||
if rep_include.search(ng_conf):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _set_nginx_include(self, ip_conf: _IpRestrict) -> Tuple[bool, str]:
|
||||
setup_path = "/www/server/panel"
|
||||
ng_file = "{}/vhost/nginx/{}{}.conf".format(setup_path, self.config_prefix, ip_conf.site_name)
|
||||
if not os.path.exists(os.path.dirname(ip_conf.nginx_sub_file)):
|
||||
os.makedirs(os.path.dirname(ip_conf.nginx_sub_file), 0o600)
|
||||
if not os.path.isfile(ip_conf.nginx_sub_file):
|
||||
write_file(ip_conf.nginx_sub_file, "")
|
||||
|
||||
ng_conf = read_file(ng_file)
|
||||
if not isinstance(ng_conf, str):
|
||||
return False, "nginx配置文件读取失败"
|
||||
|
||||
rep_include = re.compile(r"\s*include\s+.*/ip-restrict/.*\.conf;", re.M)
|
||||
if rep_include.search(ng_conf):
|
||||
return True, ""
|
||||
|
||||
_include_str = (
|
||||
"\n #引用IP黑白名单规则,注释后配置的IP黑白名单将无效\n"
|
||||
" include {};"
|
||||
).format(ip_conf.nginx_sub_file)
|
||||
|
||||
rep_redirect_include = re.compile(r"\s*include\s+.*/redirect/.*\.conf;", re.M) # 如果有重定向,添加到重定向之后
|
||||
redirect_include_res = rep_redirect_include.search(ng_conf)
|
||||
if redirect_include_res:
|
||||
new_conf = ng_conf[:redirect_include_res.end()] + _include_str + ng_conf[redirect_include_res.end():]
|
||||
else:
|
||||
if "#SSL-END" not in ng_conf:
|
||||
return False, "添加配置失败,无法定位SSL相关配置的位置"
|
||||
|
||||
new_conf = ng_conf.replace("#SSL-END", "#SSL-END" + _include_str)
|
||||
write_file(ng_file, new_conf)
|
||||
if self.web_server == "nginx" and check_server_config() is not None:
|
||||
write_file(ng_file, ng_conf)
|
||||
return False, "添加配置失败"
|
||||
|
||||
return True, ""
|
||||
|
||||
def set_ip_restrict(self, site_name: str, set_type: str) -> Tuple[bool, str]:
|
||||
ip_restrict = _IpRestrict(site_name, self.config_prefix)
|
||||
if set_type not in ("black", "white", "closed"):
|
||||
return False, "不支持的类型【{}】".format(set_type)
|
||||
ip_restrict.restrict_type = set_type
|
||||
f, msg = self._set_nginx_include(ip_restrict)
|
||||
if not f:
|
||||
return False, msg
|
||||
|
||||
return ip_restrict.save()
|
||||
|
||||
def add_black_ip_restrict(self, site_name: str, *ips: str) -> Tuple[bool, str]:
|
||||
try:
|
||||
for ip in ips:
|
||||
_ = ip_address(ip) # 引发valueError
|
||||
except ValueError:
|
||||
return False, "ip参数解析错误"
|
||||
ip_restrict = _IpRestrict(site_name, self.config_prefix)
|
||||
black_list = ip_restrict.black_list
|
||||
for i in ips:
|
||||
if i not in black_list:
|
||||
black_list.append(i)
|
||||
|
||||
ip_restrict.black_list = black_list
|
||||
f, msg = self._set_nginx_include(ip_restrict)
|
||||
if not f:
|
||||
return False, msg
|
||||
|
||||
return ip_restrict.save()
|
||||
|
||||
def remove_black_ip_restrict(self, site_name: str, *ips: str):
|
||||
ip_restrict = _IpRestrict(site_name, self.config_prefix)
|
||||
black_list = ip_restrict.black_list
|
||||
for i in ips:
|
||||
if i in black_list:
|
||||
black_list.remove(i)
|
||||
|
||||
ip_restrict.black_list = black_list
|
||||
f, msg = self._set_nginx_include(ip_restrict)
|
||||
if not f:
|
||||
return False, msg
|
||||
|
||||
return ip_restrict.save()
|
||||
|
||||
def add_white_ip_restrict(self, site_name: str, *ips: str) -> Tuple[bool, str]:
|
||||
try:
|
||||
for ip in ips:
|
||||
_ = ip_address(ip) # 引发valueError
|
||||
except ValueError:
|
||||
return False, "ip参数解析错误"
|
||||
ip_restrict = _IpRestrict(site_name, self.config_prefix)
|
||||
white_list = ip_restrict.white_list
|
||||
for i in ips:
|
||||
if i not in white_list:
|
||||
white_list.append(i)
|
||||
|
||||
ip_restrict.white_list = white_list
|
||||
f, msg = self._set_nginx_include(ip_restrict)
|
||||
if not f:
|
||||
return False, msg
|
||||
|
||||
return ip_restrict.save()
|
||||
|
||||
def remove_white_ip_restrict(self, site_name: str, *ips: str) -> Tuple[bool, str]:
|
||||
ip_restrict = _IpRestrict(site_name, self.config_prefix)
|
||||
white_list = ip_restrict.white_list
|
||||
for i in ips:
|
||||
if i in white_list:
|
||||
white_list.remove(i)
|
||||
|
||||
ip_restrict.white_list = white_list
|
||||
|
||||
return ip_restrict.save()
|
||||
|
||||
def remove_site_ip_restrict_info(self, site_name: str):
|
||||
ip_restrict = _IpRestrict(site_name, self.config_prefix)
|
||||
ip_restrict.remove_config_for_remove_site()
|
||||
|
||||
|
||||
class IpRestrict:
|
||||
|
||||
def __init__(self, config_prefix: str = ""):
|
||||
self.config_prefix = config_prefix
|
||||
self._ri = RealIpRestrict(self.config_prefix)
|
||||
|
||||
# 获取ip控制信息
|
||||
def restrict_conf(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
except (AttributeError, json.JSONDecodeError):
|
||||
return json_response(status=False, msg="Parameter error")
|
||||
|
||||
f, d = self._ri.restrict_conf(site_name)
|
||||
if not f:
|
||||
return json_response(status=f, msg=d)
|
||||
return json_response(status=f, data=d)
|
||||
|
||||
# 设置ip黑白名单状态
|
||||
def set_ip_restrict(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
set_ip_restrict = get.set_type.strip()
|
||||
except (AttributeError, json.JSONDecodeError):
|
||||
return json_response(status=False, msg="Parameter error")
|
||||
|
||||
f, m = self._ri.set_ip_restrict(site_name, set_ip_restrict)
|
||||
return json_response(status=f, msg=m)
|
||||
|
||||
# 添加黑名单
|
||||
def add_black_ip_restrict(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
value = get.value.strip()
|
||||
except AttributeError:
|
||||
return json_response(status=False, msg="Parameter error")
|
||||
|
||||
f, m = self._ri.add_black_ip_restrict(site_name, value)
|
||||
return json_response(status=f, msg=m)
|
||||
|
||||
# 移除黑名单
|
||||
def remove_black_ip_restrict(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
value = get.value.strip()
|
||||
except (AttributeError, json.JSONDecodeError):
|
||||
return json_response(status=False, msg="Parameter error")
|
||||
|
||||
f, m = self._ri.remove_black_ip_restrict(site_name, value)
|
||||
return json_response(status=f, msg=m)
|
||||
|
||||
# 添加白名单
|
||||
def add_white_ip_restrict(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
value = get.value.strip()
|
||||
except (AttributeError, json.JSONDecodeError):
|
||||
return json_response(status=False, msg="Parameter error")
|
||||
|
||||
f, m = self._ri.add_white_ip_restrict(site_name, value)
|
||||
return json_response(status=f, msg=m)
|
||||
|
||||
# 移除白名单
|
||||
def remove_white_ip_restrict(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
value = get.value.strip()
|
||||
except (AttributeError, json.JSONDecodeError):
|
||||
return json_response(status=False, msg="Parameter error")
|
||||
|
||||
f, m = self._ri.remove_white_ip_restrict(site_name, value)
|
||||
return json_response(status=f, msg=m)
|
||||
|
||||
|
||||
238
mod/base/web_conf/limit_net.py
Normal file
238
mod/base/web_conf/limit_net.py
Normal file
@@ -0,0 +1,238 @@
|
||||
import os
|
||||
import re
|
||||
from typing import Tuple, Union
|
||||
|
||||
from .util import webserver
|
||||
|
||||
|
||||
class LimitNet(object):
|
||||
|
||||
def get_limit_net(self, get) -> Union[bool, str]:
|
||||
if webserver() != 'nginx':
|
||||
return False, ""
|
||||
try:
|
||||
site_id = int(get.site_id)
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
return public.returnMsg(False, "Parameter error")
|
||||
|
||||
if self.config_prefix is None:
|
||||
return public.returnMsg(False, "不支持的网站类型")
|
||||
|
||||
# 取配置文件
|
||||
site_name = public.M('sites').where("id=?", (site_id,)).getField('name')
|
||||
filename = "{}/vhost/nginx/{}{}.conf".format(self.setup_path, self.config_prefix, site_name)
|
||||
conf = public.readFile(filename)
|
||||
if not isinstance(conf, str):
|
||||
return public.returnMsg(False, "配置文件读取错误")
|
||||
|
||||
# 站点总并发
|
||||
data = {
|
||||
'perserver': 0,
|
||||
'perip': 0,
|
||||
'limit_rate': 0,
|
||||
}
|
||||
|
||||
rep_per_server = re.compile(r"(?P<prefix>.*)limit_conn +perserver +(?P<target>\d+) *; *", re.M)
|
||||
tmp_res = rep_per_server.search(conf)
|
||||
if tmp_res is not None and tmp_res.group("prefix").find("#") == -1: # 有且不是注释
|
||||
data['perserver'] = int(tmp_res.group("target"))
|
||||
|
||||
# IP并发限制
|
||||
rep_per_ip = re.compile(r"(?P<prefix>.*)limit_conn +perip +(?P<target>\d+) *; *", re.M)
|
||||
tmp_res = rep_per_ip.search(conf)
|
||||
if tmp_res is not None and tmp_res.group("prefix").find("#") == -1: # 有且不是注释
|
||||
data['perip'] = int(tmp_res.group("target"))
|
||||
|
||||
# 请求并发限制
|
||||
rep_limit_rate = re.compile(r"(?P<prefix>.*)limit_rate +(?P<target>\d+)\w+ *; *", re.M)
|
||||
tmp_res = rep_limit_rate.search(conf)
|
||||
if tmp_res is not None and tmp_res.group("prefix").find("#") == -1: # 有且不是注释
|
||||
data['limit_rate'] = int(tmp_res.group("target"))
|
||||
|
||||
self._show_limit_net(data)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def _show_limit_net(data):
|
||||
values = [
|
||||
[300, 25, 512],
|
||||
[200, 10, 1024],
|
||||
[50, 3, 2048],
|
||||
[500, 10, 2048],
|
||||
[400, 15, 1024],
|
||||
[60, 10, 512],
|
||||
[150, 4, 1024],
|
||||
]
|
||||
for i, c in enumerate(values):
|
||||
if data["perserver"] == c[0] and data["perip"] == c[1] and data["limit_rate"] == c[2]:
|
||||
data["value"] = i + 1
|
||||
break
|
||||
else:
|
||||
data["value"] = 0
|
||||
|
||||
@staticmethod
|
||||
def _set_nginx_conf_limit() -> Tuple[bool, str]:
|
||||
# 设置共享内存
|
||||
nginx_conf_file = "/www/server/nginx/conf/nginx.conf"
|
||||
if not os.path.exists(nginx_conf_file):
|
||||
return False, "nginx配置文件丢失"
|
||||
nginx_conf = public.readFile(nginx_conf_file)
|
||||
rep_perip = re.compile(r"\s+limit_conn_zone +\$binary_remote_addr +zone=perip:10m;", re.M)
|
||||
rep_per_server = re.compile(r"\s+limit_conn_zone +\$server_name +zone=perserver:10m;", re.M)
|
||||
perip_res = rep_perip.search(nginx_conf)
|
||||
per_serve_res = rep_per_server.search(nginx_conf)
|
||||
if perip_res and per_serve_res:
|
||||
return True, ""
|
||||
elif perip_res or per_serve_res:
|
||||
tmp_res = perip_res or per_serve_res
|
||||
new_conf = nginx_conf[:tmp_res.start()] + (
|
||||
"\n\t\tlimit_conn_zone $binary_remote_addr zone=perip:10m;"
|
||||
"\n\t\tlimit_conn_zone $server_name zone=perserver:10m;"
|
||||
) + nginx_conf[tmp_res.end():]
|
||||
else:
|
||||
# 通过检查第一个server的位置
|
||||
rep_first_server = re.compile(r"http\s*\{(.*\n)*\s*server\s*\{")
|
||||
tmp_res = rep_first_server.search(nginx_conf)
|
||||
if tmp_res:
|
||||
old_http_conf = tmp_res.group()
|
||||
# 在第一个server项前添加
|
||||
server_idx = old_http_conf.rfind("server")
|
||||
new_http_conf = old_http_conf[:server_idx] + (
|
||||
"\n\t\tlimit_conn_zone $binary_remote_addr zone=perip:10m;"
|
||||
"\n\t\tlimit_conn_zone $server_name zone=perserver:10m;\n"
|
||||
) + old_http_conf[server_idx:]
|
||||
new_conf = rep_first_server.sub(new_http_conf, nginx_conf, 1)
|
||||
else:
|
||||
# 在没有配置其他server项目时,通过检查include server项目检查
|
||||
# 通检查 include /www/server/panel/vhost/nginx/*.conf; 位置
|
||||
rep_include = re.compile(r"http\s*\{(.*\n)*\s*include +/www/server/panel/vhost/nginx/\*\.conf;")
|
||||
tmp_res = rep_include.search(nginx_conf)
|
||||
if not tmp_res:
|
||||
return False, "The global configuration cache configuration failed"
|
||||
old_http_conf = tmp_res.group()
|
||||
|
||||
include_idx = old_http_conf.rfind("include ")
|
||||
new_http_conf = old_http_conf[:include_idx] + (
|
||||
"\n\t\tlimit_conn_zone $binary_remote_addr zone=perip:10m;"
|
||||
"\n\t\tlimit_conn_zone $server_name zone=perserver:10m;\n"
|
||||
) + old_http_conf[include_idx:]
|
||||
new_conf = rep_first_server.sub(new_http_conf, nginx_conf, 1)
|
||||
|
||||
public.writeFile(nginx_conf_file, new_conf)
|
||||
if public.checkWebConfig() is not True: # 检测失败,无法添加
|
||||
public.writeFile(nginx_conf_file, nginx_conf)
|
||||
return False, "The global configuration cache configuration failed"
|
||||
return True, ""
|
||||
|
||||
# 设置流量限制
|
||||
def set_limit_net(self, get):
|
||||
if public.get_webserver() != 'nginx':
|
||||
return public.returnMsg(False, 'SITE_NETLIMIT_ERR')
|
||||
try:
|
||||
site_id = int(get.site_id)
|
||||
per_server = int(get.perserver)
|
||||
perip = int(get.perip)
|
||||
limit_rate = int(get.limit_rate)
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
return public.returnMsg(False, "Parameter error")
|
||||
|
||||
if per_server < 1 or perip < 1 or limit_rate < 1:
|
||||
return public.returnMsg(False, '并发限制,IP限制,流量限制必需大于0')
|
||||
|
||||
# 取配置文件
|
||||
site_info = public.M('sites').where("id=?", (site_id,)).find()
|
||||
if not isinstance(site_info, dict):
|
||||
return public.returnMsg(False, "站点信息查询错误")
|
||||
else:
|
||||
site_name = site_info["name"]
|
||||
filename = "{}/vhost/nginx/{}{}.conf".format(self.setup_path, self.config_prefix, site_name)
|
||||
site_conf: str = public.readFile(filename)
|
||||
if not isinstance(site_conf, str):
|
||||
return public.returnMsg(False, "配置文件读取错误")
|
||||
|
||||
flag, msg = self._set_nginx_conf_limit()
|
||||
if not flag:
|
||||
return public.returnMsg(False, msg)
|
||||
|
||||
per_server_str = ' limit_conn perserver {};'.format(per_server)
|
||||
perip_str = ' limit_conn perip {};'.format(perip)
|
||||
limit_rate_str = ' limit_rate {}k;'.format(limit_rate)
|
||||
|
||||
# 请求并发限制
|
||||
new_conf = site_conf
|
||||
ssl_end_res = re.search(r"#error_page 404/404.html;[^\n]*\n", new_conf)
|
||||
if ssl_end_res is None:
|
||||
return public.returnMsg(False, "未定位到SSL的相关配置,添加失败")
|
||||
ssl_end_idx = ssl_end_res.end()
|
||||
rep_limit_rate = re.compile(r"(.*)limit_rate +(\d+)\w+ *; *", re.M)
|
||||
tmp_res = rep_limit_rate.search(new_conf)
|
||||
if tmp_res is not None :
|
||||
new_conf = rep_limit_rate.sub(limit_rate_str, new_conf)
|
||||
else:
|
||||
new_conf = new_conf[:ssl_end_idx] + limit_rate_str + "\n" + new_conf[ssl_end_idx:]
|
||||
|
||||
# IP并发限制
|
||||
rep_per_ip = re.compile(r"(.*)limit_conn +perip +(\d+) *; *", re.M)
|
||||
tmp_res = rep_per_ip.search(new_conf)
|
||||
if tmp_res is not None:
|
||||
new_conf = rep_per_ip.sub(perip_str, new_conf)
|
||||
else:
|
||||
new_conf = new_conf[:ssl_end_idx] + perip_str + "\n" + new_conf[ssl_end_idx:]
|
||||
|
||||
rep_per_server = re.compile(r"(.*)limit_conn +perserver +(\d+) *; *", re.M)
|
||||
tmp_res = rep_per_server.search(site_conf)
|
||||
if tmp_res is not None:
|
||||
new_conf = rep_per_server.sub(per_server_str, new_conf)
|
||||
else:
|
||||
new_conf = new_conf[:ssl_end_idx] + per_server_str + "\n" + new_conf[ssl_end_idx:]
|
||||
|
||||
public.writeFile(filename, new_conf)
|
||||
is_error = public.checkWebConfig()
|
||||
if is_error is not True:
|
||||
public.writeFile(filename, site_conf)
|
||||
return public.returnMsg(False, 'ERROR:<br><a style="color:red;">' + is_error.replace("\n", '<br>') + '</a>')
|
||||
|
||||
public.serviceReload()
|
||||
public.WriteLog('TYPE_SITE', 'SITE_NETLIMIT_OPEN_SUCCESS', (site_name,))
|
||||
return public.returnMsg(True, 'Successfully set')
|
||||
|
||||
# 关闭流量限制
|
||||
def close_limit_net(self, get):
|
||||
if public.get_webserver() != 'nginx':
|
||||
return public.returnMsg(False, 'SITE_NETLIMIT_ERR')
|
||||
if self.config_prefix is None:
|
||||
return public.returnMsg(False, "不支持的网站类型")
|
||||
try:
|
||||
site_id = int(get.site_id)
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
return public.returnMsg(False, "Parameter error")
|
||||
|
||||
# 取回配置文件
|
||||
site_info = public.M('sites').where("id=?", (site_id,)).find()
|
||||
if not isinstance(site_info, dict):
|
||||
return public.returnMsg(False, "站点信息查询错误")
|
||||
else:
|
||||
site_name = site_info["name"]
|
||||
filename = "{}/vhost/nginx/{}{}.conf".format(self.setup_path, self.config_prefix, site_name)
|
||||
site_conf = public.readFile(filename)
|
||||
if not isinstance(site_conf, str):
|
||||
return public.returnMsg(False, "配置文件读取错误")
|
||||
|
||||
# 清理总并发
|
||||
rep_limit_rate = re.compile(r"(.*)limit_rate +(\d+)\w+ *; *\n?", re.M)
|
||||
rep_per_ip = re.compile(r"(.*)limit_conn +perip +(\d+) *; *\n?", re.M)
|
||||
rep_per_server = re.compile(r"(.*)limit_conn +perserver +(\d+) *; *\n?", re.M)
|
||||
|
||||
new_conf = site_conf
|
||||
new_conf = rep_limit_rate.sub("", new_conf, 1)
|
||||
new_conf = rep_per_ip.sub("", new_conf, 1)
|
||||
new_conf = rep_per_server.sub("", new_conf, 1)
|
||||
|
||||
public.writeFile(filename, new_conf)
|
||||
is_error = public.checkWebConfig()
|
||||
if is_error is not True:
|
||||
public.writeFile(filename, site_conf)
|
||||
return public.returnMsg(False, 'ERROR:<br><a style="color:red;">' + is_error.replace("\n", '<br>') + '</a>')
|
||||
public.serviceReload()
|
||||
public.WriteLog('TYPE_SITE', 'SITE_NETLIMIT_CLOSE_SUCCESS', (site_name,))
|
||||
return public.returnMsg(True, 'SITE_NETLIMIT_CLOSE_SUCCESS')
|
||||
855
mod/base/web_conf/logmanager.py
Normal file
855
mod/base/web_conf/logmanager.py
Normal file
@@ -0,0 +1,855 @@
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import sys
|
||||
from typing import Tuple, Optional, Union, List
|
||||
from .util import webserver, check_server_config, write_file, read_file, DB, service_reload, get_log_path, pre_re_key
|
||||
from mod.base import json_response
|
||||
|
||||
|
||||
class _BaseLogFormat:
|
||||
panel_path = "/www/server/panel"
|
||||
|
||||
def __init__(self):
|
||||
self._config_file = ""
|
||||
self._config: Optional[dict] = None
|
||||
self._format_dict = None
|
||||
self._log_format_dir = ''
|
||||
|
||||
@property
|
||||
def config(self) -> dict:
|
||||
if self._config is None:
|
||||
try:
|
||||
self._config = json.loads(read_file(self._config_file))
|
||||
except (json.JSONDecodeError, TypeError, ValueError):
|
||||
self._config = {}
|
||||
return self._config
|
||||
|
||||
def save_config(self):
|
||||
if self._config is not None:
|
||||
write_file(self._config_file, json.dumps(self._config))
|
||||
|
||||
@property
|
||||
def log_format(self) -> dict:
|
||||
raise NotImplementedError()
|
||||
|
||||
def check_config(self, name: str, keys: List[str], space_character=None) -> Optional[str]:
|
||||
if space_character and len(space_character) > 4:
|
||||
return "间隔符过长,请输入小于4位的间隔符"
|
||||
rep_name = re.compile(r"^\w+$")
|
||||
if rep_name.match(name) is None:
|
||||
return "名称只能包含数字、字母和下划线"
|
||||
if name in ("combined", "main"):
|
||||
return "请勿使用默认名称"
|
||||
error_key = []
|
||||
for k in keys:
|
||||
if k not in self.log_format:
|
||||
error_key.append(k)
|
||||
if error_key:
|
||||
return "无法识别以下日志关键字:【{}】".format(",".join(error_key))
|
||||
|
||||
# 添加日志格式
|
||||
def add_log_format(self, name: str, keys: List[str], space_character=" ") -> Optional[str]:
|
||||
error_msg = self.check_config(name, keys, space_character)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
if name in self.config:
|
||||
return "该名称的日志格式已存在"
|
||||
error_msg = self._set_to_config(name, keys, space_character, is_modify=False)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
|
||||
self.config[name] = {"keys": keys, "space_character": space_character, "sites": []}
|
||||
self.save_config()
|
||||
service_reload()
|
||||
return None
|
||||
|
||||
# 修改日志格式
|
||||
def modify_log_format(self, name: str, keys: List[str], space_character=None) -> Optional[str]:
|
||||
error_msg = self.check_config(name, keys, space_character)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
if name not in self.config:
|
||||
return "该名称的日志格式不存在"
|
||||
|
||||
self.config[name]["keys"] = keys
|
||||
if space_character:
|
||||
self.config[name]["space_character"] = space_character
|
||||
else:
|
||||
space_character = self.config[name]["space_character"]
|
||||
|
||||
error_msg = self._set_to_config(name, keys, space_character, is_modify=True)
|
||||
if error_msg:
|
||||
return error_msg
|
||||
self.save_config()
|
||||
service_reload()
|
||||
return None
|
||||
|
||||
# 删除日志格式
|
||||
def remove_log_format(self, name: str) -> Optional[str]:
|
||||
if name not in self.config:
|
||||
return "该名称的日志格式不存在"
|
||||
if len(self.config[name].get("sites", [])) > 1:
|
||||
return "该日志格式在【{}】网站中正在使用,请先移除".format(",".join(self.config[name]["sites"]))
|
||||
self._remove_form_config(name)
|
||||
|
||||
del self.config[name]
|
||||
self.save_config()
|
||||
service_reload()
|
||||
return None
|
||||
|
||||
def _set_to_config(self, name: str, keys: List[str], space_character, is_modify=False) -> Optional[str]:
|
||||
raise NotImplementedError
|
||||
|
||||
def _remove_form_config(self, name) -> None:
|
||||
conf_file = self._log_format_dir + "/{}_format.conf".format(name)
|
||||
if os.path.isfile(conf_file):
|
||||
os.remove(conf_file)
|
||||
|
||||
# 在配置文件中设置日志格式, log_format_name传入空字符串时,设置会默认
|
||||
def set_site_log_format_in_config(self, site_name, log_format_name, conf_prefix, mutil=False) -> Optional[str]:
|
||||
"""
|
||||
mutil 为True时,不会自动重载配置
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
# 设置日志格式
|
||||
def set_site_log_format(self, site_name, log_format_name, conf_prefix, mutil=False) -> Optional[str]:
|
||||
if log_format_name not in self.config and log_format_name != "":
|
||||
return "该名称的日志格式不存在"
|
||||
error_msg = self.set_site_log_format_in_config(site_name, log_format_name, conf_prefix, mutil=mutil)
|
||||
if error_msg is not None:
|
||||
return error_msg
|
||||
if "sites" not in self.config[log_format_name]:
|
||||
self.config[log_format_name]["sites"] = []
|
||||
for name, sub_conf in self.config.items():
|
||||
if name == log_format_name:
|
||||
sub_conf["sites"].append(site_name) # 记录到配置文件中
|
||||
|
||||
if site_name in sub_conf.get("sites", []):
|
||||
sub_conf["sites"].remove(site_name) # 如果之前使用了其他的配置,则移除其他配置中的这个站点的关联
|
||||
|
||||
self.save_config()
|
||||
|
||||
|
||||
class _NgLog(_BaseLogFormat):
|
||||
|
||||
@property
|
||||
def log_format(self) -> dict:
|
||||
if self._format_dict is None:
|
||||
self._format_dict = {
|
||||
"server_addr": {
|
||||
"name": "服务器地址",
|
||||
"key": "$server_addr",
|
||||
},
|
||||
"server_port": {
|
||||
"name": "服务器端口",
|
||||
"key": "$server_port",
|
||||
},
|
||||
"host": {
|
||||
"name": "域名",
|
||||
"key": "$http_host",
|
||||
},
|
||||
"remote_addr": {
|
||||
"name": "客户端地址",
|
||||
"key": "$server_addr",
|
||||
},
|
||||
"remote_port": {
|
||||
"name": "客户端端口",
|
||||
"key": "$server_addr",
|
||||
},
|
||||
"protocol": {
|
||||
"name": "服务器协议",
|
||||
"key": "$server_protocol",
|
||||
},
|
||||
"req_length": {
|
||||
"name": "请求长度",
|
||||
"key": "$request_length",
|
||||
},
|
||||
"method": {
|
||||
"name": "请求方法",
|
||||
"key": "$request_method",
|
||||
},
|
||||
"uri": {
|
||||
"name": "请求uri",
|
||||
"key": "$request_uri",
|
||||
},
|
||||
"status": {
|
||||
"name": "状态码",
|
||||
"key": "$status",
|
||||
},
|
||||
"sent_bytes": {
|
||||
"name": "发送字节数",
|
||||
"key": "$body_bytes_sent",
|
||||
},
|
||||
"referer": {
|
||||
"name": "来源地址",
|
||||
"key": "$http_referer",
|
||||
},
|
||||
"user_agent": {
|
||||
"name": "用户代理(User-Agent)",
|
||||
"key": "$http_user_agent",
|
||||
},
|
||||
"take_time": {
|
||||
"name": "请求用时",
|
||||
"key": "$request_time",
|
||||
},
|
||||
}
|
||||
return self._format_dict
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._config_file = "{}/data/ng_log_format.json".format(self.panel_path)
|
||||
self._log_format_dir = "{}/vhost/nginx/log_format".format(self.panel_path)
|
||||
|
||||
def _set_log_format_include(self) -> Optional[str]:
|
||||
config_file = "/www/server/nginx/conf/nginx.conf"
|
||||
config_data = read_file(config_file)
|
||||
if not config_data:
|
||||
return "配置文件丢失无法操作"
|
||||
if not os.path.isdir(self._log_format_dir):
|
||||
os.makedirs(self._log_format_dir)
|
||||
rep_include = re.compile(r"include\s+/www/server/panel/vhost/nginx/log_format/\*\.conf\s*;")
|
||||
if rep_include.search(config_data):
|
||||
return
|
||||
|
||||
rep_http = re.compile(r"\s*http\s*\{[^\n]*\n")
|
||||
res = rep_http.search(config_data)
|
||||
if not res:
|
||||
return "主配置文件中缺少http配置项,无法添加"
|
||||
include_str = "include {}/*.conf;\n".format(self._log_format_dir)
|
||||
new_conf = config_data[:res.end()] + include_str + config_data[res.end():]
|
||||
write_file(config_file, new_conf)
|
||||
|
||||
def _set_to_config(self, name: str, keys: List[str], space_character, is_modify=False) -> Optional[str]:
|
||||
error_msg = self._set_log_format_include()
|
||||
if error_msg:
|
||||
return error_msg
|
||||
conf_file = self._log_format_dir + "/{}_format.conf".format(name)
|
||||
write_file(conf_file, (
|
||||
"log_format {} '{}';".format(name, space_character.join(map(lambda x: self.log_format[x]["key"], keys)))
|
||||
))
|
||||
|
||||
def set_site_log_format_in_config(self, site_name, log_format_name, conf_prefix, mutil=False) -> Optional[str]:
|
||||
"""
|
||||
mutil 为True时,不会自动重载配置
|
||||
"""
|
||||
config_file = "{}/vhost/nginx/{}{}.conf".format(self.panel_path, conf_prefix, site_name)
|
||||
config_data = read_file(config_file)
|
||||
if not config_data:
|
||||
return "配置文件丢失无法操作"
|
||||
|
||||
start_idx, end_idx = self.get_first_server_log_idx(config_data)
|
||||
if start_idx:
|
||||
rep_access_log = re.compile(r"\s*access_log\s+(?P<path>[^;\s]*)(\s+(?P<name>\w+))?;")
|
||||
res = rep_access_log.search(config_data[start_idx: end_idx])
|
||||
if res.group("name") == log_format_name:
|
||||
return
|
||||
new_access_log = "\n access_log {} {};".format(res.group("path"), log_format_name)
|
||||
new_conf = config_data[:start_idx] + new_access_log + config_data[end_idx:]
|
||||
else:
|
||||
last_server_idx = config_data.rfind("}") # server 范围内最后一个}的位置
|
||||
if last_server_idx == -1:
|
||||
return "配置文件格式错误无法操作"
|
||||
log_path = "{}/{}.log".format(get_log_path(), site_name)
|
||||
new_access_log = "\n access_log {} {};\n".format(log_path, log_format_name)
|
||||
new_conf = config_data[:last_server_idx] + new_access_log + config_data[last_server_idx:]
|
||||
write_file(config_file, new_conf)
|
||||
if webserver() == "nginx" and check_server_config() is not None:
|
||||
write_file(config_file, config_data)
|
||||
return "配置修改失败"
|
||||
if webserver() == "nginx" and not mutil:
|
||||
service_reload()
|
||||
|
||||
# 获取配置文件中server等级的第一个access_log的位置
|
||||
@staticmethod
|
||||
def get_first_server_log_idx(config_data) -> Tuple[Optional[int], Optional[int]]:
|
||||
rep_server = re.compile(r"\s*server\s*\{")
|
||||
res = rep_server.search(config_data)
|
||||
if res is None:
|
||||
return None, None
|
||||
rep_log = re.compile(r"\s*access_log\s+(?P<path>[^;\s]*)(\s+(?P<name>\w+))?;", re.M)
|
||||
s_idx = res.end()
|
||||
l_n = 1
|
||||
length = len(config_data)
|
||||
while l_n > 0:
|
||||
next_l = config_data[s_idx:].find("{")
|
||||
next_r = config_data[s_idx:].find("}")
|
||||
if next_l == -1 and next_r == -1: # 都没有了跳过
|
||||
return None, None
|
||||
if next_r == -1 and next_l != -1: # 还剩 { 但是没有 } ,跳过
|
||||
return None, None
|
||||
if next_l == -1:
|
||||
next_l = length
|
||||
if next_l < next_r:
|
||||
if l_n == 1:
|
||||
res = rep_log.search(config_data[s_idx: s_idx + next_l])
|
||||
if res:
|
||||
return s_idx + res.start(), s_idx + res.end()
|
||||
l_n += 1
|
||||
else:
|
||||
l_n -= 1
|
||||
if l_n == 0:
|
||||
res = rep_log.search(config_data[s_idx: s_idx + next_l])
|
||||
if res:
|
||||
return s_idx + res.start(), s_idx + res.end()
|
||||
s_idx += min(next_l, next_r) + 1
|
||||
return None, None
|
||||
|
||||
# 设置站点的日志路径
|
||||
def set_site_log_path(self, site_name, site_log_path, conf_prefix, mutil=False) -> Optional[str]:
|
||||
if not os.path.isdir(site_log_path):
|
||||
return "不是一个存在的文件夹路径"
|
||||
|
||||
if site_log_path[-1] == "/":
|
||||
site_log_path = site_log_path[:-1]
|
||||
|
||||
# nginx
|
||||
nginx_config_path = '/www/server/panel/vhost/nginx/{}{}.conf'.format(conf_prefix, site_name)
|
||||
nginx_config = read_file(nginx_config_path)
|
||||
if not nginx_config:
|
||||
return "网站配置文件丢失,无法配置"
|
||||
|
||||
# nginx
|
||||
old_log_file = self.nginx_get_log_file_path(nginx_config, site_name, is_error_log=False)
|
||||
old_error_log_file = self.nginx_get_log_file_path(nginx_config, site_name, is_error_log=True)
|
||||
|
||||
if old_log_file and old_error_log_file:
|
||||
new_nginx_conf = nginx_config
|
||||
log_file_rep = re.compile(r"access_log +" + pre_re_key(old_log_file))
|
||||
error_log_file_rep = re.compile(r"error_log +" + pre_re_key(old_error_log_file))
|
||||
if log_file_rep.search(nginx_config):
|
||||
new_nginx_conf = log_file_rep.sub("access_log {}/{}.log".format(site_log_path, site_name),
|
||||
new_nginx_conf, 1)
|
||||
|
||||
if error_log_file_rep.search(nginx_config):
|
||||
new_nginx_conf = error_log_file_rep.sub("error_log {}/{}.error.log".format(site_log_path, site_name),
|
||||
new_nginx_conf, 1)
|
||||
|
||||
write_file(nginx_config_path, new_nginx_conf)
|
||||
if webserver() == "nginx" and check_server_config() is not None:
|
||||
write_file(nginx_config_path, nginx_config)
|
||||
return "配置修改失败"
|
||||
if webserver() == "nginx" and not mutil:
|
||||
service_reload()
|
||||
|
||||
else:
|
||||
return "未找到日志配置,无法操作"
|
||||
|
||||
@staticmethod
|
||||
def nginx_get_log_file_path(nginx_config: str, site_name: str, is_error_log: bool = False):
|
||||
log_file = None
|
||||
if is_error_log:
|
||||
re_data = re.findall(r"error_log +(/(\S+/?)+) ?(.*?);", nginx_config)
|
||||
else:
|
||||
re_data = re.findall(r"access_log +(/(\S+/?)+) ?(.*?);", nginx_config)
|
||||
if re_data is None:
|
||||
log_file = None
|
||||
else:
|
||||
for i in re_data:
|
||||
file_path = i[0].strip(";")
|
||||
if file_path != "/dev/null" and not file_path.endswith("purge_cache.log"):
|
||||
if os.path.isdir(os.path.dirname(file_path)):
|
||||
log_file = file_path
|
||||
break
|
||||
|
||||
logsPath = '/www/wwwlogs/'
|
||||
if log_file is None:
|
||||
if is_error_log:
|
||||
log_file = logsPath + site_name + '.log'
|
||||
else:
|
||||
log_file = logsPath + site_name + '.error.log'
|
||||
if not os.path.isfile(log_file):
|
||||
log_file = None
|
||||
|
||||
return log_file
|
||||
|
||||
def get_site_log_path(self, site_name, conf_prefix) -> Union[str, dict]:
|
||||
config_path = '/www/server/panel/vhost/nginx/{}{}.conf'.format(conf_prefix, site_name)
|
||||
config = read_file(config_path)
|
||||
if not config:
|
||||
return "站点配置文件丢失"
|
||||
log_file = self.nginx_get_log_file_path(config, site_name, is_error_log=False)
|
||||
error_log_file = self.nginx_get_log_file_path(config, site_name, is_error_log=False)
|
||||
if not (error_log_file and log_file):
|
||||
return "获取失败"
|
||||
return {
|
||||
"log_file": log_file,
|
||||
"error_log_file": error_log_file,
|
||||
}
|
||||
|
||||
def close_access_log(self, site_name, conf_prefix) -> Optional[str]:
|
||||
nginx_config_path = '/www/server/panel/vhost/nginx/{}{}.conf'.format(conf_prefix, site_name)
|
||||
nginx_config = read_file(nginx_config_path)
|
||||
if not nginx_config:
|
||||
return "网站配置文件丢失,无法配置"
|
||||
|
||||
start_idx, end_idx = self.get_first_server_log_idx(nginx_config)
|
||||
if not start_idx:
|
||||
return None
|
||||
new_conf = nginx_config
|
||||
|
||||
while start_idx is not None:
|
||||
new_conf = new_conf[:start_idx] + '# ' + new_conf[start_idx:]
|
||||
start_idx, end_idx = self.get_first_server_log_idx(new_conf)
|
||||
|
||||
write_file(nginx_config_path, new_conf)
|
||||
if webserver() == "nginx" and check_server_config() is not None:
|
||||
write_file(nginx_config_path, nginx_config)
|
||||
return "配置修改失败"
|
||||
|
||||
return None
|
||||
|
||||
# 未完成
|
||||
def open_access_log(self, site_name, conf_prefix) -> Optional[str]:
|
||||
nginx_config_path = '/www/server/panel/vhost/nginx/{}{}.conf'.format(conf_prefix, site_name)
|
||||
nginx_config = read_file(nginx_config_path)
|
||||
if not nginx_config:
|
||||
return "网站配置文件丢失,无法配置"
|
||||
|
||||
new_conf = nginx_config.replace("#")
|
||||
|
||||
write_file(nginx_config_path, new_conf)
|
||||
if webserver() == "nginx" and check_server_config() is not None:
|
||||
write_file(nginx_config_path, nginx_config)
|
||||
return "配置修改失败"
|
||||
|
||||
return None
|
||||
|
||||
def access_log_is_open(self, site_name, conf_prefix) -> bool:
|
||||
nginx_config_path = '/www/server/panel/vhost/nginx/{}{}.conf'.format(conf_prefix, site_name)
|
||||
nginx_config = read_file(nginx_config_path)
|
||||
if not nginx_config:
|
||||
return False
|
||||
|
||||
start_idx, end_idx = self.get_first_server_log_idx(nginx_config)
|
||||
return start_idx is not None
|
||||
|
||||
|
||||
class _ApLog(_BaseLogFormat):
|
||||
|
||||
def set_site_log_format_in_config(self, site_name, log_format_name, conf_prefix, mutil=False) -> Optional[str]:
|
||||
if log_format_name == "":
|
||||
log_format_name = "combined"
|
||||
config_file = "{}/vhost/apache/{}{}.conf".format(self.panel_path, conf_prefix, site_name)
|
||||
config_data = read_file(config_file)
|
||||
if not config_data:
|
||||
return "配置文件丢失无法操作"
|
||||
|
||||
custom_log_rep = re.compile(r'''\s*CustomLog\s+['"](?P<path>.*)['"](\s+(?P<name>.*))?''', re.M)
|
||||
new_custom_log = '\n CustomLog "{}" %s\n' % log_format_name
|
||||
new_conf_list = []
|
||||
idx = 0
|
||||
for tmp_res in custom_log_rep.finditer(config_data):
|
||||
new_conf_list.append(config_data[idx:tmp_res.start()])
|
||||
new_conf_list.append(new_custom_log.format(tmp_res.group("path")))
|
||||
idx = tmp_res.end()
|
||||
new_conf_list.append(config_data[idx:])
|
||||
new_conf = "".join(new_conf_list)
|
||||
|
||||
write_file(config_file, new_conf)
|
||||
if webserver() == "apache" and check_server_config() is not None:
|
||||
write_file(config_file, config_data)
|
||||
return "配置修改失败"
|
||||
if webserver() == "apache" and not mutil:
|
||||
service_reload()
|
||||
|
||||
# 设置站点的日志路径
|
||||
def set_site_log_path(self, site_name, site_log_path, conf_prefix, mutil=False) -> Optional[str]:
|
||||
if not os.path.isdir(site_log_path):
|
||||
return "不是一个存在的文件夹路径"
|
||||
|
||||
if site_log_path[-1] == "/":
|
||||
site_log_path = site_log_path[:-1]
|
||||
|
||||
# apache
|
||||
apache_config_path = '/www/server/panel/vhost/apache/{}{}.conf'.format(conf_prefix, site_name)
|
||||
apache_config = read_file(apache_config_path)
|
||||
if not apache_config:
|
||||
return "网站配置文件丢失,无法配置"
|
||||
|
||||
# apache
|
||||
old_log_file = self.apache_get_log_file_path(apache_config, site_name, is_error_log=False)
|
||||
old_error_log_file = self.apache_get_log_file_path(apache_config, site_name, is_error_log=True)
|
||||
|
||||
if old_log_file and old_error_log_file:
|
||||
new_apache_conf = apache_config
|
||||
log_file_rep = re.compile(r'''CustomLog +['"]?''' + pre_re_key(old_log_file) + '''['"]?''')
|
||||
error_log_file_rep = re.compile(r'''ErrorLog +['"]?''' + pre_re_key(old_error_log_file) + '''['"]?''')
|
||||
if log_file_rep.search(apache_config):
|
||||
new_apache_conf = log_file_rep.sub('CustomLog "{}/{}-access_log"'.format(site_log_path, site_name),
|
||||
new_apache_conf)
|
||||
|
||||
if error_log_file_rep.search(apache_config):
|
||||
new_apache_conf = error_log_file_rep.sub('ErrorLog "{}/{}.-error_log"'.format(site_log_path, site_name),
|
||||
new_apache_conf)
|
||||
write_file(apache_config_path, new_apache_conf)
|
||||
print(new_apache_conf)
|
||||
if webserver() == "apache" and check_server_config() is not None:
|
||||
write_file(apache_config_path, apache_config)
|
||||
return "配置修改失败"
|
||||
if webserver() == "apache" and not mutil:
|
||||
service_reload()
|
||||
else:
|
||||
return "未找到日志配置,无法操作"
|
||||
|
||||
@staticmethod
|
||||
def apache_get_log_file_path(apache_config: str, site_name: str, is_error_log: bool = False):
|
||||
log_file = None
|
||||
if is_error_log:
|
||||
re_data = re.findall(r'''ErrorLog +['"]?(/(\S+/?)+)['"]? ?(.*?)\n''', apache_config)
|
||||
else:
|
||||
re_data = re.findall(r'''CustomLog +['"]?(/(\S+/?)+)['"]? ?(.*?)\n''', apache_config)
|
||||
if re_data is None:
|
||||
log_file = None
|
||||
else:
|
||||
for i in re_data:
|
||||
file_path = i[0].strip('"').strip("'")
|
||||
if file_path != "/dev/null":
|
||||
if os.path.isdir(os.path.dirname(file_path)):
|
||||
log_file = file_path
|
||||
break
|
||||
|
||||
logsPath = '/www/wwwlogs/'
|
||||
if log_file is None:
|
||||
if is_error_log:
|
||||
log_file = logsPath + site_name + '-access_log'
|
||||
else:
|
||||
log_file = logsPath + site_name + '-error_log'
|
||||
if not os.path.isfile(log_file):
|
||||
log_file = None
|
||||
|
||||
return log_file
|
||||
|
||||
@staticmethod
|
||||
def close_access_log(site_name, conf_prefix) -> Optional[str]:
|
||||
apache_config_path = '/www/server/panel/vhost/apache/{}{}.conf'.format(conf_prefix, site_name)
|
||||
apache_config = read_file(apache_config_path)
|
||||
if not apache_config:
|
||||
return "网站配置文件丢失,无法配置"
|
||||
custom_log_rep = re.compile(r'''CustomLog +['"]?(/(\S+/?)+)['"]?(\s*.*)?''', re.M)
|
||||
new_conf_list = []
|
||||
idx = 0
|
||||
for tmp_res in custom_log_rep.finditer(apache_config):
|
||||
new_conf_list.append(apache_config[idx:tmp_res.start()])
|
||||
new_conf_list.append("# " + tmp_res.group())
|
||||
idx = tmp_res.end()
|
||||
new_conf_list.append(apache_config[idx:])
|
||||
new_conf = "".join(new_conf_list)
|
||||
write_file(apache_config_path, new_conf)
|
||||
if webserver() == "apache" and check_server_config() is not None:
|
||||
write_file(apache_config_path, apache_config)
|
||||
return "配置修改失败"
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def open_access_log(site_name, conf_prefix) -> Optional[str]:
|
||||
apache_config_path = '/www/server/panel/vhost/apache/{}{}.conf'.format(conf_prefix, site_name)
|
||||
apache_config = read_file(apache_config_path)
|
||||
if not apache_config:
|
||||
return "网站配置文件丢失,无法配置"
|
||||
new_conf = apache_config.replace("#CustomLog", "CustomLog")
|
||||
write_file(apache_config_path, new_conf)
|
||||
if webserver() == "apache" and check_server_config() is not None:
|
||||
write_file(apache_config_path, apache_config)
|
||||
return "配置修改失败"
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def access_log_is_open(site_name, conf_prefix) -> bool:
|
||||
apache_config_path = '/www/server/panel/vhost/apache/{}{}.conf'.format(conf_prefix, site_name)
|
||||
apache_config = read_file(apache_config_path)
|
||||
if not apache_config:
|
||||
return False
|
||||
if apache_config.find("#CustomLog") != -1:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_site_log_path(self, site_name, conf_prefix) -> Union[str, dict]:
|
||||
config_path = '/www/server/panel/vhost/apache/{}{}.conf'.format(conf_prefix, site_name)
|
||||
config = read_file(config_path)
|
||||
if not config:
|
||||
return "站点配置文件丢失"
|
||||
log_file = self.apache_get_log_file_path(config, site_name, is_error_log=False)
|
||||
error_log_file = self.apache_get_log_file_path(config, site_name, is_error_log=False)
|
||||
if not (error_log_file and log_file):
|
||||
return "获取失败"
|
||||
return {
|
||||
"log_file": log_file,
|
||||
"error_log_file": error_log_file,
|
||||
}
|
||||
|
||||
@property
|
||||
def log_format(self) -> dict:
|
||||
if self._format_dict is None:
|
||||
self._format_dict = {
|
||||
"server_addr": {
|
||||
"name": "服务器地址",
|
||||
"key": "%A",
|
||||
},
|
||||
"server_port": {
|
||||
"name": "服务器端口",
|
||||
"key": "%p",
|
||||
},
|
||||
"host": {
|
||||
"name": "域名",
|
||||
"key": "%V",
|
||||
},
|
||||
"remote_addr": {
|
||||
"name": "客户端地址",
|
||||
"key": "%{c}a",
|
||||
},
|
||||
"remote_port": {
|
||||
"name": "客户端端口",
|
||||
"key": "%{remote}p",
|
||||
},
|
||||
"protocol": {
|
||||
"name": "服务器协议",
|
||||
"key": "%H",
|
||||
},
|
||||
"method": {
|
||||
"name": "请求方法",
|
||||
"key": "%m",
|
||||
},
|
||||
"uri": {
|
||||
"name": "请求uri",
|
||||
"key": r"\"%U\"",
|
||||
},
|
||||
"status": {
|
||||
"name": "状态码",
|
||||
"key": "%>s",
|
||||
},
|
||||
"sent_bytes": {
|
||||
"name": "发送字节数",
|
||||
"key": "%B",
|
||||
},
|
||||
"referer": {
|
||||
"name": "来源地址",
|
||||
"key": r"\"%{Referer}i\"",
|
||||
},
|
||||
"user_agent": {
|
||||
"name": "用户代理(User-Agent)",
|
||||
"key": r"\"%{User-Agent}i\"",
|
||||
},
|
||||
"take_time": {
|
||||
"name": "请求用时",
|
||||
"key": "%{ms}T",
|
||||
},
|
||||
}
|
||||
return self._format_dict
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._config_file = "{}/data/ap_log_format.json".format(self.panel_path)
|
||||
self._log_format_dir = "{}/vhost/apache/log_format".format(self.panel_path)
|
||||
|
||||
def _set_log_format_include(self) -> Optional[str]:
|
||||
config_file = "/www/server/apache/conf/httpd.conf"
|
||||
config_data = read_file(config_file)
|
||||
if not config_data:
|
||||
return "配置文件丢失无法操作"
|
||||
if not os.path.isdir(self._log_format_dir):
|
||||
os.makedirs(self._log_format_dir)
|
||||
rep_include = re.compile(r"IncludeOptional\s+/www/server/panel/vhost/apache/log_format/\*\.conf")
|
||||
if rep_include.search(config_data):
|
||||
return
|
||||
new_conf = config_data + """
|
||||
<IfModule log_config_module>
|
||||
IncludeOptional /www/server/panel/vhost/apache/log_format/*.conf
|
||||
</IfModule>
|
||||
"""
|
||||
write_file(config_file, new_conf)
|
||||
|
||||
def _set_to_config(self, name: str, keys: List[str], space_character, is_modify=False) -> Optional[str]:
|
||||
error_msg = self._set_log_format_include()
|
||||
if error_msg:
|
||||
return error_msg
|
||||
conf_file = self._log_format_dir + "/{}_format.conf".format(name)
|
||||
write_file(conf_file, (
|
||||
'LogFormat "{}" {}'.format(space_character.join(map(lambda x: self.log_format[x]["key"], keys)), name)
|
||||
))
|
||||
|
||||
|
||||
class RealLogMgr:
|
||||
|
||||
def __init__(self, conf_prefix: str = ""):
|
||||
self.conf_prefix = conf_prefix
|
||||
if webserver() == "nginx":
|
||||
self._log_format_tool = _NgLog()
|
||||
else:
|
||||
self._log_format_tool = _ApLog()
|
||||
|
||||
@staticmethod
|
||||
def remove_site_log_format_info(site_name: str):
|
||||
for logtool in (_NgLog(), _ApLog()):
|
||||
for _, conf in logtool.config.items():
|
||||
if site_name in conf.get("sites", []):
|
||||
conf["sites"].remove(site_name)
|
||||
logtool.save_config()
|
||||
|
||||
def log_format_data(self, site_name: str):
|
||||
log_format_data = None
|
||||
for name, data in self._log_format_tool.config.items():
|
||||
if site_name in data.get("sites", []):
|
||||
log_format_data = data
|
||||
log_format_data.update(name=name)
|
||||
return {
|
||||
"log_format": log_format_data,
|
||||
"rule": self._log_format_tool.log_format,
|
||||
"all_log_format": self._log_format_tool.config
|
||||
}
|
||||
|
||||
def add_log_format(self, name: str, keys: List[str], space_character=" ") -> Optional[str]:
|
||||
return self._log_format_tool.add_log_format(name, keys, space_character)
|
||||
|
||||
def modify_log_format(self, name: str, keys: List[str], space_character=None) -> Optional[str]:
|
||||
return self._log_format_tool.modify_log_format(name, keys, space_character)
|
||||
|
||||
def remove_log_format(self, name: str) -> Optional[str]:
|
||||
return self._log_format_tool.remove_log_format(name)
|
||||
|
||||
# log_format_name 为空字符串时表示恢复成默认的日志格式
|
||||
def set_site_log_format(self, site_name, log_format_name, mutil=False) -> Optional[str]:
|
||||
return self._log_format_tool.set_site_log_format(site_name, log_format_name, self.conf_prefix, mutil)
|
||||
|
||||
def set_site_log_path(self, site_name, site_log_path, mutil=False) -> Optional[str]:
|
||||
return self._log_format_tool.set_site_log_path(site_name, site_log_path, self.conf_prefix, mutil)
|
||||
|
||||
def get_site_log_path(self, site_name) -> Union[str, dict]:
|
||||
return self._log_format_tool.get_site_log_path(site_name, self.conf_prefix)
|
||||
|
||||
@staticmethod
|
||||
def site_crontab_log(site_name: str, hour: int, minute: int, save: int) -> bool:
|
||||
if DB("crontab").where("sName =? and sType = ?", ("ALL", "logs")).find():
|
||||
return True
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
import crontab
|
||||
crontabs = crontab.crontab()
|
||||
args = {
|
||||
"name": "切割日志[{}]".format(site_name),
|
||||
"type": 'day',
|
||||
"where1": '',
|
||||
"hour": hour,
|
||||
"minute": minute,
|
||||
"sName": site_name,
|
||||
"sType": 'logs',
|
||||
"notice": '',
|
||||
"notice_channel": '',
|
||||
"save": save,
|
||||
"save_local": '1',
|
||||
"backupTo": '',
|
||||
"sBody": '',
|
||||
"urladdress": ''
|
||||
}
|
||||
res = crontabs.AddCrontab(args)
|
||||
if res and "id" in res.keys():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class LogMgr:
|
||||
|
||||
def __init__(self, conf_prefix: str = ""):
|
||||
self.conf_prefix = conf_prefix
|
||||
self._real_log_mgr = RealLogMgr(self.conf_prefix)
|
||||
|
||||
def log_format_data(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
except (AttributeError, json.JSONDecodeError, TypeError, ValueError):
|
||||
return json_response(status=False, msg="参数类型错误")
|
||||
data = self._real_log_mgr.log_format_data(site_name)
|
||||
return json_response(status=True, data=data)
|
||||
|
||||
def add_log_format(self, get):
|
||||
try:
|
||||
space_character = " "
|
||||
format_name = get.format_name.strip()
|
||||
keys = json.loads(get.keys.strip())
|
||||
if "space_character" in get:
|
||||
space_character = get.space_character
|
||||
except (AttributeError, json.JSONDecodeError, TypeError, ValueError):
|
||||
return json_response(status=False, msg="参数类型错误")
|
||||
|
||||
msg = self._real_log_mgr.add_log_format(format_name, keys, space_character)
|
||||
if isinstance(msg, str):
|
||||
return json_response(status=False, msg=msg)
|
||||
return json_response(status=True, msg="添加成功")
|
||||
|
||||
def modify_log_format(self, get):
|
||||
try:
|
||||
space_character = None
|
||||
format_name = get.format_name.strip()
|
||||
keys = json.loads(get.keys.strip())
|
||||
if "space_character" in get:
|
||||
space_character = get.space_character
|
||||
except (AttributeError, json.JSONDecodeError, TypeError, ValueError):
|
||||
return json_response(status=False, msg="参数类型错误")
|
||||
|
||||
msg = self._real_log_mgr.modify_log_format(format_name, keys, space_character)
|
||||
if isinstance(msg, str):
|
||||
return json_response(status=False, msg=msg)
|
||||
return json_response(status=True, msg="修改成功")
|
||||
|
||||
def remove_log_format(self, get):
|
||||
try:
|
||||
format_name = get.format_name.strip()
|
||||
except (AttributeError, json.JSONDecodeError, TypeError, ValueError):
|
||||
return json_response(status=False, msg="参数类型错误")
|
||||
|
||||
msg = self._real_log_mgr.remove_log_format(format_name)
|
||||
if isinstance(msg, str):
|
||||
return json_response(status=False, msg=msg)
|
||||
return json_response(status=True, msg="Successfully delete")
|
||||
|
||||
def set_site_log_format(self, get):
|
||||
try:
|
||||
format_name = get.format_name.strip()
|
||||
site_name = get.site_name.strip()
|
||||
except (AttributeError, json.JSONDecodeError, TypeError, ValueError):
|
||||
return json_response(status=False, msg="参数类型错误")
|
||||
|
||||
msg = self._real_log_mgr.set_site_log_format(site_name, log_format_name=format_name)
|
||||
if isinstance(msg, str):
|
||||
return json_response(status=False, msg=msg)
|
||||
return json_response(status=True, msg="添加成功")
|
||||
|
||||
def set_site_log_path(self, get):
|
||||
try:
|
||||
log_path = get.log_path.strip()
|
||||
site_name = get.site_name.strip()
|
||||
except (AttributeError, json.JSONDecodeError, TypeError, ValueError):
|
||||
return json_response(status=False, msg="参数类型错误")
|
||||
|
||||
msg = self._real_log_mgr.set_site_log_path(site_name, site_log_path=log_path)
|
||||
if isinstance(msg, str):
|
||||
return json_response(status=False, msg=msg)
|
||||
return json_response(status=True, msg="修改路径成功")
|
||||
|
||||
def get_site_log_path(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
except (AttributeError, json.JSONDecodeError, TypeError, ValueError):
|
||||
return json_response(status=False, msg="参数类型错误")
|
||||
|
||||
msg = self._real_log_mgr.get_site_log_path(site_name)
|
||||
if isinstance(msg, str):
|
||||
return json_response(status=False, msg=msg)
|
||||
return json_response(status=True, data=msg)
|
||||
|
||||
def site_crontab_log(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
hour = int(get.hour.strip())
|
||||
minute = int(get.minute.strip())
|
||||
save = int(get.save.strip())
|
||||
except (AttributeError, json.JSONDecodeError, TypeError, ValueError):
|
||||
return json_response(status=False, msg="参数类型错误")
|
||||
|
||||
msg = self._real_log_mgr.site_crontab_log(site_name, hour=hour, minute=minute, save=save)
|
||||
if isinstance(msg, str):
|
||||
return json_response(status=False, msg=msg)
|
||||
return json_response(status=True, data=msg)
|
||||
90
mod/base/web_conf/nginx_realip.py
Normal file
90
mod/base/web_conf/nginx_realip.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import ipaddress
|
||||
import os
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
from .util import webserver, check_server_config, write_file, read_file, service_reload
|
||||
|
||||
|
||||
class NginxRealIP:
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def set_real_ip(self, site_name: str, ip_header: str, allow_ip: List[str], recursive: bool = False) -> Optional[
|
||||
str]:
|
||||
if not webserver() == 'nginx':
|
||||
return "only nginx web server is supported"
|
||||
res = check_server_config()
|
||||
if res:
|
||||
return "conifg error, please fix it first. ERROR: %s".format(res)
|
||||
self._set_ext_real_ip_file(site_name, status=True, ip_header=ip_header, allow_ip=allow_ip, recursive=recursive)
|
||||
res = check_server_config()
|
||||
if res:
|
||||
self._set_ext_real_ip_file(site_name, status=False, ip_header="", allow_ip=[], recursive=False)
|
||||
return "配置失败:{}".format(res)
|
||||
else:
|
||||
service_reload()
|
||||
|
||||
def close_real_ip(self, site_name: str):
|
||||
self._set_ext_real_ip_file(site_name, status=False, ip_header="", allow_ip=[], recursive=False)
|
||||
service_reload()
|
||||
return
|
||||
|
||||
def get_real_ip(self, site_name: str) -> Dict[str, Any]:
|
||||
return self._read_ext_real_ip_file(site_name)
|
||||
|
||||
def _set_ext_real_ip_file(self, site_name: str, status: bool, ip_header: str, allow_ip: List[str],
|
||||
recursive: bool = False):
|
||||
ext_file = "/www/server/panel/vhost/nginx/extension/{}/proxy_real_ip.conf".format(site_name)
|
||||
if not status:
|
||||
if os.path.exists(ext_file):
|
||||
os.remove(ext_file)
|
||||
return
|
||||
|
||||
if not os.path.exists(os.path.dirname(ext_file)):
|
||||
os.makedirs(os.path.dirname(ext_file))
|
||||
real_ip_from = ""
|
||||
for ip in allow_ip:
|
||||
tmp_ip = self.formatted_ip(ip)
|
||||
if tmp_ip:
|
||||
real_ip_from += " set_real_ip_from {};\n".format(ip)
|
||||
if not real_ip_from:
|
||||
real_ip_from = "set_real_ip_from 0.0.0.0/0;\nset_real_ip_from ::/0;\n"
|
||||
conf_data = "{}real_ip_header {};\nreal_ip_recursive {};\n".format(
|
||||
real_ip_from, ip_header, "on" if recursive else "off"
|
||||
)
|
||||
write_file(ext_file, conf_data)
|
||||
|
||||
@staticmethod
|
||||
def _read_ext_real_ip_file(site_name: str) -> Dict[str, Any]:
|
||||
ret = {
|
||||
"ip_header": "",
|
||||
"allow_ip": [],
|
||||
"recursive": False
|
||||
}
|
||||
ext_file = "/www/server/panel/vhost/nginx/extension/{}/proxy_real_ip.conf".format(site_name)
|
||||
if os.path.exists(ext_file):
|
||||
data = read_file(ext_file)
|
||||
if data:
|
||||
for line in data.split("\n"):
|
||||
line = line.strip("; ")
|
||||
if line.startswith("real_ip_header"):
|
||||
ret["ip_header"] = line.split()[1]
|
||||
elif line.startswith("set_real_ip_from"):
|
||||
ret["allow_ip"].append(line.split()[1])
|
||||
elif line.startswith("real_ip_recursive"):
|
||||
ret["recursive"] = True if line.split()[1] == "on" else False
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def formatted_ip(ip: str) -> str:
|
||||
try:
|
||||
ip = ipaddress.ip_address(ip)
|
||||
return ip.compressed
|
||||
except:
|
||||
try:
|
||||
ip = ipaddress.ip_network(ip)
|
||||
return ip.compressed
|
||||
except:
|
||||
pass
|
||||
return ""
|
||||
639
mod/base/web_conf/proxy.py
Normal file
639
mod/base/web_conf/proxy.py
Normal file
@@ -0,0 +1,639 @@
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import shutil
|
||||
import warnings
|
||||
import sys
|
||||
import traceback
|
||||
from hashlib import md5
|
||||
from typing import Tuple, Optional, Union, List, Dict, Any
|
||||
|
||||
from .util import webserver, check_server_config, write_file, read_file, DB, service_reload, get_log_path, pre_re_key
|
||||
from mod.base import json_response
|
||||
|
||||
warnings.filterwarnings("ignore", category=SyntaxWarning)
|
||||
|
||||
class RealProxy:
|
||||
panel_path = "/www/server/panel"
|
||||
_proxy_conf_file = "{}/data/mod_proxy_file.conf".format(panel_path)
|
||||
|
||||
def __init__(self, config_prefix: str):
|
||||
self.config_prefix: str = config_prefix
|
||||
self._config: Optional[List[dict]] = None
|
||||
|
||||
# {
|
||||
# "proxyname": "yyy",
|
||||
# "sitename": "www.12345test.com",
|
||||
# "proxydir": "/",
|
||||
# "proxysite": "http://www.baidu.com",
|
||||
# "todomain": "www.baidu.com",
|
||||
# "type": 0,
|
||||
# "cache": 0,
|
||||
# "subfilter": [
|
||||
# {"sub1": "", "sub2": ""},
|
||||
# {"sub1": "", "sub2": ""},
|
||||
# {"sub1": "", "sub2": ""}],
|
||||
# "advanced": 1,
|
||||
# "cachetime": 1
|
||||
# }
|
||||
|
||||
@property
|
||||
def config(self) -> List[dict]:
|
||||
if self._config is None:
|
||||
try:
|
||||
self._config = json.loads(read_file(self._proxy_conf_file))
|
||||
except (json.JSONDecodeError, TypeError, ValueError):
|
||||
self._config = []
|
||||
return self._config
|
||||
|
||||
def save_config(self):
|
||||
if self._config is not None:
|
||||
write_file(self._proxy_conf_file, json.dumps(self._config))
|
||||
|
||||
# 检查代理是否存在
|
||||
def _check_even(self, proxy_conf: dict, is_modify) -> bool:
|
||||
for i in self.config:
|
||||
if i["sitename"] == proxy_conf["sitename"]:
|
||||
if is_modify is False:
|
||||
if i["proxydir"] == proxy_conf["proxydir"] or i["proxyname"] == proxy_conf["proxyname"]:
|
||||
return True
|
||||
else:
|
||||
if i["proxyname"] != proxy_conf["proxyname"] and i["proxydir"] == proxy_conf["proxydir"]:
|
||||
return True
|
||||
|
||||
# 检测全局代理和目录代理是否同时存在
|
||||
def _check_proxy_even(self, proxy_conf: dict, is_modify) -> bool:
|
||||
n = 0
|
||||
if is_modify:
|
||||
for i in self.config:
|
||||
if i["sitename"] == proxy_conf["sitename"]:
|
||||
n += 1
|
||||
if n == 1:
|
||||
return False
|
||||
for i in self.config:
|
||||
if i["sitename"] == proxy_conf["sitename"]:
|
||||
if i["advanced"] != proxy_conf["advanced"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def check_args(self, get, is_modify=False) -> Union[str, dict]:
|
||||
if check_server_config():
|
||||
return '配置文件出错请先排查配置'
|
||||
data = {
|
||||
"advanced": 0,
|
||||
"proxydir": "",
|
||||
"cache": 0,
|
||||
"cachetime": 1,
|
||||
"type": 0,
|
||||
"todomain": "$host",
|
||||
}
|
||||
try:
|
||||
data["proxyname"] = get.proxyname.strip()
|
||||
data["sitename"] = get.sitename.strip()
|
||||
if "proxydir" in get:
|
||||
data["proxydir"] = get.proxydir.strip()
|
||||
data["proxysite"] = get.proxysite.strip()
|
||||
if "todomain" in get:
|
||||
data["todomain"] = get.todomain.strip()
|
||||
data["type"] = int(get.type.strip())
|
||||
data["cache"] = int(get.cache.strip())
|
||||
data["subfilter"] = json.loads(get.subfilter.strip())
|
||||
data["advanced"] = int(get.advanced.strip())
|
||||
data["cachetime"] = int(get.cachetime.strip())
|
||||
except:
|
||||
return "Parameter error"
|
||||
|
||||
if is_modify is False:
|
||||
if len(data["proxyname"]) < 3 or len(data["proxyname"]) > 40:
|
||||
return '名称必须大于3小于40个字符串'
|
||||
|
||||
if self._check_even(data, is_modify):
|
||||
return '指定反向代理名称或代理文件夹已存在'
|
||||
# 判断代理,只能有全局代理或目录代理
|
||||
if self._check_proxy_even(data, is_modify):
|
||||
return '不能同时设置目录代理和全局代理'
|
||||
# 判断cachetime类型
|
||||
if data["cachetime"] < 1:
|
||||
return "缓存时间不能为空"
|
||||
|
||||
rep = r"http(s)?\:\/\/"
|
||||
rep_re_key = re.compile(r'''[?=\[\])(*&^%$#@!~`{}><,'"\\]+''')
|
||||
# 检测代理目录格式
|
||||
if rep_re_key.search(data["proxydir"]):
|
||||
return "The agency directory cannot contain the following special symbols ?,=,[,],),(,*,&,^,%,$,#,@,!,~,`,{,},>,<,\,',\"]"
|
||||
# 检测发送域名格式
|
||||
if get.todomain:
|
||||
if re.search("[}{#;\"\']+", data["todomain"]):
|
||||
return '发送域名格式错误:' + data["todomain"] + '<br>不能存在以下特殊字符【 } { # ; \" \' 】 '
|
||||
if webserver() != 'openlitespeed' and not get.todomain:
|
||||
data["todomain"] = "$host"
|
||||
|
||||
# 检测目标URL格式
|
||||
if not re.match(rep, data["proxysite"]):
|
||||
return '域名格式错误 ' + data["proxysite"]
|
||||
if rep_re_key.search(data["proxysite"]):
|
||||
return "目标URL不能有以下特殊符号 ?,=,[,],),(,*,&,^,%,$,#,@,!,~,`,{,},>,<,\\,',\"]"
|
||||
|
||||
if not data["proxysite"].split('//')[-1]:
|
||||
return '目标URL不能为[http://或https://],请填写完整URL,如:https://www.yakpanel.com'
|
||||
|
||||
for s in data["subfilter"]:
|
||||
if not s["sub1"]:
|
||||
continue
|
||||
if not s["sub1"] and s["sub2"]:
|
||||
return '请输入被替换的内容'
|
||||
elif s["sub1"] == s["sub2"]:
|
||||
return '替换内容与被替换内容不能一致'
|
||||
return data
|
||||
|
||||
def check_location(self, site_name, proxy_dir: str) -> Optional[str]:
|
||||
# 伪静态文件路径
|
||||
rewrite_conf_path = "%s/vhost/rewrite/%s%s.conf" % (self.panel_path, self.config_prefix, site_name)
|
||||
# vhost文件
|
||||
vhost_path = "%s/vhost/nginx/%s%s.conf" % (self.panel_path, self.config_prefix, site_name)
|
||||
|
||||
rep_location = re.compile(r"location\s+(\^~\s*)?%s\s*{" % proxy_dir)
|
||||
|
||||
for i in [rewrite_conf_path, vhost_path]:
|
||||
conf = read_file(i)
|
||||
if isinstance(conf, str) and rep_location.search(conf):
|
||||
return '伪静态/站点主配置文件已经存在全局反向代理'
|
||||
|
||||
@staticmethod
|
||||
def _set_nginx_proxy_base():
|
||||
file = "/www/server/nginx/conf/proxy.conf"
|
||||
setup_path = "/www/server"
|
||||
if not os.path.exists(file):
|
||||
conf = '''proxy_temp_path %s/nginx/proxy_temp_dir;
|
||||
proxy_cache_path %s/nginx/proxy_cache_dir levels=1:2 keys_zone=cache_one:10m inactive=1d max_size=5g;
|
||||
client_body_buffer_size 512k;
|
||||
proxy_connect_timeout 60;
|
||||
proxy_read_timeout 60;
|
||||
proxy_send_timeout 60;
|
||||
proxy_buffer_size 32k;
|
||||
proxy_buffers 4 64k;
|
||||
proxy_busy_buffers_size 128k;
|
||||
proxy_temp_file_write_size 128k;
|
||||
proxy_next_upstream error timeout invalid_header http_500 http_503 http_404;
|
||||
proxy_cache cache_one;''' % (setup_path, setup_path)
|
||||
write_file(file, conf)
|
||||
|
||||
conf = read_file(file)
|
||||
if conf and conf.find('include proxy.conf;') == -1:
|
||||
rep = r"include\s+mime.types;"
|
||||
conf = re.sub(rep, "include mime.types;\n\tinclude proxy.conf;", conf)
|
||||
write_file(file, conf)
|
||||
|
||||
def set_nginx_proxy_include(self, site_name) -> Optional[str]:
|
||||
self._set_nginx_proxy_base()
|
||||
ng_file = "{}/vhost/nginx/{}{}.conf".format(self.panel_path, self.config_prefix, site_name)
|
||||
ng_conf = read_file(ng_file)
|
||||
if not ng_conf:
|
||||
return "配置文件丢失"
|
||||
cure_cache = '''location ~ /purge(/.*) {
|
||||
proxy_cache_purge cache_one $host$1$is_args$args;
|
||||
#access_log /www/wwwlogs/%s_purge_cache.log;
|
||||
}''' % site_name
|
||||
|
||||
proxy_dir = "{}/vhost/nginx/proxy/{}".format(self.panel_path, site_name)
|
||||
if not os.path.isdir(os.path.dirname(proxy_dir)):
|
||||
os.makedirs(os.path.dirname(proxy_dir))
|
||||
|
||||
if not os.path.isdir(proxy_dir):
|
||||
os.makedirs(proxy_dir)
|
||||
|
||||
include_conf = (
|
||||
"\n #清理缓存规则\n"
|
||||
" %s\n"
|
||||
" #引用反向代理规则,注释后配置的反向代理将无效\n"
|
||||
" include /www/server/panel/vhost/nginx/proxy/%s/*.conf;\n"
|
||||
) % (cure_cache, site_name)
|
||||
|
||||
rep_include = re.compile(r"\s*include.*/proxy/.*/\*\.conf\s*;", re.M)
|
||||
if rep_include.search(ng_conf):
|
||||
return
|
||||
# 添加 引入
|
||||
rep_list = [
|
||||
(re.compile(r"\s*include\s+.*/rewrite/.*\.conf;(\s*#REWRITE-END)?"), False), # 先匹配伪静态,有伪静态就加到伪静态下
|
||||
(re.compile(r"#PHP-INFO-END"), False), # 匹配PHP配置, 加到php配置下
|
||||
(re.compile(r"\sinclude +.*/ip-restrict/.*\*\.conf;", re.M), False), # 匹配IP配置, 加其下
|
||||
(re.compile(r"#SECURITY-END"), False), # 匹配Referer配置, 加其下
|
||||
]
|
||||
|
||||
# 使用正则匹配确定插入位置
|
||||
def set_by_rep_idx(tmp_rep: re.Pattern, use_start: bool) -> bool:
|
||||
tmp_res = tmp_rep.search(ng_conf)
|
||||
if not tmp_res:
|
||||
return False
|
||||
if use_start:
|
||||
new_conf = ng_conf[:tmp_res.start()] + include_conf + tmp_res.group() + ng_conf[tmp_res.end():]
|
||||
else:
|
||||
new_conf = ng_conf[:tmp_res.start()] + tmp_res.group() + include_conf + ng_conf[tmp_res.end():]
|
||||
|
||||
write_file(ng_file, new_conf)
|
||||
if webserver() == "nginx" and check_server_config() is not None:
|
||||
write_file(ng_file, ng_conf)
|
||||
return False
|
||||
return True
|
||||
for r, s in rep_list:
|
||||
if set_by_rep_idx(r, s):
|
||||
break
|
||||
else:
|
||||
return "无法在配置文件中定位到需要添加的项目"
|
||||
|
||||
now_ng_conf = read_file(ng_file)
|
||||
# 清理文件缓存
|
||||
rep_location = re.compile(r"location\s+~\s+\.\*\\\.[^{]*{(\s*(expires|error_log|access_log).*;){3}\s*}\s*")
|
||||
|
||||
new__ng_conf = rep_location.sub("", now_ng_conf)
|
||||
write_file(ng_file, new__ng_conf)
|
||||
if webserver() == "nginx" and check_server_config() is not None:
|
||||
write_file(ng_file, now_ng_conf)
|
||||
|
||||
def un_set_nginx_proxy_include(self, site_name) -> Optional[str]:
|
||||
ng_file = "{}/vhost/nginx/{}{}.conf".format(self.panel_path, self.config_prefix, site_name)
|
||||
ng_conf = read_file(ng_file)
|
||||
if not ng_conf:
|
||||
return "配置文件丢失"
|
||||
rep_list = [
|
||||
re.compile(r"\s*#清理缓存规则\n"),
|
||||
re.compile(r"\s*location\s+~\s+/purge[^{]*{[^}]*}\s*"),
|
||||
re.compile(r"(#[^#\n]*\n)?\s*include.*/proxy/.*/\*\.conf\s*;[^\n]*\n"),
|
||||
]
|
||||
new_conf = ng_conf
|
||||
for rep in rep_list:
|
||||
new_conf = rep.sub("", new_conf, 1)
|
||||
|
||||
write_file(ng_file, new_conf)
|
||||
if webserver() == "nginx" and check_server_config() is not None:
|
||||
write_file(ng_file, ng_conf)
|
||||
return "配置移除失败"
|
||||
|
||||
def set_apache_proxy_include(self, site_name):
|
||||
ap_file = "{}/vhost/apache/{}{}.conf".format(self.panel_path, self.config_prefix, site_name)
|
||||
ap_conf = read_file(ap_file)
|
||||
if not ap_conf:
|
||||
return "配置文件丢失"
|
||||
proxy_dir = "{}/vhost/apache/proxy/{}".format(self.panel_path, site_name)
|
||||
|
||||
if not os.path.isdir(os.path.dirname(proxy_dir)):
|
||||
os.makedirs(os.path.dirname(proxy_dir))
|
||||
if not os.path.isdir(proxy_dir):
|
||||
os.makedirs(proxy_dir)
|
||||
|
||||
include_conf = (
|
||||
" #引用反向代理规则,注释后配置的反向代理将无效\n"
|
||||
" IncludeOptional /www/server/panel/vhost/apache/proxy/%s/*.conf\n"
|
||||
) % site_name
|
||||
|
||||
rep_include = re.compile(r"\s*IncludeOptional.*/proxy/.*/\*\.conf\s*;", re.M)
|
||||
if rep_include.search(ap_conf):
|
||||
return
|
||||
|
||||
# 添加 引入
|
||||
rep_list = [
|
||||
(re.compile(r"<FilesMatch \\\.php\$>(.|\n)*?</FilesMatch>[^\n]*\n"), False), # 匹配PHP配置, 加到php配置下
|
||||
(re.compile(r"CustomLog[^\n]*\n"), False), # 匹配Referer配置, 加其下
|
||||
]
|
||||
|
||||
# 使用正则匹配确定插入位置
|
||||
def set_by_rep_idx(rep: re.Pattern, use_start: bool) -> bool:
|
||||
new_conf_list = []
|
||||
last_idx = 0
|
||||
for tmp in rep.finditer(ap_conf):
|
||||
new_conf_list.append(ap_conf[last_idx:tmp.start()])
|
||||
if use_start:
|
||||
new_conf_list.append(include_conf)
|
||||
new_conf_list.append(tmp.group())
|
||||
else:
|
||||
new_conf_list.append(tmp.group())
|
||||
new_conf_list.append(include_conf)
|
||||
last_idx = tmp.end()
|
||||
if last_idx == 0:
|
||||
return False
|
||||
|
||||
new_conf_list.append(ap_conf[last_idx:])
|
||||
|
||||
new_conf = "".join(new_conf_list)
|
||||
write_file(ap_file, new_conf)
|
||||
if webserver() == "apache" and check_server_config() is not None:
|
||||
write_file(ap_file, ap_conf)
|
||||
return False
|
||||
return True
|
||||
|
||||
for r, s in rep_list:
|
||||
if set_by_rep_idx(r, s):
|
||||
break
|
||||
else:
|
||||
return "无法在配置文件中定位到需要添加的项目"
|
||||
|
||||
def un_set_apache_proxy_include(self, site_name) -> Optional[str]:
|
||||
ng_file = "{}/vhost/apache/{}{}.conf".format(self.panel_path, self.config_prefix, site_name)
|
||||
ap_conf = read_file(ng_file)
|
||||
if not ap_conf:
|
||||
return "配置文件丢失"
|
||||
rep_include = re.compile(r"(#.*\n)?\s*IncludeOptiona.*/proxy/.*/\*\.conf\s*[^\n]\n")
|
||||
|
||||
new_conf = rep_include.sub("", ap_conf)
|
||||
|
||||
write_file(ng_file, new_conf)
|
||||
if webserver() == "apache" and check_server_config() is not None:
|
||||
write_file(ng_file, ap_conf)
|
||||
return "配置移除失败"
|
||||
|
||||
def set_nginx_proxy(self, proxy_data: dict) -> Optional[str]:
|
||||
proxy_name_md5 = self._calc_proxy_name_md5(proxy_data["proxyname"])
|
||||
ng_proxy_file = "%s/vhost/nginx/proxy/%s/%s_%s.conf" % (
|
||||
self.panel_path, proxy_data["sitename"], proxy_name_md5, proxy_data["sitename"])
|
||||
if proxy_data["type"] == 0:
|
||||
if os.path.isfile(ng_proxy_file):
|
||||
os.remove(ng_proxy_file)
|
||||
return
|
||||
|
||||
random_string = self._random_string()
|
||||
|
||||
# websocket前置map
|
||||
map_file = "{}/vhost/nginx/0.websocket.conf".format(self.panel_path)
|
||||
if not os.path.exists(map_file):
|
||||
write_file(map_file, '''
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}''')
|
||||
# 构造缓存配置
|
||||
ng_cache = r"""
|
||||
if ( $uri ~* "\.(gif|png|jpg|css|js|woff|woff2)$" )
|
||||
{
|
||||
expires 1m;
|
||||
}
|
||||
proxy_ignore_headers Set-Cookie Cache-Control expires;
|
||||
proxy_cache cache_one;
|
||||
proxy_cache_key $host$uri$is_args$args;
|
||||
proxy_cache_valid 200 304 301 302 %sm;""" % proxy_data["cachetime"]
|
||||
no_cache = r"""
|
||||
set $static_file%s 0;
|
||||
if ( $uri ~* "\.(gif|png|jpg|css|js|woff|woff2)$" )
|
||||
{
|
||||
set $static_file%s 1;
|
||||
expires 1m;
|
||||
}
|
||||
if ( $static_file%s = 0 )
|
||||
{
|
||||
add_header Cache-Control no-cache;
|
||||
}""" % (random_string, random_string, random_string)
|
||||
|
||||
ng_proxy = '''
|
||||
#PROXY-START%s
|
||||
|
||||
location ^~ %s
|
||||
{
|
||||
proxy_pass %s;
|
||||
proxy_set_header Host %s;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header REMOTE-HOST $remote_addr;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_http_version 1.1;
|
||||
# proxy_hide_header Upgrade;
|
||||
|
||||
add_header X-Cache $upstream_cache_status;
|
||||
|
||||
#Set Nginx Cache
|
||||
%s
|
||||
%s
|
||||
}
|
||||
|
||||
#PROXY-END%s'''
|
||||
|
||||
# 构造替换字符串
|
||||
ng_sub_data_list = []
|
||||
for s in proxy_data["subfilter"]:
|
||||
if not s["sub1"]:
|
||||
continue
|
||||
if '"' in s["sub1"]:
|
||||
s["sub1"] = s["sub1"].replace('"', '\\"')
|
||||
if '"' in s["sub2"]:
|
||||
s["sub2"] = s["sub2"].replace('"', '\\"')
|
||||
ng_sub_data_list.append(' sub_filter "%s" "%s";' % (s["sub1"], s["sub2"]))
|
||||
if ng_sub_data_list:
|
||||
ng_sub_filter = '''
|
||||
proxy_set_header Accept-Encoding "";
|
||||
%s
|
||||
sub_filter_once off;''' % "\n".join(ng_sub_data_list)
|
||||
else:
|
||||
ng_sub_filter = ''
|
||||
|
||||
if proxy_data["proxydir"][-1] != '/':
|
||||
proxy_dir = proxy_data["proxydir"] + "/"
|
||||
else:
|
||||
proxy_dir = proxy_data["proxydir"]
|
||||
|
||||
if proxy_data["proxysite"][-1] != '/':
|
||||
proxy_site = proxy_data["proxysite"] + "/"
|
||||
else:
|
||||
proxy_site = proxy_data["proxysite"]
|
||||
|
||||
# 构造反向代理
|
||||
if proxy_data["cache"] == 1:
|
||||
ng_proxy_cache = ng_proxy % (
|
||||
proxy_dir, proxy_dir, proxy_site, proxy_data["todomain"], ng_sub_filter, ng_cache, proxy_dir)
|
||||
else:
|
||||
ng_proxy_cache = ng_proxy % (
|
||||
proxy_dir, proxy_dir, proxy_site, proxy_data["todomain"], ng_sub_filter, no_cache, proxy_dir)
|
||||
|
||||
write_file(ng_proxy_file, ng_proxy_cache)
|
||||
if webserver() == "nginx" and check_server_config() is not None:
|
||||
import public
|
||||
public.print_log(check_server_config())
|
||||
os.remove(ng_proxy_file)
|
||||
return "配置添加失败"
|
||||
|
||||
def set_apache_proxy(self, proxy_data: dict):
|
||||
proxy_name_md5 = self._calc_proxy_name_md5(proxy_data["proxyname"])
|
||||
ap_proxy_file = "%s/vhost/apache/proxy/%s/%s_%s.conf" % (
|
||||
self.panel_path, proxy_data["sitename"], proxy_name_md5, proxy_data["sitename"])
|
||||
if proxy_data["type"] == 0:
|
||||
if os.path.isfile(ap_proxy_file):
|
||||
os.remove(ap_proxy_file)
|
||||
return
|
||||
|
||||
ap_proxy = '''#PROXY-START%s
|
||||
<IfModule mod_proxy.c>
|
||||
ProxyRequests Off
|
||||
SSLProxyEngine on
|
||||
ProxyPass %s %s/
|
||||
ProxyPassReverse %s %s/
|
||||
</IfModule>
|
||||
#PROXY-END%s''' % (proxy_data["proxydir"], proxy_data["proxydir"], proxy_data["proxysite"],
|
||||
proxy_data["proxydir"],proxy_data["proxysite"], proxy_data["proxydir"])
|
||||
write_file(ap_proxy_file, ap_proxy)
|
||||
|
||||
@staticmethod
|
||||
def _random_string() -> str:
|
||||
from uuid import uuid4
|
||||
return "bt" + uuid4().hex[:6]
|
||||
|
||||
@staticmethod
|
||||
def _calc_proxy_name_md5(data: str) -> str:
|
||||
m = md5()
|
||||
m.update(data.encode("utf-8"))
|
||||
return m.hexdigest()
|
||||
|
||||
def create_proxy(self, get) -> Optional[str]:
|
||||
proxy_data = self.check_args(get, is_modify=False)
|
||||
if isinstance(proxy_data, str):
|
||||
return proxy_data
|
||||
if webserver() == "nginx":
|
||||
error_msg = self.check_location(proxy_data["sitename"], proxy_data["proxydir"])
|
||||
if error_msg:
|
||||
return error_msg
|
||||
|
||||
error_msg = self.set_nginx_proxy_include(proxy_data["sitename"])
|
||||
if webserver() == "nginx" and error_msg:
|
||||
return error_msg
|
||||
error_msg = self.set_apache_proxy_include(proxy_data["sitename"])
|
||||
if webserver() == "apache" and error_msg:
|
||||
return error_msg
|
||||
error_msg = self.set_nginx_proxy(proxy_data)
|
||||
if webserver() == "nginx" and error_msg:
|
||||
return error_msg
|
||||
self.set_apache_proxy(proxy_data)
|
||||
self.config.append(proxy_data)
|
||||
self.save_config()
|
||||
service_reload()
|
||||
|
||||
def modify_proxy(self, get) -> Optional[str]:
|
||||
proxy_data = self.check_args(get, is_modify=True)
|
||||
if isinstance(proxy_data, str):
|
||||
return proxy_data
|
||||
idx = None
|
||||
|
||||
for index, i in enumerate(self.config):
|
||||
if i["proxyname"] == proxy_data["proxyname"] and i["sitename"] == proxy_data["sitename"]:
|
||||
idx = index
|
||||
break
|
||||
if idx is None:
|
||||
return "未找到该名称的反向代理配置"
|
||||
|
||||
if webserver() == "nginx" and proxy_data["proxydir"] != self.config[idx]["proxydir"]:
|
||||
error_msg = self.check_location(proxy_data["sitename"], proxy_data["proxydir"])
|
||||
if error_msg:
|
||||
return error_msg
|
||||
|
||||
error_msg = self.set_nginx_proxy_include(proxy_data["sitename"])
|
||||
if webserver() == "nginx" and error_msg:
|
||||
return error_msg
|
||||
error_msg = self.set_apache_proxy_include(proxy_data["sitename"])
|
||||
if webserver() == "apache" and error_msg:
|
||||
return error_msg
|
||||
error_msg = self.set_nginx_proxy(proxy_data)
|
||||
if webserver() == "nginx" and error_msg:
|
||||
return error_msg
|
||||
self.set_apache_proxy(proxy_data)
|
||||
self.config[idx] = proxy_data
|
||||
self.save_config()
|
||||
service_reload()
|
||||
|
||||
def remove_proxy(self, site_name, proxy_name, multiple=False) -> Optional[str]:
|
||||
idx = None
|
||||
site_other = False
|
||||
for index, i in enumerate(self.config):
|
||||
if i["proxyname"] == proxy_name and i["sitename"] == site_name:
|
||||
idx = index
|
||||
if i["sitename"] == site_name and i["proxyname"] != proxy_name:
|
||||
site_other = True
|
||||
|
||||
if idx is None:
|
||||
return "未找到该名称的反向代理配置"
|
||||
|
||||
proxy_name_md5 = self._calc_proxy_name_md5(proxy_name)
|
||||
ng_proxy_file = "%s/vhost/nginx/proxy/%s/%s_%s.conf" % (
|
||||
self.panel_path, site_name, proxy_name_md5, site_name)
|
||||
ap_proxy_file = "%s/vhost/apache/proxy/%s/%s_%s.conf" % (
|
||||
self.panel_path, site_name, proxy_name_md5, site_name)
|
||||
if os.path.isfile(ap_proxy_file):
|
||||
os.remove(ap_proxy_file)
|
||||
|
||||
if os.path.isfile(ng_proxy_file):
|
||||
os.remove(ng_proxy_file)
|
||||
del self.config[idx]
|
||||
self.save_config()
|
||||
if not site_other:
|
||||
self.un_set_apache_proxy_include(site_name)
|
||||
self.un_set_nginx_proxy_include(site_name)
|
||||
if not multiple:
|
||||
service_reload()
|
||||
|
||||
def get_proxy_list(self, get) -> Union[str, List[Dict[str, Any]]]:
|
||||
try:
|
||||
site_name = get.sitename.strip()
|
||||
except (AttributeError, ValueError, TypeError):
|
||||
return "Parameter error"
|
||||
proxy_list = []
|
||||
web_server = webserver()
|
||||
for conf in self.config:
|
||||
if conf["sitename"] != site_name:
|
||||
continue
|
||||
md5_name = self._calc_proxy_name_md5(conf['proxyname'])
|
||||
conf["proxy_conf_file"] = "%s/vhost/%s/proxy/%s/%s_%s.conf" % (
|
||||
self.panel_path, web_server, site_name, md5_name, site_name)
|
||||
proxy_list.append(conf)
|
||||
return proxy_list
|
||||
|
||||
def remove_site_proxy_info(self, site_name):
|
||||
idx_list = []
|
||||
for index, i in enumerate(self.config):
|
||||
if i["sitename"] == site_name:
|
||||
idx_list.append(index)
|
||||
|
||||
for idx in idx_list[::-1]:
|
||||
del self.config[idx]
|
||||
|
||||
self.save_config()
|
||||
|
||||
ng_proxy_dir = "%s/vhost/nginx/proxy/%s" % (self.panel_path, site_name)
|
||||
ap_proxy_dir = "%s/vhost/apache/proxy/%s" % (self.panel_path, site_name)
|
||||
|
||||
if os.path.isdir(ng_proxy_dir):
|
||||
shutil.rmtree(ng_proxy_dir)
|
||||
|
||||
if os.path.isdir(ap_proxy_dir):
|
||||
shutil.rmtree(ap_proxy_dir)
|
||||
|
||||
|
||||
class Proxy(object):
|
||||
|
||||
def __init__(self, config_prefix=""):
|
||||
self.config_prefix = config_prefix
|
||||
self._p = RealProxy(self.config_prefix)
|
||||
|
||||
def create_proxy(self, get):
|
||||
msg = self._p.create_proxy(get)
|
||||
if msg:
|
||||
return json_response(status=False, msg=msg)
|
||||
return json_response(status=True, msg="Successfully added")
|
||||
|
||||
def modify_proxy(self, get):
|
||||
msg = self._p.modify_proxy(get)
|
||||
if msg:
|
||||
return json_response(status=False, msg=msg)
|
||||
return json_response(status=True, msg="修改成功")
|
||||
|
||||
def remove_proxy(self, get):
|
||||
try:
|
||||
site_name = get.sitename.strip()
|
||||
proxy_name = get.proxyname.strip()
|
||||
except:
|
||||
return json_response(status=False, msg="Parameter error")
|
||||
msg = self._p.remove_proxy(site_name, proxy_name)
|
||||
if msg:
|
||||
return json_response(status=False, msg=msg)
|
||||
return json_response(status=True, msg="Successfully delete")
|
||||
|
||||
def get_proxy_list(self, get):
|
||||
data = self._p.get_proxy_list(get)
|
||||
if isinstance(data, str):
|
||||
return json_response(status=False, msg=data)
|
||||
else:
|
||||
return json_response(status=True, data=data)
|
||||
737
mod/base/web_conf/redirect.py
Normal file
737
mod/base/web_conf/redirect.py
Normal file
@@ -0,0 +1,737 @@
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import hashlib
|
||||
import shutil
|
||||
import time
|
||||
from typing import Tuple, Optional, Union, Dict, List, Any
|
||||
from urllib import parse
|
||||
from itertools import product
|
||||
from .util import webserver, check_server_config, write_file, read_file, DB, service_reload
|
||||
from mod.base import json_response
|
||||
|
||||
|
||||
class RealRedirect:
|
||||
panel_path = "/www/server/panel"
|
||||
_redirect_conf_file = "{}/data/redirect.conf".format(panel_path)
|
||||
|
||||
_ng_redirect_domain_format = """
|
||||
if ($host ~ '^%s'){
|
||||
return %s %s%s;
|
||||
}
|
||||
"""
|
||||
_ng_redirect_path_format = """
|
||||
rewrite ^%s(.*) %s%s %s;
|
||||
"""
|
||||
_ap_redirect_domain_format = """
|
||||
<IfModule mod_rewrite.c>
|
||||
RewriteEngine on
|
||||
RewriteCond %%{HTTP_HOST} ^%s [NC]
|
||||
RewriteRule ^(.*) %s%s [L,R=%s]
|
||||
</IfModule>
|
||||
"""
|
||||
_ap_redirect_path_format = """
|
||||
<IfModule mod_rewrite.c>
|
||||
RewriteEngine on
|
||||
RewriteRule ^%s(.*) %s%s [L,R=%s]
|
||||
</IfModule>
|
||||
"""
|
||||
|
||||
def __init__(self, config_prefix: str):
|
||||
self._config: Optional[List[Dict[str, Union[str, int]]]] = None
|
||||
self.config_prefix = config_prefix
|
||||
self._webserver = None
|
||||
|
||||
@property
|
||||
def webserver(self) -> str:
|
||||
if self._webserver is not None:
|
||||
return self._webserver
|
||||
self._webserver = webserver()
|
||||
return self._webserver
|
||||
|
||||
@property
|
||||
def config(self) -> List[Dict[str, Union[str, int, List]]]:
|
||||
if self._config is not None:
|
||||
return self._config
|
||||
try:
|
||||
self._config = json.loads(read_file(self._redirect_conf_file))
|
||||
except (json.JSONDecodeError, TypeError, ValueError):
|
||||
self._config = []
|
||||
if not isinstance(self._config, list):
|
||||
self._config = []
|
||||
return self._config
|
||||
|
||||
def save_config(self):
|
||||
if self._config is not None:
|
||||
return write_file(self._redirect_conf_file, json.dumps(self._config))
|
||||
|
||||
def _check_redirect_domain_exist(self, site_name,
|
||||
redirect_domain: list,
|
||||
redirect_name: str = None,
|
||||
is_modify=False) -> Optional[List[str]]:
|
||||
res = set()
|
||||
redirect_domain_set = set(redirect_domain)
|
||||
for c in self.config:
|
||||
if c["sitename"] != site_name:
|
||||
continue
|
||||
if is_modify:
|
||||
if c["redirectname"] != redirect_name:
|
||||
res |= set(c["redirectdomain"]) & redirect_domain_set
|
||||
else:
|
||||
res |= set(c["redirectdomain"]) & redirect_domain_set
|
||||
return list(res) if res else None
|
||||
|
||||
def _check_redirect_path_exist(self, site_name,
|
||||
redirect_path: str,
|
||||
redirect_name: str = None) -> bool:
|
||||
for c in self.config:
|
||||
if c["sitename"] == site_name:
|
||||
if c["redirectname"] != redirect_name and c["redirectpath"] == redirect_path:
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _parse_url_domain(url: str):
|
||||
return parse.urlparse(url).netloc
|
||||
|
||||
@staticmethod
|
||||
def _parse_url_path(url: str):
|
||||
return parse.urlparse(url).path
|
||||
|
||||
# 计算name md5
|
||||
@staticmethod
|
||||
def _calc_redirect_name_md5(redirect_name) -> str:
|
||||
md5 = hashlib.md5()
|
||||
md5.update(redirect_name.encode('utf-8'))
|
||||
return md5.hexdigest()
|
||||
|
||||
def _check_redirect(self, site_name, redirect_name, is_error=False):
|
||||
for i in self.config:
|
||||
if i["sitename"] != site_name:
|
||||
continue
|
||||
if is_error and "errorpage" in i and i["errorpage"] in [1, '1']:
|
||||
return i
|
||||
if i["redirectname"] == redirect_name:
|
||||
return i
|
||||
return None
|
||||
|
||||
# 创建修改配置检测
|
||||
def _check_redirect_args(self, get, is_modify=False) -> Union[str, Dict]:
|
||||
if check_server_config() is not None:
|
||||
return '配置文件出错请先排查配置'
|
||||
|
||||
try:
|
||||
site_name = get.sitename.strip()
|
||||
redirect_path = get.redirectpath.strip()
|
||||
redirect_type = get.redirecttype.strip()
|
||||
domain_or_path = get.domainorpath.strip()
|
||||
hold_path = int(get.holdpath)
|
||||
|
||||
to_url = ""
|
||||
to_path = ""
|
||||
error_page = 0
|
||||
redirect_domain = []
|
||||
redirect_name = ""
|
||||
status_type = 1
|
||||
|
||||
if "redirectname" in get and get.redirectname.strip():
|
||||
redirect_name = get.redirectname.strip()
|
||||
if "tourl" in get:
|
||||
to_url = get.tourl.strip()
|
||||
if "topath" in get:
|
||||
to_path = get.topath.strip()
|
||||
if "redirectdomain" in get:
|
||||
redirect_domain = json.loads(get.redirectdomain.strip())
|
||||
if "type" in get:
|
||||
status_type = int(get.type)
|
||||
if "errorpage" in get:
|
||||
error_page = int(get.errorpage)
|
||||
except (AttributeError, ValueError):
|
||||
return '参数错误'
|
||||
|
||||
if not is_modify:
|
||||
if not redirect_name:
|
||||
return "Parameter error, configuration name cannot be empty"
|
||||
# 检测名称是否重复
|
||||
if not (3 <= len(redirect_name) < 15):
|
||||
return '名称必须大于2小于15个字符串'
|
||||
|
||||
if self._check_redirect(site_name, redirect_name, error_page == 1):
|
||||
return '指定重定向名称已存在'
|
||||
|
||||
site_info = DB('sites').where("name=?", (site_name,)).find()
|
||||
if not isinstance(site_info, dict):
|
||||
return "站点信息查询错误"
|
||||
else:
|
||||
site_name = site_info["name"]
|
||||
|
||||
# 检测目标URL格式
|
||||
rep = r"http(s)?\:\/\/([a-zA-Z0-9][-a-zA-Z0-9]{0,62}\.)+([a-zA-Z0-9][a-zA-Z0-9]{0,62})+.?"
|
||||
if to_url and not re.match(rep, to_url):
|
||||
return '目标URL格式不对【%s】' % to_url
|
||||
|
||||
# 非404页面de重定向检测项
|
||||
if error_page != 1:
|
||||
# 检测是否选择域名
|
||||
if domain_or_path == "domain":
|
||||
if not redirect_domain:
|
||||
return '请选择重定向域名'
|
||||
# 检测域名是否已经存在配置文件
|
||||
repeat_domain = self._check_redirect_domain_exist(site_name, redirect_domain, redirect_name, is_modify)
|
||||
if repeat_domain:
|
||||
return '重定向域名重复 %s' % repeat_domain
|
||||
|
||||
# 检查目标URL的域名和被重定向的域名是否一样
|
||||
tu = self._parse_url_domain(to_url)
|
||||
for d in redirect_domain:
|
||||
if d == tu:
|
||||
return '域名 "%s" 和目标域名一致请取消选择' % d
|
||||
else:
|
||||
if not redirect_path:
|
||||
return '请输入重定向路径'
|
||||
if redirect_path[0] != "/":
|
||||
return "路径格式不正确,格式为/xxx"
|
||||
# 检测路径是否有存在配置文件
|
||||
if self._check_redirect_path_exist(site_name, redirect_path, redirect_name):
|
||||
return '重定向路径重复 %s' % redirect_path
|
||||
|
||||
to_url_path = self._parse_url_path(to_url)
|
||||
if to_url_path.startswith(redirect_path):
|
||||
return '目标URL[%s]以被重定向的路径[%s]开头,会导致循环匹配' % (to_url_path, redirect_path)
|
||||
# 404页面重定向检测项
|
||||
else:
|
||||
if not to_url and not to_path:
|
||||
return '首页或自定义页面必须二选一'
|
||||
if to_path:
|
||||
to_path = "/"
|
||||
|
||||
return {
|
||||
"tourl": to_url,
|
||||
"topath": to_path,
|
||||
"errorpage": error_page,
|
||||
"redirectdomain": redirect_domain,
|
||||
"redirectname": redirect_name if redirect_name else str(int(time.time())),
|
||||
"type": status_type,
|
||||
"sitename": site_name,
|
||||
"redirectpath": redirect_path,
|
||||
"redirecttype": redirect_type,
|
||||
"domainorpath": domain_or_path,
|
||||
"holdpath": hold_path,
|
||||
}
|
||||
|
||||
def create_redirect(self, get) -> Tuple[bool, str]:
|
||||
res_conf = self._check_redirect_args(get, is_modify=False)
|
||||
if isinstance(res_conf, str):
|
||||
return False, res_conf
|
||||
|
||||
res = self._set_include(res_conf)
|
||||
if res is not None:
|
||||
return False, res
|
||||
res = self._write_config(res_conf)
|
||||
if res is not None:
|
||||
return False, res
|
||||
self.config.append(res_conf)
|
||||
self.save_config()
|
||||
service_reload()
|
||||
return True, '创建成功'
|
||||
|
||||
def _set_include(self, res_conf) -> Optional[str]:
|
||||
flag, msg = self._set_nginx_redirect_include(res_conf)
|
||||
if not flag and webserver() == "nginx":
|
||||
return msg
|
||||
flag, msg = self._set_apache_redirect_include(res_conf)
|
||||
if not flag and webserver() == "apache":
|
||||
return msg
|
||||
|
||||
def _write_config(self, res_conf) -> Optional[str]:
|
||||
if res_conf["errorpage"] != 1:
|
||||
res = self.write_nginx_redirect_file(res_conf)
|
||||
if res is not None:
|
||||
return res
|
||||
res = self.write_apache_redirect_file(res_conf)
|
||||
if res is not None:
|
||||
return res
|
||||
else:
|
||||
self.unset_nginx_404_conf(res_conf["sitename"])
|
||||
res = self.write_nginx_404_redirect_file(res_conf)
|
||||
if res is not None:
|
||||
return res
|
||||
res = self.write_apache_404_redirect_file(res_conf)
|
||||
if res is not None:
|
||||
return res
|
||||
|
||||
def modify_redirect(self, get) -> Tuple[bool, str]:
|
||||
"""
|
||||
@name 修改、启用、禁用重定向
|
||||
@author hezhihong
|
||||
@param get.sitename 站点名称
|
||||
@param get.redirectname 重定向名称
|
||||
@param get.tourl 目标URL
|
||||
@param get.redirectdomain 重定向域名
|
||||
@param get.redirectpath 重定向路径
|
||||
@param get.redirecttype 重定向类型
|
||||
@param get.type 重定向状态 0禁用 1启用
|
||||
@param get.domainorpath 重定向类型 domain 域名重定向 path 路径重定向
|
||||
@param get.holdpath 保留路径 0不保留 1保留
|
||||
@return json
|
||||
"""
|
||||
# 基本信息检查
|
||||
res_conf = self._check_redirect_args(get, is_modify=True)
|
||||
if isinstance(res_conf, str):
|
||||
return False, res_conf
|
||||
|
||||
old_idx = None
|
||||
for i, conf in enumerate(self.config):
|
||||
if conf["redirectname"] == res_conf["redirectname"] and conf["sitename"] == res_conf["sitename"]:
|
||||
old_idx = i
|
||||
|
||||
res = self._set_include(res_conf)
|
||||
if res is not None:
|
||||
return False, res
|
||||
res = self._write_config(res_conf)
|
||||
if res is not None:
|
||||
return False, res
|
||||
|
||||
if old_idx is not None:
|
||||
self.config[old_idx].update(res_conf)
|
||||
else:
|
||||
self.config.append(res_conf)
|
||||
self.save_config()
|
||||
service_reload()
|
||||
return True, '修改成功'
|
||||
|
||||
def _set_nginx_redirect_include(self, redirect_conf: dict) -> Tuple[bool, str]:
|
||||
ng_redirect_dir = "%s/vhost/nginx/redirect/%s" % (self.panel_path, redirect_conf["sitename"])
|
||||
ng_file = "{}/vhost/nginx/{}{}.conf".format(self.panel_path, self.config_prefix, redirect_conf["sitename"])
|
||||
if not os.path.exists(ng_redirect_dir):
|
||||
os.makedirs(ng_redirect_dir, 0o600)
|
||||
ng_conf = read_file(ng_file)
|
||||
if not isinstance(ng_conf, str):
|
||||
return False, "nginx配置文件读取失败"
|
||||
|
||||
rep_include = re.compile(r"\sinclude +.*/redirect/.*\*\.conf;", re.M)
|
||||
if rep_include.search(ng_conf):
|
||||
return True, ""
|
||||
redirect_include = (
|
||||
"#SSL-END\n"
|
||||
" #引用重定向规则,注释后配置的重定向代理将无效\n"
|
||||
" include {}/*.conf;"
|
||||
).format(ng_redirect_dir)
|
||||
|
||||
if "#SSL-END" not in ng_conf:
|
||||
return False, "添加配置失败,无法定位SSL相关配置的位置"
|
||||
|
||||
new_conf = ng_conf.replace("#SSL-END", redirect_include)
|
||||
write_file(ng_file, new_conf)
|
||||
if self.webserver == "nginx" and check_server_config() is not None:
|
||||
write_file(ng_file, ng_conf)
|
||||
return False, "添加配置失败"
|
||||
|
||||
return True, ""
|
||||
|
||||
def _un_set_nginx_redirect_include(self, redirect_conf: dict) -> Tuple[bool, str]:
|
||||
ng_file = "{}/vhost/nginx/{}{}.conf".format(self.panel_path, self.config_prefix, redirect_conf["sitename"])
|
||||
ng_conf = read_file(ng_file)
|
||||
if not isinstance(ng_conf, str):
|
||||
return False, "nginx配置文件读取失败"
|
||||
|
||||
rep_include = re.compile(r"(#(.*)\n)?\s*include +.*/redirect/.*\*\.conf;")
|
||||
if not rep_include.search(ng_conf):
|
||||
return True, ""
|
||||
|
||||
new_conf = rep_include.sub("", ng_conf, 1)
|
||||
write_file(ng_file, new_conf)
|
||||
if self.webserver == "nginx" and check_server_config() is not None:
|
||||
write_file(ng_file, ng_conf)
|
||||
return False, "移除配置失败"
|
||||
|
||||
return True, ""
|
||||
|
||||
def _set_apache_redirect_include(self, redirect_conf: dict) -> Tuple[bool, str]:
|
||||
ap_redirect_dir = "%s/vhost/apache/redirect/%s" % (self.panel_path, redirect_conf["sitename"])
|
||||
ap_file = "{}/vhost/apache/{}{}.conf".format(self.panel_path, self.config_prefix, redirect_conf["sitename"])
|
||||
if not os.path.exists(ap_redirect_dir):
|
||||
os.makedirs(ap_redirect_dir, 0o600)
|
||||
|
||||
ap_conf = read_file(ap_file)
|
||||
if not isinstance(ap_conf, str):
|
||||
return False, "apache配置文件读取失败"
|
||||
|
||||
rep_include = re.compile(r"\sIncludeOptional +.*/redirect/.*\*\.conf", re.M)
|
||||
include_count = len(list(rep_include.finditer(ap_conf)))
|
||||
if ap_conf.count("</VirtualHost>") == include_count:
|
||||
return True, ""
|
||||
|
||||
if include_count > 0:
|
||||
# 先清除已有的配置
|
||||
self._un_set_apache_redirect_include(redirect_conf)
|
||||
|
||||
rep_custom_log = re.compile(r"CustomLog .*\n")
|
||||
rep_deny_files = re.compile(r"\n\s*#DENY FILES")
|
||||
|
||||
include_conf = (
|
||||
"\n # 引用重定向规则,注释后配置的重定向代理将无效\n"
|
||||
" IncludeOptional {}/*.conf\n"
|
||||
).format(ap_redirect_dir)
|
||||
|
||||
new_conf = None
|
||||
|
||||
def set_by_rep_idx(rep: re.Pattern, use_start: bool) -> bool:
|
||||
new_conf_list = []
|
||||
last_idx = 0
|
||||
for tmp in rep.finditer(ap_conf):
|
||||
new_conf_list.append(ap_conf[last_idx:tmp.start()])
|
||||
if use_start:
|
||||
new_conf_list.append(include_conf)
|
||||
new_conf_list.append(tmp.group())
|
||||
else:
|
||||
new_conf_list.append(tmp.group())
|
||||
new_conf_list.append(include_conf)
|
||||
last_idx = tmp.end()
|
||||
|
||||
new_conf_list.append(ap_conf[last_idx:])
|
||||
|
||||
nonlocal new_conf
|
||||
new_conf = "".join(new_conf_list)
|
||||
write_file(ap_file, new_conf)
|
||||
if self.webserver == "apache" and check_server_config() is not None:
|
||||
write_file(ap_file, ap_conf)
|
||||
return False
|
||||
return True
|
||||
|
||||
if set_by_rep_idx(rep_custom_log, False) and rep_include.search(new_conf):
|
||||
return True, ""
|
||||
|
||||
if set_by_rep_idx(rep_deny_files, True) and rep_include.search(new_conf):
|
||||
return True, ""
|
||||
return False, "设置失败"
|
||||
|
||||
def _un_set_apache_redirect_include(self, redirect_conf: dict) -> Tuple[bool, str]:
|
||||
ap_file = "{}/vhost/apache/{}{}.conf".format(self.panel_path, self.config_prefix, redirect_conf["sitename"])
|
||||
ap_conf = read_file(ap_file)
|
||||
if not isinstance(ap_conf, str):
|
||||
return False, "apache配置文件读取失败"
|
||||
|
||||
rep_include = re.compile(r"(#(.*)\n)?\s*IncludeOptional +.*/redirect/.*\*\.conf")
|
||||
if not rep_include.search(ap_conf):
|
||||
return True, ""
|
||||
|
||||
new_conf = rep_include.sub("", ap_conf)
|
||||
write_file(ap_file, new_conf)
|
||||
if self.webserver == "apache" and check_server_config() is not None:
|
||||
write_file(ap_file, ap_conf)
|
||||
return False, "移除配置失败"
|
||||
|
||||
return True, ""
|
||||
|
||||
def write_nginx_redirect_file(self, redirect_conf: dict) -> Optional[str]:
|
||||
conf_file = "{}/vhost/nginx/redirect/{}/{}_{}.conf".format(
|
||||
self.panel_path, redirect_conf["sitename"], self._calc_redirect_name_md5(redirect_conf["redirectname"]),
|
||||
redirect_conf["sitename"]
|
||||
)
|
||||
if redirect_conf["type"] == 1:
|
||||
to_url = redirect_conf["tourl"]
|
||||
conf_list = ["#REWRITE-START"]
|
||||
if redirect_conf["domainorpath"] == "domain":
|
||||
hold_path = "$request_uri" if redirect_conf["holdpath"] == 1 else ""
|
||||
for sd in redirect_conf["redirectdomain"]:
|
||||
if sd.startswith("*."):
|
||||
sd = r"[\w.]+\." + sd[2:]
|
||||
|
||||
conf_list.append(self._ng_redirect_domain_format % (
|
||||
sd, redirect_conf["redirecttype"], to_url, hold_path
|
||||
))
|
||||
else:
|
||||
redirect_path = redirect_conf["redirectpath"]
|
||||
if redirect_conf["redirecttype"] == "301":
|
||||
redirect_type = "permanent"
|
||||
else:
|
||||
redirect_type = "redirect"
|
||||
hold_path = "$1" if redirect_conf["holdpath"] == 1 else ""
|
||||
conf_list.append(self._ng_redirect_path_format % (redirect_path, to_url, hold_path, redirect_type))
|
||||
|
||||
conf_list.append("#REWRITE-END")
|
||||
|
||||
conf_data = "\n".join(conf_list)
|
||||
write_file(conf_file, conf_data)
|
||||
|
||||
if self.webserver == "nginx":
|
||||
error_msg = check_server_config()
|
||||
if error_msg is not None:
|
||||
if os.path.exists(conf_file):
|
||||
os.remove(conf_file)
|
||||
return 'ERROR: 配置出错<br><a style="color:red;">' + error_msg.replace("\n", '<br>') + '</a>'
|
||||
else:
|
||||
if os.path.exists(conf_file):
|
||||
os.remove(conf_file)
|
||||
|
||||
def write_apache_redirect_file(self, redirect_conf: dict) -> Optional[str]:
|
||||
conf_file = "{}/vhost/apache/redirect/{}/{}_{}.conf".format(
|
||||
self.panel_path, redirect_conf["sitename"], self._calc_redirect_name_md5(redirect_conf["redirectname"]),
|
||||
redirect_conf["sitename"]
|
||||
)
|
||||
if redirect_conf["type"] != 1:
|
||||
if os.path.exists(conf_file):
|
||||
os.remove(conf_file)
|
||||
return
|
||||
|
||||
to_url = redirect_conf["tourl"]
|
||||
conf_list = ["#REWRITE-START"]
|
||||
hold_path = "$1" if redirect_conf["holdpath"] == 1 else ""
|
||||
if redirect_conf["domainorpath"] == "domain":
|
||||
for sd in redirect_conf["redirectdomain"]:
|
||||
if sd.startswith("*."):
|
||||
sd = r"[\w.]+\." + sd[2:]
|
||||
|
||||
conf_list.append(self._ap_redirect_domain_format % (
|
||||
sd, to_url, hold_path, redirect_conf["redirecttype"]
|
||||
))
|
||||
else:
|
||||
redirect_path = redirect_conf["redirectpath"]
|
||||
conf_list.append(self._ap_redirect_path_format % (redirect_path, to_url, hold_path, redirect_conf["redirecttype"]))
|
||||
|
||||
conf_list.append("#REWRITE-END")
|
||||
|
||||
write_file(conf_file, "\n".join(conf_list))
|
||||
if self.webserver == "apache":
|
||||
error_msg = check_server_config()
|
||||
if error_msg is not None:
|
||||
if os.path.exists(conf_file):
|
||||
os.remove(conf_file)
|
||||
return 'ERROR: 配置出错<br><a style="color:red;">' + error_msg.replace("\n", '<br>') + '</a>'
|
||||
|
||||
def unset_nginx_404_conf(self, site_name):
|
||||
"""
|
||||
清理已有的 404 页面 配置
|
||||
"""
|
||||
need_clear_files = [
|
||||
"{}/vhost/nginx/{}{}.conf".format(self.panel_path, self.config_prefix, site_name),
|
||||
"{}/vhost/nginx/rewrite/{}{}.conf".format(self.panel_path, self.config_prefix, site_name),
|
||||
]
|
||||
rep_error_page = re.compile(r'(?P<prefix>.*)error_page +404 +/404\.html[^\n]*\n', re.M)
|
||||
rep_location_404 = re.compile(r'(?P<prefix>.*)location += +/404\.html[^}]*}')
|
||||
clear_files = [
|
||||
{
|
||||
"data": read_file(i),
|
||||
"path": i,
|
||||
} for i in need_clear_files
|
||||
]
|
||||
for file_info, rep in product(clear_files, (rep_error_page, rep_location_404)):
|
||||
if not isinstance(file_info["data"], str):
|
||||
continue
|
||||
tmp_res = rep.search(file_info["data"])
|
||||
if not tmp_res or tmp_res.group("prefix").find("#") != -1:
|
||||
continue
|
||||
file_info["data"] = rep.sub("", file_info["data"])
|
||||
|
||||
for i in clear_files:
|
||||
if not isinstance(i["data"], str):
|
||||
continue
|
||||
write_file(i["path"], i["data"])
|
||||
|
||||
def write_nginx_404_redirect_file(self, redirect_conf: dict) -> Optional[str]:
|
||||
"""
|
||||
设置nginx 404重定向
|
||||
"""
|
||||
r_name_md5 = self._calc_redirect_name_md5(redirect_conf["redirectname"])
|
||||
file_path = "{}/vhost/nginx/redirect/{}".format(self.panel_path, redirect_conf["sitename"])
|
||||
file_name = '%s_%s.conf' % (r_name_md5, redirect_conf["sitename"])
|
||||
conf_file = os.path.join(file_path, file_name)
|
||||
if redirect_conf["type"] != 1:
|
||||
if os.path.exists(conf_file):
|
||||
os.remove(conf_file)
|
||||
return
|
||||
|
||||
_path = redirect_conf["tourl"] if redirect_conf["tourl"] else redirect_conf["topath"]
|
||||
conf_data = (
|
||||
'#REWRITE-START\n'
|
||||
'error_page 404 = @notfound;\n'
|
||||
'location @notfound {{\n'
|
||||
' return {} {};\n'
|
||||
'}}\n#REWRITE-END'
|
||||
).format(redirect_conf["redirecttype"], _path)
|
||||
|
||||
write_file(conf_file, conf_data)
|
||||
if self.webserver == "nginx":
|
||||
error_msg = check_server_config()
|
||||
if error_msg is not None:
|
||||
if os.path.exists(conf_file):
|
||||
os.remove(conf_file)
|
||||
return 'ERROR: 配置出错<br><a style="color:red;">' + error_msg.replace("\n", '<br>') + '</a>'
|
||||
|
||||
def write_apache_404_redirect_file(self, redirect_conf: dict) -> Optional[str]:
|
||||
"""
|
||||
设置apache 404重定向
|
||||
"""
|
||||
r_name_md5 = self._calc_redirect_name_md5(redirect_conf["redirectname"])
|
||||
conf_file = "{}/vhost/apache/redirect/{}/{}_{}.conf".format(
|
||||
self.panel_path, redirect_conf["sitename"], r_name_md5, redirect_conf["sitename"]
|
||||
)
|
||||
if redirect_conf["type"] != 1:
|
||||
if os.path.exists(conf_file):
|
||||
os.remove(conf_file)
|
||||
return
|
||||
|
||||
_path = redirect_conf["tourl"] if redirect_conf["tourl"] else redirect_conf["topath"]
|
||||
conf_data = """
|
||||
#REWRITE-START
|
||||
<IfModule mod_rewrite.c>
|
||||
RewriteEngine on
|
||||
RewriteCond %{{REQUEST_FILENAME}} !-f
|
||||
RewriteCond %{{REQUEST_FILENAME}} !-d
|
||||
RewriteRule . {} [L,R={}]
|
||||
</IfModule>
|
||||
#REWRITE-END
|
||||
""".format(_path, redirect_conf["redirecttype"])
|
||||
|
||||
write_file(conf_file, conf_data)
|
||||
if self.webserver == "apache":
|
||||
error_msg = check_server_config()
|
||||
if error_msg is not None:
|
||||
if os.path.exists(conf_file):
|
||||
os.remove(conf_file)
|
||||
return 'ERROR: 配置出错<br><a style="color:red;">' + error_msg.replace("\n", '<br>') + '</a>'
|
||||
|
||||
def remove_redirect(self, get, multiple=None) -> Tuple[bool, str]:
|
||||
try:
|
||||
site_name = get.sitename.strip()
|
||||
redirect_name = get.redirectname.strip()
|
||||
except AttributeError:
|
||||
return False, "Parameter error"
|
||||
target_idx = None
|
||||
have_other_redirect = False
|
||||
target_conf = None
|
||||
for i, conf in enumerate(self.config):
|
||||
if conf["redirectname"] != redirect_name and conf["sitename"] == site_name:
|
||||
have_other_redirect = True
|
||||
if conf["redirectname"] == redirect_name and conf["sitename"] == site_name:
|
||||
target_idx = i
|
||||
target_conf = conf
|
||||
|
||||
if target_idx is None:
|
||||
return False, '没有指定的配置'
|
||||
|
||||
r_md5_name = self._calc_redirect_name_md5(target_conf["redirectname"])
|
||||
ng_conf_file = "%s/vhost/nginx/redirect/%s/%s_%s.conf" % (
|
||||
self.panel_path, site_name, r_md5_name, site_name)
|
||||
if os.path.exists(ng_conf_file):
|
||||
os.remove(ng_conf_file)
|
||||
|
||||
ap_conf_file = "%s/vhost/nginx/apache/%s/%s_%s.conf" % (
|
||||
self.panel_path, site_name, r_md5_name, site_name)
|
||||
if os.path.exists(ap_conf_file):
|
||||
os.remove(ap_conf_file)
|
||||
|
||||
if not have_other_redirect:
|
||||
self._un_set_apache_redirect_include(target_conf)
|
||||
self._un_set_nginx_redirect_include(target_conf)
|
||||
|
||||
del self.config[target_idx]
|
||||
self.save_config()
|
||||
if not multiple:
|
||||
service_reload()
|
||||
|
||||
return True, '删除成功'
|
||||
|
||||
def mutil_remove_redirect(self, get):
|
||||
try:
|
||||
redirect_names = json.loads(get.redirectnames.strip())
|
||||
site_name = get.sitename.strip()
|
||||
except (AttributeError, json.JSONDecodeError, TypeError):
|
||||
return False, "Parameter error"
|
||||
del_successfully = []
|
||||
del_failed = []
|
||||
get_obj = type(get)()
|
||||
for redirect_name in redirect_names:
|
||||
get_obj.redirectname = redirect_name
|
||||
get_obj.sitename = site_name
|
||||
try:
|
||||
flag, msg = self.remove_redirect(get, multiple=1)
|
||||
if flag:
|
||||
del_failed[redirect_name] = msg
|
||||
continue
|
||||
del_successfully.append(redirect_name)
|
||||
except:
|
||||
del_failed.append(redirect_name)
|
||||
|
||||
service_reload()
|
||||
if not del_failed:
|
||||
return True, '删除重定向【{}】成功'.format(','.join(del_successfully))
|
||||
else:
|
||||
return True, '重定向【{}】删除成功,【{}】删除失败'.format(
|
||||
','.join(del_successfully), ','.join(del_failed)
|
||||
)
|
||||
|
||||
def get_redirect_list(self, get) -> Tuple[bool, Union[str, List[Dict[str, Any]]]]:
|
||||
try:
|
||||
error_page = None
|
||||
site_name = get.sitename.strip()
|
||||
if "errorpage" in get:
|
||||
error_page = int(get.errorpage)
|
||||
except (AttributeError, ValueError, TypeError):
|
||||
return False, "Parameter error"
|
||||
redirect_list = []
|
||||
web_server = self.webserver
|
||||
if self.webserver == 'openlitespeed':
|
||||
web_server = 'apache'
|
||||
for conf in self.config:
|
||||
if conf["sitename"] != site_name:
|
||||
continue
|
||||
if error_page is not None and error_page != int(conf['errorpage']):
|
||||
continue
|
||||
if 'errorpage' in conf and conf['errorpage'] in [1, '1']:
|
||||
conf['redirectdomain'] = ['404页面']
|
||||
|
||||
md5_name = self._calc_redirect_name_md5(conf['redirectname'])
|
||||
conf["redirect_conf_file"] = "%s/vhost/%s/redirect/%s/%s_%s.conf" % (
|
||||
self.panel_path, web_server, site_name, md5_name, site_name)
|
||||
conf["type"] = 1 if os.path.isfile(conf["redirect_conf_file"]) else 0
|
||||
redirect_list.append(conf)
|
||||
return True, redirect_list
|
||||
|
||||
def remove_site_redirect_info(self, site_name):
|
||||
for i in range(len(self.config) - 1, -1, -1):
|
||||
if self.config[i]["sitename"] == site_name:
|
||||
del self.config[i]
|
||||
self.save_config()
|
||||
|
||||
m_path = self.panel_path + '/vhost/nginx/redirect/' + site_name
|
||||
if os.path.exists(m_path):
|
||||
shutil.rmtree(m_path)
|
||||
|
||||
m_path = self.panel_path + '/vhost/apache/redirect/' + site_name
|
||||
if os.path.exists(m_path):
|
||||
shutil.rmtree(m_path)
|
||||
|
||||
|
||||
class Redirect(RealRedirect):
|
||||
|
||||
def __init__(self, config_prefix: str = ""):
|
||||
super().__init__(config_prefix)
|
||||
self.config_prefix = config_prefix
|
||||
|
||||
def remove_redirect_by_project_name(self, project_name):
|
||||
return self.remove_site_redirect_info(project_name)
|
||||
|
||||
def create_project_redirect(self, get):
|
||||
flag, msg = self.create_redirect(get)
|
||||
return json_response(status=flag, msg=msg)
|
||||
|
||||
def modify_project_redirect(self, get):
|
||||
flag, msg = self.modify_redirect(get)
|
||||
return json_response(status=flag, msg=msg)
|
||||
|
||||
def remove_project_redirect(self, get):
|
||||
flag, msg = self.remove_redirect(get)
|
||||
return json_response(status=flag, msg=msg)
|
||||
|
||||
def mutil_remove_project_redirect(self, get):
|
||||
flag, msg = self.mutil_remove_redirect(get)
|
||||
return json_response(status=flag, msg=msg)
|
||||
|
||||
def get_project_redirect_list(self, get):
|
||||
flag, data = self.get_redirect_list(get)
|
||||
if not flag:
|
||||
return json_response(status=flag, msg=data)
|
||||
else:
|
||||
return json_response(status=flag, data=data)
|
||||
363
mod/base/web_conf/referer.py
Normal file
363
mod/base/web_conf/referer.py
Normal file
@@ -0,0 +1,363 @@
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from typing import Tuple, Optional, Union, Dict
|
||||
from .util import webserver, check_server_config, DB, \
|
||||
write_file, read_file, GET_CLASS, service_reload, pre_re_key
|
||||
from mod.base import json_response
|
||||
|
||||
|
||||
@dataclass
|
||||
class _RefererConf:
|
||||
name: str
|
||||
fix: str
|
||||
domains: str
|
||||
status: str
|
||||
return_rule: str
|
||||
http_status: str
|
||||
|
||||
def __str__(self):
|
||||
return '{"name"="%s","fix"="%s","domains"="%s","status"="%s","http_status"="%s","return_rule"="%s"}' % (
|
||||
self.name, self.fix, self.domains, self.status, self.http_status, self.return_rule
|
||||
)
|
||||
|
||||
|
||||
class RealReferer:
|
||||
_referer_conf_dir = '/www/server/panel/vhost/config' # 防盗链配置
|
||||
_ng_referer_conf_format = r''' #SECURITY-START 防盗链配置
|
||||
location ~ .*\.(%s)$ {
|
||||
expires 30d;
|
||||
access_log /dev/null;
|
||||
valid_referers %s;
|
||||
if ($invalid_referer){
|
||||
%s;
|
||||
}
|
||||
}
|
||||
#SECURITY-END'''
|
||||
|
||||
def __init__(self, config_prefix: str):
|
||||
if not os.path.isdir(self._referer_conf_dir):
|
||||
os.makedirs(self._referer_conf_dir)
|
||||
self.config_prefix: str = config_prefix
|
||||
self._webserver = None
|
||||
|
||||
@property
|
||||
def webserver(self) -> str:
|
||||
if self._webserver is not None:
|
||||
return self._webserver
|
||||
self._webserver = webserver()
|
||||
return self._webserver
|
||||
|
||||
def get_config(self, site_name: str) -> Optional[_RefererConf]:
|
||||
try:
|
||||
config = json.loads(read_file("{}/{}{}_door_chain.json".format(self._referer_conf_dir, self.config_prefix, site_name)))
|
||||
except (json.JSONDecodeError, TypeError, ValueError):
|
||||
config = None
|
||||
if isinstance(config, dict):
|
||||
return _RefererConf(**config)
|
||||
return None
|
||||
|
||||
def save_config(self, site_name: str, data: Union[dict, str, _RefererConf]) -> bool:
|
||||
if isinstance(data, dict):
|
||||
c = json.dumps(data)
|
||||
elif isinstance(data, _RefererConf):
|
||||
c = json.dumps(str(data))
|
||||
else:
|
||||
c = data
|
||||
|
||||
file_path = "{}/{}{}_door_chain.json".format(self._referer_conf_dir, self.config_prefix, site_name)
|
||||
return write_file(file_path, c)
|
||||
|
||||
# 检测参数,如果正确则返回 配置数据类型的值,否则返回错误信息
|
||||
@staticmethod
|
||||
def check_args(get: Union[Dict, GET_CLASS]) -> Union[_RefererConf, str]:
|
||||
res = {}
|
||||
if isinstance(get, GET_CLASS):
|
||||
try:
|
||||
res["status"] = "true" if not hasattr(get, "status") else get.status.strip()
|
||||
res["http_status"] = "false" if not hasattr(get, "http_status") else get.http_status.strip()
|
||||
res["name"] = get.name.strip()
|
||||
res["fix"] = get.fix.strip()
|
||||
res["domains"] = get.domains.strip()
|
||||
res["return_rule"] = get.return_rule.strip()
|
||||
except AttributeError:
|
||||
return "Parameter error"
|
||||
else:
|
||||
try:
|
||||
res["status"] = "true" if "status" not in get else get["status"].strip()
|
||||
res["http_status"] = "false" if "http_status" not in get else get["http_status"].strip()
|
||||
res["name"] = get["name"].strip()
|
||||
res["fix"] = get["fix"].strip()
|
||||
res["domains"] = get["domains"].strip()
|
||||
res["return_rule"] = get["return_rule"].strip()
|
||||
except KeyError:
|
||||
return "Parameter error"
|
||||
|
||||
rconf = _RefererConf(**res)
|
||||
if rconf.status not in ("true", "false") and rconf.return_rule not in ("true", "false"):
|
||||
return "状态参数只能使用【true,false】"
|
||||
if rconf.return_rule not in ('404', '403', '200', '301', '302', '401') and rconf.return_rule[0] != "/":
|
||||
return "响应资源应使用URI路径或HTTP状态码,如:/test.png 或 404"
|
||||
if len(rconf.domains) < 3:
|
||||
return "防盗链域名不能为空"
|
||||
if len(rconf.fix) < 2:
|
||||
return 'URL后缀不能为空!'
|
||||
return rconf
|
||||
|
||||
def set_referer_security(self, rc: _RefererConf) -> Tuple[bool, str]:
|
||||
error_msg = self._set_nginx_referer_security(rc)
|
||||
if error_msg and self.webserver == "nginx":
|
||||
return False, error_msg
|
||||
error_msg = self._set_apache_referer_security(rc)
|
||||
if error_msg and self.webserver == "apache":
|
||||
return False, error_msg
|
||||
service_reload()
|
||||
self.save_config(rc.name, rc)
|
||||
return True, "设置成功"
|
||||
|
||||
def _set_nginx_referer_security(self, rc: _RefererConf) -> Optional[str]:
|
||||
ng_file = '/www/server/panel/vhost/nginx/{}{}.conf'.format(self.config_prefix, rc.name)
|
||||
ng_conf = read_file(ng_file)
|
||||
if not isinstance(ng_conf, str):
|
||||
return "nginx配置文件丢失,无法设置"
|
||||
start_idx, end_idx = self._get_nginx_referer_security_idx(ng_conf)
|
||||
if rc.status == "true":
|
||||
if rc.return_rule[0] == "/":
|
||||
return_rule = "rewrite /.* {} break".format(rc.return_rule)
|
||||
else:
|
||||
return_rule = 'return {}'.format(rc.return_rule)
|
||||
|
||||
valid_args_list = []
|
||||
if rc.http_status == "true":
|
||||
valid_args_list.extend(("none", "blocked"))
|
||||
valid_args_list.extend(map(lambda x: x.strip(), rc.domains.split(",")))
|
||||
valid_args = " ".join(valid_args_list)
|
||||
|
||||
location_args = "|".join(map(lambda x: pre_re_key(x.strip()), rc.fix.split(",")))
|
||||
if start_idx is not None:
|
||||
new_conf = ng_conf[:start_idx] + "\n" + (
|
||||
self._ng_referer_conf_format % (location_args, valid_args, return_rule)
|
||||
) + "\n" + ng_conf[end_idx:]
|
||||
else:
|
||||
rep_redirect_include = re.compile(r"\sinclude +.*/redirect/.*\*\.conf;", re.M)
|
||||
redirect_include_res = rep_redirect_include.search(ng_conf)
|
||||
if redirect_include_res:
|
||||
new_conf = ng_conf[:redirect_include_res.end()] + "\n" + (
|
||||
self._ng_referer_conf_format % (location_args, valid_args, return_rule)
|
||||
) + ng_conf[redirect_include_res.end():]
|
||||
else:
|
||||
if "#SSL-END" not in ng_conf:
|
||||
return "添加配置失败,无法定位SSL相关配置的位置"
|
||||
|
||||
new_conf = ng_conf.replace("#SSL-END", "#SSL-END\n" + self._ng_referer_conf_format % (
|
||||
location_args, valid_args, return_rule))
|
||||
|
||||
else:
|
||||
if start_idx is None:
|
||||
return
|
||||
new_conf = ng_conf[:start_idx] + "\n" + ng_conf[end_idx:]
|
||||
|
||||
write_file(ng_file, new_conf)
|
||||
if self.webserver == "nginx" and check_server_config() is not None:
|
||||
write_file(ng_file, ng_conf)
|
||||
return "配置失败"
|
||||
|
||||
@staticmethod
|
||||
def _get_nginx_referer_security_idx(ng_conf: str) -> Tuple[Optional[int], Optional[int]]:
|
||||
rep_security = re.compile(
|
||||
r"(\s*#\s*SECURITY-START.*\n)?\s*location\s+~\s+\.\*\\\.\(.*(\|.*)?\)\$\s*\{[^}]*valid_referers"
|
||||
)
|
||||
res = rep_security.search(ng_conf)
|
||||
if res is None:
|
||||
return None, None
|
||||
|
||||
start_idx = res.start()
|
||||
s_idx = start_idx + ng_conf[start_idx:].find("{") + 1 # 起始位置
|
||||
l_n = 1
|
||||
max_idx = len(ng_conf)
|
||||
while l_n > 0:
|
||||
next_l = ng_conf[s_idx:].find("{")
|
||||
next_r = ng_conf[s_idx:].find("}") # 可能存在报错
|
||||
if next_r == -1:
|
||||
return None, None
|
||||
if next_l == -1:
|
||||
next_l = max_idx
|
||||
|
||||
if next_l < next_r:
|
||||
l_n += 1
|
||||
else:
|
||||
l_n -= 1
|
||||
s_idx += min(next_l, next_r) + 1
|
||||
|
||||
rep_comment = re.search(r"^\s*#\s*SECURITY-END[^\n]*\n", ng_conf[s_idx:])
|
||||
if rep_comment is not None:
|
||||
end_idx = s_idx + rep_comment.end()
|
||||
else:
|
||||
end_idx = s_idx
|
||||
|
||||
return start_idx, end_idx
|
||||
|
||||
@staticmethod
|
||||
def _build_apache_referer_security_conf(rc: _RefererConf) -> str:
|
||||
r_conf_list = ["#SECURITY-START 防盗链配置"]
|
||||
cond_format = " RewriteCond %{{HTTP_REFERER}} !{} [NC]"
|
||||
if rc.http_status == "false":
|
||||
r_conf_list.append(cond_format.format("^$"))
|
||||
|
||||
r_conf_list.extend(map(lambda x: cond_format.format(x.strip()), rc.domains.split(",")))
|
||||
|
||||
rule_format = " RewriteRule .({}) {} "
|
||||
if rc.return_rule[0] == "/":
|
||||
r_conf_list.append(rule_format.format(
|
||||
"|".join(map(lambda x: x.strip(), rc.fix.split(","))),
|
||||
rc.return_rule
|
||||
))
|
||||
else:
|
||||
r_conf_list.append(rule_format.format(
|
||||
"|".join(map(lambda x: x.strip(), rc.fix.split(","))),
|
||||
"/{s}.html [R={s},NC,L]".format(s=rc.return_rule)
|
||||
))
|
||||
|
||||
r_conf_list.append(" #SECURITY-END")
|
||||
|
||||
return "\n".join(r_conf_list)
|
||||
|
||||
# 根据配置正则确定位置 并将配置文件添加进去 use_start 参数指定添加的前后
|
||||
def _add_apache_referer_security_by_rep_idx(self,
|
||||
rep: re.Pattern,
|
||||
use_start: bool,
|
||||
ap_conf, ap_file, r_conf) -> bool:
|
||||
tmp_conf_list = []
|
||||
last_idx = 0
|
||||
for tmp in rep.finditer(ap_conf):
|
||||
tmp_conf_list.append(ap_conf[last_idx:tmp.start()])
|
||||
if use_start:
|
||||
tmp_conf_list.append("\n" + r_conf + "\n")
|
||||
tmp_conf_list.append(tmp.group())
|
||||
else:
|
||||
tmp_conf_list.append(tmp.group())
|
||||
tmp_conf_list.append("\n" + r_conf + "\n")
|
||||
last_idx = tmp.end()
|
||||
if last_idx == 0:
|
||||
return False
|
||||
|
||||
tmp_conf_list.append(ap_conf[last_idx:])
|
||||
_conf = "".join(tmp_conf_list)
|
||||
write_file(ap_file, _conf)
|
||||
if self.webserver == "apache" and check_server_config() is not None:
|
||||
write_file(ap_file, ap_conf)
|
||||
return False
|
||||
return True
|
||||
|
||||
def _set_apache_referer_security(self, rc: _RefererConf) -> Optional[str]:
|
||||
ap_file = '/www/server/panel/vhost/apache/{}{}.conf'.format(self.config_prefix, rc.name)
|
||||
ap_conf = read_file(ap_file)
|
||||
if not isinstance(ap_conf, str):
|
||||
return "nginx配置文件丢失,无法设置"
|
||||
rep_security = re.compile(r"#\s*SECURITY-START(.|\n)#SECURITY-END.*\n")
|
||||
res = rep_security.search(ap_conf)
|
||||
if rc.status == "true":
|
||||
r_conf = self._build_apache_referer_security_conf(rc)
|
||||
if res is not None:
|
||||
new_conf_list = []
|
||||
_idx = 0
|
||||
for tmp_res in rep_security.finditer(ap_conf):
|
||||
new_conf_list.append(ap_conf[_idx:tmp_res.start()])
|
||||
new_conf_list.append("\n" + r_conf + "\n")
|
||||
_idx = tmp_res.end()
|
||||
new_conf_list.append(ap_conf[_idx:])
|
||||
new_conf = "".join(new_conf_list)
|
||||
write_file(ap_file, new_conf)
|
||||
if self.webserver == "apache" and check_server_config() is not None:
|
||||
write_file(ap_file, ap_conf)
|
||||
return "配置修改失败"
|
||||
|
||||
rep_redirect_include = re.compile(r"IncludeOptional +.*/redirect/.*\*\.conf.*\n", re.M)
|
||||
rep_custom_log = re.compile(r"CustomLog .*\n")
|
||||
rep_deny_files = re.compile(r"\n\s*#DENY FILES")
|
||||
if self._add_apache_referer_security_by_rep_idx(rep_redirect_include, False, ap_conf, ap_file, r_conf):
|
||||
return
|
||||
if self._add_apache_referer_security_by_rep_idx(rep_custom_log, False, ap_conf, ap_file, r_conf):
|
||||
return
|
||||
if self._add_apache_referer_security_by_rep_idx(rep_deny_files, True, ap_conf, ap_file, r_conf):
|
||||
return
|
||||
return "设置添加失败"
|
||||
|
||||
else:
|
||||
if res is None:
|
||||
return
|
||||
|
||||
new_conf_list = []
|
||||
_idx = 0
|
||||
for tmp_res in rep_security.finditer(ap_conf):
|
||||
new_conf_list.append(ap_conf[_idx:tmp_res.start()])
|
||||
_idx = tmp_res.end()
|
||||
new_conf_list.append(ap_conf[_idx:])
|
||||
new_conf = "".join(new_conf_list)
|
||||
write_file(ap_file, new_conf)
|
||||
if self.webserver == "apache" and check_server_config() is not None:
|
||||
write_file(ap_file, ap_conf)
|
||||
return "配置修改失败"
|
||||
|
||||
def get_referer_security(self, site_name) -> Optional[dict]:
|
||||
r = self.get_config(site_name)
|
||||
if r is None:
|
||||
return None
|
||||
return json.loads(str(r))
|
||||
|
||||
def remove_site_referer_info(self, site_name):
|
||||
file_path = "{}/{}{}_door_chain.json".format(self._referer_conf_dir, self.config_prefix, site_name)
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
|
||||
# 从配置文件中获取referer配置信息
|
||||
# 暂时不实现,意义不大
|
||||
def _get_referer_security_by_conf(self, site_name):
|
||||
if self.webserver == "nginx":
|
||||
self._get_nginx_referer_security()
|
||||
else:
|
||||
self._get_apache_referer_security()
|
||||
|
||||
|
||||
class Referer:
|
||||
|
||||
def __init__(self, config_prefix: str):
|
||||
self.config_prefix: str = config_prefix
|
||||
self._r = RealReferer(self.config_prefix)
|
||||
|
||||
def get_referer_security(self, get):
|
||||
try:
|
||||
site_name = get.site_name.strip()
|
||||
except AttributeError:
|
||||
return json_response(status=False, msg="Parameter error")
|
||||
|
||||
data = self._r.get_referer_security(site_name)
|
||||
if data is None:
|
||||
default_conf = {
|
||||
"name": site_name,
|
||||
"fix": "jpg,jpeg,gif,png,js,css",
|
||||
"domains": "",
|
||||
"status": "false",
|
||||
"return_rule": "404",
|
||||
"http_status": "false",
|
||||
}
|
||||
site_info = DB("sites").where("name=?", (site_name,)).field('id').find()
|
||||
if not isinstance(site_info, dict):
|
||||
return json_response(status=False, msg="Site query error")
|
||||
domains_info = DB("domain").where("pid=?", (site_info["id"],)).field('name').select()
|
||||
if not isinstance(domains_info, list):
|
||||
return json_response(status=False, msg="Site query error")
|
||||
|
||||
default_conf["domains"] = ",".join(map(lambda x: x["name"], domains_info))
|
||||
return json_response(status=True, data=default_conf)
|
||||
|
||||
return json_response(status=True, data=data)
|
||||
|
||||
def set_referer_security(self, get):
|
||||
r = self._r.check_args(get)
|
||||
if isinstance(r, str):
|
||||
return json_response(status=False, msg=r)
|
||||
|
||||
flag, msg = self._r.set_referer_security(r)
|
||||
return json_response(status=flag, msg=msg)
|
||||
103
mod/base/web_conf/server_extension.py
Normal file
103
mod/base/web_conf/server_extension.py
Normal file
@@ -0,0 +1,103 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
|
||||
import public
|
||||
|
||||
class NginxExtension:
|
||||
_EXTENSION_DIR = "{}/vhost/nginx/extension".format(public.get_panel_path())
|
||||
|
||||
@classmethod
|
||||
def set_extension(cls, site_name: str, config_path: str) -> str:
|
||||
config_data = public.readFile(config_path)
|
||||
if not config_data:
|
||||
return ""
|
||||
return cls.set_extension_by_config(site_name, config_data)
|
||||
|
||||
@classmethod
|
||||
def set_extension_by_config(cls, site_name: str, config_data: str) -> str:
|
||||
ext_path = "{}/{}".format(cls._EXTENSION_DIR, site_name)
|
||||
if os.path.exists(ext_path):
|
||||
if not os.path.isdir(ext_path):
|
||||
os.remove(ext_path)
|
||||
os.makedirs(ext_path)
|
||||
else:
|
||||
os.makedirs(ext_path)
|
||||
|
||||
rep_exp_list=[
|
||||
re.compile(r"(?<!#)server\s*{(([^{\n]*\n)|(\s*#.*\n)){0,20}\s*root\s+/[^\n]*\n"),
|
||||
re.compile(r"(?<!#)server\s*{(([^{\n]*\n)|(\s*#.*\n)){0,20}\s*index\s+[^\n]*\n"),
|
||||
re.compile(r"(?<!#)server\s*{(([^{\n]*\n)|(\s*#.*\n)){0,20}\s*server_name\s+[^\n]*\n"),
|
||||
]
|
||||
|
||||
insert_ext = " include {}/*.conf;\n".format(ext_path)
|
||||
for rep_exp in rep_exp_list:
|
||||
find_list = list(re.finditer(rep_exp, config_data))
|
||||
if not find_list:
|
||||
continue
|
||||
for tmp in find_list[::-1]:
|
||||
config_data = config_data[:tmp.end()] + insert_ext + config_data[tmp.end():]
|
||||
break
|
||||
|
||||
return config_data
|
||||
|
||||
@classmethod
|
||||
def remove_extension(cls, site_name: str, config_path: str):
|
||||
ext_path = "{}/{}".format(cls._EXTENSION_DIR, site_name)
|
||||
if os.path.isdir(ext_path):
|
||||
shutil.rmtree(ext_path)
|
||||
|
||||
config_data= public.readFile(config_path)
|
||||
if not config_data:
|
||||
return None
|
||||
return cls.remove_extension_from_config(site_name, config_data)
|
||||
|
||||
@staticmethod
|
||||
def remove_extension_from_config(site_name: str, config_data: str):
|
||||
regexp = re.compile(r"\s*include\s+/.*extension/.*/\*\.conf;[^\n]*\n")
|
||||
return re.sub(regexp, "\n", config_data)
|
||||
|
||||
@staticmethod
|
||||
def has_extension(conf_data: str) -> bool:
|
||||
regexp = re.compile(r"\s*include\s+/.*extension/.*/\*\.conf;[^\n]*\n")
|
||||
return bool(re.search(regexp, conf_data))
|
||||
|
||||
|
||||
class ApacheExtension(NginxExtension):
|
||||
_EXTENSION_DIR = "{}/vhost/apache/extension".format(public.get_panel_path())
|
||||
|
||||
@classmethod
|
||||
def set_extension_by_config(cls, site_name: str, config_data: str) -> str:
|
||||
ext_path = "{}/{}".format(cls._EXTENSION_DIR, site_name)
|
||||
if not os.path.exists(ext_path):
|
||||
os.makedirs(ext_path)
|
||||
else:
|
||||
if not os.path.isdir(ext_path):
|
||||
os.remove(ext_path)
|
||||
os.makedirs(ext_path)
|
||||
|
||||
rep_exp_list=[
|
||||
re.compile(r"<VirtualHost\s+\S+:\d+>\s(.*\n){0,8}\s*ServerAlias\s+[^\n]*\n"),
|
||||
re.compile(r"<VirtualHost\s+\S+:\d+>\s(.*\n){0,6}\s*DocumentRoot\s+[^\n]*\n"),
|
||||
]
|
||||
|
||||
insert_ext = " IncludeOptional {}/*.conf\n".format(ext_path)
|
||||
for rep_exp in rep_exp_list:
|
||||
find_list = list(re.finditer(rep_exp, config_data))
|
||||
if not find_list:
|
||||
continue
|
||||
for tmp in find_list[::-1]:
|
||||
config_data = config_data[:tmp.end()] + insert_ext + config_data[tmp.end():]
|
||||
break
|
||||
|
||||
return config_data
|
||||
|
||||
@staticmethod
|
||||
def remove_extension_from_config(site_name: str, config_data: str):
|
||||
regexp = re.compile(r"\s*IncludeOptional\s+/.*extension/.*/\*\.conf[^\n]*\n")
|
||||
return re.sub(regexp, "\n", config_data)
|
||||
|
||||
@staticmethod
|
||||
def has_extension(conf_data: str) -> bool:
|
||||
regexp = re.compile(r"\s*IncludeOptional\s+/.*extension/.*/\*\.conf[^\n]*\n")
|
||||
return bool(re.search(regexp, conf_data))
|
||||
1335
mod/base/web_conf/ssl.py
Normal file
1335
mod/base/web_conf/ssl.py
Normal file
File diff suppressed because it is too large
Load Diff
200
mod/base/web_conf/util.py
Normal file
200
mod/base/web_conf/util.py
Normal file
@@ -0,0 +1,200 @@
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional, Tuple, Callable
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
import public
|
||||
|
||||
|
||||
def webserver() -> Optional[str]:
|
||||
if os.path.exists('/www/server/nginx/sbin/nginx'):
|
||||
web_server = 'nginx'
|
||||
elif os.path.exists('/www/server/apache/bin/apachectl'):
|
||||
web_server = 'apache'
|
||||
elif os.path.exists('/usr/local/lsws/bin/lswsctrl'):
|
||||
web_server = 'openlitespeed'
|
||||
else:
|
||||
web_server = None
|
||||
return web_server
|
||||
|
||||
|
||||
def check_server_config() -> Optional[str]:
|
||||
w_s = webserver()
|
||||
setup_path = "/www/server"
|
||||
if w_s == 'nginx':
|
||||
shell_str = (
|
||||
"ulimit -n 8192; "
|
||||
"{setup_path}/nginx/sbin/nginx -t -c {setup_path}/nginx/conf/nginx.conf"
|
||||
).format(setup_path=setup_path)
|
||||
result: Tuple[str, str] = public.ExecShell(shell_str)
|
||||
searchStr = 'successful'
|
||||
elif w_s == 'apache':
|
||||
shell_str = (
|
||||
"ulimit -n 8192; "
|
||||
"{setup_path}/apache/bin/apachectl -t"
|
||||
).format(setup_path=setup_path)
|
||||
result: Tuple[str, str] = public.ExecShell(shell_str)
|
||||
searchStr = 'Syntax OK'
|
||||
else:
|
||||
return None
|
||||
if result[1].find(searchStr) == -1:
|
||||
public.WriteLog("TYPE_SOFT", 'CONF_CHECK_ERR', (result[1],))
|
||||
return result[1]
|
||||
|
||||
|
||||
def read_file(filename, mode='r') -> Optional[str]:
|
||||
"""
|
||||
读取文件内容
|
||||
@filename 文件名
|
||||
return string(bin) 若文件不存在,则返回None
|
||||
"""
|
||||
import os
|
||||
if not os.path.exists(filename):
|
||||
return None
|
||||
fp = None
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
f_body = fp.read()
|
||||
except:
|
||||
return None
|
||||
finally:
|
||||
if fp and not fp.closed:
|
||||
fp.close()
|
||||
return f_body
|
||||
|
||||
|
||||
def write_file(filename: str, s_body: str, mode='w+') -> bool:
|
||||
"""
|
||||
写入文件内容
|
||||
@filename 文件名
|
||||
@s_body 欲写入的内容
|
||||
return bool 若文件不存在则尝试自动创建
|
||||
"""
|
||||
try:
|
||||
fp = open(filename, mode=mode)
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
try:
|
||||
fp = open(filename, mode=mode, encoding="utf-8")
|
||||
fp.write(s_body)
|
||||
fp.close()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def debug_api_warp(fn):
|
||||
def inner(*args, **kwargs):
|
||||
try:
|
||||
return fn(*args, **kwargs)
|
||||
except:
|
||||
public.print_log(public.get_error_info())
|
||||
return {
|
||||
|
||||
}
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
# 重载Web服务配置
|
||||
def service_reload():
|
||||
setup_path = "/www/server"
|
||||
if os.path.exists('{}/nginx/sbin/nginx'.format(setup_path)):
|
||||
result = public.ExecShell('/etc/init.d/nginx reload')
|
||||
if result[1].find('nginx.pid') != -1:
|
||||
public.ExecShell('pkill -9 nginx && sleep 1')
|
||||
public.ExecShell('/etc/init.d/nginx start')
|
||||
elif os.path.exists('{}/apache/bin/apachectl'.format(setup_path)):
|
||||
result = public.ExecShell('/etc/init.d/httpd reload')
|
||||
else:
|
||||
result = public.ExecShell('rm -f /tmp/lshttpd/*.sock* && /usr/local/lsws/bin/lswsctrl restart')
|
||||
return result
|
||||
|
||||
|
||||
# 防正则转译
|
||||
def pre_re_key(input_str: str) -> str:
|
||||
re_char = ['$', '(', ')', '*', '+', '.', '[', ']', '{', '}', '?', '^', '|', '\\']
|
||||
res = []
|
||||
for i in input_str:
|
||||
if i in re_char:
|
||||
res.append("\\" + i)
|
||||
else:
|
||||
res.append(i)
|
||||
return "".join(res)
|
||||
|
||||
|
||||
def get_log_path() -> str:
|
||||
log_path = public.readFile("{}/data/sites_log_path.pl".format(public.get_panel_path()))
|
||||
if isinstance(log_path, str) and os.path.isdir(log_path):
|
||||
return log_path
|
||||
return public.GetConfigValue('logs_path')
|
||||
|
||||
|
||||
|
||||
# 2024/4/18 上午9:44 域名编码转换
|
||||
def to_puny_code(domain):
|
||||
try:
|
||||
try:
|
||||
import idna
|
||||
except:
|
||||
os.system("btpip install idna -I")
|
||||
import idna
|
||||
|
||||
import re
|
||||
match = re.search(u"[^u\0000-u\001f]+", domain)
|
||||
if not match:
|
||||
return domain
|
||||
try:
|
||||
if domain.startswith("*."):
|
||||
return "*." + idna.encode(domain[2:]).decode("utf8")
|
||||
else:
|
||||
return idna.encode(domain).decode("utf8")
|
||||
except:
|
||||
return domain
|
||||
except:
|
||||
return domain
|
||||
|
||||
|
||||
# 2024/4/18 下午5:48 中文路径处理
|
||||
def to_puny_code_path(path):
|
||||
if sys.version_info[0] == 2: path = path.encode('utf-8')
|
||||
if os.path.exists(path): return path
|
||||
import re
|
||||
match = re.search(u"[\x80-\xff]+", path)
|
||||
if not match: match = re.search(u"[\u4e00-\u9fa5]+", path)
|
||||
if not match: return path
|
||||
npath = ''
|
||||
for ph in path.split('/'):
|
||||
npath += '/' + to_puny_code(ph)
|
||||
return npath.replace('//', '/')
|
||||
|
||||
|
||||
|
||||
class _DB:
|
||||
|
||||
def __call__(self, table: str):
|
||||
import db
|
||||
with db.Sql() as t:
|
||||
t.table(table)
|
||||
return t
|
||||
|
||||
|
||||
DB = _DB()
|
||||
|
||||
GET_CLASS = public.dict_obj
|
||||
|
||||
listen_ipv6: Callable[[], bool] = public.listen_ipv6
|
||||
|
||||
ExecShell: Callable = public.ExecShell
|
||||
|
||||
|
||||
def use_http2() -> bool:
|
||||
versionStr = public.readFile('/www/server/nginx/version.pl')
|
||||
if isinstance(versionStr, str):
|
||||
if versionStr.find('1.8.1') == -1:
|
||||
return True
|
||||
return False
|
||||
Reference in New Issue
Block a user