Initial YakPanel commit
This commit is contained in:
17
mod/project/node/dbutil/__init__.py
Normal file
17
mod/project/node/dbutil/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from .load_db import LoadSite, HttpNode, TcpNode, NodeDB
|
||||
from .node_db import Node, ServerNodeDB, ServerMonitorRepo, NodeAPPKey
|
||||
from .file_transfer_db import FileTransfer, FileTransferDB, FileTransferTask
|
||||
# from .executor import Script, ScriptGroup, ExecutorDB, ExecutorLog, ExecutorTask
|
||||
from .node_task_flow import Script, Flow, CommandTask, CommandLog, TransferFile, TransferLog, TaskFlowsDB, \
|
||||
TransferTask, FlowTemplates
|
||||
|
||||
# 初始化数据库
|
||||
try:
|
||||
NodeDB().init_db()
|
||||
ServerNodeDB().init_db()
|
||||
FileTransferDB().init_db()
|
||||
# ExecutorDB().init_db()
|
||||
TaskFlowsDB().init_db()
|
||||
except Exception as e:
|
||||
import public
|
||||
public.print_error()
|
||||
481
mod/project/node/dbutil/executor.py
Normal file
481
mod/project/node/dbutil/executor.py
Normal file
@@ -0,0 +1,481 @@
|
||||
import os
|
||||
import sys
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Tuple, Any, Union, Type, Generic, TypeVar, TextIO
|
||||
import sqlite3
|
||||
import json
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
import public
|
||||
import db
|
||||
|
||||
if "/www/server/panel" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel")
|
||||
|
||||
|
||||
@dataclass
|
||||
class Script:
|
||||
"""对应scripts表"""
|
||||
name: str
|
||||
script_type: str
|
||||
content: str
|
||||
id: Optional[int] = None
|
||||
description: Optional[str] = None
|
||||
group_id: int = 0
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
@staticmethod
|
||||
def check(data: Dict[str, Any]) -> str:
|
||||
if "script_type" not in data or not data["script_type"]:
|
||||
return "Script type cannot be empty"
|
||||
if not data["script_type"] in ["python", "shell"]:
|
||||
return "Script type error, please choose Python or Shell"
|
||||
if "content" not in data or not data["content"]:
|
||||
return "Script content cannot be empty"
|
||||
if "name" not in data or not data["name"]:
|
||||
return "Script name cannot be empty"
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> 'Script':
|
||||
"""从字典创建Script实例"""
|
||||
return cls(
|
||||
id=int(data['id']) if data.get('id', None) else None,
|
||||
name=str(data['name']),
|
||||
script_type=str(data['script_type']),
|
||||
content=str(data['content']),
|
||||
description=str(data['description']) if data.get('description', None) else None,
|
||||
group_id=int(data['group_id']) if data.get('group_id', None) else 0,
|
||||
created_at=datetime.fromisoformat(data['created_at']) if data.get('created_at', None) else None,
|
||||
updated_at=datetime.fromisoformat(data['updated_at']) if data.get('updated_at', None) else None
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""转换为字典格式"""
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'script_type': self.script_type,
|
||||
'content': self.content,
|
||||
'description': self.description,
|
||||
'group_id': self.group_id,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ScriptGroup:
|
||||
"""对应script_groups表"""
|
||||
name: str
|
||||
id: Optional[int] = None
|
||||
description: Optional[str] = None
|
||||
created_at: Optional[datetime] = None
|
||||
|
||||
@staticmethod
|
||||
def check(data: Dict[str, Any]) -> str:
|
||||
if "name" not in data or not data["name"]:
|
||||
return "Script group name cannot be empty"
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> 'ScriptGroup':
|
||||
"""从字典创建ScriptGroup实例"""
|
||||
return cls(
|
||||
id=int(data['id']) if data.get('id', None) else None,
|
||||
name=str(data['name']),
|
||||
description=str(data['description']) if data.get('description', None) else None,
|
||||
created_at=datetime.fromisoformat(data['created_at']) if data.get('created_at', None) else None
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""转换为字典格式"""
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExecutorTask:
|
||||
"""对应executor_tasks表"""
|
||||
script_id: int
|
||||
script_content: str
|
||||
script_type: str
|
||||
server_ids: str = ""
|
||||
id: Optional[int] = None
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
_elogs: Optional[List["ExecutorLog"]] = None
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> 'ExecutorTask':
|
||||
"""从字典创建ExecutorTask实例"""
|
||||
return cls(
|
||||
id=int(data['id']) if data.get('id', None) else None,
|
||||
script_id=int(data['script_id']),
|
||||
script_content=str(data['script_content']),
|
||||
script_type=str(data['script_type']),
|
||||
created_at=datetime.fromisoformat(data['created_at']) if data.get('created_at', None) else None,
|
||||
updated_at=datetime.fromisoformat(data['updated_at']) if data.get('updated_at', None) else None
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""转换为字典格式"""
|
||||
return {
|
||||
'id': self.id,
|
||||
'script_id': self.script_id,
|
||||
'server_ids': self.server_ids,
|
||||
'script_content': self.script_content,
|
||||
'script_type': self.script_type,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None
|
||||
}
|
||||
|
||||
@property
|
||||
def elogs(self) -> List["ExecutorLog"]:
|
||||
if self._elogs is None:
|
||||
return []
|
||||
return self._elogs
|
||||
|
||||
@elogs.setter
|
||||
def elogs(self, elogs: List["ExecutorLog"]):
|
||||
self._elogs = elogs
|
||||
|
||||
|
||||
_EXECUTOR_LOG_DIR = public.get_panel_path() + "/logs/executor_log/"
|
||||
try:
|
||||
if not os.path.exists(_EXECUTOR_LOG_DIR):
|
||||
os.makedirs(_EXECUTOR_LOG_DIR)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExecutorLog:
|
||||
"""对应executor_logs表"""
|
||||
executor_task_id: int
|
||||
server_id: int
|
||||
ssh_host: str
|
||||
id: Optional[int] = None
|
||||
status: int = 0 # 0:运行中 1:成功 2:失败 3:异常
|
||||
log_name: Optional[str] = None
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
_log_fp: Optional[TextIO] = None
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> 'ExecutorLog':
|
||||
"""从字典创建ExecutorLog实例"""
|
||||
return cls(
|
||||
id=int(data['id']) if data.get('id', None) else None,
|
||||
executor_task_id=int(data['executor_task_id']),
|
||||
server_id=int(data['server_id']),
|
||||
ssh_host=str(data['ssh_host']),
|
||||
status=int(data['status']) if data.get('status', 0) else 0,
|
||||
log_name=str(data['log_name']) if data.get('log_name', None) else None,
|
||||
created_at=datetime.fromisoformat(data['created_at']) if data.get('created_at', None) else None,
|
||||
updated_at=datetime.fromisoformat(data['updated_at']) if data.get('updated_at', None) else None
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""转换为字典格式"""
|
||||
return {
|
||||
'id': self.id,
|
||||
'executor_task_id': self.executor_task_id,
|
||||
'server_id': self.server_id,
|
||||
'ssh_host': self.ssh_host,
|
||||
'status': self.status,
|
||||
'log_name': self.log_name,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None
|
||||
}
|
||||
|
||||
@property
|
||||
def log_file(self):
|
||||
return os.path.join(_EXECUTOR_LOG_DIR, self.log_name)
|
||||
|
||||
@property
|
||||
def log_fp(self):
|
||||
if self._log_fp is None:
|
||||
self._log_fp = open(self.log_file, "w+")
|
||||
return self._log_fp
|
||||
|
||||
def create_log(self):
|
||||
public.writeFile(self.log_file, "")
|
||||
|
||||
def remove_log(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.remove(self.log_file)
|
||||
|
||||
def get_log(self):
|
||||
return public.readFile(self.log_file)
|
||||
|
||||
def write_log(self, log_data: str, is_end_log=False):
|
||||
self.log_fp.write(log_data)
|
||||
self.log_fp.flush()
|
||||
if is_end_log:
|
||||
self.log_fp.close()
|
||||
self._log_fp = None
|
||||
|
||||
|
||||
_TableType = TypeVar("_TableType", bound=Union[Script, ScriptGroup, ExecutorTask, ExecutorLog])
|
||||
|
||||
|
||||
class _Table(Generic[_TableType]):
|
||||
"""数据库表"""
|
||||
table_name: str = ""
|
||||
data_cls: Type[_TableType]
|
||||
|
||||
def __init__(self, db_obj: db.Sql):
|
||||
self._db = db_obj
|
||||
|
||||
# 当仅传递一个数据时,返回插入数的 id或错误信息; 当传递多个数据时,返回插入的行数或错误信息
|
||||
def create(self,
|
||||
data: Union[_TableType, List[_TableType]]) -> Union[int, str]:
|
||||
"""创建数据"""
|
||||
if not isinstance(data, list):
|
||||
data = [data]
|
||||
|
||||
if not len(data):
|
||||
raise ValueError("Data cannot be empty")
|
||||
if not isinstance(data[0], self.data_cls):
|
||||
raise ValueError("Data type error")
|
||||
|
||||
now = datetime.now().isoformat()
|
||||
|
||||
def fileter_data(item):
|
||||
item_dict = item.to_dict()
|
||||
if "id" in item_dict:
|
||||
item_dict.pop("id")
|
||||
if "created_at" in item_dict and item_dict["created_at"] is None:
|
||||
item_dict["created_at"] = now
|
||||
if "updated_at" in item_dict and item_dict["updated_at"] is None:
|
||||
item_dict["updated_at"] = now
|
||||
return item_dict
|
||||
|
||||
data_list = list(map(fileter_data, data))
|
||||
if len(data_list) == 1:
|
||||
try:
|
||||
res = self._db.table(self.table_name).insert(data_list[0])
|
||||
if isinstance(res, int):
|
||||
return res
|
||||
return str(res)
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
try:
|
||||
res = self._db.table(self.table_name).batch_insert(data_list)
|
||||
if isinstance(res, (int, bool)):
|
||||
return len(data)
|
||||
return str(res)
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
|
||||
def update(self, data: _TableType) -> str:
|
||||
"""更新数据"""
|
||||
if not isinstance(data, self.data_cls):
|
||||
raise ValueError("Data type error")
|
||||
data_dict = data.to_dict()
|
||||
data_dict.pop('created_at', None)
|
||||
if "updated_at" in data_dict:
|
||||
data_dict["updated_at"] = datetime.now().isoformat()
|
||||
if "id" not in data_dict:
|
||||
raise ValueError("The data ID cannot be empty")
|
||||
try:
|
||||
self._db.table(self.table_name).where("id=?", (data_dict["id"],)).update(data_dict)
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
return ""
|
||||
|
||||
def get_byid(self, data_id: int) -> Optional[_TableType]:
|
||||
"""根据id获取数据"""
|
||||
try:
|
||||
result = self._db.table(self.table_name).where("id=?", (data_id,)).find()
|
||||
except Exception as e:
|
||||
return None
|
||||
if not result:
|
||||
return None
|
||||
return self.data_cls.from_dict(result)
|
||||
|
||||
def delete(self, data_id: Union[int, List[int]]):
|
||||
"""删除数据"""
|
||||
if isinstance(data_id, list):
|
||||
data_id = [int(item) for item in data_id]
|
||||
elif isinstance(data_id, int):
|
||||
data_id = [int(data_id)]
|
||||
else:
|
||||
return "数据id类型错误"
|
||||
try:
|
||||
self._db.table(self.table_name).where(
|
||||
"id in ({})".format(",".join(["?"] * len(data_id))), (*data_id,)
|
||||
).delete()
|
||||
return ""
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
|
||||
def query(self, *args) -> List[_TableType]:
|
||||
"""查询数据"""
|
||||
try:
|
||||
result = self._db.table(self.table_name).where(*args).select()
|
||||
except Exception as e:
|
||||
return []
|
||||
if not result:
|
||||
return []
|
||||
return [self.data_cls.from_dict(item) for item in result]
|
||||
|
||||
def query_page(self, *args, page_num: int = 1, limit: int = 10) -> List[_TableType]:
|
||||
"""查询数据, 支持分页"""
|
||||
try:
|
||||
offset = limit * (page_num - 1)
|
||||
result = self._db.table(self.table_name).where(*args).limit(limit, offset).order("id DESC").select()
|
||||
except Exception as e:
|
||||
public.print_error()
|
||||
return []
|
||||
if not result:
|
||||
return []
|
||||
return [self.data_cls.from_dict(item) for item in result]
|
||||
|
||||
def count(self, *args) -> int:
|
||||
"""查询数据数量"""
|
||||
try:
|
||||
result = self._db.table(self.table_name).where(*args).count()
|
||||
except Exception as e:
|
||||
return 0
|
||||
return result
|
||||
|
||||
def find(self, *args) -> Optional[_TableType]:
|
||||
"""查询单条数据"""
|
||||
try:
|
||||
result = self._db.table(self.table_name).where(*args).find()
|
||||
except Exception as e:
|
||||
return None
|
||||
if not result:
|
||||
return None
|
||||
return self.data_cls.from_dict(result)
|
||||
|
||||
|
||||
class _ScriptTable(_Table[Script]):
|
||||
"""脚本表"""
|
||||
table_name = "scripts"
|
||||
data_cls = Script
|
||||
|
||||
def set_group_id(self, group_id: int, *where_args) -> str:
|
||||
"""设置脚本组"""
|
||||
try:
|
||||
self._db.table(self.table_name).where(where_args).update({"group_id": group_id})
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
return ""
|
||||
|
||||
|
||||
class _ScriptGroupTable(_Table[ScriptGroup]):
|
||||
"""脚本组表"""
|
||||
table_name = "script_groups"
|
||||
data_cls = ScriptGroup
|
||||
default_group = ScriptGroup(
|
||||
id=0,
|
||||
name="default",
|
||||
description="Default grouping, use this grouping when not set",
|
||||
created_at=datetime.now(),
|
||||
)
|
||||
|
||||
def all_group(self) -> List[ScriptGroup]:
|
||||
"""获取所有脚本组"""
|
||||
try:
|
||||
result = self._db.table(self.table_name).select()
|
||||
except Exception as e:
|
||||
return []
|
||||
if not result:
|
||||
return []
|
||||
return [self.default_group] + [self.data_cls.from_dict(item) for item in result]
|
||||
|
||||
|
||||
class _ExecutorTaskTable(_Table[ExecutorTask]):
|
||||
"""执行任务表"""
|
||||
table_name = "executor_tasks"
|
||||
data_cls = ExecutorTask
|
||||
|
||||
def query_tasks(self,
|
||||
page=1, size=10, node_id: int = None, script_type: str = None, search: str = None
|
||||
) -> Tuple[int, List[ExecutorTask]]:
|
||||
"""查询任务"""
|
||||
where_args, parms = [], []
|
||||
if script_type and script_type != "all":
|
||||
where_args.append("script_type=?")
|
||||
parms.append(script_type)
|
||||
if search:
|
||||
search_str = "script_content like ?"
|
||||
parms.append("%{}%".format(search))
|
||||
|
||||
stable = _ScriptTable(self._db)
|
||||
data = stable.query("name like ? or description like ?", ("%{}%".format(search), "%{}%".format(search)))
|
||||
if data:
|
||||
search_str += " or script_id in ({})".format(",".join(["?"] * len(data)))
|
||||
where_args.append("(" + search_str + ")")
|
||||
parms.append(tuple([item.id for item in data]))
|
||||
else:
|
||||
where_args.append(search_str)
|
||||
|
||||
if node_id:
|
||||
where_args.append("server_ids like ?")
|
||||
parms.append("%|{}%".format(node_id))
|
||||
|
||||
|
||||
# public.print_log("search criteria: {}".format(" AND ".join(where_args)), parms)
|
||||
count = self.count(
|
||||
" AND ".join(where_args),
|
||||
(*parms, )
|
||||
)
|
||||
|
||||
return count, self.query_page(
|
||||
" AND ".join(where_args),
|
||||
(*parms, ),
|
||||
page_num=page,
|
||||
limit=size
|
||||
)
|
||||
|
||||
|
||||
class _ExecutorLogTable(_Table[ExecutorLog]):
|
||||
"""执行日志表"""
|
||||
table_name = "executor_logs"
|
||||
data_cls = ExecutorLog
|
||||
|
||||
|
||||
class ExecutorDB:
|
||||
_DB_FILE = public.get_panel_path() + "/data/db/executor.db"
|
||||
_DB_INIT_FILE = os.path.dirname(__file__) + "/executor.sql"
|
||||
|
||||
def __init__(self):
|
||||
sql = db.Sql()
|
||||
sql._Sql__DB_FILE = self._DB_FILE
|
||||
self.db = sql
|
||||
self.Script = _ScriptTable(self.db)
|
||||
self.ScriptGroup = _ScriptGroupTable(self.db)
|
||||
self.ExecutorTask = _ExecutorTaskTable(self.db)
|
||||
self.ExecutorLog = _ExecutorLogTable(self.db)
|
||||
|
||||
def init_db(self):
|
||||
sql_data = public.readFile(self._DB_INIT_FILE)
|
||||
if not os.path.exists(self._DB_FILE) or os.path.getsize(self._DB_FILE) == 0:
|
||||
public.writeFile(self._DB_FILE, "")
|
||||
import sqlite3
|
||||
conn = sqlite3.connect(self._DB_FILE)
|
||||
cursor = conn.cursor()
|
||||
cursor.executescript(sql_data)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
def close(self):
|
||||
self.db.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_trackback):
|
||||
self.close()
|
||||
60
mod/project/node/dbutil/executor.sql
Normal file
60
mod/project/node/dbutil/executor.sql
Normal file
@@ -0,0 +1,60 @@
|
||||
|
||||
-- 创建脚本表
|
||||
CREATE TABLE IF NOT EXISTS scripts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL CHECK(length(name) <= 255),
|
||||
script_type TEXT NOT NULL CHECK(length(script_type) <= 255),
|
||||
content TEXT NOT NULL,
|
||||
description TEXT CHECK(length(description) <= 255),
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
group_id INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
-- 创建脚本组表
|
||||
CREATE TABLE IF NOT EXISTS script_groups (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL CHECK(length(name) <= 255),
|
||||
description TEXT CHECK(length(description) <= 255),
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- 创建执行任务表
|
||||
CREATE TABLE IF NOT EXISTS executor_tasks (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
server_ids TEXT NOT NULL,
|
||||
script_id INTEGER NOT NULL,
|
||||
script_content TEXT NOT NULL,
|
||||
script_type TEXT NOT NULL CHECK(length(script_type) <= 255),
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- 创建执行日志表
|
||||
CREATE TABLE IF NOT EXISTS executor_logs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
executor_task_id INTEGER NOT NULL,
|
||||
server_id INTEGER NOT NULL,
|
||||
ssh_host TEXT NOT NULL,
|
||||
status INTEGER NOT NULL DEFAULT 0 CHECK(status IN (0,1,2,3)),
|
||||
log_name TEXT CHECK(length(log_name) <= 255),
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
|
||||
-- 创建索引(分开创建以避免SQLite语法错误)
|
||||
-- 脚本表索引
|
||||
CREATE INDEX IF NOT EXISTS idx_scripts_name ON scripts(name);
|
||||
CREATE INDEX IF NOT EXISTS idx_scripts_script_type ON scripts(script_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_scripts_group_id ON scripts(group_id);
|
||||
|
||||
-- 脚本组索引
|
||||
CREATE INDEX IF NOT EXISTS idx_script_groups_name ON script_groups(name);
|
||||
|
||||
-- 执行任务索引
|
||||
CREATE INDEX IF NOT EXISTS idx_executor_tasks_script_id ON executor_tasks(script_id);
|
||||
|
||||
-- 执行日志索引
|
||||
CREATE INDEX IF NOT EXISTS idx_executor_logs_task_server ON executor_logs(executor_task_id, server_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_executor_logs_status ON executor_logs(status);
|
||||
36
mod/project/node/dbutil/file_transfer.sql
Normal file
36
mod/project/node/dbutil/file_transfer.sql
Normal file
@@ -0,0 +1,36 @@
|
||||
-- 传输任务表
|
||||
CREATE TABLE IF NOT EXISTS transfer_tasks
|
||||
(
|
||||
task_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
source_node TEXT NOT NULL DEFAULT '{}', -- {"address":"https:/xxxx", "api_key":"xxxxx", "name":"xxxx"}
|
||||
target_node TEXT NOT NULL DEFAULT '{}', -- {"address":"https:/xxxx", "api_key":"xxxxx", "name":"xxxx"}
|
||||
source_path_list TEXT NOT NULL DEFAULT '[]', -- 源节点上的路径 [{"path":"/www/wwwroot/aaaa", "is_dir":true}]
|
||||
target_path TEXT NOT NULL, -- 目标节点上的路径
|
||||
task_action TEXT NOT NULL, -- upload/download
|
||||
status TEXT NOT NULL, -- pending/running/completed/failed
|
||||
default_mode TEXT NOT NULL, -- 默认处理模式 cover: 覆盖,ignore: 跳过,rename:重命名
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP,
|
||||
created_by TEXT NOT NULL, -- 创建的节点名称
|
||||
target_task_id INTEGER NOT NULL,
|
||||
is_source_node BOOLEAN NOT NULL, -- 是否为本节点发送
|
||||
is_target_node BOOLEAN NOT NULL -- 是否为本节点接收
|
||||
);
|
||||
|
||||
-- 文件传输详情表
|
||||
CREATE TABLE IF NOT EXISTS file_transfers
|
||||
(
|
||||
transfer_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
task_id INTEGER NOT NULL,
|
||||
src_file TEXT NOT NULL, -- 源文件
|
||||
dst_file TEXT NOT NULL, -- 目标文件
|
||||
file_size INTEGER NOT NULL, -- 文件大小
|
||||
is_dir INTEGER NOT NULL DEFAULT 0,
|
||||
status TEXT NOT NULL, -- pending/running/completed/failed
|
||||
progress INTEGER DEFAULT 0, -- 0-100
|
||||
message TEXT NOT NULL DEFAULT '',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP
|
||||
);
|
||||
328
mod/project/node/dbutil/file_transfer_db.py
Normal file
328
mod/project/node/dbutil/file_transfer_db.py
Normal file
@@ -0,0 +1,328 @@
|
||||
import os
|
||||
import sys
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Tuple
|
||||
import sqlite3
|
||||
import json
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
import public
|
||||
import db
|
||||
|
||||
if "/www/server/panel" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel")
|
||||
|
||||
|
||||
@dataclass
|
||||
class FileTransferTask:
|
||||
task_id: Optional[int] = None
|
||||
source_node: dict = field(default_factory=lambda: {})
|
||||
target_node: dict = field(default_factory=lambda: {})
|
||||
source_path_list: list = field(default_factory=lambda: []) # [{"path":"/www/wwwroot/aaaa", "is_dir":true}]
|
||||
target_path: str = ""
|
||||
task_action: str = "" # upload/download
|
||||
status: str = "pending" # pending/running/completed/failed
|
||||
default_mode: str = "cover" # 默认处理模式 cover: 覆盖,ignore: 跳过,rename:重命名
|
||||
created_at: Optional[datetime] = None
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
created_by: str = ""
|
||||
target_task_id: int = 0
|
||||
is_source_node: bool = False
|
||||
is_target_node: bool = False
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, row: dict) -> 'FileTransferTask':
|
||||
source_node = row.get("source_node", "{}")
|
||||
if isinstance(source_node, str):
|
||||
source_node = json.loads(source_node)
|
||||
elif isinstance(source_node, dict):
|
||||
source_node = source_node
|
||||
else:
|
||||
source_node = {}
|
||||
|
||||
target_node = row.get("target_node", "{}")
|
||||
if isinstance(target_node, str):
|
||||
target_node = json.loads(target_node)
|
||||
elif isinstance(target_node, dict):
|
||||
target_node = target_node
|
||||
else:
|
||||
target_node = {}
|
||||
|
||||
source_path_list = row.get("source_path_list", "[]")
|
||||
if isinstance(source_path_list, str):
|
||||
source_path_list = json.loads(source_path_list)
|
||||
elif isinstance(source_path_list, list):
|
||||
source_path_list = source_path_list
|
||||
else:
|
||||
source_path_list = []
|
||||
|
||||
return cls(
|
||||
task_id=row.get("task_id", None),
|
||||
source_node=source_node,
|
||||
target_node=target_node,
|
||||
source_path_list=source_path_list,
|
||||
target_path=row.get("target_path", ""),
|
||||
task_action=row.get("task_action", ""),
|
||||
status=row.get("status", ""),
|
||||
default_mode=row.get("default_mode", "cover"),
|
||||
created_at=datetime.fromisoformat(row.get("created_at")) if row.get("created_at", "") else None,
|
||||
started_at=datetime.fromisoformat(row.get("started_at")) if row.get("started_at", "") else None,
|
||||
completed_at=datetime.fromisoformat(row.get("completed_at")) if row.get("completed_at", "") else None,
|
||||
created_by=row.get("created_by", ""),
|
||||
target_task_id=row.get("target_task_id", 0),
|
||||
is_source_node=row.get("is_source_node", False),
|
||||
is_target_node=row.get("is_target_node", False)
|
||||
)
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"task_id": self.task_id,
|
||||
"source_node": self.source_node,
|
||||
"target_node": self.target_node,
|
||||
"source_path_list": self.source_path_list,
|
||||
"target_path": self.target_path,
|
||||
"task_action": self.task_action,
|
||||
"status": self.status,
|
||||
"default_mode": self.default_mode,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"started_at": self.started_at.isoformat() if self.started_at else None,
|
||||
"completed_at": self.completed_at.isoformat() if self.completed_at else None,
|
||||
"created_by": self.created_by,
|
||||
"target_task_id": self.target_task_id,
|
||||
"is_source_node": self.is_source_node,
|
||||
"is_target_node": self.is_target_node
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class FileTransfer:
|
||||
transfer_id: Optional[int] = None
|
||||
task_id: int = 0
|
||||
src_file: str = ""
|
||||
dst_file: str = ""
|
||||
file_size: int = 0
|
||||
is_dir: int = 0
|
||||
status: str = "" # pending/running/completed/failed
|
||||
progress: int = 0
|
||||
message: str = ""
|
||||
created_at: Optional[datetime] = None
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, row: dict) -> 'FileTransfer':
|
||||
return cls(
|
||||
transfer_id=row.get("transfer_id", None),
|
||||
task_id=row.get("task_id", 0),
|
||||
src_file=row.get("src_file", ""),
|
||||
dst_file=row.get("dst_file", ""),
|
||||
file_size=row.get("file_size", 0),
|
||||
is_dir=row.get("is_dir", 0),
|
||||
status=row.get("status", ""),
|
||||
progress=row.get("progress", 0),
|
||||
message=row.get("message", ""),
|
||||
created_at=datetime.fromisoformat(row.get("created_at")) if row.get("created_at", "") else None,
|
||||
started_at=datetime.fromisoformat(row.get("started_at")) if row.get("started_at", "") else None,
|
||||
completed_at=datetime.fromisoformat(row.get("completed_at")) if row.get("completed_at", "") else None
|
||||
)
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"transfer_id": self.transfer_id,
|
||||
"task_id": self.task_id,
|
||||
"src_file": self.src_file,
|
||||
"dst_file": self.dst_file,
|
||||
"file_size": self.file_size,
|
||||
"is_dir": self.is_dir,
|
||||
"status": self.status,
|
||||
"progress": self.progress,
|
||||
"message": self.message,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"started_at": self.started_at.isoformat() if self.started_at else None,
|
||||
"completed_at": self.completed_at.isoformat() if self.completed_at else None
|
||||
}
|
||||
|
||||
|
||||
# SQLite 操作类
|
||||
class FileTransferDB:
|
||||
_DB_FILE = public.get_panel_path() + "/data/db/node_file_transfer.db"
|
||||
_DB_INIT_FILE = os.path.dirname(__file__) + "/file_transfer.sql"
|
||||
|
||||
def __init__(self):
|
||||
sql = db.Sql()
|
||||
sql._Sql__DB_FILE = self._DB_FILE
|
||||
self.db = sql
|
||||
|
||||
def init_db(self):
|
||||
sql_data = public.readFile(self._DB_INIT_FILE)
|
||||
if not os.path.exists(self._DB_FILE) or os.path.getsize(self._DB_FILE) == 0:
|
||||
public.writeFile(self._DB_FILE, "")
|
||||
import sqlite3
|
||||
conn = sqlite3.connect(self._DB_FILE)
|
||||
c = conn.cursor()
|
||||
c.executescript(sql_data)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
def close(self):
|
||||
self.db.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_trackback):
|
||||
self.close()
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def create_task(self, task: FileTransferTask) -> str:
|
||||
task_data = task.to_dict()
|
||||
task_data.pop('task_id', None)
|
||||
task_data.pop('created_at', None)
|
||||
task_data["source_node"] = json.dumps(task_data["source_node"])
|
||||
task_data["target_node"] = json.dumps(task_data["target_node"])
|
||||
task_data["source_path_list"] = json.dumps(task_data["source_path_list"])
|
||||
try:
|
||||
err = self.db.table("transfer_tasks").insert(task_data)
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
elif isinstance(err, int):
|
||||
task.task_id = err
|
||||
return ""
|
||||
except Exception as e:
|
||||
return f"Database operation error: {str(e)}"
|
||||
|
||||
def update_task(self, task: FileTransferTask) -> str:
|
||||
task_data = task.to_dict()
|
||||
task_data.pop('created_at', None)
|
||||
task_data["source_node"] = json.dumps(task_data["source_node"])
|
||||
task_data["target_node"] = json.dumps(task_data["target_node"])
|
||||
task_data["source_path_list"] = json.dumps(task_data["source_path_list"])
|
||||
if not task.task_id:
|
||||
return "task_id is required"
|
||||
try:
|
||||
err = self.db.table("transfer_tasks").where("task_id = ?", task.task_id).update(task_data)
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
return ""
|
||||
except Exception as e:
|
||||
return f"Database operation error: {str(e)}"
|
||||
|
||||
def get_task(self, task_id: int) -> Tuple[Optional[dict], str]:
|
||||
result = self.db.table("transfer_tasks").where("task_id = ?", task_id).find()
|
||||
if isinstance(result, str):
|
||||
return None, result
|
||||
if self.db.ERR_INFO:
|
||||
return None, self.db.ERR_INFO
|
||||
return result, ""
|
||||
|
||||
def get_last_task(self) -> Tuple[Optional[dict], str]:
|
||||
result = self.db.table("transfer_tasks").order("task_id DESC").limit(1).find()
|
||||
if isinstance(result, str):
|
||||
return None, result
|
||||
if self.db.ERR_INFO:
|
||||
return None, self.db.ERR_INFO
|
||||
return result, ""
|
||||
|
||||
def delete_task(self, task_id: int) -> str:
|
||||
result = self.db.table("transfer_tasks").where("task_id = ?", task_id).delete()
|
||||
if isinstance(result, str):
|
||||
return result
|
||||
return ""
|
||||
|
||||
def get_all_tasks(self, offset: int = 0, limit: int = 100) -> List[dict]:
|
||||
results = self.db.table("transfer_tasks").limit(limit, offset).select()
|
||||
if isinstance(results, list):
|
||||
return results
|
||||
return []
|
||||
|
||||
def count_tasks(self) -> int:
|
||||
return self.db.table("transfer_tasks").count()
|
||||
|
||||
def create_file_transfer(self, transfer: FileTransfer) -> str:
|
||||
transfer_data = transfer.to_dict()
|
||||
transfer_data.pop('transfer_id', None)
|
||||
transfer_data.pop('created_at', None)
|
||||
try:
|
||||
err = self.db.table("file_transfers").insert(transfer_data)
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
return ""
|
||||
except Exception as e:
|
||||
return f"Database operation error: {str(e)}"
|
||||
|
||||
def update_file_transfer(self, transfer: FileTransfer) -> str:
|
||||
transfer_data = transfer.to_dict()
|
||||
if not transfer.transfer_id:
|
||||
return "transfer_id is required"
|
||||
try:
|
||||
err = self.db.table("file_transfers").where("transfer_id = ?", transfer.transfer_id).update(transfer_data)
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
return ""
|
||||
except Exception as e:
|
||||
return f"Database operation error: {str(e)}"
|
||||
|
||||
def get_file_transfer(self, transfer_id: int) -> Optional[dict]:
|
||||
result = self.db.table("file_transfers").where("transfer_id = ?", transfer_id).find()
|
||||
if isinstance(result, str):
|
||||
return None
|
||||
if self.db.ERR_INFO:
|
||||
return None
|
||||
return result
|
||||
|
||||
def get_task_file_transfers(self, task_id: int) -> List[dict]:
|
||||
results = self.db.table("file_transfers").where("task_id = ?", task_id).select()
|
||||
if isinstance(results, list):
|
||||
return results
|
||||
return []
|
||||
|
||||
def batch_create_file_transfers(self, transfers: List[FileTransfer]) -> str:
|
||||
"""
|
||||
批量创建文件传输记录
|
||||
|
||||
Args:
|
||||
transfers: FileTransfer 对象列表
|
||||
|
||||
Returns:
|
||||
str: 错误信息,如果成功则返回空字符串
|
||||
"""
|
||||
if not transfers:
|
||||
return ""
|
||||
|
||||
try:
|
||||
# 准备批量插入的数据
|
||||
transfer_data_list = []
|
||||
for transfer in transfers:
|
||||
transfer_data = transfer.to_dict()
|
||||
transfer_data.pop('transfer_id', None)
|
||||
transfer_data['created_at'] = datetime.now().isoformat()
|
||||
transfer_data_list.append(transfer_data)
|
||||
|
||||
# 执行批量插入
|
||||
err = self.db.table("file_transfers").batch_insert(transfer_data_list)
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
return ""
|
||||
except Exception as e:
|
||||
return f"Batch creation of file transfer records failed: {str(e)}"
|
||||
|
||||
# 获取上一个任务所有文件传输状态
|
||||
def last_task_all_status(self) -> Tuple[Dict, str]:
|
||||
last_task, err = self.get_last_task()
|
||||
if err:
|
||||
return {}, err
|
||||
if not last_task:
|
||||
return {}, ""
|
||||
|
||||
task = FileTransferTask.from_dict(last_task)
|
||||
file_list = self.get_task_file_transfers(task.task_id)
|
||||
return {
|
||||
"task": task.to_dict(),
|
||||
"file_list": file_list,
|
||||
}, ""
|
||||
54
mod/project/node/dbutil/load_balancer.sql
Normal file
54
mod/project/node/dbutil/load_balancer.sql
Normal file
@@ -0,0 +1,54 @@
|
||||
-- load_sites 负载均衡网站
|
||||
CREATE TABLE IF NOT EXISTS `load_sites`
|
||||
(
|
||||
`load_id` INTEGER PRIMARY KEY AUTOINCREMENT, -- 负载均衡ID
|
||||
`name` TEXT NOT NULL UNIQUE, -- 负载均衡名称
|
||||
`site_id` INTEGER NOT NULL DEFAULT 0, -- 站点ID
|
||||
`site_name` TEXT NOT NULL , -- 站点名称,网站主域名
|
||||
`site_type` TEXT NOT NULL DEFAULT 'http', -- http, tcp (http:代表http负载均衡,tcp:代表tcp/udp负载均衡)
|
||||
`ps` TEXT NOT NULL DEFAULT '',
|
||||
`http_config` TEXT NOT NULL DEFAULT '{"proxy_next_upstream":"error timeout http_500 http_502 http_503 http_504","http_alg":"sticky_cookie"}',
|
||||
`tcp_config` TEXT NOT NULL DEFAULT '{"proxy_connect_timeout":8,"proxy_timeout":86400,"host":"127.0.0.1","port":80,"type":"tcp"}',
|
||||
`created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- http_nodes
|
||||
CREATE TABLE IF NOT EXISTS `http_nodes`
|
||||
(
|
||||
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
`load_id` INTEGER NOT NULL DEFAULT 0, -- 负载均衡ID
|
||||
`node_id` INTEGER NOT NULL DEFAULT 0, -- 节点ID
|
||||
`node_site_id` INTEGER NOT NULL DEFAULT 0, -- 节点上的网站ID
|
||||
`node_site_name` TEXT NOT NULL DEFAULT '', -- 节点上的网站名称
|
||||
`port` INTEGER NOT NULL DEFAULT 0, -- 端口
|
||||
`location` TEXT NOT NULL DEFAULT '/', -- 实施代理的路由, 默认是根路由 '/' 当前版本也只支持根路由
|
||||
`path` TEXT NOT NULL DEFAULT '/', -- 访问验证路径
|
||||
`node_status` TEXT NOT NULL DEFAULT 'online', -- 节点状态 online, backup, down
|
||||
`weight` INTEGER NOT NULL DEFAULT 1, -- 权重
|
||||
`max_fail` INTEGER NOT NULL DEFAULT 0, -- 最大失败次数
|
||||
`fail_timeout` INTEGER NOT NULL DEFAULT 0, -- 失败恢复时间
|
||||
`max_conns` INTEGER NOT NULL DEFAULT 0, -- 最大连接数
|
||||
`ps` TEXT NOT NULL DEFAULT '',
|
||||
`created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- tcp_nodes
|
||||
CREATE TABLE IF NOT EXISTS `tcp_nodes`
|
||||
(
|
||||
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
`load_id` INTEGER NOT NULL DEFAULT 0, -- 负载均衡ID
|
||||
`node_id` INTEGER NOT NULL DEFAULT 0, -- 节点ID
|
||||
`host` TEXT NOT NULL,
|
||||
`port` INTEGER NOT NULL DEFAULT 0,
|
||||
`node_status` TEXT NOT NULL DEFAULT 'online', -- 节点状态 online, backup, down
|
||||
`weight` INTEGER NOT NULL DEFAULT 1,
|
||||
`max_fail` INTEGER NOT NULL DEFAULT 0,
|
||||
`fail_timeout` INTEGER NOT NULL DEFAULT 0,
|
||||
`ps` TEXT NOT NULL DEFAULT '',
|
||||
`created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS `load_sites_name` ON `load_sites` (`name`);
|
||||
CREATE INDEX IF NOT EXISTS `load_sites_site_type` ON `load_sites` (`site_type`);
|
||||
CREATE INDEX IF NOT EXISTS `http_nodes_load_id` ON `http_nodes` (`load_id`);
|
||||
CREATE INDEX IF NOT EXISTS `tcp_nodes_load_id` ON `tcp_nodes` (`load_id`);
|
||||
449
mod/project/node/dbutil/load_db.py
Normal file
449
mod/project/node/dbutil/load_db.py
Normal file
@@ -0,0 +1,449 @@
|
||||
import json
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Tuple, Optional, List, Union
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
import public
|
||||
import db
|
||||
|
||||
if "/www/server/panel" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel")
|
||||
|
||||
|
||||
@dataclass
|
||||
class LoadSite:
|
||||
name: str
|
||||
site_name: str
|
||||
site_type: str
|
||||
ps: str = ''
|
||||
http_config: dict = field(default_factory=lambda: {
|
||||
"proxy_next_upstream": "error timeout http_500 http_502 http_503 http_504",
|
||||
"http_alg": "sticky_cookie",
|
||||
"proxy_cache_status": False,
|
||||
"cache_time": "1d",
|
||||
"cache_suffix": "css,js,jpg,jpeg,gif,png,webp,woff,eot,ttf,svg,ico,css.map,js.map",
|
||||
})
|
||||
tcp_config: dict = field(default_factory=lambda: {
|
||||
"proxy_connect_timeout": 8,
|
||||
"proxy_timeout": 86400,
|
||||
"host": "127.0.0.1",
|
||||
"port": 80,
|
||||
"type": "tcp"
|
||||
})
|
||||
created_at: int = 0
|
||||
load_id: int = 0
|
||||
site_id: int = 0
|
||||
|
||||
@classmethod
|
||||
def bind_http_load(cls, data: dict) -> Tuple[Optional["LoadSite"], str]:
|
||||
check_msg = cls.base_check(data)
|
||||
if check_msg:
|
||||
return None, check_msg
|
||||
if not data.get('site_name', None):
|
||||
return None, 'site_name is required'
|
||||
if not public.is_domain(data['site_name']):
|
||||
return None, 'site_name is invalid'
|
||||
if not isinstance(data.get('http_config', None), dict):
|
||||
return None, 'http_config is required'
|
||||
else:
|
||||
if "proxy_cache_status" not in dict.keys(data['http_config']): #兼容旧版本数据
|
||||
data['http_config']["proxy_cache_status"] = False
|
||||
data['http_config']["cache_time"] = "1d"
|
||||
data['http_config']["cache_suffix"] = "css,js,jpg,jpeg,gif,png,webp,woff,eot,ttf,svg,ico,css.map,js.map"
|
||||
for k in ['proxy_next_upstream', 'http_alg', "proxy_cache_status", "cache_time", "cache_suffix"]:
|
||||
if k not in dict.keys(data['http_config']):
|
||||
return None, 'http_config.{} is required'.format(k)
|
||||
for i in data['http_config']['proxy_next_upstream'].split():
|
||||
if i not in ('error', 'timeout') and not re.match(r'^http_\d{3}$', i):
|
||||
return None, 'http_config.proxy_next_upstream is invalid'
|
||||
if data['http_config']['http_alg'] not in ('sticky_cookie', 'round_robin', 'least_conn', 'ip_hash'):
|
||||
return None, 'http_config.http_alg is invalid'
|
||||
if not isinstance(data['http_config']['proxy_cache_status'], bool):
|
||||
return None, 'http_config.proxy_cache_status is invalid'
|
||||
if not isinstance(data['http_config']['cache_time'], str):
|
||||
return None, 'http_config.cache_time is invalid'
|
||||
if not re.match(r"^[0-9]+([smhd])$", data['http_config']['cache_time']):
|
||||
return None, 'http_config.cache_time is invalid'
|
||||
cache_suffix = data['http_config']['cache_suffix']
|
||||
cache_suffix_list = []
|
||||
for suffix in cache_suffix.split(","):
|
||||
tmp_suffix = re.sub(r"\s", "", suffix)
|
||||
if not tmp_suffix:
|
||||
continue
|
||||
cache_suffix_list.append(tmp_suffix)
|
||||
real_cache_suffix = ",".join(cache_suffix_list)
|
||||
if not real_cache_suffix:
|
||||
real_cache_suffix = "css,js,jpg,jpeg,gif,png,webp,woff,eot,ttf,svg,ico,css.map,js.map"
|
||||
data['http_config']['cache_suffix'] = real_cache_suffix
|
||||
|
||||
l = LoadSite(data.get('name'), data.get('site_name'), 'http', data.get('ps', ''),
|
||||
http_config=data.get('http_config'),
|
||||
created_at=data.get('created_at', 0), load_id=data.get('load_id', 0),
|
||||
site_id=data.get('site_id', 0))
|
||||
return l, ""
|
||||
|
||||
@classmethod
|
||||
def base_check(cls, data) -> str:
|
||||
if not data.get('name', None):
|
||||
return 'name is required'
|
||||
if not re.match(r'^[a-zA-Z0-9][a-zA-Z0-9_]+$', data['name']):
|
||||
return 'The name can only contain letters, numbers, underscores, and cannot start with numbers or underscores'
|
||||
if not len(data['name']) >= 3:
|
||||
return 'The length of the name cannot be less than 3 characters'
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def bind_tcp_load(cls, data: dict) -> Tuple[Optional["LoadSite"], str]:
|
||||
check_msg = cls.base_check(data)
|
||||
if check_msg:
|
||||
return None, check_msg
|
||||
if not isinstance(data.get('tcp_config', None), dict):
|
||||
return None, 'tcp_config is required'
|
||||
else:
|
||||
for k in ['proxy_connect_timeout', 'proxy_timeout', 'host', 'port', 'type']:
|
||||
if not data['tcp_config'].get(k):
|
||||
return None, 'tcp_config.{} is required'.format(k)
|
||||
if data['tcp_config']['type'] not in ('tcp', 'udp'):
|
||||
return None, 'tcp_config.type is invalid'
|
||||
if not isinstance(data['tcp_config']['port'], int) and not 1 <= data['tcp_config']['port'] <= 65535:
|
||||
return None, 'tcp_config.port is invalid'
|
||||
if not public.check_ip(data['tcp_config']['host']):
|
||||
return None, 'tcp_config.host is invalid'
|
||||
|
||||
l = LoadSite(data.get('name'), data.get('site_name'), 'tcp', ps=data.get('ps', ''),
|
||||
tcp_config=data.get('tcp_config'),
|
||||
created_at=data.get('created_at', 0), load_id=data.get('load_id', 0),
|
||||
site_id=data.get('site_id', 0))
|
||||
return l, ""
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"name": self.name,
|
||||
"site_name": self.site_name,
|
||||
"site_type": self.site_type,
|
||||
"ps": self.ps,
|
||||
"http_config": self.http_config,
|
||||
"tcp_config": self.tcp_config,
|
||||
"created_at": self.created_at,
|
||||
"load_id": self.load_id,
|
||||
"site_id": self.site_id
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class HttpNode:
|
||||
node_id: int
|
||||
node_site_name: str
|
||||
port: int
|
||||
location: str = "/"
|
||||
path: str = "/"
|
||||
node_status: str = "online" # online, backup, down
|
||||
weight: int = 1
|
||||
max_fail: int = 3
|
||||
fail_timeout: int = 600
|
||||
ps: str = ""
|
||||
created_at: int = 0
|
||||
node_site_id: int = 0
|
||||
id: int = 0
|
||||
load_id: int = 0
|
||||
|
||||
@classmethod
|
||||
def bind(cls, data: dict) -> Tuple[Optional["HttpNode"], str]:
|
||||
if not isinstance(data.get('node_site_name', None), str):
|
||||
return None, 'node_site_name is required'
|
||||
if not public.is_domain(data['node_site_name']) and not public.check_ip(data['node_site_name']):
|
||||
return None, 'node_site_name is invalid'
|
||||
if not isinstance(data.get('port', None), int):
|
||||
return None, 'port is required'
|
||||
if not 1 <= data['port'] <= 65535:
|
||||
return None, 'port is invalid'
|
||||
if not isinstance(data.get('node_id', None), int):
|
||||
return None, 'node_id is required'
|
||||
if not isinstance(data.get('node_status', None), str):
|
||||
return None, 'node_status is required'
|
||||
if not data['node_status'] in ('online', 'backup', 'down'):
|
||||
return None, 'node_status is invalid'
|
||||
|
||||
n = HttpNode(data.get('node_id'), data.get('node_site_name'), data.get('port'), "/",
|
||||
data.get('path', "/"), data.get('node_status', "online"), data.get('weight', 1),
|
||||
data.get('max_fail', 3), data.get('fail_timeout', 600), data.get('ps', ''),
|
||||
data.get('created_at', 0), data.get('node_site_id', 0), data.get('id', 0),
|
||||
data.get('load_id', 0)
|
||||
)
|
||||
return n, ""
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"node_id": self.node_id,
|
||||
"node_site_name": self.node_site_name,
|
||||
"port": self.port,
|
||||
"location": self.location,
|
||||
"path": self.path,
|
||||
"node_status": self.node_status,
|
||||
"weight": self.weight,
|
||||
"max_fail": self.max_fail,
|
||||
"fail_timeout": self.fail_timeout,
|
||||
"ps": self.ps,
|
||||
"created_at": self.created_at,
|
||||
"node_site_id": self.node_site_id,
|
||||
"id": self.id,
|
||||
"load_id": self.load_id
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class TcpNode:
|
||||
node_id: int
|
||||
host: str
|
||||
port: int
|
||||
id: int = 0
|
||||
load_id: int = 0
|
||||
node_status: str = "online" # online, backup, down
|
||||
weight: int = 1
|
||||
max_fail: int = 3
|
||||
fail_timeout: int = 600
|
||||
ps: str = ""
|
||||
created_at: int = 0
|
||||
|
||||
@classmethod
|
||||
def bind(cls, data: dict) -> Tuple[Optional["TcpNode"], str]:
|
||||
if not isinstance(data.get('node_status', None), str):
|
||||
return None, 'node_status is required'
|
||||
if not data['node_status'] in ('online', 'backup', 'down'):
|
||||
return None, 'node_status is invalid'
|
||||
if not isinstance(data.get('host', None), str):
|
||||
return None, 'host is required'
|
||||
if not isinstance(data.get('node_id', None), int):
|
||||
return None, 'node_id is required'
|
||||
if not isinstance(data.get('port', None), int):
|
||||
return None, 'port is required'
|
||||
if not 1 <= data['port'] <= 65535:
|
||||
return None, 'port is invalid'
|
||||
n = TcpNode(data.get('node_id'), data.get('host'), data.get('port'), data.get('id', 0), data.get('load_id', 0),
|
||||
data.get('node_status', "online"), data.get('weight', 1), data.get('max_fail', 3),
|
||||
data.get('fail_timeout', 600), data.get('ps', ''), data.get('created_at', 0))
|
||||
return n, ""
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"node_id": self.node_id,
|
||||
"host": self.host,
|
||||
"port": self.port,
|
||||
"id": self.id,
|
||||
"load_id": self.load_id,
|
||||
"node_status": self.node_status,
|
||||
"weight": self.weight,
|
||||
"max_fail": self.max_fail,
|
||||
"fail_timeout": self.fail_timeout,
|
||||
"ps": self.ps,
|
||||
"created_at": self.created_at
|
||||
}
|
||||
|
||||
|
||||
class NodeDB:
|
||||
_DB_FILE = public.get_panel_path() + "/data/db/node_load_balance.db"
|
||||
_DB_INIT_FILE = os.path.dirname(__file__) + "/load_balancer.sql"
|
||||
|
||||
def __init__(self):
|
||||
sql = db.Sql()
|
||||
sql._Sql__DB_FILE = self._DB_FILE
|
||||
self.db = sql
|
||||
|
||||
def init_db(self):
|
||||
sql_data = public.readFile(self._DB_INIT_FILE)
|
||||
if not os.path.exists(self._DB_FILE) or os.path.getsize(self._DB_FILE) == 0:
|
||||
public.writeFile(self._DB_FILE, "")
|
||||
import sqlite3
|
||||
conn = sqlite3.connect(self._DB_FILE)
|
||||
c = conn.cursor()
|
||||
c.executescript(sql_data)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
def close(self):
|
||||
self.db.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_trackback):
|
||||
self.close()
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def update_load_key(self, load_id: int, load_data: dict) -> str:
|
||||
if not isinstance(load_id, int):
|
||||
return "load_id is required"
|
||||
if not isinstance(load_data, dict):
|
||||
return "load_data is required"
|
||||
err = self.db.table("load_sites").where("load_id = ?", load_id).update(load_data)
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
return ""
|
||||
|
||||
def name_exist(self, name: str) -> bool:
|
||||
return self.db.table("load_sites").where("name = ?", name).count() > 0
|
||||
|
||||
def load_site_name_exist(self, name: str) -> bool:
|
||||
return self.db.table("load_sites").where("site_name = ?", name).count() > 0
|
||||
|
||||
def load_id_exist(self, load_id: int) -> bool:
|
||||
return self.db.table("load_sites").where("load_id = ?", load_id).count() > 0
|
||||
|
||||
def loads_count(self, site_type: str, query: str = "") -> int:
|
||||
if site_type == "http":
|
||||
if not query:
|
||||
return self.db.table("load_sites").where("site_type = ?", "http").count()
|
||||
return self.db.table("load_sites").where(
|
||||
"site_type = ? AND ps like ?", ("http", "%" + query + "%")).count()
|
||||
else:
|
||||
if not query:
|
||||
return self.db.table("load_sites").where("site_type = ?", "tcp").count()
|
||||
return self.db.table("load_sites").where(
|
||||
"site_type = ? AND ps like ?", ("tcp", "%" + query + "%")).count()
|
||||
|
||||
def loads_list(self, site_type: str, offset: int, limit: int, query: str = ""):
|
||||
if site_type == "all":
|
||||
if query:
|
||||
return self.db.table("load_sites").where("ps like ?", "%" + query + "%").limit(limit, offset).select()
|
||||
return self.db.table("load_sites").limit(limit, offset).select()
|
||||
if site_type == "http":
|
||||
if not query:
|
||||
return self.db.table("load_sites").where("site_type = ?", "http").limit(limit, offset).select()
|
||||
return self.db.table("load_sites").where(
|
||||
"site_type = ? AND ps like ?", ("http", "%" + query + "%")).limit(limit, offset).select()
|
||||
else:
|
||||
if not query:
|
||||
return self.db.table("load_sites").where("site_type = ?", "tcp").limit(limit, offset).select()
|
||||
return self.db.table("load_sites").where(
|
||||
"site_type = ? AND ps like ?", ("tcp", "%" + query + "%")).limit(limit, offset).select()
|
||||
|
||||
def create_load(self, site_type: str, load: LoadSite, nodes: List[Union[HttpNode, TcpNode]]) -> str:
|
||||
load_data = load.to_dict()
|
||||
load_data.pop('load_id')
|
||||
load_data.pop('created_at')
|
||||
load_data["http_config"] = json.dumps(load.http_config)
|
||||
load_data["tcp_config"] = json.dumps(load.tcp_config)
|
||||
try:
|
||||
err = self.db.table("load_sites").insert(load_data)
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
load.load_id = err
|
||||
|
||||
for node in nodes:
|
||||
node_data = node.to_dict()
|
||||
node_data.pop('id')
|
||||
node_data.pop('created_at')
|
||||
node_data['load_id'] = load.load_id
|
||||
if site_type == "http" and isinstance(node, HttpNode):
|
||||
err = self.db.table("http_nodes").insert(node_data)
|
||||
else:
|
||||
err = self.db.table("tcp_nodes").insert(node_data)
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
except Exception as e:
|
||||
return "数据库操作错误:" + str(e)
|
||||
|
||||
return ""
|
||||
|
||||
def update_load(self, site_type: str, load: LoadSite, nodes: List[Union[HttpNode, TcpNode]]) -> str:
|
||||
load_data = load.to_dict()
|
||||
if not load.load_id:
|
||||
return "load_id is required"
|
||||
load_data.pop('created_at')
|
||||
load_data.pop('load_id')
|
||||
load_data["http_config"] = json.dumps(load.http_config)
|
||||
load_data["tcp_config"] = json.dumps(load.tcp_config)
|
||||
|
||||
try:
|
||||
err = self.db.table("load_sites").where("load_id = ?", load.load_id).update(load_data)
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
except Exception as e:
|
||||
return "数据库操作错误:" + str(e)
|
||||
|
||||
old_nodes, err = self.get_nodes(load.load_id, site_type)
|
||||
if err:
|
||||
return err
|
||||
old_nodes_map = {}
|
||||
for old_node in old_nodes:
|
||||
old_nodes_map[old_node['id']] = old_node
|
||||
|
||||
try:
|
||||
for node in nodes:
|
||||
node_data = node.to_dict()
|
||||
node_data.pop('id')
|
||||
node_data.pop('created_at')
|
||||
node_data['load_id'] = load.load_id
|
||||
if node.id in old_nodes_map:
|
||||
if site_type == "http" and isinstance(node, HttpNode):
|
||||
err = self.db.table("http_nodes").where("id = ?", node.id).update(node_data)
|
||||
else:
|
||||
err = self.db.table("tcp_nodes").where("id = ?", node.id).update(node_data)
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
old_nodes_map.pop(node.id)
|
||||
else:
|
||||
if site_type == "http" and isinstance(node, HttpNode):
|
||||
err = self.db.table("http_nodes").insert(node_data)
|
||||
else:
|
||||
err = self.db.table("tcp_nodes").insert(node_data)
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
for node_id in old_nodes_map:
|
||||
if site_type == "http":
|
||||
err = self.db.table("http_nodes").where("id = ?", node_id).delete()
|
||||
else:
|
||||
err = self.db.table("tcp_nodes").where("id = ?", node_id).delete()
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
except Exception as e:
|
||||
return "数据库操作错误:" + str(e)
|
||||
return ""
|
||||
|
||||
def get_nodes(self, load_id: int, site_type: str) -> Tuple[List[dict], str]:
|
||||
if site_type == "http":
|
||||
nodes: List[dict] = self.db.table("http_nodes").where("load_id = ?", load_id).select()
|
||||
else:
|
||||
nodes: List[dict] = self.db.table("tcp_nodes").where("load_id = ?", load_id).select()
|
||||
if isinstance(nodes, str):
|
||||
return [], nodes
|
||||
if not nodes and self.db.ERR_INFO:
|
||||
return [], self.db.ERR_INFO
|
||||
return nodes, ""
|
||||
|
||||
def get_load(self, load_id: int) -> Tuple[Optional[dict], str]:
|
||||
load_data = self.db.table("load_sites").where("load_id = ?", load_id).find()
|
||||
if isinstance(load_data, str):
|
||||
return None, load_data
|
||||
if self.db.ERR_INFO:
|
||||
return None, self.db.ERR_INFO
|
||||
if len(load_data) == 0:
|
||||
return None, "未查询到该负载配置"
|
||||
return load_data, ""
|
||||
|
||||
def delete(self, load_id: int) -> str:
|
||||
load_data = self.db.table("load_sites").where("load_id = ?", load_id).find()
|
||||
if isinstance(load_data, str):
|
||||
return load_data
|
||||
if self.db.ERR_INFO:
|
||||
return self.db.ERR_INFO
|
||||
if len(load_data) == 0:
|
||||
return ""
|
||||
|
||||
if load_data["site_type"] == "http":
|
||||
err = self.db.table("http_nodes").where("load_id = ?", load_id).delete()
|
||||
else:
|
||||
err = self.db.table("tcp_nodes").where("load_id = ?", load_id).delete()
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
err = self.db.table("load_sites").where("load_id = ?", load_id).delete()
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
return ""
|
||||
28
mod/project/node/dbutil/node.sql
Normal file
28
mod/project/node/dbutil/node.sql
Normal file
@@ -0,0 +1,28 @@
|
||||
CREATE TABLE IF NOT EXISTS `node`
|
||||
(
|
||||
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
`address` VARCHAR, -- 节点地址 https://xxx:xx/
|
||||
`category_id` INTEGER, -- 分类
|
||||
`remarks` VARCHAR, -- 节点名称
|
||||
`api_key` VARCHAR, -- api key
|
||||
`create_time` INTEGER DEFAULT (0), -- 创建时间
|
||||
`server_ip` TEXT, -- 服务器ip
|
||||
`status` INTEGER, -- 0: 不在线 1: 在线
|
||||
`error` TEXT DEFAULT '{}',
|
||||
`error_num` INTEGER DEFAULT 0,
|
||||
`app_key` TEXT, -- app key
|
||||
`ssh_conf` TEXT NOT NULL DEFAULT '{}',
|
||||
`ssh_test` INTEGER DEFAULT 0, -- 是否执行了ssh秘钥测试, 0: 未测试 1: 已测试
|
||||
`lpver` TEXT DEFAULT '' -- 1panel 版本,当目标面板时1panel时,记录版本是v1还是v2
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `category`
|
||||
(
|
||||
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
`name` VARCHAR,
|
||||
`create_time` INTEGER DEFAULT (0)
|
||||
);
|
||||
|
||||
INSERT INTO `node` (app_key, api_key, remarks, server_ip)
|
||||
SELECT 'local', 'local', 'Local node', '127.0.0.1'
|
||||
WHERE NOT EXISTS (SELECT 1 FROM `node` WHERE app_key = 'local' AND api_key = 'local');
|
||||
462
mod/project/node/dbutil/node_db.py
Normal file
462
mod/project/node/dbutil/node_db.py
Normal file
@@ -0,0 +1,462 @@
|
||||
import base64
|
||||
import json
|
||||
import os.path
|
||||
import re
|
||||
import time
|
||||
import sys
|
||||
from urllib.parse import urlparse
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Tuple, Optional, List, Union, Dict
|
||||
|
||||
if "/www/server/panel/class" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel/class")
|
||||
|
||||
import public
|
||||
import db
|
||||
|
||||
if "/www/server/panel" not in sys.path:
|
||||
sys.path.insert(0, "/www/server/panel")
|
||||
|
||||
|
||||
@dataclass
|
||||
class NodeAPPKey:
|
||||
origin: str
|
||||
request_token: str
|
||||
app_key: str
|
||||
app_token: str
|
||||
|
||||
def to_string(self)->str:
|
||||
data = "|".join((self.origin, self.request_token, self.app_key, self.app_token))
|
||||
return base64.b64encode(data.encode()).decode("utf-8")
|
||||
|
||||
|
||||
@dataclass
|
||||
class Node:
|
||||
remarks: str
|
||||
id: int = 0
|
||||
address: str = ""
|
||||
category_id: int = 0
|
||||
api_key: str = ""
|
||||
create_time: int = 0
|
||||
server_ip: str = ""
|
||||
status: int = 1
|
||||
error: dict = field(default_factory=dict)
|
||||
error_num: int = 0
|
||||
app_key: str = ""
|
||||
ssh_conf: dict = field(default_factory=dict)
|
||||
lpver: str = ""
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> Tuple[Optional["Node"], str]:
|
||||
if not isinstance(data.get('remarks', None), str):
|
||||
return None, 'remarks is required'
|
||||
if not data["remarks"].strip():
|
||||
return None, 'remarks is required'
|
||||
data["remarks"] = data["remarks"].strip()
|
||||
|
||||
api_key = data.get('api_key', '')
|
||||
app_key = data.get('app_key', '')
|
||||
ssh_conf: dict = data.get('ssh_conf', {})
|
||||
if not api_key and not app_key and not ssh_conf:
|
||||
return None, 'api_key or app_key or ssh_conf is required'
|
||||
|
||||
if app_key:
|
||||
app = cls.parse_app_key(app_key)
|
||||
if not app:
|
||||
return None, 'App_key format error'
|
||||
data["address"] = app.origin
|
||||
url = urlparse(data["address"], allow_fragments=False)
|
||||
if not url.scheme or not url.netloc:
|
||||
return None, 'address is invalid'
|
||||
|
||||
if api_key:
|
||||
if not isinstance(data.get('address', None), str):
|
||||
return None, 'address is required'
|
||||
url = urlparse(data["address"], allow_fragments=False)
|
||||
if not url.scheme or not url.netloc:
|
||||
return None, 'address is invalid'
|
||||
|
||||
if ssh_conf:
|
||||
for key in ("host", "port"):
|
||||
if key not in ssh_conf:
|
||||
return None, 'ssh_conf is invalid'
|
||||
if "username" not in ssh_conf:
|
||||
ssh_conf["username"] = "root"
|
||||
if "password" not in ssh_conf:
|
||||
ssh_conf["password"] = ""
|
||||
if "pkey" not in ssh_conf:
|
||||
ssh_conf["pkey"] = ""
|
||||
if "pkey_passwd" not in ssh_conf:
|
||||
ssh_conf["pkey_passwd"] = ""
|
||||
|
||||
if ssh_conf and not data.get("address", None):
|
||||
data["address"] = ssh_conf["host"]
|
||||
|
||||
n = Node(
|
||||
data["remarks"], id=data.get('id', 0), address=data.get("address"), category_id=int(data.get('category_id', 0)),
|
||||
api_key=api_key, create_time=data.get('create_time', 0), server_ip=data.get('server_ip', ''),
|
||||
status=data.get('status', 1), error=data.get('error', {}), error_num=data.get('error_num', 0),
|
||||
app_key=app_key, ssh_conf=ssh_conf, lpver=data.get('lpver', '')
|
||||
)
|
||||
return n, ''
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"remarks": self.remarks,
|
||||
"id": self.id,
|
||||
"address": self.address,
|
||||
"category_id": self.category_id,
|
||||
"api_key": self.api_key,
|
||||
"create_time": self.create_time,
|
||||
"server_ip": self.server_ip,
|
||||
"status": self.status,
|
||||
"error": self.error,
|
||||
"error_num": self.error_num,
|
||||
"app_key": self.app_key,
|
||||
"ssh_conf": self.ssh_conf,
|
||||
"lpver": self.lpver
|
||||
}
|
||||
|
||||
def parse_server_ip(self):
|
||||
import socket
|
||||
from urllib.parse import urlparse
|
||||
if not self.address.startswith("http"):
|
||||
host = self.address # 仅 ssh时 address本身就是host
|
||||
else:
|
||||
host = urlparse(self.address).hostname
|
||||
if isinstance(host, str) and public.check_ip(host):
|
||||
return host
|
||||
try:
|
||||
ip_address = socket.gethostbyname(host)
|
||||
return ip_address
|
||||
except socket.gaierror as e:
|
||||
public.print_log(f"Error: {e}")
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def parse_app_key(app_key: str) -> Optional[NodeAPPKey]:
|
||||
try:
|
||||
data = base64.b64decode(app_key).decode("utf-8")
|
||||
origin, request_token, app_key, app_token = data.split("|")
|
||||
origin_arr = origin.split(":")
|
||||
if len(origin_arr) > 3:
|
||||
origin = ":".join(origin_arr[:3])
|
||||
return NodeAPPKey(origin, request_token, app_key, app_token)
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
class ServerNodeDB:
|
||||
_DB_FILE = public.get_panel_path() + "/data/db/node.db"
|
||||
_DB_INIT_FILE = os.path.dirname(__file__) + "/node.sql"
|
||||
|
||||
def __init__(self):
|
||||
sql = db.Sql()
|
||||
sql._Sql__DB_FILE = self._DB_FILE
|
||||
self.db = sql
|
||||
|
||||
def init_db(self):
|
||||
sql_data = public.readFile(self._DB_INIT_FILE)
|
||||
import sqlite3
|
||||
conn = sqlite3.connect(self._DB_FILE)
|
||||
cur = conn.cursor()
|
||||
cur.executescript(sql_data)
|
||||
cur.execute("PRAGMA table_info(node)")
|
||||
existing_cols = [row[1] for row in cur.fetchall()]
|
||||
if "ssh_test" in existing_cols:
|
||||
pass
|
||||
# print("字段 ssh_test 已存在")
|
||||
else:
|
||||
cur.execute("ALTER TABLE node ADD COLUMN ssh_test INTEGER DEFAULT (0)")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
def close(self):
|
||||
self.db.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_trackback):
|
||||
self.close()
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def is_local_node(self, node_id: int):
|
||||
return self.db.table('node').where("id=? AND app_key = 'local' AND api_key = 'local'", (node_id,)).count() > 0
|
||||
|
||||
def get_local_node(self):
|
||||
data = self.db.table('node').where("app_key = 'local' AND api_key = 'local'", ()).find()
|
||||
if isinstance(data, dict):
|
||||
return data
|
||||
return {
|
||||
"id": 0,
|
||||
"address": "",
|
||||
"category_id": 0,
|
||||
"remarks": "Local node",
|
||||
"api_key": "local",
|
||||
"create_time": time.strftime('%Y-%m-%d %H:%M:%S'),
|
||||
"server_ip": "127.0.0.1",
|
||||
"status": 0,
|
||||
"error": 0,
|
||||
"error_num": 0,
|
||||
"app_key": "local",
|
||||
"ssh_conf": "{}",
|
||||
"lpver": "",
|
||||
}
|
||||
|
||||
def create_node(self, node: Node) -> str:
|
||||
node_data = node.to_dict()
|
||||
node_data.pop("id")
|
||||
node_data["create_time"] = time.strftime('%Y-%m-%d %H:%M:%S')
|
||||
node_data.pop("error")
|
||||
node_data["status"] = 1
|
||||
node_data["ssh_conf"] = json.dumps(node_data["ssh_conf"])
|
||||
|
||||
if node.category_id > 0 and not self.category_exites(node.category_id):
|
||||
return "Classification does not exist"
|
||||
|
||||
if self.db.table('node').where('remarks=?', (node.remarks,)).count() > 0:
|
||||
return "The node with this name already exists"
|
||||
try:
|
||||
node_id = self.db.table('node').insert(node_data)
|
||||
if isinstance(node_id, int):
|
||||
node.id = node_id
|
||||
return ""
|
||||
elif isinstance(node_id, str):
|
||||
return node_id
|
||||
else:
|
||||
return str(node_id)
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
|
||||
def update_node(self, node: Node, with_out_fields: List[str] = Node) -> str:
|
||||
if self.is_local_node(node.id):
|
||||
return "Cannot modify local nodes"
|
||||
if not self.node_id_exites(node.id):
|
||||
return "Node does not exist"
|
||||
node_data = node.to_dict()
|
||||
node_data.pop("create_time")
|
||||
node_data.pop("id")
|
||||
node_data["ssh_conf"] = json.dumps(node_data["ssh_conf"])
|
||||
node_data["error"] = json.dumps(node_data["error"])
|
||||
if with_out_fields and isinstance(with_out_fields, list):
|
||||
for f in with_out_fields:
|
||||
if f in node_data:
|
||||
node_data.pop(f)
|
||||
|
||||
if node.category_id > 0 and not self.category_exites(node.category_id):
|
||||
node.category_id = 0
|
||||
node_data["category_id"] = 0
|
||||
try:
|
||||
res = self.db.table('node').where('id=?', (node.id,)).update(node_data)
|
||||
if isinstance(res, str):
|
||||
return res
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
|
||||
return ""
|
||||
|
||||
def set_node_ssh_conf(self, node_id: int, ssh_conf: dict, ssh_test: int=0):
|
||||
pdata = {"ssh_conf": json.dumps(ssh_conf)}
|
||||
if ssh_test:
|
||||
pdata["ssh_test"] = 1
|
||||
self.db.table('node').where('id=?', (node_id,)).update(pdata)
|
||||
return
|
||||
|
||||
def remove_node_ssh_conf(self, node_id: int):
|
||||
self.db.table('node').where('id=?', (node_id,)).update({"ssh_conf": "{}"})
|
||||
return
|
||||
|
||||
def delete_node(self, node_id: int) -> str:
|
||||
if self.is_local_node(node_id):
|
||||
return "Cannot delete local node"
|
||||
if not self.node_id_exites(node_id):
|
||||
return "Node does not exist"
|
||||
try:
|
||||
res = self.db.table('node').where('id=?', (node_id,)).delete()
|
||||
if isinstance(res, str):
|
||||
return res
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
return ""
|
||||
|
||||
def find_node(self, api_key:str = "", app_key: str = "") -> Optional[dict]:
|
||||
res = self.db.table('node').where('api_key=?', (api_key, app_key)).find()
|
||||
if isinstance(res, dict):
|
||||
return res
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_node_list(self,
|
||||
search: str = "",
|
||||
category_id: int = -1,
|
||||
offset: int = 0,
|
||||
limit: int = 10) -> Tuple[List[Dict], str]:
|
||||
try:
|
||||
args = []
|
||||
query_str = ""
|
||||
if search:
|
||||
query_str += "remarks like ?"
|
||||
args.append('%{}%'.format(search))
|
||||
if category_id >= 0:
|
||||
if query_str:
|
||||
query_str += " and category_id=?"
|
||||
else:
|
||||
query_str += "category_id=?"
|
||||
args.append(category_id)
|
||||
if query_str:
|
||||
data_list = self.db.table('node').where(query_str, args).order('id desc').limit(limit, offset).select()
|
||||
else:
|
||||
data_list = self.db.table('node').order('id desc').limit(limit, offset).select()
|
||||
if self.db.ERR_INFO:
|
||||
return [], self.db.ERR_INFO
|
||||
if not isinstance(data_list, list):
|
||||
return [], str(data_list)
|
||||
return data_list, ""
|
||||
except Exception as e:
|
||||
return [], str(e)
|
||||
|
||||
def query_node_list(self, *args) -> List[Dict]:
|
||||
return self.db.table('node').where(*args).select()
|
||||
|
||||
def category_exites(self, category_id: int) -> bool:
|
||||
return self.db.table('category').where('id=?', (category_id,)).count() > 0
|
||||
|
||||
def node_id_exites(self, node_id: int) -> bool:
|
||||
return self.db.table('node').where('id=?', (node_id,)).count() > 0
|
||||
|
||||
def category_map(self) -> Dict:
|
||||
default_data = {0: "Default classification"}
|
||||
data_list = self.db.table('category').field('id,name').select()
|
||||
if isinstance(data_list, list):
|
||||
for data in data_list:
|
||||
default_data[data["id"]] = data["name"]
|
||||
return default_data
|
||||
|
||||
def node_map(self) -> Dict:
|
||||
default_data = {}
|
||||
data_list = self.db.table('node').field('id,remarks').select()
|
||||
if isinstance(data_list, list):
|
||||
for data in data_list:
|
||||
default_data[data["id"]] = data["remarks"]
|
||||
return default_data
|
||||
|
||||
def create_category(self, name: str) -> str:
|
||||
if self.db.table('category').where('name=?', (name,)).count() > 0:
|
||||
return "The classification for this name already exists"
|
||||
try:
|
||||
res = self.db.table('category').insert({"name": name, "create_time": time.strftime('%Y-%m-%d %H:%M:%S')})
|
||||
if isinstance(res, str):
|
||||
return res
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
return ""
|
||||
|
||||
def delete_category(self, category_id: int):
|
||||
self.db.table('node').where('category_id=?', (category_id,)).update({"category_id": 0})
|
||||
self.db.table('category').where('id=?', (category_id,)).delete()
|
||||
|
||||
def bind_category_to_node(self, node_id: List[int], category_id: int) -> str:
|
||||
if not node_id:
|
||||
return "Node ID cannot be empty"
|
||||
if category_id > 0 and not self.category_exites(category_id):
|
||||
return "Classification does not exist"
|
||||
|
||||
try:
|
||||
err = self.db.table('node').where(
|
||||
'id in ({})'.format(",".join(["?"]*len(node_id))), (*node_id,)
|
||||
).update({"category_id": category_id})
|
||||
if isinstance(err, str):
|
||||
return err
|
||||
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
return ""
|
||||
|
||||
def node_count(self, search, category_id) -> int:
|
||||
try:
|
||||
args = []
|
||||
query_str = ""
|
||||
if search:
|
||||
query_str += "remarks like ?"
|
||||
args.append('%{}%'.format(search))
|
||||
if category_id >= 0:
|
||||
if query_str:
|
||||
query_str += " and category_id=?"
|
||||
else:
|
||||
query_str += "category_id=?"
|
||||
args.append(category_id)
|
||||
if query_str:
|
||||
count = self.db.table('node').where(query_str, args).order('id desc').count()
|
||||
else:
|
||||
count = self.db.table('node').order('id desc').count()
|
||||
return count
|
||||
except:
|
||||
return 0
|
||||
|
||||
def get_node_by_id(self, node_id: int) -> Optional[Dict]:
|
||||
try:
|
||||
data = self.db.table('node').where('id=?', (node_id,)).find()
|
||||
if self.db.ERR_INFO:
|
||||
return None
|
||||
if not isinstance(data, dict):
|
||||
return None
|
||||
return data
|
||||
except:
|
||||
return None
|
||||
|
||||
class ServerMonitorRepo:
|
||||
_REPO_DIR = public.get_panel_path() + "/data/mod_node_status_cache/"
|
||||
|
||||
def __init__(self):
|
||||
if not os.path.exists(self._REPO_DIR):
|
||||
os.makedirs(self._REPO_DIR)
|
||||
|
||||
def set_wait_reboot(self, server_ip: str, start: bool):
|
||||
wait_file = os.path.join(self._REPO_DIR, "wait_reboot_{}".format(server_ip))
|
||||
if start:
|
||||
return public.writeFile(wait_file, "wait_reboot")
|
||||
else:
|
||||
if os.path.exists(wait_file):
|
||||
os.remove(wait_file)
|
||||
|
||||
def is_reboot_wait(self, server_ip: str):
|
||||
wait_file = os.path.join(self._REPO_DIR, "wait_reboot_{}".format(server_ip))
|
||||
# 重器待等待时间超过10分钟认为超时
|
||||
return os.path.exists(wait_file) and os.path.getmtime(wait_file) > time.time() - 610
|
||||
|
||||
@staticmethod
|
||||
def get_local_server_status():
|
||||
from system import system
|
||||
return system().GetNetWork(None)
|
||||
|
||||
def get_server_status(self, server_id: int) -> Optional[Dict]:
|
||||
cache_file = os.path.join(self._REPO_DIR, "server_{}.json".format(server_id))
|
||||
if not os.path.exists(cache_file):
|
||||
return None
|
||||
|
||||
mtime = os.path.getmtime(cache_file)
|
||||
if time.time() - mtime > 60 * 5:
|
||||
os.remove(cache_file)
|
||||
return None
|
||||
try:
|
||||
data = public.readFile(cache_file)
|
||||
if isinstance(data, str):
|
||||
return json.loads(data)
|
||||
except:
|
||||
return None
|
||||
|
||||
def save_server_status(self, server_id: int, data: Dict) -> str:
|
||||
cache_file = os.path.join(self._REPO_DIR, "server_{}.json".format(server_id))
|
||||
try:
|
||||
public.writeFile(cache_file, json.dumps(data))
|
||||
return ""
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
|
||||
def remove_cache(self, server_id: int):
|
||||
cache_file = os.path.join(self._REPO_DIR, "server_{}.json".format(server_id))
|
||||
if os.path.exists(cache_file):
|
||||
os.remove(cache_file)
|
||||
1149
mod/project/node/dbutil/node_task_flow.py
Normal file
1149
mod/project/node/dbutil/node_task_flow.py
Normal file
File diff suppressed because it is too large
Load Diff
144
mod/project/node/dbutil/node_task_flow.sql
Normal file
144
mod/project/node/dbutil/node_task_flow.sql
Normal file
@@ -0,0 +1,144 @@
|
||||
-- 创建脚本表
|
||||
CREATE TABLE IF NOT EXISTS scripts
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL CHECK (length(name) <= 255),
|
||||
script_type TEXT NOT NULL CHECK (length(script_type) <= 255),
|
||||
content TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now'))
|
||||
);
|
||||
|
||||
-- 创建任务流
|
||||
CREATE TABLE IF NOT EXISTS flows
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
server_ids TEXT NOT NULL, -- 存储服务器ID列表
|
||||
step_count INTEGER NOT NULL,
|
||||
strategy TEXT NOT NULL, -- 对于不同任务的处理策略, json字段
|
||||
status TEXT NOT NULL, -- 总体状态 waiting, running, complete, error
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now'))
|
||||
);
|
||||
|
||||
-- 创建命令行任务表
|
||||
CREATE TABLE IF NOT EXISTS command_tasks
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
flow_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL CHECK (length(name) <= 255),
|
||||
step_index INTEGER NOT NULL,
|
||||
script_id INTEGER NOT NULL,
|
||||
script_content TEXT NOT NULL,
|
||||
script_type TEXT NOT NULL CHECK (length(script_type) <= 255),
|
||||
status INTEGER NOT NULL DEFAULT 0 CHECK (status IN (0, 1, 2, 3)), -- 0: 等待中, 1: 进行中, 2: 成功, 3: 失败
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now'))
|
||||
);
|
||||
|
||||
-- 创建命令行任务日志表
|
||||
CREATE TABLE IF NOT EXISTS command_logs
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
command_task_id INTEGER NOT NULL,
|
||||
server_id INTEGER NOT NULL,
|
||||
ssh_host TEXT NOT NULL,
|
||||
status INTEGER NOT NULL DEFAULT 0 CHECK (status IN (0, 1, 2, 3, 4)), -- 0: 等待中, 1: 进行中, 2: 成功, 3: 失败, 4: 异常
|
||||
log_name TEXT NOT NULL CHECK (length(log_name) <= 255)
|
||||
);
|
||||
|
||||
-- 传输任务表
|
||||
CREATE TABLE IF NOT EXISTS transfer_tasks
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL CHECK (length(name) <= 255),
|
||||
flow_id INTEGER NOT NULL, -- 当本机不是数据源节点时, 本字段的值为 0
|
||||
step_index INTEGER NOT NULL,
|
||||
src_node TEXT NOT NULL, -- 数据源节点, json字段
|
||||
src_node_task_id INTEGER NOT NULL, -- 当本机是数据源节点时, 本字段的值为 0, 否则为目标机器上的transfer_tasks.id
|
||||
dst_nodes TEXT NOT NULL, -- 目标节点,多个,json字段
|
||||
message TEXT NOT NULL DEFAULT '', -- 与目标节点的链接错误信息
|
||||
path_list TEXT NOT NULL DEFAULT '[]', -- 源节点上的路径 [{"path":"/www/wwwroots", "is_dir":true}]
|
||||
status INTEGER NOT NULL DEFAULT 0 CHECK (status IN (0, 1, 2, 3)), -- 0: 等待中, 1: 进行中, 2: 成功, 3: 失败
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now'))
|
||||
);
|
||||
|
||||
-- 传输文件列表
|
||||
CREATE TABLE IF NOT EXISTS transfer_files
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
flow_id INTEGER NOT NULL,
|
||||
transfer_task_id INTEGER NOT NULL,
|
||||
src_file TEXT NOT NULL, -- 源文件
|
||||
dst_file TEXT NOT NULL, -- 目标文件
|
||||
file_size INTEGER NOT NULL, -- 文件大小
|
||||
is_dir INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
|
||||
-- 传输文件列表
|
||||
CREATE TABLE IF NOT EXISTS transfer_logs
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
flow_id INTEGER NOT NULL,
|
||||
transfer_task_id INTEGER NOT NULL,
|
||||
transfer_file_id INTEGER NOT NULL,
|
||||
dst_node_idx INTEGER NOT NULL, -- 目标节点索引,基于 transfer_tasks.dst_nodes
|
||||
status INTEGER NOT NULL DEFAULT 0 CHECK (status IN (0, 1, 2, 3, 4)), -- 0: 等待中, 1: 进行中, 2: 成功, 3: 失败, 4: 跳过
|
||||
progress INTEGER DEFAULT 0, -- 0-100
|
||||
message TEXT NOT NULL DEFAULT '',
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||
started_at INTEGER,
|
||||
completed_at INTEGER
|
||||
);
|
||||
|
||||
-- 创建流程模板表
|
||||
CREATE TABLE IF NOT EXISTS flow_templates
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL CHECK (length(name) <= 255),
|
||||
key_words TEXT NOT NULL DEFAULT '', -- 关键字词,用来查询内容是子任务的名称
|
||||
description TEXT NOT NULL DEFAULT '', -- 模板描述
|
||||
content TEXT NOT NULL, -- json字段,由前端构建,实际流程内容
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_scripts_name ON scripts (name);
|
||||
CREATE INDEX IF NOT EXISTS idx_scripts_description ON scripts (description);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_flow_server_ids ON flows (server_ids);
|
||||
|
||||
-- command_tasks 表
|
||||
CREATE INDEX IF NOT EXISTS idx_command_tasks_flow_id ON command_tasks (flow_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_command_tasks_script_id ON command_tasks (script_id);
|
||||
|
||||
-- command_logs 表
|
||||
CREATE INDEX IF NOT EXISTS idx_command_logs_task_id ON command_logs (command_task_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_command_logs_server_id ON command_logs (server_id);
|
||||
-- command_logs 状态查询
|
||||
CREATE INDEX IF NOT EXISTS idx_command_logs_status ON command_logs (command_task_id, status);
|
||||
|
||||
-- transfer_tasks 表
|
||||
CREATE INDEX IF NOT EXISTS idx_transfer_tasks_flow_id ON transfer_tasks (flow_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_transfer_tasks_src_node_task_id ON transfer_tasks (src_node_task_id);
|
||||
|
||||
-- transfer_files 表
|
||||
CREATE INDEX IF NOT EXISTS idx_transfer_files_task_id ON transfer_files (transfer_task_id);
|
||||
|
||||
-- transfer_logs 表
|
||||
CREATE INDEX IF NOT EXISTS idx_transfer_logs_flow_id ON transfer_logs (flow_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_transfer_logs_task_id ON transfer_logs (transfer_task_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_transfer_logs_file_id ON transfer_logs (transfer_file_id);
|
||||
-- transfer_logs 状态查询
|
||||
CREATE INDEX IF NOT EXISTS idx_transfer_logs_status ON transfer_logs (transfer_file_id, status);
|
||||
|
||||
-- flow_templates 表
|
||||
CREATE INDEX IF NOT EXISTS idx_flow_templates_name ON flow_templates (name);
|
||||
CREATE INDEX IF NOT EXISTS idx_flow_templates_key_words ON flow_templates (key_words);
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user