2026-04-07 02:04:22 +05:30
|
|
|
"""YakPanel - Site service"""
|
|
|
|
|
import os
|
|
|
|
|
import re
|
2026-04-07 10:03:25 +05:30
|
|
|
from datetime import datetime, timezone
|
2026-04-07 02:04:22 +05:30
|
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
|
|
|
from sqlalchemy import select
|
|
|
|
|
|
|
|
|
|
from app.models.site import Site, Domain
|
|
|
|
|
from app.models.redirect import SiteRedirect
|
2026-04-07 10:41:22 +05:30
|
|
|
from app.core.config import get_runtime_config, get_settings
|
2026-04-07 11:42:19 +05:30
|
|
|
from app.core.utils import path_safe_check, write_file, read_file, exec_shell_sync, nginx_reload_all_known
|
2026-04-07 02:04:22 +05:30
|
|
|
|
|
|
|
|
|
|
|
|
|
DOMAIN_REGEX = re.compile(r"^([\w\-\*]{1,100}\.){1,8}([\w\-]{1,24}|[\w\-]{1,24}\.[\w\-]{1,24})$")
|
|
|
|
|
|
2026-04-07 10:12:30 +05:30
|
|
|
LETSENCRYPT_LIVE = "/etc/letsencrypt/live"
|
|
|
|
|
SSL_EXPIRING_DAYS = 14
|
2026-04-07 11:42:19 +05:30
|
|
|
_SAN_CACHE: dict[str, tuple[float, frozenset[str]]] = {}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _normalize_hostname(h: str) -> str:
|
|
|
|
|
return (h or "").strip().lower().split(":")[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _iter_le_pairs_sorted() -> list[tuple[str, str]]:
|
|
|
|
|
if not os.path.isdir(LETSENCRYPT_LIVE):
|
|
|
|
|
return []
|
|
|
|
|
try:
|
|
|
|
|
names = sorted(os.listdir(LETSENCRYPT_LIVE))
|
|
|
|
|
except OSError:
|
|
|
|
|
return []
|
|
|
|
|
out: list[tuple[str, str]] = []
|
|
|
|
|
for entry in names:
|
|
|
|
|
if entry.startswith(".") or ".." in entry:
|
|
|
|
|
continue
|
|
|
|
|
fc = os.path.join(LETSENCRYPT_LIVE, entry, "fullchain.pem")
|
|
|
|
|
pk = os.path.join(LETSENCRYPT_LIVE, entry, "privkey.pem")
|
|
|
|
|
if os.path.isfile(fc) and os.path.isfile(pk):
|
|
|
|
|
out.append((fc, pk))
|
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _cert_san_names(fullchain: str) -> frozenset[str]:
|
|
|
|
|
try:
|
|
|
|
|
st = os.stat(fullchain)
|
|
|
|
|
mtime = st.st_mtime
|
|
|
|
|
except OSError:
|
|
|
|
|
return frozenset()
|
|
|
|
|
hit = _SAN_CACHE.get(fullchain)
|
|
|
|
|
if hit is not None and hit[0] == mtime:
|
|
|
|
|
return hit[1]
|
|
|
|
|
out, _err = exec_shell_sync(f'openssl x509 -in "{fullchain}" -noout -text', timeout=8)
|
|
|
|
|
names: set[str] = set()
|
|
|
|
|
if out:
|
|
|
|
|
for m in re.finditer(r"DNS:([^,\s\n]+)", out, flags=re.IGNORECASE):
|
|
|
|
|
names.add(m.group(1).strip().lower())
|
|
|
|
|
froz = frozenset(names)
|
|
|
|
|
_SAN_CACHE[fullchain] = (mtime, froz)
|
|
|
|
|
return froz
|
2026-04-07 10:12:30 +05:30
|
|
|
|
|
|
|
|
|
2026-04-07 10:41:22 +05:30
|
|
|
def _nginx_site_template_path() -> str | None:
|
|
|
|
|
"""
|
|
|
|
|
Resolve webserver/templates/nginx_site.conf.
|
|
|
|
|
Order: YAKPANEL_NGINX_TEMPLATE env, repo root (parent of backend/), Settings.panel_path.
|
|
|
|
|
"""
|
|
|
|
|
candidates: list[str] = []
|
|
|
|
|
env_override = (os.environ.get("YAKPANEL_NGINX_TEMPLATE") or "").strip()
|
|
|
|
|
if env_override:
|
|
|
|
|
candidates.append(env_override)
|
|
|
|
|
# site_service.py -> services -> app -> backend -> YakPanel-server (repo root)
|
|
|
|
|
here = os.path.abspath(__file__)
|
|
|
|
|
repo_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(here))))
|
|
|
|
|
candidates.append(os.path.join(repo_root, "webserver", "templates", "nginx_site.conf"))
|
|
|
|
|
try:
|
|
|
|
|
s = get_settings()
|
|
|
|
|
pp = (s.panel_path or "").strip()
|
|
|
|
|
if pp:
|
|
|
|
|
candidates.append(os.path.join(os.path.abspath(pp), "webserver", "templates", "nginx_site.conf"))
|
|
|
|
|
sp = (s.setup_path or "").strip()
|
|
|
|
|
if sp:
|
|
|
|
|
candidates.append(
|
|
|
|
|
os.path.join(os.path.abspath(sp), "YakPanel-server", "webserver", "templates", "nginx_site.conf")
|
|
|
|
|
)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
for path in candidates:
|
|
|
|
|
if path and os.path.isfile(path):
|
|
|
|
|
return path
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2026-04-07 10:12:30 +05:30
|
|
|
def _backup_count(site_name: str, backup_dir: str) -> int:
|
|
|
|
|
if not backup_dir or not os.path.isdir(backup_dir):
|
|
|
|
|
return 0
|
|
|
|
|
prefix = f"{site_name}_"
|
|
|
|
|
n = 0
|
|
|
|
|
try:
|
|
|
|
|
for f in os.listdir(backup_dir):
|
|
|
|
|
if f.startswith(prefix) and f.endswith(".tar.gz"):
|
|
|
|
|
n += 1
|
|
|
|
|
except OSError:
|
|
|
|
|
return 0
|
|
|
|
|
return n
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _parse_cert_not_after(cert_path: str) -> datetime | None:
|
|
|
|
|
if not os.path.isfile(cert_path):
|
|
|
|
|
return None
|
|
|
|
|
out, _err = exec_shell_sync(f'openssl x509 -in "{cert_path}" -noout -enddate', timeout=5)
|
|
|
|
|
if not out or "notAfter=" not in out:
|
|
|
|
|
return None
|
|
|
|
|
val = out.strip().split("=", 1)[1].strip()
|
|
|
|
|
try:
|
|
|
|
|
dt = datetime.strptime(val, "%b %d %H:%M:%S %Y GMT")
|
|
|
|
|
return dt.replace(tzinfo=timezone.utc)
|
|
|
|
|
except ValueError:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _best_ssl_for_hostnames(hostnames: list[str]) -> dict:
|
2026-04-07 11:42:19 +05:30
|
|
|
"""Pick the LE cert (live/ or SAN) that covers site hostnames with longest validity."""
|
2026-04-07 10:12:30 +05:30
|
|
|
none = {"status": "none", "days_left": None, "cert_name": None}
|
2026-04-07 11:42:19 +05:30
|
|
|
seen: set[str] = set()
|
|
|
|
|
want_list: list[str] = []
|
|
|
|
|
for host in hostnames:
|
|
|
|
|
n = _normalize_hostname(host)
|
|
|
|
|
if n and ".." not in n and n not in seen:
|
|
|
|
|
seen.add(n)
|
|
|
|
|
want_list.append(n)
|
|
|
|
|
if not want_list:
|
|
|
|
|
return none
|
|
|
|
|
want = set(want_list)
|
2026-04-07 10:12:30 +05:30
|
|
|
try:
|
2026-04-07 11:42:19 +05:30
|
|
|
if not os.path.isdir(LETSENCRYPT_LIVE):
|
2026-04-07 10:12:30 +05:30
|
|
|
return none
|
|
|
|
|
best_days: int | None = None
|
|
|
|
|
best_name: str | None = None
|
2026-04-07 11:42:19 +05:30
|
|
|
for fc, _pk in _iter_le_pairs_sorted():
|
|
|
|
|
live_name = os.path.basename(os.path.dirname(fc)).lower()
|
|
|
|
|
if live_name in want:
|
|
|
|
|
match_names = {live_name}
|
|
|
|
|
else:
|
|
|
|
|
match_names = want & _cert_san_names(fc)
|
|
|
|
|
if not match_names:
|
2026-04-07 10:12:30 +05:30
|
|
|
continue
|
2026-04-07 11:42:19 +05:30
|
|
|
end = _parse_cert_not_after(fc)
|
2026-04-07 10:12:30 +05:30
|
|
|
if end is None:
|
|
|
|
|
continue
|
|
|
|
|
now = datetime.now(timezone.utc)
|
|
|
|
|
days = int((end - now).total_seconds() // 86400)
|
2026-04-07 11:42:19 +05:30
|
|
|
pick = min(match_names)
|
2026-04-07 10:12:30 +05:30
|
|
|
if best_days is None or days > best_days:
|
|
|
|
|
best_days = days
|
2026-04-07 11:42:19 +05:30
|
|
|
best_name = pick
|
2026-04-07 10:12:30 +05:30
|
|
|
if best_days is None:
|
|
|
|
|
return none
|
|
|
|
|
if best_days < 0:
|
|
|
|
|
status = "expired"
|
|
|
|
|
elif best_days <= SSL_EXPIRING_DAYS:
|
|
|
|
|
status = "expiring"
|
|
|
|
|
else:
|
|
|
|
|
status = "active"
|
|
|
|
|
return {"status": status, "days_left": best_days, "cert_name": best_name}
|
|
|
|
|
except OSError:
|
|
|
|
|
return none
|
|
|
|
|
|
2026-04-07 02:04:22 +05:30
|
|
|
|
2026-04-07 10:23:05 +05:30
|
|
|
def _letsencrypt_paths(hostname: str) -> tuple[str, str] | None:
|
|
|
|
|
"""Return (fullchain, privkey) if Let's Encrypt files exist for this hostname."""
|
|
|
|
|
h = (hostname or "").strip().lower().split(":")[0]
|
|
|
|
|
if not h or ".." in h:
|
|
|
|
|
return None
|
|
|
|
|
base = os.path.join(LETSENCRYPT_LIVE, h)
|
|
|
|
|
fc = os.path.join(base, "fullchain.pem")
|
|
|
|
|
pk = os.path.join(base, "privkey.pem")
|
|
|
|
|
if os.path.isfile(fc) and os.path.isfile(pk):
|
|
|
|
|
return fc, pk
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2026-04-07 11:42:19 +05:30
|
|
|
def _letsencrypt_paths_any(hostnames: list[str]) -> tuple[str, str] | None:
|
|
|
|
|
"""First matching LE cert: exact live/<host>/, then live dir name, then SAN match."""
|
|
|
|
|
seen: set[str] = set()
|
|
|
|
|
want_ordered: list[str] = []
|
|
|
|
|
for h in hostnames:
|
|
|
|
|
n = _normalize_hostname(h)
|
|
|
|
|
if n and ".." not in n and n not in seen:
|
|
|
|
|
seen.add(n)
|
|
|
|
|
want_ordered.append(n)
|
|
|
|
|
if not want_ordered:
|
|
|
|
|
return None
|
|
|
|
|
want = set(want_ordered)
|
|
|
|
|
for n in want_ordered:
|
|
|
|
|
p = _letsencrypt_paths(n)
|
|
|
|
|
if p:
|
|
|
|
|
return p
|
|
|
|
|
for fc, pk in _iter_le_pairs_sorted():
|
|
|
|
|
live_name = os.path.basename(os.path.dirname(fc)).lower()
|
|
|
|
|
if live_name in want:
|
|
|
|
|
return fc, pk
|
|
|
|
|
if want & _cert_san_names(fc):
|
|
|
|
|
return fc, pk
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2026-04-07 13:23:35 +05:30
|
|
|
def _build_php_deny_execute_block(enabled: int) -> str:
|
|
|
|
|
if not enabled:
|
|
|
|
|
return ""
|
|
|
|
|
return (
|
|
|
|
|
r" location ~* ^/uploads/.*\.(php|phar|phtml|php5)$ {" + "\n"
|
|
|
|
|
r" deny all;" + "\n"
|
|
|
|
|
r" }" + "\n"
|
|
|
|
|
r" location ~* ^/storage/.*\.(php|phar|phtml|php5)$ {" + "\n"
|
|
|
|
|
r" deny all;" + "\n"
|
|
|
|
|
r" }" + "\n"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _build_main_app_block(proxy_upstream: str, proxy_websocket: int, php_version: str) -> str:
|
|
|
|
|
pu = (proxy_upstream or "").strip()
|
|
|
|
|
pv = php_version or "74"
|
|
|
|
|
if pu:
|
|
|
|
|
ws_lines = ""
|
|
|
|
|
if proxy_websocket:
|
|
|
|
|
ws_lines = (
|
|
|
|
|
" proxy_set_header Upgrade $http_upgrade;\n"
|
|
|
|
|
' proxy_set_header Connection "upgrade";\n'
|
|
|
|
|
)
|
|
|
|
|
return (
|
|
|
|
|
f" location / {{\n"
|
|
|
|
|
f" proxy_pass {pu};\n"
|
|
|
|
|
f" proxy_http_version 1.1;\n"
|
|
|
|
|
f" proxy_set_header Host $host;\n"
|
|
|
|
|
f" proxy_set_header X-Real-IP $remote_addr;\n"
|
|
|
|
|
f" proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n"
|
|
|
|
|
f" proxy_set_header X-Forwarded-Proto $scheme;\n"
|
|
|
|
|
f"{ws_lines}"
|
|
|
|
|
f" proxy_read_timeout 3600s;\n"
|
|
|
|
|
f" }}\n"
|
|
|
|
|
)
|
2026-04-07 10:23:05 +05:30
|
|
|
return (
|
|
|
|
|
r" location ~ .*\.(gif|jpg|jpeg|png|bmp|swf)$ {" + "\n"
|
|
|
|
|
f" expires 30d;\n"
|
|
|
|
|
f" access_log off;\n"
|
|
|
|
|
f" }}\n"
|
|
|
|
|
r" location ~ .*\.(js|css)?$ {" + "\n"
|
|
|
|
|
f" expires 12h;\n"
|
|
|
|
|
f" access_log off;\n"
|
|
|
|
|
f" }}\n"
|
|
|
|
|
r" location ~ \.php$ {" + "\n"
|
|
|
|
|
f" fastcgi_pass unix:/tmp/php-cgi-{pv}.sock;\n"
|
|
|
|
|
f" fastcgi_index index.php;\n"
|
|
|
|
|
f" include fastcgi.conf;\n"
|
|
|
|
|
f" }}\n"
|
2026-04-07 13:23:35 +05:30
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _build_dir_auth_block(
|
|
|
|
|
dir_path: str,
|
|
|
|
|
user_file: str,
|
|
|
|
|
proxy_upstream: str,
|
|
|
|
|
root_path: str,
|
|
|
|
|
) -> str:
|
|
|
|
|
dp = (dir_path or "").strip()
|
|
|
|
|
uf = (user_file or "").strip()
|
|
|
|
|
if not dp or not uf or ".." in dp or ".." in uf:
|
|
|
|
|
return ""
|
|
|
|
|
if not dp.startswith("/"):
|
|
|
|
|
dp = "/" + dp
|
|
|
|
|
qf = uf.replace("\\", "\\\\").replace('"', '\\"')
|
|
|
|
|
qr = root_path.replace("\\", "\\\\")
|
|
|
|
|
pu = (proxy_upstream or "").strip()
|
|
|
|
|
if pu:
|
|
|
|
|
puc = pu.rstrip("/")
|
|
|
|
|
return (
|
|
|
|
|
f" location ^~ {dp} {{\n"
|
|
|
|
|
f' auth_basic "YakPanel";\n'
|
|
|
|
|
f' auth_basic_user_file "{qf}";\n'
|
|
|
|
|
f" proxy_pass {puc};\n"
|
|
|
|
|
f" proxy_http_version 1.1;\n"
|
|
|
|
|
f" proxy_set_header Host $host;\n"
|
|
|
|
|
f" proxy_set_header X-Real-IP $remote_addr;\n"
|
|
|
|
|
f" proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n"
|
|
|
|
|
f" proxy_set_header X-Forwarded-Proto $scheme;\n"
|
|
|
|
|
f" }}\n"
|
|
|
|
|
)
|
|
|
|
|
return (
|
|
|
|
|
f" location ^~ {dp} {{\n"
|
|
|
|
|
f' auth_basic "YakPanel";\n'
|
|
|
|
|
f' auth_basic_user_file "{qf}";\n'
|
|
|
|
|
f" root {qr};\n"
|
|
|
|
|
f" try_files $uri $uri/ =404;\n"
|
|
|
|
|
f" }}\n"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _build_location_bundle(
|
|
|
|
|
root_path: str,
|
|
|
|
|
redirects: list[tuple[str, str, int]] | None,
|
|
|
|
|
proxy_upstream: str,
|
|
|
|
|
proxy_websocket: int,
|
|
|
|
|
dir_auth_path: str,
|
|
|
|
|
dir_auth_user_file: str,
|
|
|
|
|
php_deny_execute: int,
|
|
|
|
|
php_version: str,
|
|
|
|
|
) -> str:
|
|
|
|
|
acme = (
|
|
|
|
|
f" location ^~ /.well-known/acme-challenge/ {{\n"
|
|
|
|
|
f" root {root_path};\n"
|
|
|
|
|
f' default_type "text/plain";\n'
|
|
|
|
|
f" allow all;\n"
|
|
|
|
|
f" access_log off;\n"
|
|
|
|
|
f" }}\n"
|
|
|
|
|
)
|
|
|
|
|
redirect_lines = []
|
|
|
|
|
for src, tgt, code in redirects or []:
|
|
|
|
|
if src and tgt:
|
|
|
|
|
redirect_lines.append(f" location = {src} {{ return {code} {tgt}; }}")
|
|
|
|
|
redirect_block = ("\n" + "\n".join(redirect_lines)) if redirect_lines else ""
|
|
|
|
|
dir_auth = _build_dir_auth_block(dir_auth_path, dir_auth_user_file, proxy_upstream, root_path)
|
|
|
|
|
php_deny = _build_php_deny_execute_block(php_deny_execute)
|
|
|
|
|
main = _build_main_app_block(proxy_upstream, proxy_websocket, php_version)
|
|
|
|
|
return acme + redirect_block + "\n" + dir_auth + php_deny + main
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _build_ssl_server_block(
|
|
|
|
|
server_names: str,
|
|
|
|
|
root_path: str,
|
|
|
|
|
logs_path: str,
|
|
|
|
|
site_name: str,
|
|
|
|
|
php_version: str,
|
|
|
|
|
fullchain: str,
|
|
|
|
|
privkey: str,
|
|
|
|
|
redirects: list[tuple[str, str, int]] | None,
|
|
|
|
|
proxy_upstream: str = "",
|
|
|
|
|
proxy_websocket: int = 0,
|
|
|
|
|
dir_auth_path: str = "",
|
|
|
|
|
dir_auth_user_file: str = "",
|
|
|
|
|
php_deny_execute: int = 0,
|
|
|
|
|
) -> str:
|
|
|
|
|
"""Second server {} for HTTPS when LE certs exist."""
|
|
|
|
|
q_fc = fullchain.replace("\\", "\\\\").replace('"', '\\"')
|
|
|
|
|
q_pk = privkey.replace("\\", "\\\\").replace('"', '\\"')
|
|
|
|
|
bundle = _build_location_bundle(
|
|
|
|
|
root_path,
|
|
|
|
|
redirects,
|
|
|
|
|
proxy_upstream,
|
|
|
|
|
proxy_websocket,
|
|
|
|
|
dir_auth_path,
|
|
|
|
|
dir_auth_user_file,
|
|
|
|
|
php_deny_execute,
|
|
|
|
|
php_version,
|
|
|
|
|
)
|
|
|
|
|
return (
|
|
|
|
|
f"server {{\n"
|
|
|
|
|
f" listen 443 ssl;\n"
|
|
|
|
|
f" server_name {server_names};\n"
|
|
|
|
|
f' ssl_certificate "{q_fc}";\n'
|
|
|
|
|
f' ssl_certificate_key "{q_pk}";\n'
|
|
|
|
|
f" index index.php index.html index.htm default.php default.htm default.html;\n"
|
|
|
|
|
f" root {root_path};\n"
|
|
|
|
|
f" error_page 404 /404.html;\n"
|
|
|
|
|
f" error_page 502 /502.html;\n"
|
|
|
|
|
f"{bundle}"
|
2026-04-07 10:23:05 +05:30
|
|
|
f" access_log {logs_path}/{site_name}.log;\n"
|
|
|
|
|
f" error_log {logs_path}/{site_name}.error.log;\n"
|
|
|
|
|
f"}}\n"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2026-04-07 02:04:22 +05:30
|
|
|
def _render_vhost(
|
|
|
|
|
template: str,
|
|
|
|
|
server_names: str,
|
|
|
|
|
root_path: str,
|
|
|
|
|
logs_path: str,
|
|
|
|
|
site_name: str,
|
|
|
|
|
php_version: str,
|
|
|
|
|
force_https: int,
|
|
|
|
|
redirects: list[tuple[str, str, int]] | None = None,
|
2026-04-07 10:23:05 +05:30
|
|
|
le_hostnames: list[str] | None = None,
|
2026-04-07 13:23:35 +05:30
|
|
|
proxy_upstream: str = "",
|
|
|
|
|
proxy_websocket: int = 0,
|
|
|
|
|
dir_auth_path: str = "",
|
|
|
|
|
dir_auth_user_file: str = "",
|
|
|
|
|
php_deny_execute: int = 0,
|
2026-04-07 02:04:22 +05:30
|
|
|
) -> str:
|
|
|
|
|
"""Render nginx vhost template. redirects: [(source, target, code), ...]"""
|
2026-04-07 10:23:05 +05:30
|
|
|
if force_https:
|
|
|
|
|
force_block = (
|
|
|
|
|
' if ($request_uri !~ "^/.well-known/acme-challenge/") {\n'
|
|
|
|
|
" return 301 https://$host$request_uri;\n"
|
|
|
|
|
" }"
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
force_block = ""
|
|
|
|
|
hosts = le_hostnames if le_hostnames is not None else [p for p in server_names.split() if p]
|
|
|
|
|
ssl_block = ""
|
2026-04-07 13:23:35 +05:30
|
|
|
le = _letsencrypt_paths_any(hosts)
|
|
|
|
|
if le:
|
|
|
|
|
fc, pk = le
|
|
|
|
|
ssl_block = _build_ssl_server_block(
|
|
|
|
|
server_names,
|
|
|
|
|
root_path,
|
|
|
|
|
logs_path,
|
|
|
|
|
site_name,
|
|
|
|
|
php_version,
|
|
|
|
|
fc,
|
|
|
|
|
pk,
|
|
|
|
|
redirects,
|
|
|
|
|
proxy_upstream,
|
|
|
|
|
proxy_websocket,
|
|
|
|
|
dir_auth_path,
|
|
|
|
|
dir_auth_user_file,
|
|
|
|
|
php_deny_execute,
|
|
|
|
|
)
|
|
|
|
|
bundle = _build_location_bundle(
|
|
|
|
|
root_path,
|
|
|
|
|
redirects,
|
|
|
|
|
proxy_upstream,
|
|
|
|
|
proxy_websocket,
|
|
|
|
|
dir_auth_path,
|
|
|
|
|
dir_auth_user_file,
|
|
|
|
|
php_deny_execute,
|
|
|
|
|
php_version,
|
|
|
|
|
)
|
2026-04-07 02:04:22 +05:30
|
|
|
content = template.replace("{SERVER_NAMES}", server_names)
|
|
|
|
|
content = content.replace("{ROOT_PATH}", root_path)
|
|
|
|
|
content = content.replace("{LOGS_PATH}", logs_path)
|
|
|
|
|
content = content.replace("{SITE_NAME}", site_name)
|
|
|
|
|
content = content.replace("{PHP_VERSION}", php_version or "74")
|
|
|
|
|
content = content.replace("{FORCE_HTTPS_BLOCK}", force_block)
|
2026-04-07 13:23:35 +05:30
|
|
|
content = content.replace("{LOCATION_BUNDLE}", bundle)
|
2026-04-07 10:23:05 +05:30
|
|
|
content = content.replace("{SSL_SERVER_BLOCK}", ssl_block)
|
2026-04-07 02:04:22 +05:30
|
|
|
return content
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def domain_format(domains: list[str]) -> str | None:
|
|
|
|
|
"""Validate domain format. Returns first invalid domain or None."""
|
|
|
|
|
for d in domains:
|
|
|
|
|
if not DOMAIN_REGEX.match(d):
|
|
|
|
|
return d
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def domain_exists(db: AsyncSession, domains: list[str], exclude_site_id: int | None = None) -> str | None:
|
|
|
|
|
"""Check if domain already exists. Returns first existing domain or None."""
|
|
|
|
|
for d in domains:
|
|
|
|
|
parts = d.split(":")
|
|
|
|
|
name, port = parts[0], parts[1] if len(parts) > 1 else "80"
|
|
|
|
|
q = select(Domain).where(Domain.name == name, Domain.port == port)
|
|
|
|
|
if exclude_site_id is not None:
|
|
|
|
|
q = q.where(Domain.pid != exclude_site_id)
|
|
|
|
|
result = await db.execute(q)
|
|
|
|
|
if result.scalar_one_or_none():
|
|
|
|
|
return d
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2026-04-07 13:23:35 +05:30
|
|
|
def _vhost_kwargs_from_site(site: Site) -> dict:
|
|
|
|
|
return {
|
|
|
|
|
"proxy_upstream": getattr(site, "proxy_upstream", None) or "",
|
|
|
|
|
"proxy_websocket": int(getattr(site, "proxy_websocket", 0) or 0),
|
|
|
|
|
"dir_auth_path": getattr(site, "dir_auth_path", None) or "",
|
|
|
|
|
"dir_auth_user_file": getattr(site, "dir_auth_user_file", None) or "",
|
|
|
|
|
"php_deny_execute": int(getattr(site, "php_deny_execute", 0) or 0),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2026-04-07 02:04:22 +05:30
|
|
|
async def create_site(
|
|
|
|
|
db: AsyncSession,
|
|
|
|
|
name: str,
|
|
|
|
|
path: str,
|
|
|
|
|
domains: list[str],
|
|
|
|
|
project_type: str = "PHP",
|
|
|
|
|
ps: str = "",
|
|
|
|
|
php_version: str = "74",
|
|
|
|
|
force_https: int = 0,
|
2026-04-07 13:23:35 +05:30
|
|
|
proxy_upstream: str = "",
|
|
|
|
|
proxy_websocket: int = 0,
|
|
|
|
|
dir_auth_path: str = "",
|
|
|
|
|
dir_auth_user_file: str = "",
|
|
|
|
|
php_deny_execute: int = 0,
|
2026-04-07 02:04:22 +05:30
|
|
|
) -> dict:
|
|
|
|
|
"""Create a new site with vhost config."""
|
|
|
|
|
if not path_safe_check(name) or not path_safe_check(path):
|
|
|
|
|
return {"status": False, "msg": "Invalid site name or path"}
|
|
|
|
|
|
|
|
|
|
invalid = await domain_format(domains)
|
|
|
|
|
if invalid:
|
|
|
|
|
return {"status": False, "msg": f"Invalid domain format: {invalid}"}
|
|
|
|
|
|
|
|
|
|
existing = await domain_exists(db, domains)
|
|
|
|
|
if existing:
|
|
|
|
|
return {"status": False, "msg": f"Domain already exists: {existing}"}
|
|
|
|
|
|
|
|
|
|
cfg = get_runtime_config()
|
|
|
|
|
setup_path = cfg["setup_path"]
|
|
|
|
|
www_root = cfg["www_root"]
|
|
|
|
|
www_logs = cfg["www_logs"]
|
|
|
|
|
vhost_path = os.path.join(setup_path, "panel", "vhost", "nginx")
|
|
|
|
|
|
|
|
|
|
site_path = os.path.join(www_root, name)
|
|
|
|
|
if not os.path.exists(site_path):
|
|
|
|
|
os.makedirs(site_path, 0o755)
|
|
|
|
|
|
2026-04-07 13:23:35 +05:30
|
|
|
site = Site(
|
|
|
|
|
name=name,
|
|
|
|
|
path=site_path,
|
|
|
|
|
ps=ps,
|
|
|
|
|
project_type=project_type,
|
|
|
|
|
php_version=php_version or "74",
|
|
|
|
|
force_https=force_https or 0,
|
|
|
|
|
proxy_upstream=(proxy_upstream or "")[:512],
|
|
|
|
|
proxy_websocket=1 if proxy_websocket else 0,
|
|
|
|
|
dir_auth_path=(dir_auth_path or "")[:256],
|
|
|
|
|
dir_auth_user_file=(dir_auth_user_file or "")[:512],
|
|
|
|
|
php_deny_execute=1 if php_deny_execute else 0,
|
|
|
|
|
)
|
2026-04-07 02:04:22 +05:30
|
|
|
db.add(site)
|
|
|
|
|
await db.flush()
|
|
|
|
|
|
|
|
|
|
for d in domains:
|
|
|
|
|
parts = d.split(":")
|
|
|
|
|
domain_name, port = parts[0], parts[1] if len(parts) > 1 else "80"
|
|
|
|
|
db.add(Domain(pid=site.id, name=domain_name, port=port))
|
|
|
|
|
|
|
|
|
|
await db.flush()
|
|
|
|
|
|
|
|
|
|
# Generate Nginx vhost
|
|
|
|
|
conf_path = os.path.join(vhost_path, f"{name}.conf")
|
|
|
|
|
panel_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
|
|
|
template_path = os.path.join(panel_root, "webserver", "templates", "nginx_site.conf")
|
|
|
|
|
|
|
|
|
|
if os.path.exists(template_path):
|
|
|
|
|
template = read_file(template_path) or ""
|
|
|
|
|
server_names = " ".join(d.split(":")[0] for d in domains)
|
2026-04-07 10:23:05 +05:30
|
|
|
le_hosts = [d.split(":")[0] for d in domains]
|
2026-04-07 13:23:35 +05:30
|
|
|
vk = _vhost_kwargs_from_site(site)
|
2026-04-07 10:23:05 +05:30
|
|
|
content = _render_vhost(
|
2026-04-07 13:23:35 +05:30
|
|
|
template,
|
|
|
|
|
server_names,
|
|
|
|
|
site_path,
|
|
|
|
|
www_logs,
|
|
|
|
|
name,
|
|
|
|
|
php_version or "74",
|
|
|
|
|
force_https or 0,
|
|
|
|
|
[],
|
|
|
|
|
le_hosts,
|
|
|
|
|
**vk,
|
2026-04-07 10:23:05 +05:30
|
|
|
)
|
2026-04-07 02:04:22 +05:30
|
|
|
write_file(conf_path, content)
|
|
|
|
|
|
2026-04-07 11:42:19 +05:30
|
|
|
reload_ok, reload_err = nginx_reload_all_known()
|
2026-04-07 02:04:22 +05:30
|
|
|
|
|
|
|
|
await db.commit()
|
2026-04-07 11:42:19 +05:30
|
|
|
if reload_ok:
|
|
|
|
|
return {"status": True, "msg": "Site created", "id": site.id}
|
|
|
|
|
return {
|
|
|
|
|
"status": True,
|
|
|
|
|
"msg": f"Site created but nginx reload failed (HTTPS may not work): {reload_err}",
|
|
|
|
|
"id": site.id,
|
|
|
|
|
}
|
2026-04-07 02:04:22 +05:30
|
|
|
|
|
|
|
|
|
|
|
|
|
async def list_sites(db: AsyncSession) -> list[dict]:
|
2026-04-07 10:03:25 +05:30
|
|
|
"""List all sites with domain count, primary domain, backup count, SSL summary."""
|
|
|
|
|
cfg = get_runtime_config()
|
|
|
|
|
backup_dir = cfg.get("backup_path") or ""
|
2026-04-07 02:04:22 +05:30
|
|
|
result = await db.execute(select(Site).order_by(Site.id))
|
|
|
|
|
sites = result.scalars().all()
|
|
|
|
|
out = []
|
|
|
|
|
for s in sites:
|
2026-04-07 10:03:25 +05:30
|
|
|
domain_result = await db.execute(select(Domain).where(Domain.pid == s.id).order_by(Domain.id))
|
|
|
|
|
domain_rows = domain_result.scalars().all()
|
|
|
|
|
domain_list = [f"{d.name}:{d.port}" if d.port != "80" else d.name for d in domain_rows]
|
|
|
|
|
hostnames = [d.name for d in domain_rows]
|
|
|
|
|
primary = hostnames[0] if hostnames else ""
|
|
|
|
|
php_ver = getattr(s, "php_version", None) or "74"
|
2026-04-07 02:04:22 +05:30
|
|
|
out.append({
|
|
|
|
|
"id": s.id,
|
|
|
|
|
"name": s.name,
|
|
|
|
|
"path": s.path,
|
|
|
|
|
"status": s.status,
|
|
|
|
|
"ps": s.ps,
|
|
|
|
|
"project_type": s.project_type,
|
2026-04-07 10:03:25 +05:30
|
|
|
"domain_count": len(domain_rows),
|
2026-04-07 02:04:22 +05:30
|
|
|
"addtime": s.addtime.isoformat() if s.addtime else None,
|
2026-04-07 10:03:25 +05:30
|
|
|
"php_version": php_ver,
|
|
|
|
|
"primary_domain": primary,
|
|
|
|
|
"domains": domain_list,
|
|
|
|
|
"backup_count": _backup_count(s.name, backup_dir),
|
|
|
|
|
"ssl": _best_ssl_for_hostnames(hostnames),
|
2026-04-07 02:04:22 +05:30
|
|
|
})
|
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def delete_site(db: AsyncSession, site_id: int) -> dict:
|
|
|
|
|
"""Delete a site and its vhost config."""
|
|
|
|
|
result = await db.execute(select(Site).where(Site.id == site_id))
|
|
|
|
|
site = result.scalar_one_or_none()
|
|
|
|
|
if not site:
|
|
|
|
|
return {"status": False, "msg": "Site not found"}
|
|
|
|
|
|
|
|
|
|
await db.execute(Domain.__table__.delete().where(Domain.pid == site_id))
|
|
|
|
|
await db.execute(SiteRedirect.__table__.delete().where(SiteRedirect.site_id == site_id))
|
|
|
|
|
await db.delete(site)
|
|
|
|
|
|
|
|
|
|
cfg = get_runtime_config()
|
|
|
|
|
conf_path = os.path.join(cfg["setup_path"], "panel", "vhost", "nginx", f"{site.name}.conf")
|
|
|
|
|
if os.path.exists(conf_path):
|
|
|
|
|
os.remove(conf_path)
|
|
|
|
|
|
2026-04-07 11:42:19 +05:30
|
|
|
reload_ok, reload_err = nginx_reload_all_known()
|
2026-04-07 02:04:22 +05:30
|
|
|
|
|
|
|
|
await db.commit()
|
2026-04-07 11:42:19 +05:30
|
|
|
if reload_ok:
|
|
|
|
|
return {"status": True, "msg": "Site deleted"}
|
|
|
|
|
return {"status": True, "msg": f"Site deleted but nginx reload failed: {reload_err}"}
|
2026-04-07 02:04:22 +05:30
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_site_count(db: AsyncSession) -> int:
|
|
|
|
|
"""Get total site count."""
|
|
|
|
|
from sqlalchemy import func
|
|
|
|
|
result = await db.execute(select(func.count()).select_from(Site))
|
|
|
|
|
return result.scalar() or 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_site_with_domains(db: AsyncSession, site_id: int) -> dict | None:
|
|
|
|
|
"""Get site with domain list for editing."""
|
|
|
|
|
result = await db.execute(select(Site).where(Site.id == site_id))
|
|
|
|
|
site = result.scalar_one_or_none()
|
|
|
|
|
if not site:
|
|
|
|
|
return None
|
|
|
|
|
domain_result = await db.execute(select(Domain).where(Domain.pid == site.id))
|
|
|
|
|
domains = domain_result.scalars().all()
|
|
|
|
|
domain_list = [f"{d.name}:{d.port}" if d.port != "80" else d.name for d in domains]
|
|
|
|
|
return {
|
|
|
|
|
"id": site.id,
|
|
|
|
|
"name": site.name,
|
|
|
|
|
"path": site.path,
|
|
|
|
|
"status": site.status,
|
|
|
|
|
"ps": site.ps,
|
|
|
|
|
"project_type": site.project_type,
|
|
|
|
|
"php_version": getattr(site, "php_version", None) or "74",
|
|
|
|
|
"force_https": getattr(site, "force_https", 0) or 0,
|
2026-04-07 13:23:35 +05:30
|
|
|
"proxy_upstream": getattr(site, "proxy_upstream", None) or "",
|
|
|
|
|
"proxy_websocket": int(getattr(site, "proxy_websocket", 0) or 0),
|
|
|
|
|
"dir_auth_path": getattr(site, "dir_auth_path", None) or "",
|
|
|
|
|
"dir_auth_user_file": getattr(site, "dir_auth_user_file", None) or "",
|
|
|
|
|
"php_deny_execute": int(getattr(site, "php_deny_execute", 0) or 0),
|
2026-04-07 02:04:22 +05:30
|
|
|
"domains": domain_list,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def update_site(
|
|
|
|
|
db: AsyncSession,
|
|
|
|
|
site_id: int,
|
|
|
|
|
path: str | None = None,
|
|
|
|
|
domains: list[str] | None = None,
|
|
|
|
|
ps: str | None = None,
|
|
|
|
|
php_version: str | None = None,
|
|
|
|
|
force_https: int | None = None,
|
2026-04-07 13:23:35 +05:30
|
|
|
proxy_upstream: str | None = None,
|
|
|
|
|
proxy_websocket: int | None = None,
|
|
|
|
|
dir_auth_path: str | None = None,
|
|
|
|
|
dir_auth_user_file: str | None = None,
|
|
|
|
|
php_deny_execute: int | None = None,
|
2026-04-07 02:04:22 +05:30
|
|
|
) -> dict:
|
|
|
|
|
"""Update site domains, path, or note."""
|
|
|
|
|
result = await db.execute(select(Site).where(Site.id == site_id))
|
|
|
|
|
site = result.scalar_one_or_none()
|
|
|
|
|
if not site:
|
|
|
|
|
return {"status": False, "msg": "Site not found"}
|
|
|
|
|
|
|
|
|
|
if domains is not None:
|
|
|
|
|
invalid = await domain_format(domains)
|
|
|
|
|
if invalid:
|
|
|
|
|
return {"status": False, "msg": f"Invalid domain format: {invalid}"}
|
|
|
|
|
existing = await domain_exists(db, domains, exclude_site_id=site_id)
|
|
|
|
|
if existing:
|
|
|
|
|
return {"status": False, "msg": f"Domain already exists: {existing}"}
|
|
|
|
|
await db.execute(Domain.__table__.delete().where(Domain.pid == site_id))
|
|
|
|
|
for d in domains:
|
|
|
|
|
parts = d.split(":")
|
|
|
|
|
domain_name, port = parts[0], parts[1] if len(parts) > 1 else "80"
|
|
|
|
|
db.add(Domain(pid=site.id, name=domain_name, port=port))
|
|
|
|
|
|
|
|
|
|
if path is not None and path_safe_check(path):
|
|
|
|
|
site.path = path
|
|
|
|
|
|
|
|
|
|
if ps is not None:
|
|
|
|
|
site.ps = ps
|
|
|
|
|
if php_version is not None:
|
|
|
|
|
site.php_version = php_version or "74"
|
|
|
|
|
if force_https is not None:
|
|
|
|
|
site.force_https = 1 if force_https else 0
|
2026-04-07 13:23:35 +05:30
|
|
|
if proxy_upstream is not None:
|
|
|
|
|
site.proxy_upstream = (proxy_upstream or "")[:512]
|
|
|
|
|
if proxy_websocket is not None:
|
|
|
|
|
site.proxy_websocket = 1 if proxy_websocket else 0
|
|
|
|
|
if dir_auth_path is not None:
|
|
|
|
|
site.dir_auth_path = (dir_auth_path or "")[:256]
|
|
|
|
|
if dir_auth_user_file is not None:
|
|
|
|
|
site.dir_auth_user_file = (dir_auth_user_file or "")[:512]
|
|
|
|
|
if php_deny_execute is not None:
|
|
|
|
|
site.php_deny_execute = 1 if php_deny_execute else 0
|
2026-04-07 02:04:22 +05:30
|
|
|
|
|
|
|
|
await db.flush()
|
|
|
|
|
|
2026-04-07 13:23:35 +05:30
|
|
|
regen = (
|
|
|
|
|
domains is not None
|
|
|
|
|
or php_version is not None
|
|
|
|
|
or force_https is not None
|
|
|
|
|
or proxy_upstream is not None
|
|
|
|
|
or proxy_websocket is not None
|
|
|
|
|
or dir_auth_path is not None
|
|
|
|
|
or dir_auth_user_file is not None
|
|
|
|
|
or php_deny_execute is not None
|
|
|
|
|
)
|
|
|
|
|
if regen:
|
2026-04-07 02:04:22 +05:30
|
|
|
cfg = get_runtime_config()
|
|
|
|
|
vhost_path = os.path.join(cfg["setup_path"], "panel", "vhost", "nginx")
|
|
|
|
|
conf_path = os.path.join(vhost_path, f"{site.name}.conf")
|
2026-04-07 10:41:22 +05:30
|
|
|
template_path = _nginx_site_template_path()
|
|
|
|
|
if template_path:
|
2026-04-07 02:04:22 +05:30
|
|
|
template = read_file(template_path) or ""
|
|
|
|
|
domain_result = await db.execute(select(Domain).where(Domain.pid == site.id))
|
|
|
|
|
domain_rows = domain_result.scalars().all()
|
|
|
|
|
domain_list = [f"{d.name}:{d.port}" if d.port != "80" else d.name for d in domain_rows]
|
|
|
|
|
server_names = " ".join(d.split(":")[0] for d in domain_list) if domain_list else site.name
|
|
|
|
|
php_ver = getattr(site, "php_version", None) or "74"
|
|
|
|
|
fhttps = getattr(site, "force_https", 0) or 0
|
|
|
|
|
redir_result = await db.execute(select(SiteRedirect).where(SiteRedirect.site_id == site.id))
|
|
|
|
|
redirects = [(r.source, r.target, r.code or 301) for r in redir_result.scalars().all()]
|
2026-04-07 10:23:05 +05:30
|
|
|
le_hosts = [d.name for d in domain_rows]
|
2026-04-07 13:23:35 +05:30
|
|
|
vk = _vhost_kwargs_from_site(site)
|
2026-04-07 10:23:05 +05:30
|
|
|
content = _render_vhost(
|
2026-04-07 13:23:35 +05:30
|
|
|
template,
|
|
|
|
|
server_names,
|
|
|
|
|
site.path,
|
|
|
|
|
cfg["www_logs"],
|
|
|
|
|
site.name,
|
|
|
|
|
php_ver,
|
|
|
|
|
fhttps,
|
|
|
|
|
redirects,
|
|
|
|
|
le_hosts,
|
|
|
|
|
**vk,
|
2026-04-07 10:23:05 +05:30
|
|
|
)
|
2026-04-07 02:04:22 +05:30
|
|
|
write_file(conf_path, content)
|
2026-04-07 11:42:19 +05:30
|
|
|
reload_ok, reload_err = nginx_reload_all_known()
|
|
|
|
|
if not reload_ok:
|
|
|
|
|
await db.commit()
|
|
|
|
|
return {"status": False, "msg": f"Vhost updated but nginx test/reload failed: {reload_err}"}
|
2026-04-07 02:04:22 +05:30
|
|
|
|
|
|
|
|
await db.commit()
|
|
|
|
|
return {"status": True, "msg": "Site updated"}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _vhost_path(site_name: str) -> tuple[str, str]:
|
|
|
|
|
"""Return (conf_path, disabled_path) for site vhost."""
|
|
|
|
|
cfg = get_runtime_config()
|
|
|
|
|
vhost_dir = os.path.join(cfg["setup_path"], "panel", "vhost", "nginx")
|
|
|
|
|
disabled_dir = os.path.join(vhost_dir, "disabled")
|
|
|
|
|
return (
|
|
|
|
|
os.path.join(vhost_dir, f"{site_name}.conf"),
|
|
|
|
|
os.path.join(disabled_dir, f"{site_name}.conf"),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def set_site_status(db: AsyncSession, site_id: int, status: int) -> dict:
|
|
|
|
|
"""Enable (1) or disable (0) site by moving vhost config."""
|
|
|
|
|
result = await db.execute(select(Site).where(Site.id == site_id))
|
|
|
|
|
site = result.scalar_one_or_none()
|
|
|
|
|
if not site:
|
|
|
|
|
return {"status": False, "msg": "Site not found"}
|
|
|
|
|
|
|
|
|
|
conf_path, disabled_path = _vhost_path(site.name)
|
|
|
|
|
disabled_dir = os.path.dirname(disabled_path)
|
|
|
|
|
|
|
|
|
|
if status == 1: # enable
|
|
|
|
|
if os.path.isfile(disabled_path):
|
|
|
|
|
os.makedirs(os.path.dirname(conf_path), exist_ok=True)
|
|
|
|
|
os.rename(disabled_path, conf_path)
|
|
|
|
|
else: # disable
|
|
|
|
|
if os.path.isfile(conf_path):
|
|
|
|
|
os.makedirs(disabled_dir, exist_ok=True)
|
|
|
|
|
os.rename(conf_path, disabled_path)
|
|
|
|
|
|
|
|
|
|
site.status = status
|
|
|
|
|
await db.commit()
|
|
|
|
|
|
2026-04-07 11:42:19 +05:30
|
|
|
reload_ok, reload_err = nginx_reload_all_known()
|
|
|
|
|
if not reload_ok:
|
|
|
|
|
return {
|
|
|
|
|
"status": False,
|
|
|
|
|
"msg": f"Site {'enabled' if status == 1 else 'disabled'} but nginx test/reload failed: {reload_err}",
|
|
|
|
|
}
|
2026-04-07 02:04:22 +05:30
|
|
|
|
|
|
|
|
return {"status": True, "msg": "Site " + ("enabled" if status == 1 else "disabled")}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def regenerate_site_vhost(db: AsyncSession, site_id: int) -> dict:
|
2026-04-07 10:35:44 +05:30
|
|
|
"""Regenerate nginx vhost for a site (e.g. after redirect changes or before LE validation)."""
|
2026-04-07 02:04:22 +05:30
|
|
|
result = await db.execute(select(Site).where(Site.id == site_id))
|
|
|
|
|
site = result.scalar_one_or_none()
|
|
|
|
|
if not site:
|
|
|
|
|
return {"status": False, "msg": "Site not found"}
|
|
|
|
|
cfg = get_runtime_config()
|
2026-04-07 10:35:44 +05:30
|
|
|
conf_path, disabled_path = _vhost_path(site.name)
|
|
|
|
|
if site.status == 1:
|
|
|
|
|
write_path = conf_path
|
|
|
|
|
else:
|
|
|
|
|
write_path = disabled_path if os.path.isfile(disabled_path) else conf_path
|
2026-04-07 10:41:22 +05:30
|
|
|
template_path = _nginx_site_template_path()
|
|
|
|
|
if not template_path:
|
|
|
|
|
return {
|
|
|
|
|
"status": False,
|
|
|
|
|
"msg": "Template not found (nginx_site.conf). Expected under panel webserver/templates/ "
|
|
|
|
|
"or set env YAKPANEL_NGINX_TEMPLATE to the full path. Check Settings.panel_path matches the install directory.",
|
|
|
|
|
}
|
2026-04-07 02:04:22 +05:30
|
|
|
template = read_file(template_path) or ""
|
|
|
|
|
domain_result = await db.execute(select(Domain).where(Domain.pid == site.id))
|
|
|
|
|
domain_rows = domain_result.scalars().all()
|
|
|
|
|
domain_list = [f"{d.name}:{d.port}" if d.port != "80" else d.name for d in domain_rows]
|
|
|
|
|
server_names = " ".join(d.split(":")[0] for d in domain_list) if domain_list else site.name
|
|
|
|
|
php_ver = getattr(site, "php_version", None) or "74"
|
|
|
|
|
fhttps = getattr(site, "force_https", 0) or 0
|
|
|
|
|
redir_result = await db.execute(select(SiteRedirect).where(SiteRedirect.site_id == site.id))
|
|
|
|
|
redirects = [(r.source, r.target, r.code or 301) for r in redir_result.scalars().all()]
|
2026-04-07 10:23:05 +05:30
|
|
|
le_hosts = [d.name for d in domain_rows]
|
2026-04-07 13:23:35 +05:30
|
|
|
vk = _vhost_kwargs_from_site(site)
|
2026-04-07 10:23:05 +05:30
|
|
|
content = _render_vhost(
|
2026-04-07 13:23:35 +05:30
|
|
|
template,
|
|
|
|
|
server_names,
|
|
|
|
|
site.path,
|
|
|
|
|
cfg["www_logs"],
|
|
|
|
|
site.name,
|
|
|
|
|
php_ver,
|
|
|
|
|
fhttps,
|
|
|
|
|
redirects,
|
|
|
|
|
le_hosts,
|
|
|
|
|
**vk,
|
2026-04-07 10:23:05 +05:30
|
|
|
)
|
2026-04-07 10:35:44 +05:30
|
|
|
write_file(write_path, content)
|
2026-04-07 11:42:19 +05:30
|
|
|
reload_ok, reload_err = nginx_reload_all_known()
|
|
|
|
|
if not reload_ok:
|
|
|
|
|
return {"status": False, "msg": f"Vhost written but nginx test/reload failed: {reload_err}"}
|
2026-04-07 02:04:22 +05:30
|
|
|
return {"status": True, "msg": "Vhost regenerated"}
|