Compare commits

..

34 Commits

Author SHA1 Message Date
Niranjan
ef59e6b403 new changes 2026-04-07 21:35:20 +05:30
Niranjan
8e166c8bde new changes 2026-04-07 20:57:48 +05:30
Niranjan
0732a2eba6 new changes 2026-04-07 20:53:04 +05:30
Niranjan
376ea41951 new changes 2026-04-07 20:41:08 +05:30
Niranjan
31fe556bb0 new changes 2026-04-07 20:29:49 +05:30
Niranjan
8fe63c7cf4 new changes 2026-04-07 14:09:55 +05:30
Niranjan
18777560d5 new changes 2026-04-07 13:58:51 +05:30
Niranjan
7b394d6446 new changes 2026-04-07 13:30:25 +05:30
Niranjan
6dea3b4307 new changes 2026-04-07 13:23:35 +05:30
Niranjan
df015e4d5a new changes 2026-04-07 12:00:10 +05:30
Niranjan
f1b0a88dff new changes 2026-04-07 11:50:53 +05:30
Niranjan
10236a4cd4 new changes 2026-04-07 11:42:19 +05:30
Niranjan
cc45fac342 new changes 2026-04-07 10:47:27 +05:30
Niranjan
8a08a95a17 new changes 2026-04-07 10:41:22 +05:30
Niranjan
88424b8836 new changes 2026-04-07 10:35:44 +05:30
Niranjan
097087519b new changes 2026-04-07 10:29:29 +05:30
Niranjan
09d0e2e033 new changes 2026-04-07 10:23:05 +05:30
Niranjan
8a3e3ce04b new changes 2026-04-07 10:12:30 +05:30
Niranjan
8965233e8c new changes 2026-04-07 10:03:25 +05:30
Niranjan
5e86cc7e40 new changes 2026-04-07 09:46:22 +05:30
Niranjan
b679cc3bb5 new changes 2026-04-07 05:26:12 +05:30
Niranjan
60e8d457c4 new changes 2026-04-07 05:17:40 +05:30
Niranjan
1aba57e5ad new changes 2026-04-07 05:12:03 +05:30
Niranjan
a18bba15f2 new changes 2026-04-07 05:05:28 +05:30
Niranjan
7c070224bd new changes 2026-04-07 04:28:40 +05:30
Niranjan
464ff188ad new changes 2026-04-07 04:24:16 +05:30
Niranjan
8bba285f56 new changes 2026-04-07 04:20:13 +05:30
Niranjan
493b4f6556 new changes 2026-04-07 04:11:59 +05:30
Niranjan
e2f2d5cc38 new changes 2026-04-07 04:03:46 +05:30
Niranjan
73148d2b09 new changes 2026-04-07 03:57:44 +05:30
Niranjan
3872d90ee7 new changes 2026-04-07 03:52:42 +05:30
Niranjan
d737072438 new changes 2026-04-07 03:49:12 +05:30
Niranjan
b7c5a19eca new changes 2026-04-07 03:45:37 +05:30
Niranjan
03e73a2c4c new changes 2026-04-07 03:40:06 +05:30
30135 changed files with 3255484 additions and 3063 deletions

32
.env.example Normal file
View File

@@ -0,0 +1,32 @@
POSTGRES_USER=yakpanel
POSTGRES_PASSWORD=yakpanel_dev_password
POSTGRES_DB=yakpanel
# Host port (container still uses 5432). 15432 avoids clashes with local PostgreSQL.
POSTGRES_PORT=15432
# Host port (container still uses 6379). 16379 avoids clashes with local Redis.
REDIS_PORT=16379
# Host ports for NATS (14222/18222 avoid common local installs).
NATS_PORT=14222
NATS_MONITOR_PORT=18222
MINIO_ROOT_USER=yakpanel
MINIO_ROOT_PASSWORD=yakpanel_minio_password
MINIO_PORT=19000
MINIO_CONSOLE_PORT=19001
# Host port for API scaffold (container still uses 8080).
API_PORT=18080
# Web control panel — React UI + FastAPI (nginx proxies /api -> backend)
PANEL_UI_PORT=3080
# Optional: comma-separated origins for direct browser access to API
PANEL_CORS_EXTRA_ORIGINS=
# Observability stack
PROMETHEUS_PORT=19090
GRAFANA_PORT=13000
GRAFANA_ADMIN_USER=admin
GRAFANA_ADMIN_PASSWORD=yakpanel_grafana_admin
CADVISOR_PORT=18081

29
Makefile Normal file
View File

@@ -0,0 +1,29 @@
SHELL := /bin/bash
COMPOSE := bash scripts/docker-compose.sh
.PHONY: up down logs ps init doctor migrate
init:
cp -n .env.example .env || true
bash scripts/ensure-env-ports.sh
@echo "Initialized .env (kept existing values if present)"
up:
$(COMPOSE) up -d --build
down:
$(COMPOSE) down
logs:
$(COMPOSE) logs -f --tail=100
ps:
$(COMPOSE) ps
doctor:
@docker --version
@docker compose version
migrate:
$(COMPOSE) run --rm db-migrate

View File

@@ -9,8 +9,8 @@
<br/>
<div align="center">
[![BTWAF](https://img.shields.io/badge/YakPanel-YakPanel-blue)](https://github.com/YakPanel/YakPanel)
[![social](https://img.shields.io/github/stars/YakPanel/YakPanel?style=social)](https://github.com/YakPanel/YakPanel)
[![YakPanel](https://img.shields.io/badge/YakPanel-yakpanel--core-blue)](https://source.yakpanel.com/admin/yakpanel-core)
[![Source](https://img.shields.io/badge/Source-Gitea-green)](https://source.yakpanel.com/admin/yakpanel-core)
</div>
<p align="center">
@@ -91,3 +91,35 @@ Dir usage analysis
**Note: after the deployment is complete, please immediately modify the user name and password in the panel settings and add the installation entry**
## YakPanel 2026 Dev Scaffold (Ubuntu)
Use this for the new multi-service architecture scaffold in this repository.
Source: [https://source.yakpanel.com/admin/yakpanel-core](https://source.yakpanel.com/admin/yakpanel-core) · clone with `git clone https://source.yakpanel.com/admin/yakpanel-core.git`
```bash
cd yakpanel-core
chmod +x scripts/install-ubuntu.sh scripts/bootstrap-dev.sh
./scripts/install-ubuntu.sh
```
One-click installer:
```bash
cd yakpanel-core
chmod +x one-click-installer.sh
./one-click-installer.sh
```
Or run manually:
```bash
make init
make up
make ps
```
Reference: `docs/ubuntu-dev-install.md`
The **full web control panel** (sites, files, SSL, Docker, monitor, etc.) is the `YakPanel-server` stack: after `make up`, open `http://localhost:3080/` unless you changed `PANEL_UI_PORT` in `.env`. Default login: `admin` / `admin`. Observability is included: Prometheus (`PROMETHEUS_PORT`) and Grafana (`GRAFANA_PORT`).

View File

@@ -27,6 +27,9 @@ class CreateBackupPlanRequest(BaseModel):
target_id: int
schedule: str # cron, e.g. "0 2 * * *"
enabled: bool = True
s3_bucket: str = ""
s3_endpoint: str = ""
s3_key_prefix: str = ""
@router.get("/plans")
@@ -45,6 +48,9 @@ async def backup_plans_list(
"target_id": r.target_id,
"schedule": r.schedule,
"enabled": r.enabled,
"s3_bucket": getattr(r, "s3_bucket", None) or "",
"s3_endpoint": getattr(r, "s3_endpoint", None) or "",
"s3_key_prefix": getattr(r, "s3_key_prefix", None) or "",
}
for r in rows
]
@@ -79,6 +85,9 @@ async def backup_plan_create(
target_id=body.target_id,
schedule=body.schedule,
enabled=body.enabled,
s3_bucket=(body.s3_bucket or "")[:256],
s3_endpoint=(body.s3_endpoint or "")[:512],
s3_key_prefix=(body.s3_key_prefix or "")[:256],
)
db.add(plan)
await db.commit()
@@ -107,6 +116,9 @@ async def backup_plan_update(
plan.target_id = body.target_id
plan.schedule = body.schedule
plan.enabled = body.enabled
plan.s3_bucket = (body.s3_bucket or "")[:256]
plan.s3_endpoint = (body.s3_endpoint or "")[:512]
plan.s3_key_prefix = (body.s3_key_prefix or "")[:256]
await db.commit()
return {"status": True, "msg": "Updated"}
@@ -143,6 +155,27 @@ def _run_site_backup(site: Site) -> tuple[bool, str, str | None]:
return False, str(e), None
def _maybe_upload_s3(local_file: str, plan: BackupPlan) -> tuple[bool, str]:
"""Copy backup file to S3-compatible bucket if plan.s3_bucket set. Uses AWS_ACCESS_KEY_ID / AWS_SECRET_ACCESS_KEY."""
bucket = (getattr(plan, "s3_bucket", None) or "").strip()
if not bucket or not os.path.isfile(local_file):
return True, ""
try:
import boto3
except ImportError:
return False, "boto3 not installed (pip install boto3)"
ep = (getattr(plan, "s3_endpoint", None) or "").strip() or None
prefix = (getattr(plan, "s3_key_prefix", None) or "").strip().strip("/")
key_base = os.path.basename(local_file)
key = f"{prefix}/{key_base}" if prefix else key_base
try:
client = boto3.client("s3", endpoint_url=ep)
client.upload_file(local_file, bucket, key)
return True, f"s3://{bucket}/{key}"
except Exception as e:
return False, str(e)
def _run_database_backup(dbo: Database) -> tuple[bool, str, str | None]:
"""Run database backup (sync). Returns (ok, msg, filename)."""
cfg = get_runtime_config()
@@ -164,15 +197,17 @@ async def backup_run_scheduled(
"""Run all due backup plans. Call this from cron (e.g. every hour) or manually."""
from datetime import datetime as dt
now = dt.utcnow()
cfg = get_runtime_config()
result = await db.execute(select(BackupPlan).where(BackupPlan.enabled == True))
plans = result.scalars().all()
results = []
for plan in plans:
ok = False
msg = ""
try:
prev_run = croniter(plan.schedule, now).get_prev(dt)
# Run if we're within 15 minutes after the scheduled time
secs_since = (now - prev_run).total_seconds()
if secs_since > 900 or secs_since < 0: # Not within 15 min window
if secs_since > 900 or secs_since < 0:
continue
except Exception:
continue
@@ -183,6 +218,11 @@ async def backup_run_scheduled(
results.append({"plan": plan.name, "status": "skipped", "msg": "Site not found or path invalid"})
continue
ok, msg, filename = _run_site_backup(site)
if ok and filename:
full = os.path.join(cfg["backup_path"], filename)
u_ok, u_msg = _maybe_upload_s3(full, plan)
if u_msg:
msg = f"{msg}; {u_msg}" if u_ok else f"{msg}; S3 failed: {u_msg}"
if ok:
send_email(
subject=f"YakPanel - Scheduled backup: {plan.name}",
@@ -195,6 +235,11 @@ async def backup_run_scheduled(
results.append({"plan": plan.name, "status": "skipped", "msg": "Database not found"})
continue
ok, msg, filename = _run_database_backup(dbo)
if ok and filename:
full = os.path.join(cfg["backup_path"], "database", filename)
u_ok, u_msg = _maybe_upload_s3(full, plan)
if u_msg:
msg = f"{msg}; {u_msg}" if u_ok else f"{msg}; S3 failed: {u_msg}"
if ok:
send_email(
subject=f"YakPanel - Scheduled backup: {plan.name}",

View File

@@ -1,6 +1,9 @@
"""YakPanel - Crontab API"""
import json
import tempfile
import os
from pathlib import Path
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
@@ -14,6 +17,20 @@ from app.models.crontab import Crontab
router = APIRouter(prefix="/crontab", tags=["crontab"])
_CRON_TEMPLATES = Path(__file__).resolve().parent.parent / "data" / "cron_templates.json"
@router.get("/templates")
async def crontab_templates(current_user: User = Depends(get_current_user)):
"""YakPanel starter cron templates (edit before apply; no external branding)."""
if not _CRON_TEMPLATES.is_file():
return {"templates": []}
try:
data = json.loads(_CRON_TEMPLATES.read_text(encoding="utf-8"))
return {"templates": data if isinstance(data, list) else []}
except (json.JSONDecodeError, OSError):
return {"templates": []}
class CreateCrontabRequest(BaseModel):
name: str = ""

View File

@@ -1,4 +1,6 @@
"""YakPanel - Dashboard API"""
import os
from fastapi import APIRouter, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func
@@ -10,6 +12,24 @@ from app.models.user import User
router = APIRouter(prefix="/dashboard", tags=["dashboard"])
def _root_inode_usage() -> dict:
"""Inode use on filesystem root (Linux). Returns percent or null if unavailable."""
try:
sv = os.statvfs("/")
except (AttributeError, OSError):
return {"percent": None, "used": None, "total": None}
total = int(sv.f_files)
free = int(sv.f_ffree)
if total <= 0:
return {"percent": None, "used": None, "total": None}
used = max(0, total - free)
return {
"percent": round(100.0 * used / total, 1),
"used": used,
"total": total,
}
@router.get("/stats")
async def get_stats(
current_user: User = Depends(get_current_user),
@@ -18,10 +38,10 @@ async def get_stats(
"""Get dashboard statistics"""
import psutil
from app.services.site_service import get_site_count
from app.services.site_service import get_site_count, ssl_alert_summary
from app.models.ftp import Ftp
from app.models.database import Database
from sqlalchemy import select, func
site_count = await get_site_count(db)
ftp_result = await db.execute(select(func.count()).select_from(Ftp))
ftp_count = ftp_result.scalar() or 0
@@ -32,11 +52,14 @@ async def get_stats(
cpu_percent = psutil.cpu_percent(interval=1)
memory = psutil.virtual_memory()
disk = psutil.disk_usage("/")
inodes = _root_inode_usage()
ssl_alerts = await ssl_alert_summary(db)
return {
"site_count": site_count,
"ftp_count": ftp_count,
"database_count": database_count,
"ssl_alerts": ssl_alerts,
"system": {
"cpu_percent": cpu_percent,
"memory_percent": memory.percent,
@@ -45,5 +68,8 @@ async def get_stats(
"disk_percent": disk.percent,
"disk_used_gb": round(disk.used / 1024 / 1024 / 1024, 2),
"disk_total_gb": round(disk.total / 1024 / 1024 / 1024, 2),
"inode_percent": inodes["percent"],
"inode_used": inodes["used"],
"inode_total": inodes["total"],
},
}

View File

@@ -1,8 +1,13 @@
"""YakPanel - File manager API"""
import os
import shutil
import stat
import zipfile
from datetime import datetime, timezone
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form
from fastapi.responses import FileResponse
from pydantic import BaseModel
from pydantic import BaseModel, Field
from app.core.config import get_runtime_config
from app.core.utils import read_file, write_file, path_safe_check
@@ -13,31 +18,83 @@ router = APIRouter(prefix="/files", tags=["files"])
def _resolve_path(path: str) -> str:
"""Resolve and validate path within allowed roots (cross-platform)"""
cfg = get_runtime_config()
www_root = os.path.abspath(cfg["www_root"])
setup_path = os.path.abspath(cfg["setup_path"])
allowed = [www_root, setup_path]
if os.name != "nt":
allowed.append(os.path.abspath("/www"))
"""
Resolve API path to an OS path.
On Linux/macOS: path is an absolute POSIX path from filesystem root (/) so admins
can browse the whole server (typical expectation for a full-server admin file manager).
On Windows (dev): paths stay sandboxed under www_root / setup_path.
"""
if ".." in path:
raise HTTPException(status_code=401, detail="Path traversal not allowed")
norm_path = path.strip().replace("\\", "/").strip("/")
# Root or www_root-style path
if not norm_path or norm_path in ("www", "www/wwwroot", "wwwroot"):
full = www_root
elif norm_path.startswith("www/wwwroot/"):
full = os.path.abspath(os.path.join(www_root, norm_path[12:]))
else:
full = os.path.abspath(os.path.join(www_root, norm_path))
if not any(
full == r or (full + os.sep).startswith(r + os.sep)
for r in allowed
):
raise HTTPException(status_code=403, detail="Path not allowed")
raise HTTPException(status_code=400, detail="Path traversal not allowed")
raw = path.strip().replace("\\", "/")
if os.name == "nt":
cfg = get_runtime_config()
www_root = os.path.abspath(cfg["www_root"])
setup_path = os.path.abspath(cfg["setup_path"])
allowed = [www_root, setup_path]
norm_path = raw.strip("/")
if not norm_path or norm_path in ("www", "www/wwwroot", "wwwroot"):
full = www_root
elif norm_path.startswith("www/wwwroot/"):
full = os.path.abspath(os.path.join(www_root, norm_path[12:]))
else:
full = os.path.abspath(os.path.join(www_root, norm_path))
if not any(
full == r or (full + os.sep).startswith(r + os.sep)
for r in allowed
):
raise HTTPException(status_code=403, detail="Path not allowed")
return full
# POSIX: absolute paths from /
if not raw or raw == "/":
return "/"
if not raw.startswith("/"):
raw = "/" + raw
full = os.path.normpath(raw)
if not full.startswith("/"):
raise HTTPException(status_code=400, detail="Invalid path")
return full
def _stat_entry(path: str, name: str) -> dict | None:
item_path = os.path.join(path, name)
try:
st = os.stat(item_path, follow_symlinks=False)
except OSError:
return None
is_dir = os.path.isdir(item_path)
owner = str(st.st_uid)
group = str(st.st_gid)
try:
import pwd
import grp
owner = pwd.getpwuid(st.st_uid).pw_name
group = grp.getgrgid(st.st_gid).gr_name
except (ImportError, KeyError, OSError):
pass
try:
sym = stat.filemode(st.st_mode)
except Exception:
sym = ""
return {
"name": name,
"is_dir": is_dir,
"size": st.st_size if not is_dir else 0,
"mtime": datetime.fromtimestamp(st.st_mtime, tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S"),
"mtime_ts": int(st.st_mtime),
"mode": format(st.st_mode & 0o777, "o"),
"mode_symbolic": sym,
"owner": owner,
"group": group,
}
@router.get("/list")
async def files_list(
path: str = "/",
@@ -49,20 +106,102 @@ async def files_list(
except HTTPException:
raise
if not os.path.isdir(full):
raise HTTPException(status_code=401, detail="Not a directory")
raise HTTPException(status_code=404, detail="Not a directory")
items = []
for name in os.listdir(full):
item_path = os.path.join(full, name)
try:
stat = os.stat(item_path)
items.append({
"name": name,
"is_dir": os.path.isdir(item_path),
"size": stat.st_size if os.path.isfile(item_path) else 0,
})
except OSError:
pass
return {"path": path, "items": items}
try:
names = os.listdir(full)
except PermissionError:
raise HTTPException(status_code=403, detail="Permission denied")
except FileNotFoundError:
raise HTTPException(status_code=404, detail="Not found")
except OSError as e:
raise HTTPException(status_code=500, detail=str(e))
for name in sorted(names, key=str.lower):
row = _stat_entry(full, name)
if row:
items.append(row)
display_path = full if full == "/" else full.rstrip("/")
return {"path": display_path, "items": items}
@router.get("/dir-size")
async def files_dir_size(
path: str,
current_user: User = Depends(get_current_user),
):
"""Return total byte size of directory tree (may be slow on large trees)."""
try:
full = _resolve_path(path)
except HTTPException:
raise
if not os.path.isdir(full):
raise HTTPException(status_code=400, detail="Not a directory")
total = 0
try:
for root, _dirs, files in os.walk(full):
for fn in files:
fp = os.path.join(root, fn)
try:
total += os.path.getsize(fp)
except OSError:
pass
except PermissionError:
raise HTTPException(status_code=403, detail="Permission denied")
return {"size": total}
@router.get("/search")
async def files_search(
q: str,
path: str = "/",
max_results: int = 200,
current_user: User = Depends(get_current_user),
):
"""Find files/folders whose name contains q (case-insensitive), walking from path."""
if not q or not q.strip():
return {"path": path, "results": []}
try:
root = _resolve_path(path)
except HTTPException:
raise
if not os.path.isdir(root):
raise HTTPException(status_code=400, detail="Not a directory")
qn = q.strip().lower()
results: list[dict] = []
skip_prefixes = tuple()
if os.name != "nt" and root in ("/", "//"):
skip_prefixes = ("/proc", "/sys", "/dev")
def should_skip(p: str) -> bool:
ap = os.path.abspath(p)
return any(ap == sp or ap.startswith(sp + os.sep) for sp in skip_prefixes)
try:
for dirpath, dirnames, filenames in os.walk(root, topdown=True):
if len(results) >= max_results:
break
if should_skip(dirpath):
dirnames[:] = []
continue
try:
for dn in list(dirnames):
if len(results) >= max_results:
break
if qn in dn.lower():
rel = os.path.join(dirpath, dn)
results.append({"path": rel.replace("\\", "/"), "name": dn, "is_dir": True})
for fn in filenames:
if len(results) >= max_results:
break
if qn in fn.lower():
rel = os.path.join(dirpath, fn)
results.append({"path": rel.replace("\\", "/"), "name": fn, "is_dir": False})
except OSError:
continue
except PermissionError:
raise HTTPException(status_code=403, detail="Permission denied")
return {"path": path, "query": q, "results": results[:max_results]}
@router.get("/read")
@@ -238,3 +377,211 @@ async def files_write(
if not write_file(full, body.content):
raise HTTPException(status_code=500, detail="Failed to write file")
return {"status": True, "msg": "Saved"}
class TouchRequest(BaseModel):
path: str
name: str
class ChmodRequest(BaseModel):
file_path: str
mode: str = Field(description="Octal mode e.g. 0644 or 755")
recursive: bool = False
class CopyRequest(BaseModel):
path: str
name: str
dest_path: str
dest_name: str | None = None
class MoveRequest(BaseModel):
path: str
name: str
dest_path: str
dest_name: str | None = None
class CompressRequest(BaseModel):
path: str
names: list[str]
archive_name: str
@router.post("/touch")
async def files_touch(
body: TouchRequest,
current_user: User = Depends(get_current_user),
):
"""Create an empty file in directory path."""
try:
parent = _resolve_path(body.path)
except HTTPException:
raise
if not body.name or ".." in body.name or "/" in body.name or "\\" in body.name:
raise HTTPException(status_code=400, detail="Invalid name")
if not path_safe_check(body.name):
raise HTTPException(status_code=400, detail="Invalid name")
full = os.path.join(parent, body.name)
if os.path.exists(full):
raise HTTPException(status_code=400, detail="Already exists")
try:
open(full, "a", encoding="utf-8").close()
except OSError as e:
raise HTTPException(status_code=500, detail=str(e))
return {"status": True, "msg": "Created"}
@router.post("/chmod")
async def files_chmod(
body: ChmodRequest,
current_user: User = Depends(get_current_user),
):
"""chmod a file or directory. mode is octal string (644, 0755)."""
try:
full = _resolve_path(body.file_path)
except HTTPException:
raise
if not os.path.exists(full):
raise HTTPException(status_code=404, detail="Not found")
try:
mode = int(body.mode.strip(), 8)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid mode")
if mode < 0 or mode > 0o7777:
raise HTTPException(status_code=400, detail="Invalid mode")
def _chmod_one(p: str) -> None:
os.chmod(p, mode)
try:
if body.recursive and os.path.isdir(full):
for root, dirs, files in os.walk(full):
for d in dirs:
_chmod_one(os.path.join(root, d))
for f in files:
_chmod_one(os.path.join(root, f))
_chmod_one(full)
else:
_chmod_one(full)
except PermissionError:
raise HTTPException(status_code=403, detail="Permission denied")
except OSError as e:
raise HTTPException(status_code=500, detail=str(e))
return {"status": True, "msg": "Permissions updated"}
@router.post("/copy")
async def files_copy(
body: CopyRequest,
current_user: User = Depends(get_current_user),
):
"""Copy file or directory into dest_path (optionally new name)."""
try:
src_parent = _resolve_path(body.path)
dest_parent = _resolve_path(body.dest_path)
except HTTPException:
raise
if not body.name or ".." in body.name:
raise HTTPException(status_code=400, detail="Invalid name")
src = os.path.join(src_parent, body.name)
dest_name = body.dest_name or body.name
if ".." in dest_name or "/" in dest_name or "\\" in dest_name:
raise HTTPException(status_code=400, detail="Invalid destination name")
dest = os.path.join(dest_parent, dest_name)
if not os.path.exists(src):
raise HTTPException(status_code=404, detail="Source not found")
if os.path.exists(dest):
raise HTTPException(status_code=400, detail="Destination already exists")
if not os.path.isdir(dest_parent):
raise HTTPException(status_code=400, detail="Destination parent must be a directory")
try:
if os.path.isdir(src):
shutil.copytree(src, dest, symlinks=True)
else:
shutil.copy2(src, dest)
except OSError as e:
raise HTTPException(status_code=500, detail=str(e))
return {"status": True, "msg": "Copied"}
@router.post("/move")
async def files_move(
body: MoveRequest,
current_user: User = Depends(get_current_user),
):
"""Move (rename) file or directory to another folder."""
try:
src_parent = _resolve_path(body.path)
dest_parent = _resolve_path(body.dest_path)
except HTTPException:
raise
if not body.name or ".." in body.name:
raise HTTPException(status_code=400, detail="Invalid name")
src = os.path.join(src_parent, body.name)
dest_name = body.dest_name or body.name
if ".." in dest_name or "/" in dest_name or "\\" in dest_name:
raise HTTPException(status_code=400, detail="Invalid destination name")
dest = os.path.join(dest_parent, dest_name)
if not os.path.exists(src):
raise HTTPException(status_code=404, detail="Source not found")
if os.path.exists(dest):
raise HTTPException(status_code=400, detail="Destination already exists")
if not os.path.isdir(dest_parent):
raise HTTPException(status_code=400, detail="Destination parent must be a directory")
try:
shutil.move(src, dest)
except OSError as e:
raise HTTPException(status_code=500, detail=str(e))
return {"status": True, "msg": "Moved"}
@router.post("/compress")
async def files_compress(
body: CompressRequest,
current_user: User = Depends(get_current_user),
):
"""Create a zip in path containing named files/folders."""
try:
parent = _resolve_path(body.path)
except HTTPException:
raise
if not os.path.isdir(parent):
raise HTTPException(status_code=400, detail="Not a directory")
if not body.names:
raise HTTPException(status_code=400, detail="Nothing to compress")
name = (body.archive_name or "archive").strip()
if not name.lower().endswith(".zip"):
name += ".zip"
if ".." in name or "/" in name or "\\" in name:
raise HTTPException(status_code=400, detail="Invalid archive name")
zip_path = os.path.join(parent, name)
if os.path.exists(zip_path):
raise HTTPException(status_code=400, detail="Archive already exists")
try:
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zf:
for entry in body.names:
if not entry or ".." in entry or "/" in entry or "\\" in entry:
continue
src = os.path.join(parent, entry)
if not os.path.exists(src):
continue
if os.path.isfile(src):
zf.write(src, arcname=entry.replace("\\", "/"))
elif os.path.isdir(src):
for root, _dirs, files in os.walk(src):
for f in files:
fp = os.path.join(root, f)
zn = os.path.relpath(fp, parent).replace("\\", "/")
zf.write(fp, zn)
except OSError as e:
if os.path.exists(zip_path):
try:
os.remove(zip_path)
except OSError:
pass
raise HTTPException(status_code=500, detail=str(e))
return {"status": True, "msg": "Compressed", "archive": name}

View File

@@ -20,6 +20,38 @@ class CreateFirewallRuleRequest(BaseModel):
ps: str = ""
@router.get("/status")
async def firewall_backend_status(current_user: User = Depends(get_current_user)):
"""UFW and firewalld presence/state for the Security UI (read-only)."""
ufw_out, _ = exec_shell_sync("ufw status 2>/dev/null", timeout=5)
ufw_text = (ufw_out or "").strip()
ufw_detected = bool(ufw_text) and "Status:" in ufw_text
ufw_active: bool | None = None
if ufw_detected:
if "Status: active" in ufw_text:
ufw_active = True
elif "Status: inactive" in ufw_text:
ufw_active = False
fw_state_out, _ = exec_shell_sync("firewall-cmd --state 2>/dev/null", timeout=5)
fw_line = (fw_state_out or "").strip().lower()
firewalld_running = fw_line == "running"
firewalld_detected = fw_line in ("running", "not running")
return {
"ufw": {
"detected": ufw_detected,
"active": ufw_active,
"summary_line": ufw_text.split("\n")[0] if ufw_text else "",
},
"firewalld": {
"detected": firewalld_detected,
"running": firewalld_running,
"state": fw_line or None,
},
}
@router.get("/list")
async def firewall_list(
current_user: User = Depends(get_current_user),

View File

@@ -1,5 +1,6 @@
"""YakPanel - FTP API"""
from fastapi import APIRouter, Depends, HTTPException
import os
from fastapi import APIRouter, Depends, HTTPException, Query
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func
from pydantic import BaseModel
@@ -103,6 +104,25 @@ async def ftp_delete(
return {"status": True, "msg": "FTP account deleted"}
@router.get("/logs")
async def ftp_logs(
lines: int = Query(default=200, ge=1, le=5000),
current_user: User = Depends(get_current_user),
):
"""Tail common Pure-FTPd log paths if readable (non-destructive)."""
candidates = [
"/var/log/pure-ftpd/pure-ftpd.log",
"/var/log/pureftpd.log",
"/var/log/messages",
]
for path in candidates:
if os.path.isfile(path):
out, err = exec_shell_sync(f'tail -n {int(lines)} "{path}" 2>/dev/null', timeout=15)
text = (out or "") + (err or "")
return {"path": path, "content": text[-800000:] or "(empty)"}
return {"path": None, "content": "No known FTP log file found on this server."}
@router.get("/count")
async def ftp_count(
current_user: User = Depends(get_current_user),

View File

@@ -13,7 +13,7 @@ router = APIRouter(prefix="/logs", tags=["logs"])
def _resolve_log_path(path: str) -> str:
"""Resolve path within www_logs only"""
if ".." in path:
raise HTTPException(status_code=401, detail="Path traversal not allowed")
raise HTTPException(status_code=400, detail="Path traversal not allowed")
cfg = get_runtime_config()
logs_root = os.path.abspath(cfg["www_logs"])
path = path.strip().replace("\\", "/").lstrip("/")

View File

@@ -13,7 +13,6 @@ from collections import defaultdict
from typing import Annotated, Any, Literal, Optional, Union
from urllib.parse import urlparse
import asyncssh
from fastapi import APIRouter, HTTPException, Request, WebSocket
from pydantic import BaseModel, Field, field_validator
@@ -198,6 +197,8 @@ async def create_job(body: CreateJobRequest, request: Request):
auth_payload = body.auth
async def runner() -> None:
import asyncssh
async with _jobs_lock:
job = _jobs.get(job_id)
if not job:

View File

@@ -0,0 +1,78 @@
"""YakPanel - read-only security checklist (local server probes)."""
import os
import re
from fastapi import APIRouter, Depends
from app.api.auth import get_current_user
from app.models.user import User
from app.core.utils import read_file, exec_shell_sync
router = APIRouter(prefix="/security", tags=["security"])
@router.get("/checklist")
async def security_checklist(current_user: User = Depends(get_current_user)):
"""Non-destructive hints: SSH config, firewall helper, fail2ban. Not a full audit."""
items: list[dict] = []
sshd = "/etc/ssh/sshd_config"
body = read_file(sshd) if os.path.isfile(sshd) else None
if isinstance(body, str) and body:
if re.search(r"^\s*PasswordAuthentication\s+no\s*$", body, re.MULTILINE | re.IGNORECASE):
items.append({
"id": "ssh_password_auth",
"ok": True,
"title": "SSH password auth",
"detail": "PasswordAuthentication appears set to no (prefer key-based login).",
})
elif re.search(r"^\s*PasswordAuthentication\s+yes", body, re.MULTILINE | re.IGNORECASE):
items.append({
"id": "ssh_password_auth",
"ok": False,
"title": "SSH password auth",
"detail": "PasswordAuthentication is yes — consider disabling and using SSH keys.",
})
else:
items.append({
"id": "ssh_password_auth",
"ok": None,
"title": "SSH password auth",
"detail": "Could not find an explicit PasswordAuthentication line (defaults depend on distro).",
})
else:
items.append({
"id": "ssh_password_auth",
"ok": None,
"title": "SSH password auth",
"detail": "/etc/ssh/sshd_config not readable from the panel process.",
})
ufw_out, _ = exec_shell_sync("ufw status 2>/dev/null", timeout=5)
ufw = ufw_out or ""
if "Status: active" in ufw:
items.append({"id": "ufw", "ok": True, "title": "UFW firewall", "detail": "UFW reports active."})
elif "Status: inactive" in ufw:
items.append({
"id": "ufw",
"ok": None,
"title": "UFW firewall",
"detail": "UFW installed but inactive — enable if this host is public.",
})
else:
items.append({
"id": "ufw",
"ok": None,
"title": "UFW firewall",
"detail": "UFW not detected (OK if you use firewalld/iptables only).",
})
f2_out, _ = exec_shell_sync("systemctl is-active fail2ban 2>/dev/null", timeout=5)
f2_active = (f2_out or "").strip() == "active"
items.append({
"id": "fail2ban",
"ok": f2_active,
"title": "fail2ban",
"detail": "fail2ban is active." if f2_active else "fail2ban not active (optional hardening).",
})
return {"items": items, "disclaimer": "YakPanel reads local settings only; this is not a compliance scan."}

View File

@@ -29,6 +29,11 @@ class CreateSiteRequest(BaseModel):
ps: str = ""
php_version: str = "74"
force_https: bool = False
proxy_upstream: str = ""
proxy_websocket: bool = False
dir_auth_path: str = ""
dir_auth_user_file: str = ""
php_deny_execute: bool = False
class UpdateSiteRequest(BaseModel):
@@ -37,6 +42,11 @@ class UpdateSiteRequest(BaseModel):
ps: str | None = None
php_version: str | None = None
force_https: bool | None = None
proxy_upstream: str | None = None
proxy_websocket: bool | None = None
dir_auth_path: str | None = None
dir_auth_user_file: str | None = None
php_deny_execute: bool | None = None
@router.get("/list")
@@ -66,12 +76,49 @@ async def site_create(
ps=body.ps,
php_version=body.php_version or "74",
force_https=1 if body.force_https else 0,
proxy_upstream=(body.proxy_upstream or "").strip(),
proxy_websocket=1 if body.proxy_websocket else 0,
dir_auth_path=(body.dir_auth_path or "").strip(),
dir_auth_user_file=(body.dir_auth_user_file or "").strip(),
php_deny_execute=1 if body.php_deny_execute else 0,
)
if not result["status"]:
raise HTTPException(status_code=400, detail=result["msg"])
return result
class SiteBatchRequest(BaseModel):
action: str
ids: list[int]
@router.post("/batch")
async def site_batch(
body: SiteBatchRequest,
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Bulk enable, disable, or delete sites by id."""
if body.action not in ("enable", "disable", "delete"):
raise HTTPException(status_code=400, detail="action must be enable, disable, or delete")
if not body.ids:
raise HTTPException(status_code=400, detail="ids required")
results: list[dict] = []
for sid in body.ids:
if body.action == "delete":
r = await delete_site(db, sid)
elif body.action == "enable":
r = await set_site_status(db, sid, 1)
else:
r = await set_site_status(db, sid, 0)
results.append({
"id": sid,
"ok": bool(r.get("status")),
"msg": r.get("msg", ""),
})
return {"results": results}
@router.get("/{site_id}")
async def site_get(
site_id: int,
@@ -94,12 +141,22 @@ async def site_update(
):
"""Update site domains, path, or note"""
result = await update_site(
db, site_id,
db,
site_id,
path=body.path,
domains=body.domains,
ps=body.ps,
php_version=body.php_version,
force_https=None if body.force_https is None else (1 if body.force_https else 0),
proxy_upstream=body.proxy_upstream,
proxy_websocket=None
if body.proxy_websocket is None
else (1 if body.proxy_websocket else 0),
dir_auth_path=body.dir_auth_path,
dir_auth_user_file=body.dir_auth_user_file,
php_deny_execute=None
if body.php_deny_execute is None
else (1 if body.php_deny_execute else 0),
)
if not result["status"]:
raise HTTPException(status_code=400, detail=result["msg"])

View File

@@ -1,54 +1,342 @@
"""YakPanel - App Store / Software API"""
from __future__ import annotations
import asyncio
import os
import shlex
import shutil
import subprocess
from typing import Literal
from fastapi import APIRouter, Depends, HTTPException
from app.core.utils import exec_shell_sync
from app.core.utils import environment_with_system_path, exec_shell_sync
from app.api.auth import get_current_user
from app.models.user import User
router = APIRouter(prefix="/soft", tags=["soft"])
# Curated list of common server software (Debian/Ubuntu package names)
SOFTWARE_LIST = [
{"id": "nginx", "name": "Nginx", "desc": "Web server", "pkg": "nginx"},
{"id": "mysql-server", "name": "MySQL Server", "desc": "Database server", "pkg": "mysql-server"},
{"id": "mariadb-server", "name": "MariaDB", "desc": "Database server", "pkg": "mariadb-server"},
{"id": "php", "name": "PHP", "desc": "PHP runtime", "pkg": "php"},
{"id": "php-fpm", "name": "PHP-FPM", "desc": "PHP FastCGI", "pkg": "php-fpm"},
{"id": "redis-server", "name": "Redis", "desc": "In-memory cache", "pkg": "redis-server"},
{"id": "postgresql", "name": "PostgreSQL", "desc": "Database server", "pkg": "postgresql"},
{"id": "mongodb", "name": "MongoDB", "desc": "NoSQL database", "pkg": "mongodb"},
{"id": "certbot", "name": "Certbot", "desc": "Let's Encrypt SSL", "pkg": "certbot"},
{"id": "docker", "name": "Docker", "desc": "Container runtime", "pkg": "docker.io"},
{"id": "nodejs", "name": "Node.js", "desc": "JavaScript runtime", "pkg": "nodejs"},
{"id": "npm", "name": "npm", "desc": "Node package manager", "pkg": "npm"},
{"id": "git", "name": "Git", "desc": "Version control", "pkg": "git"},
{"id": "python3", "name": "Python 3", "desc": "Python runtime", "pkg": "python3"},
PmKind = Literal["apt", "dnf", "yum", "microdnf", "apk", "none"]
# Per distro: apt = Debian/Ubuntu, rpm = RHEL/Fedora/Alma/Rocky, apk = Alpine (best-effort)
SOFTWARE_LIST: list[dict[str, str]] = [
{
"id": "nginx",
"name": "Nginx",
"desc": "Web server",
"apt": "nginx",
"rpm": "nginx",
"apk": "nginx",
},
{
"id": "mysql-server",
"name": "MySQL Server",
"desc": "Database server",
"apt": "mysql-server",
"rpm": "mysql-server",
"apk": "mysql",
},
{
"id": "mariadb-server",
"name": "MariaDB",
"desc": "Database server",
"apt": "mariadb-server",
"rpm": "mariadb-server",
"apk": "mariadb",
},
{
"id": "php",
"name": "PHP",
"desc": "PHP runtime",
"apt": "php",
"rpm": "php",
"apk": "php",
},
{
"id": "php-fpm",
"name": "PHP-FPM",
"desc": "PHP FastCGI",
"apt": "php-fpm",
"rpm": "php-fpm",
"apk": "php-fpm",
},
{
"id": "redis-server",
"name": "Redis",
"desc": "In-memory cache",
"apt": "redis-server",
"rpm": "redis",
"apk": "redis",
},
{
"id": "postgresql",
"name": "PostgreSQL",
"desc": "Database server",
"apt": "postgresql",
"rpm": "postgresql-server",
"apk": "postgresql",
},
{
"id": "mongodb",
"name": "MongoDB",
"desc": "NoSQL database",
"apt": "mongodb",
"rpm": "mongodb-server",
"apk": "mongodb",
},
{
"id": "certbot",
"name": "Certbot",
"desc": "Let's Encrypt SSL",
"apt": "certbot",
"rpm": "certbot",
"apk": "certbot",
},
{
"id": "docker",
"name": "Docker",
"desc": "Container runtime",
"apt": "docker.io",
"rpm": "docker",
"apk": "docker",
},
{
"id": "nodejs",
"name": "Node.js",
"desc": "JavaScript runtime",
"apt": "nodejs",
"rpm": "nodejs",
"apk": "nodejs",
},
{
"id": "npm",
"name": "npm",
"desc": "Node package manager",
"apt": "npm",
"rpm": "npm",
"apk": "npm",
},
{
"id": "git",
"name": "Git",
"desc": "Version control",
"apt": "git",
"rpm": "git",
"apk": "git",
},
{
"id": "python3",
"name": "Python 3",
"desc": "Python runtime",
"apt": "python3",
"rpm": "python3",
"apk": "python3",
},
]
def _check_installed(pkg: str) -> tuple[bool, str]:
"""Check if package is installed. Returns (installed, version_or_error)."""
out, err = exec_shell_sync(f"dpkg -l {pkg} 2>/dev/null | grep ^ii", timeout=5)
def _resolve_command(name: str) -> str | None:
"""Locate an executable; systemd often provides a venv-only PATH, so scan standard dirs too."""
std = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
merged = f"{os.environ.get('PATH', '').strip()}:{std}".strip(":")
found = shutil.which(name, path=merged)
if found:
return found
for d in ("/usr/bin", "/usr/sbin", "/bin", "/sbin", "/usr/local/bin", "/usr/local/sbin"):
cand = os.path.join(d, name)
if os.path.isfile(cand) and os.access(cand, os.X_OK):
return cand
return None
def _detect_package_manager() -> PmKind:
if _resolve_command("apt-get"):
return "apt"
if _resolve_command("dnf"):
return "dnf"
if _resolve_command("yum"):
return "yum"
if _resolve_command("microdnf"):
return "microdnf"
if _resolve_command("apk"):
return "apk"
return "none"
def _package_name(entry: dict[str, str], pm: PmKind) -> str:
if pm == "apt":
return entry["apt"]
if pm in ("dnf", "yum", "microdnf"):
return entry["rpm"]
if pm == "apk":
return entry.get("apk") or entry["apt"]
return entry["apt"]
def _apt_env() -> dict[str, str]:
env = environment_with_system_path()
env.setdefault("DEBIAN_FRONTEND", "noninteractive")
env.setdefault("APT_LISTCHANGES_FRONTEND", "none")
return env
def _run_shell(script: str, timeout: int, env: dict[str, str] | None = None) -> None:
run_env = environment_with_system_path(env)
result = subprocess.run(
script,
shell=True,
capture_output=True,
text=True,
timeout=timeout,
env=run_env,
)
if result.returncode == 0:
return
out = (result.stdout or "").strip()
err = (result.stderr or "").strip()
msg = err or out or f"Command failed (exit {result.returncode})"
raise HTTPException(status_code=500, detail=msg[:4000])
def _check_installed_apt(pkg: str) -> tuple[bool, str]:
out, _ = exec_shell_sync(f"dpkg -l {shlex.quote(pkg)} 2>/dev/null | grep ^ii", timeout=5)
if out.strip():
# Parse version from dpkg output: ii pkg version ...
parts = out.split()
if len(parts) >= 3:
return True, parts[2]
return False, ""
def _check_installed_rpm(pkg: str) -> tuple[bool, str]:
try:
rpm_bin = _resolve_command("rpm") or "rpm"
result = subprocess.run(
[rpm_bin, "-q", "--queryformat", "%{EVR}", pkg],
capture_output=True,
text=True,
timeout=5,
env=environment_with_system_path(),
)
if result.returncode != 0:
return False, ""
ver = (result.stdout or "").strip()
return (True, ver) if ver else (True, "")
except (FileNotFoundError, subprocess.TimeoutExpired):
return False, ""
def _check_installed_apk(pkg: str) -> tuple[bool, str]:
try:
apk_bin = _resolve_command("apk") or "apk"
r = subprocess.run(
[apk_bin, "info", "-e", pkg],
capture_output=True,
text=True,
timeout=5,
env=environment_with_system_path(),
)
if r.returncode != 0:
return False, ""
out = (r.stdout or "").strip()
if not out:
return True, ""
first = out.split("\n")[0].strip()
return True, first
except (FileNotFoundError, subprocess.TimeoutExpired):
return False, ""
def _check_installed(pm: PmKind, pkg: str) -> tuple[bool, str]:
if pm == "apt":
return _check_installed_apt(pkg)
if pm in ("dnf", "yum", "microdnf"):
return _check_installed_rpm(pkg)
if pm == "apk":
return _check_installed_apk(pkg)
return False, ""
def _install_script(pm: PmKind, pkg: str) -> tuple[str, int, dict[str, str] | None]:
q = shlex.quote(pkg)
if pm == "apt":
ag = _resolve_command("apt-get")
if not ag:
raise HTTPException(status_code=501, detail="apt-get not found on this system.")
exe = shlex.quote(ag)
return f"{exe} update -qq && {exe} install -y {q}", 600, _apt_env()
if pm == "dnf":
exe = shlex.quote(x) if (x := _resolve_command("dnf")) else None
if exe:
return f"{exe} install -y {q}", 600, None
if pm == "yum":
exe = shlex.quote(x) if (x := _resolve_command("yum")) else None
if exe:
return f"{exe} install -y {q}", 600, None
if pm == "microdnf":
exe = shlex.quote(x) if (x := _resolve_command("microdnf")) else None
if exe:
return f"{exe} install -y {q}", 600, None
if pm == "apk":
exe = shlex.quote(x) if (x := _resolve_command("apk")) else None
if exe:
return f"{exe} update && {exe} add {q}", 600, None
raise HTTPException(
status_code=501,
detail="No supported package manager found (need apt-get, dnf, yum, microdnf, or apk).",
)
def _uninstall_script(pm: PmKind, pkg: str) -> tuple[str, int, dict[str, str] | None]:
q = shlex.quote(pkg)
if pm == "apt":
ag = _resolve_command("apt-get")
if ag:
exe = shlex.quote(ag)
return f"{exe} remove -y {q}", 180, _apt_env()
raise HTTPException(status_code=501, detail="apt-get not found on this system.")
if pm == "dnf":
exe = shlex.quote(x) if (x := _resolve_command("dnf")) else None
if exe:
return f"{exe} remove -y {q}", 180, None
if pm == "yum":
exe = shlex.quote(x) if (x := _resolve_command("yum")) else None
if exe:
return f"{exe} remove -y {q}", 180, None
if pm == "microdnf":
exe = shlex.quote(x) if (x := _resolve_command("microdnf")) else None
if exe:
return f"{exe} remove -y {q}", 180, None
if pm == "apk":
exe = shlex.quote(x) if (x := _resolve_command("apk")) else None
if exe:
return f"{exe} del {q}", 120, None
raise HTTPException(
status_code=501,
detail="No supported package manager found (need apt-get, dnf, yum, microdnf, or apk).",
)
@router.get("/list")
async def soft_list(current_user: User = Depends(get_current_user)):
"""List software with install status"""
result = []
"""List software with install status for this OS."""
pm = _detect_package_manager()
manager_label = pm if pm != "none" else "unknown"
result: list[dict] = []
for s in SOFTWARE_LIST:
installed, version = _check_installed(s["pkg"])
result.append({
**s,
"installed": installed,
"version": version if installed else "",
})
return {"software": result}
pkg = _package_name(s, pm) if pm != "none" else s["apt"]
installed, version = _check_installed(pm, pkg) if pm != "none" else (False, "")
result.append(
{
"id": s["id"],
"name": s["name"],
"desc": s["desc"],
"pkg": pkg,
"installed": installed,
"version": version if installed else "",
"package_manager": manager_label,
}
)
return {"software": result, "package_manager": manager_label}
@router.post("/install/{pkg_id}")
@@ -56,13 +344,14 @@ async def soft_install(
pkg_id: str,
current_user: User = Depends(get_current_user),
):
"""Install package via apt (requires root)"""
pkg = next((s["pkg"] for s in SOFTWARE_LIST if s["id"] == pkg_id), None)
if not pkg:
"""Install package via system package manager (root privileges required)."""
pm = _detect_package_manager()
entry = next((s for s in SOFTWARE_LIST if s["id"] == pkg_id), None)
if not entry:
raise HTTPException(status_code=404, detail="Package not found")
out, err = exec_shell_sync(f"apt-get update && apt-get install -y {pkg}", timeout=300)
if err and "error" in err.lower() and "E: " in err:
raise HTTPException(status_code=500, detail=err.strip() or out.strip())
pkg = _package_name(entry, pm)
script, timeout, env = _install_script(pm, pkg)
await asyncio.to_thread(_run_shell, script, timeout, env)
return {"status": True, "msg": "Installed"}
@@ -71,11 +360,12 @@ async def soft_uninstall(
pkg_id: str,
current_user: User = Depends(get_current_user),
):
"""Uninstall package via apt"""
pkg = next((s["pkg"] for s in SOFTWARE_LIST if s["id"] == pkg_id), None)
if not pkg:
"""Uninstall package via system package manager."""
pm = _detect_package_manager()
entry = next((s for s in SOFTWARE_LIST if s["id"] == pkg_id), None)
if not entry:
raise HTTPException(status_code=404, detail="Package not found")
out, err = exec_shell_sync(f"apt-get remove -y {pkg}", timeout=120)
if err and "error" in err.lower() and "E: " in err:
raise HTTPException(status_code=500, detail=err.strip() or out.strip())
pkg = _package_name(entry, pm)
script, timeout, env = _uninstall_script(pm, pkg)
await asyncio.to_thread(_run_shell, script, timeout, env)
return {"status": True, "msg": "Uninstalled"}

View File

@@ -1,19 +1,148 @@
"""YakPanel - SSL/Domains API - Let's Encrypt via certbot"""
import os
import re
import shutil
import socket
import subprocess
import sys
import tempfile
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from pydantic import BaseModel
from pydantic import BaseModel, Field
from typing import Optional, Literal
from app.core.database import get_db
from app.core.config import get_runtime_config
from app.core.utils import exec_shell_sync
from app.core.utils import environment_with_system_path, exec_shell_sync, read_file, nginx_reload_all_known, nginx_binary_candidates
from app.api.auth import get_current_user
from app.models.user import User
from app.models.site import Site, Domain
from app.services.site_service import regenerate_site_vhost
router = APIRouter(prefix="/ssl", tags=["ssl"])
_CERTBOT_PATH_CANDIDATES = (
"/usr/bin/certbot",
"/usr/local/bin/certbot",
"/snap/bin/certbot",
)
def _certbot_command() -> list[str] | None:
"""Resolve argv prefix to run certbot: [binary] or [python, -m, certbot]."""
env = environment_with_system_path()
path_var = env.get("PATH", "")
exe = getattr(sys, "executable", None) or ""
if exe and os.path.isfile(exe):
try:
r = subprocess.run(
[exe, "-m", "certbot", "--version"],
capture_output=True,
text=True,
timeout=20,
env=env,
)
if r.returncode == 0:
return [exe, "-m", "certbot"]
except (FileNotFoundError, OSError, subprocess.TimeoutExpired):
pass
tried: list[str] = []
w = shutil.which("certbot", path=path_var)
if w and os.path.isfile(w):
tried.append(w)
for p in _CERTBOT_PATH_CANDIDATES:
if p not in tried and os.path.isfile(p):
tried.append(p)
for exe in tried:
try:
r = subprocess.run(
[exe, "--version"],
capture_output=True,
text=True,
timeout=15,
env=env,
)
if r.returncode == 0:
return [exe]
except (FileNotFoundError, OSError, subprocess.TimeoutExpired):
continue
for py_name in ("python3", "python"):
py = shutil.which(py_name, path=path_var)
if not py or not os.path.isfile(py):
continue
try:
r = subprocess.run(
[py, "-m", "certbot", "--version"],
capture_output=True,
text=True,
timeout=20,
env=env,
)
if r.returncode == 0:
return [py, "-m", "certbot"]
except (FileNotFoundError, OSError, subprocess.TimeoutExpired):
continue
return None
def _certbot_missing_message() -> str:
return (
"certbot is not installed or not reachable from the panel process. "
"On the server, run one of: apt install certbot | dnf install certbot | yum install certbot | snap install certbot. "
"Alternatively: pip install certbot (panel can use python3 -m certbot). "
"If certbot is already installed, ensure /usr/bin is on PATH for the YakPanel service."
)
async def _le_hostnames_for_domain_row(db: AsyncSession, dom_row: Optional[Domain], primary: str) -> list[str]:
"""All distinct hostnames for the site (for -d flags). Falls back to primary."""
if not dom_row:
return [primary] if primary else []
result = await db.execute(select(Domain).where(Domain.pid == dom_row.pid).order_by(Domain.id))
rows = result.scalars().all()
seen: set[str] = set()
out: list[str] = []
for d in rows:
n = (d.name or "").strip()
if not n:
continue
key = n.lower()
if key not in seen:
seen.add(key)
out.append(n)
if primary and primary.lower() not in seen:
out.insert(0, primary)
return out if out else ([primary] if primary else [])
def _reload_panel_and_common_nginx() -> tuple[bool, str]:
"""Reload nginx so new vhost (ACME path) is live before certbot HTTP-01."""
return nginx_reload_all_known(timeout=60)
def _localhost_accepts_tcp(port: int, timeout: float = 2.0) -> bool:
"""True if something accepts a TCP connection on this machine (checks IPv4 loopback)."""
try:
with socket.create_connection(("127.0.0.1", port), timeout=timeout):
return True
except OSError:
return False
def _ss_reports_listen_443() -> bool | None:
"""Parse ss/netstat output; None if the probe could not run."""
out, _ = exec_shell_sync("ss -tln 2>/dev/null || netstat -tln 2>/dev/null", timeout=5)
if not out or not out.strip():
return None
return bool(re.search(r":443\b", out))
@router.get("/domains")
async def ssl_domains(
@@ -44,12 +173,225 @@ class RequestCertRequest(BaseModel):
email: str
class SiteSslApplyRequest(BaseModel):
"""Apply Let's Encrypt for one site; domains must belong to that site."""
site_id: int
domains: list[str] = Field(..., min_length=1)
method: Literal["file", "dns_cloudflare"]
email: str
api_token: str = ""
def _normalize_site_ssl_domains(raw_list: list[str], dom_rows: list[Domain]) -> tuple[list[str], str | None]:
"""
Map requested names to DB hostnames for this site.
Returns (canonical_hostnames, error_message).
"""
if not dom_rows:
return [], "Site has no domains configured"
name_map: dict[str, str] = {}
for d in dom_rows:
n = (d.name or "").strip()
if n:
name_map[n.lower()] = n
seen: set[str] = set()
out: list[str] = []
for raw in raw_list:
key = (raw or "").split(":")[0].strip().lower()
if not key or ".." in key:
continue
canon = name_map.get(key)
if not canon:
continue
lk = canon.lower()
if lk not in seen:
seen.add(lk)
out.append(canon)
if not out:
return [], "Select at least one valid domain name for this site"
return out, None
@router.post("/site-apply")
async def ssl_site_apply(
body: SiteSslApplyRequest,
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""
Per-site SSL: choose subset of site domains and file (HTTP-01) or Cloudflare DNS-01 validation.
"""
site_result = await db.execute(select(Site).where(Site.id == body.site_id))
site = site_result.scalar_one_or_none()
if not site:
raise HTTPException(status_code=404, detail="Site not found")
dom_result = await db.execute(select(Domain).where(Domain.pid == site.id).order_by(Domain.id))
dom_rows = list(dom_result.scalars().all())
hostnames, err = _normalize_site_ssl_domains(body.domains, dom_rows)
if err:
raise HTTPException(status_code=400, detail=err)
email = (body.email or "").strip()
if not email:
raise HTTPException(status_code=400, detail="Email is required")
dom_row = dom_rows[0]
regen_pre = await regenerate_site_vhost(db, dom_row.pid)
if not regen_pre.get("status"):
raise HTTPException(
status_code=500,
detail="Cannot refresh nginx vhost before certificate: " + str(regen_pre.get("msg", "")),
)
ok_ngx, err_ngx = _reload_panel_and_common_nginx()
if not ok_ngx:
raise HTTPException(
status_code=500,
detail="Nginx test/reload failed (fix config, then retry): " + err_ngx,
)
prefix = _certbot_command()
if not prefix:
raise HTTPException(status_code=500, detail=_certbot_missing_message())
if body.method == "file":
cfg = get_runtime_config()
allowed = [os.path.abspath(cfg["www_root"]), os.path.abspath(cfg["setup_path"])]
webroot_abs = os.path.abspath((site.path or "").strip() or ".")
if ".." in (site.path or ""):
raise HTTPException(status_code=400, detail="Invalid site path")
if not any(webroot_abs.startswith(a + os.sep) or webroot_abs == a for a in allowed):
raise HTTPException(status_code=400, detail="Site path must be under www_root or setup_path")
webroot_norm = webroot_abs.rstrip(os.sep)
challenge_dir = os.path.join(webroot_norm, ".well-known", "acme-challenge")
try:
os.makedirs(challenge_dir, mode=0o755, exist_ok=True)
except OSError as e:
raise HTTPException(status_code=500, detail=f"Cannot create ACME webroot directory: {e}") from e
base_flags = ["--non-interactive", "--agree-tos", "--email", email, "--no-eff-email"]
cmd_webroot = prefix + ["certonly", "--webroot", "-w", webroot_norm, *base_flags]
for h in hostnames:
cmd_webroot.extend(["-d", h])
cmd_webroot.extend(["--preferred-challenges", "http"])
cmd_nginx = prefix + ["certonly", "--nginx", *base_flags]
for h in hostnames:
cmd_nginx.extend(["-d", h])
env = environment_with_system_path()
proc: subprocess.CompletedProcess[str] | None = None
last_err = ""
for cmd, label in ((cmd_webroot, "webroot"), (cmd_nginx, "nginx")):
try:
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=300, env=env)
except FileNotFoundError:
raise HTTPException(status_code=500, detail=_certbot_missing_message()) from None
except subprocess.TimeoutExpired:
raise HTTPException(status_code=500, detail="certbot timed out (300s)") from None
if proc.returncode == 0:
break
chunk = (proc.stderr or proc.stdout or "").strip() or f"exit {proc.returncode}"
last_err = f"[{label}] {chunk}"
if proc is None or proc.returncode != 0:
msg = last_err or "certbot failed"
hint = (
" Check DNS A/AAAA for every selected name points here; port 80 must reach nginx for this site. "
"CDN or redirects block file validation — use DNS verification instead."
)
raise HTTPException(status_code=500, detail=(msg + hint)[:8000])
regen = await regenerate_site_vhost(db, site.id)
if not regen.get("status"):
return {
"status": True,
"msg": "Certificate issued but nginx vhost update failed: " + str(regen.get("msg", "")),
"output": (proc.stdout or "")[-2000:],
}
return {
"status": True,
"msg": "Certificate issued and nginx updated",
"output": (proc.stdout or "")[-2000:],
}
# dns_cloudflare
token = (body.api_token or "").strip()
if not token:
raise HTTPException(status_code=400, detail="Cloudflare API token required for DNS verification")
cred_lines = f"dns_cloudflare_api_token = {token}\n"
fd, cred_path = tempfile.mkstemp(suffix=".ini", prefix="yakpanel_cf_")
try:
os.write(fd, cred_lines.encode())
os.close(fd)
os.chmod(cred_path, 0o600)
except OSError as e:
try:
os.close(fd)
except OSError:
pass
raise HTTPException(status_code=500, detail=f"Cannot write credentials temp file: {e}") from e
base_flags = [
"--non-interactive",
"--agree-tos",
"--email",
email,
"--no-eff-email",
"--dns-cloudflare",
"--dns-cloudflare-credentials",
cred_path,
]
cmd = prefix + ["certonly"] + base_flags
for h in hostnames:
cmd.extend(["-d", h])
env = environment_with_system_path()
try:
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=600, env=env)
except (FileNotFoundError, subprocess.TimeoutExpired) as e:
try:
os.unlink(cred_path)
except OSError:
pass
raise HTTPException(status_code=500, detail=str(e)) from e
finally:
try:
os.unlink(cred_path)
except OSError:
pass
if proc.returncode != 0:
err = (proc.stderr or proc.stdout or "").strip() or f"exit {proc.returncode}"
raise HTTPException(
status_code=500,
detail="certbot DNS failed (install certbot-dns-cloudflare if missing). " + err[:6000],
)
regen = await regenerate_site_vhost(db, site.id)
if not regen.get("status"):
return {
"status": True,
"msg": "Certificate issued but vhost regen failed: " + str(regen.get("msg", "")),
"output": (proc.stdout or "")[-2000:],
}
return {
"status": True,
"msg": "Certificate issued via Cloudflare DNS-01",
"output": (proc.stdout or "")[-2000:],
}
@router.post("/request")
async def ssl_request_cert(
body: RequestCertRequest,
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Request Let's Encrypt certificate via certbot (webroot challenge)"""
"""Request Let's Encrypt certificate via certbot (webroot challenge)."""
if not body.domain or not body.webroot or not body.email:
raise HTTPException(status_code=400, detail="domain, webroot and email required")
if ".." in body.domain or ".." in body.webroot:
@@ -59,14 +401,392 @@ async def ssl_request_cert(
webroot_abs = os.path.abspath(body.webroot)
if not any(webroot_abs.startswith(a + os.sep) or webroot_abs == a for a in allowed):
raise HTTPException(status_code=400, detail="Webroot must be under www_root or setup_path")
cmd = (
f'certbot certonly --webroot -w "{body.webroot}" -d "{body.domain}" '
f'--non-interactive --agree-tos --email "{body.email}"'
dom = body.domain.split(":")[0].strip()
webroot_norm = webroot_abs.rstrip(os.sep)
result_dom = await db.execute(select(Domain).where(Domain.name == dom).limit(1))
dom_row = result_dom.scalar_one_or_none()
if dom_row:
regen_pre = await regenerate_site_vhost(db, dom_row.pid)
if not regen_pre.get("status"):
raise HTTPException(
status_code=500,
detail="Cannot refresh nginx vhost before certificate request: " + str(regen_pre.get("msg", "")),
)
ok_ngx, err_ngx = _reload_panel_and_common_nginx()
if not ok_ngx:
raise HTTPException(
status_code=500,
detail="Nginx test/reload failed before certificate request (fix config, then retry): " + err_ngx,
)
challenge_dir = os.path.join(webroot_norm, ".well-known", "acme-challenge")
try:
os.makedirs(challenge_dir, mode=0o755, exist_ok=True)
except OSError as e:
raise HTTPException(status_code=500, detail=f"Cannot create ACME webroot directory: {e}") from e
prefix = _certbot_command()
if not prefix:
raise HTTPException(status_code=500, detail=_certbot_missing_message())
hostnames = await _le_hostnames_for_domain_row(db, dom_row, dom)
base_flags = [
"--non-interactive",
"--agree-tos",
"--email",
body.email,
"--no-eff-email",
]
cmd_webroot = prefix + ["certonly", "--webroot", "-w", webroot_norm, *base_flags]
for h in hostnames:
cmd_webroot.extend(["-d", h])
cmd_webroot.extend(["--preferred-challenges", "http"])
cmd_nginx = prefix + ["certonly", "--nginx", *base_flags]
for h in hostnames:
cmd_nginx.extend(["-d", h])
env = environment_with_system_path()
proc: subprocess.CompletedProcess[str] | None = None
last_err = ""
for cmd, label in ((cmd_webroot, "webroot"), (cmd_nginx, "nginx")):
try:
proc = subprocess.run(
cmd,
capture_output=True,
text=True,
timeout=300,
env=env,
)
except FileNotFoundError:
raise HTTPException(status_code=500, detail=_certbot_missing_message()) from None
except subprocess.TimeoutExpired:
raise HTTPException(status_code=500, detail="certbot timed out (300s)") from None
if proc.returncode == 0:
break
chunk = (proc.stderr or proc.stdout or "").strip() or f"exit {proc.returncode}"
last_err = f"[{label}] {chunk}"
if proc is None or proc.returncode != 0:
msg = last_err or "certbot failed"
hint = (
" Webroot and nginx plugins both failed. Check: "
"DNS A/AAAA for every -d name points to this server; port 80 reaches the nginx that serves these hosts; "
"site is enabled; install python3-certbot-nginx if the nginx method reports a missing plugin. "
"If you use a CDN proxy, pause it or use DNS validation instead."
)
raise HTTPException(status_code=500, detail=(msg + hint)[:8000])
row = dom_row
if row:
regen = await regenerate_site_vhost(db, row.pid)
if not regen.get("status"):
return {
"status": True,
"msg": "Certificate issued but nginx vhost update failed: " + str(regen.get("msg", "")),
"output": (proc.stdout or "")[-2000:],
}
return {
"status": True,
"msg": "Certificate issued and nginx updated",
"output": (proc.stdout or "")[-2000:],
}
@router.get("/diagnostics")
async def ssl_diagnostics(current_user: User = Depends(get_current_user)):
"""
Help debug HTTP vs HTTPS: compares panel-written vhosts with what nginx -T actually loads.
ERR_CONNECTION_REFUSED on 443 usually means no listen 443 in the active nginx, or a firewall.
"""
cfg = get_runtime_config()
setup_abs = os.path.abspath((cfg.get("setup_path") or "").strip() or ".")
vhost_dir = os.path.join(setup_abs, "panel", "vhost", "nginx")
include_snippet = "include " + vhost_dir.replace(os.sep, "/") + "/*.conf;"
vhost_summaries: list[dict] = []
if os.path.isdir(vhost_dir):
try:
names = sorted(os.listdir(vhost_dir))
except OSError:
names = []
for fn in names:
if not fn.endswith(".conf") or fn.startswith("."):
continue
fp = os.path.join(vhost_dir, fn)
if not os.path.isfile(fp):
continue
body = read_file(fp) or ""
vhost_summaries.append({
"file": fn,
"has_listen_80": bool(re.search(r"\blisten\s+80\b", body)),
"has_listen_443": bool(re.search(r"\blisten\s+.*443", body)),
"has_ssl_directives": "ssl_certificate" in body,
})
any_vhost_443 = any(
v.get("has_listen_443") and v.get("has_ssl_directives") for v in vhost_summaries
)
out, err = exec_shell_sync(cmd, timeout=120)
if err and "error" in err.lower() and "successfully" not in err.lower():
raise HTTPException(status_code=500, detail=err.strip() or out.strip())
return {"status": True, "msg": "Certificate requested", "output": out}
effective_listen_443 = False
panel_include_in_effective_config = False
nginx_t_errors: list[str] = []
norm_vhost = vhost_dir.replace(os.sep, "/")
env = environment_with_system_path()
for ngx in nginx_binary_candidates():
try:
r = subprocess.run(
[ngx, "-T"],
capture_output=True,
text=True,
timeout=25,
env=env,
)
except (FileNotFoundError, OSError, subprocess.TimeoutExpired) as e:
nginx_t_errors.append(f"{ngx}: {e}")
continue
dump = (r.stdout or "") + (r.stderr or "")
if r.returncode != 0:
nginx_t_errors.append(f"{ngx}: " + (dump.strip()[:800] or f"-T exit {r.returncode}"))
continue
if re.search(r"\blisten\s+.*443", dump):
effective_listen_443 = True
if norm_vhost in dump or "panel/vhost/nginx" in dump:
panel_include_in_effective_config = True
hints: list[str] = []
if not os.path.isdir(vhost_dir):
hints.append(f"The panel vhost directory is missing ({vhost_dir}). Create a website in YakPanel first.")
elif not vhost_summaries:
hints.append("There are no .conf files under the panel nginx vhost directory.")
le_live = "/etc/letsencrypt/live"
le_present = False
if os.path.isdir(le_live):
try:
le_present = any(
n and not n.startswith(".")
for n in os.listdir(le_live)
)
except OSError:
le_present = False
if le_present and vhost_summaries and not any_vhost_443:
hints.append(
"Let's Encrypt certs exist on this server but panel vhosts do not include an HTTPS (listen 443 ssl) block. "
"Regenerate the vhost: edit the site and save, or use Request SSL again."
)
if any_vhost_443 and not effective_listen_443:
hints.append(
"Your panel .conf files define HTTPS, but nginx -T does not show any listen 443 — the daemon that handles traffic is not loading YakPanel vhosts. "
"Add the include line below inside http { } for that nginx (e.g. /etc/nginx/nginx.conf), then nginx -t && reload."
)
elif vhost_summaries and not panel_include_in_effective_config:
hints.append(
"If http://domain shows the default 'Welcome to nginx' page, stock nginx is answering and likely does not include YakPanel vhosts. "
"Add the include below (or symlink this directory into /etc/nginx/conf.d/)."
)
localhost_443_open = _localhost_accepts_tcp(443)
ss_443 = _ss_reports_listen_443()
if not localhost_443_open and not effective_listen_443:
hints.append(
"This server is not accepting TCP on 127.0.0.1:443 — nothing is listening on 443 yet. "
"Fix nginx (listen 443 ssl + include panel vhosts) first; opening only the cloud firewall will not fix ERR_CONNECTION_REFUSED until nginx binds 443."
)
elif effective_listen_443 and localhost_443_open:
hints.append(
"Nginx loads HTTPS and 127.0.0.1:443 accepts connections on this host. "
"If browsers off this machine still see connection refused, allow inbound TCP 443: "
"sudo ufw allow 443/tcp && sudo ufw reload (or firewalld), and your VPS Security Group / provider firewall."
)
elif effective_listen_443 and not localhost_443_open:
hints.append(
"nginx -T reports listen 443, but connecting to 127.0.0.1:443 failed — check nginx error.log; nginx may have failed to bind (permission or address already in use)."
)
elif localhost_443_open and not effective_listen_443:
hints.append(
"127.0.0.1:443 accepts TCP, but nginx -T from panel binaries did not show listen 443 — another process may own 443; check ss -tlnp and which nginx serves port 80."
)
debian_sites = os.path.isdir("/etc/nginx/sites-available")
rhel_conf = os.path.isdir("/etc/nginx/conf.d")
layout = "unknown"
if debian_sites:
layout = "debian_sites_available"
elif rhel_conf:
layout = "rhel_conf_d"
drop_deb = "/etc/nginx/sites-available/yakpanel-vhosts.conf"
drop_rhel = "/etc/nginx/conf.d/yakpanel-vhosts.conf"
nginx_wizard = {
"detected_layout": layout,
"include_snippet": include_snippet,
"dropin_file_suggested": drop_deb if debian_sites else drop_rhel,
"debian": {
"sites_available_file": drop_deb,
"sites_enabled_symlink": "/etc/nginx/sites-enabled/yakpanel-vhosts.conf",
"steps": [
f"printf '%s\\n' '{include_snippet}' | sudo tee {drop_deb}",
f"sudo ln -sf {drop_deb} /etc/nginx/sites-enabled/yakpanel-vhosts.conf",
"sudo nginx -t && sudo systemctl reload nginx",
],
},
"rhel": {
"conf_d_file": drop_rhel,
"steps": [
f"printf '%s\\n' '{include_snippet}' | sudo tee {drop_rhel}",
"sudo nginx -t && sudo systemctl reload nginx",
],
},
"note": "Run the steps for your distro as root. The include line must appear inside the main http { } context (conf.d files do automatically).",
}
return {
"vhost_dir": vhost_dir,
"include_snippet": include_snippet,
"nginx_wizard": nginx_wizard,
"vhosts": vhost_summaries,
"any_vhost_listen_ssl": any_vhost_443,
"nginx_effective_listen_443": effective_listen_443,
"panel_vhost_path_in_nginx_t": panel_include_in_effective_config,
"nginx_t_probe_errors": nginx_t_errors,
"localhost_443_accepts_tcp": localhost_443_open,
"ss_reports_443_listen": ss_443,
"hints": hints,
}
class DnsCertCloudflareRequest(BaseModel):
domain: str
email: str
api_token: str
class DnsManualInstructionsRequest(BaseModel):
domain: str
@router.post("/dns-request/cloudflare")
async def ssl_dns_cloudflare_cert(
body: DnsCertCloudflareRequest,
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Request Let's Encrypt certificate using DNS-01 via Cloudflare (requires certbot-dns-cloudflare)."""
dom = (body.domain or "").split(":")[0].strip()
if not dom or ".." in dom or not body.email or not body.api_token:
raise HTTPException(status_code=400, detail="domain, email, and api_token required")
result_dom = await db.execute(select(Domain).where(Domain.name == dom).limit(1))
dom_row = result_dom.scalar_one_or_none()
if dom_row:
regen_pre = await regenerate_site_vhost(db, dom_row.pid)
if not regen_pre.get("status"):
raise HTTPException(
status_code=500,
detail="Cannot refresh nginx vhost: " + str(regen_pre.get("msg", "")),
)
ok_ngx, err_ngx = _reload_panel_and_common_nginx()
if not ok_ngx:
raise HTTPException(
status_code=500,
detail="Nginx reload failed: " + err_ngx,
)
prefix = _certbot_command()
if not prefix:
raise HTTPException(status_code=500, detail=_certbot_missing_message())
hostnames = await _le_hostnames_for_domain_row(db, dom_row, dom)
if not hostnames:
hostnames = [dom]
cred_lines = f'dns_cloudflare_api_token = {body.api_token.strip()}\n'
fd, cred_path = tempfile.mkstemp(suffix=".ini", prefix="yakpanel_cf_")
try:
os.write(fd, cred_lines.encode())
os.close(fd)
os.chmod(cred_path, 0o600)
except OSError as e:
try:
os.close(fd)
except OSError:
pass
raise HTTPException(status_code=500, detail=f"Cannot write credentials temp file: {e}") from e
base_flags = [
"--non-interactive",
"--agree-tos",
"--email",
body.email.strip(),
"--no-eff-email",
"--dns-cloudflare",
"--dns-cloudflare-credentials",
cred_path,
]
cmd = prefix + ["certonly"] + base_flags
for h in hostnames:
cmd.extend(["-d", h])
env = environment_with_system_path()
try:
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=600, env=env)
except (FileNotFoundError, subprocess.TimeoutExpired) as e:
try:
os.unlink(cred_path)
except OSError:
pass
raise HTTPException(status_code=500, detail=str(e)) from e
finally:
try:
os.unlink(cred_path)
except OSError:
pass
if proc.returncode != 0:
err = (proc.stderr or proc.stdout or "").strip() or f"exit {proc.returncode}"
raise HTTPException(
status_code=500,
detail="certbot DNS failed. Install certbot-dns-cloudflare (pip or OS package) if missing. " + err[:6000],
)
if dom_row:
regen = await regenerate_site_vhost(db, dom_row.pid)
if not regen.get("status"):
return {
"status": True,
"msg": "Certificate issued but vhost regen failed: " + str(regen.get("msg", "")),
"output": (proc.stdout or "")[-2000:],
}
return {
"status": True,
"msg": "Certificate issued via Cloudflare DNS-01",
"output": (proc.stdout or "")[-2000:],
}
@router.post("/dns-request/manual-instructions")
async def ssl_dns_manual_instructions(
body: DnsManualInstructionsRequest,
current_user: User = Depends(get_current_user),
):
"""Return TXT record host for ACME DNS-01 (user creates record then runs certbot --manual)."""
d = (body.domain or "").split(":")[0].strip()
if not d or ".." in d:
raise HTTPException(status_code=400, detail="Invalid domain")
return {
"txt_record_name": f"_acme-challenge.{d}",
"certbot_example": (
f"sudo certbot certonly --manual --preferred-challenges dns --email you@example.com "
f"--agree-tos -d {d}"
),
"note": "Certbot will display the exact TXT value to create. After DNS propagates, continue in the terminal.",
}
@router.get("/certificates")

View File

@@ -107,6 +107,16 @@ def _run_migrations(conn):
"ALTER TABLE sites ADD COLUMN force_https INTEGER DEFAULT 0"
)
)
if "proxy_upstream" not in cols:
conn.execute(sqlalchemy.text("ALTER TABLE sites ADD COLUMN proxy_upstream VARCHAR(512) DEFAULT ''"))
if "proxy_websocket" not in cols:
conn.execute(sqlalchemy.text("ALTER TABLE sites ADD COLUMN proxy_websocket INTEGER DEFAULT 0"))
if "dir_auth_path" not in cols:
conn.execute(sqlalchemy.text("ALTER TABLE sites ADD COLUMN dir_auth_path VARCHAR(256) DEFAULT ''"))
if "dir_auth_user_file" not in cols:
conn.execute(sqlalchemy.text("ALTER TABLE sites ADD COLUMN dir_auth_user_file VARCHAR(512) DEFAULT ''"))
if "php_deny_execute" not in cols:
conn.execute(sqlalchemy.text("ALTER TABLE sites ADD COLUMN php_deny_execute INTEGER DEFAULT 0"))
except Exception:
pass
# Create backup_plans if not exists (create_all handles new installs)
@@ -123,6 +133,18 @@ def _run_migrations(conn):
"""))
except Exception:
pass
try:
r = conn.execute(sqlalchemy.text("PRAGMA table_info(backup_plans)"))
bcols = [row[1] for row in r.fetchall()]
if bcols:
if "s3_bucket" not in bcols:
conn.execute(sqlalchemy.text("ALTER TABLE backup_plans ADD COLUMN s3_bucket VARCHAR(256) DEFAULT ''"))
if "s3_endpoint" not in bcols:
conn.execute(sqlalchemy.text("ALTER TABLE backup_plans ADD COLUMN s3_endpoint VARCHAR(512) DEFAULT ''"))
if "s3_key_prefix" not in bcols:
conn.execute(sqlalchemy.text("ALTER TABLE backup_plans ADD COLUMN s3_key_prefix VARCHAR(256) DEFAULT ''"))
except Exception:
pass
# Create custom_plugins if not exists
try:
conn.execute(sqlalchemy.text("""

View File

@@ -6,7 +6,8 @@ from passlib.context import CryptContext
from app.core.config import get_settings
settings = get_settings()
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
# bcrypt_sha256: SHA-256 pre-hash then bcrypt (no 72-byte limit); bcrypt: verify legacy hashes
pwd_context = CryptContext(schemes=["bcrypt_sha256", "bcrypt"], deprecated="auto")
def verify_password(plain_password: str, hashed_password: str) -> bool:
@@ -15,7 +16,7 @@ def verify_password(plain_password: str, hashed_password: str) -> bool:
def get_password_hash(password: str) -> str:
"""Hash a password"""
"""Hash a password (uses bcrypt_sha256; bcrypt only supports 72 raw bytes)."""
return pwd_context.hash(password)

View File

@@ -9,6 +9,24 @@ from typing import Tuple, Optional
regex_safe_path = re.compile(r"^[\w\s./\-]*$")
# systemd often sets PATH to venv-only; subprocess shells then miss /usr/bin (dnf, apt-get, …).
_SYSTEM_PATH = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
def ensure_system_path(env: dict[str, str]) -> None:
"""Append standard locations to PATH if /usr/bin is missing."""
cur = (env.get("PATH") or "").strip()
if "/usr/bin" in cur:
return
env["PATH"] = f"{cur}:{_SYSTEM_PATH}" if cur else _SYSTEM_PATH
def environment_with_system_path(base: Optional[dict[str, str]] = None) -> dict[str, str]:
"""Copy of process env (or base) with PATH guaranteed to include system bin dirs."""
env = dict(base) if base is not None else os.environ.copy()
ensure_system_path(env)
return env
def md5(strings: str | bytes) -> str:
"""Generate MD5 hash"""
@@ -78,6 +96,7 @@ async def exec_shell(
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
cwd=cwd,
env=environment_with_system_path(),
)
try:
stdout, stderr = await asyncio.wait_for(
@@ -102,6 +121,7 @@ def exec_shell_sync(cmd: str, timeout: Optional[float] = None, cwd: Optional[str
capture_output=True,
timeout=timeout or 300,
cwd=cwd,
env=environment_with_system_path(),
)
out = result.stdout.decode("utf-8", errors="replace") if result.stdout else ""
err = result.stderr.decode("utf-8", errors="replace") if result.stderr else ""
@@ -110,3 +130,86 @@ def exec_shell_sync(cmd: str, timeout: Optional[float] = None, cwd: Optional[str
return "", "Timed out"
except Exception as e:
return "", str(e)
def nginx_test_and_reload(nginx_bin: str, timeout: float = 60.0) -> Tuple[bool, str]:
"""Run ``nginx -t`` then ``nginx -s reload``. Returns (success, error_message)."""
if not nginx_bin or not os.path.isfile(nginx_bin):
return True, ""
env = environment_with_system_path()
try:
t = subprocess.run(
[nginx_bin, "-t"],
capture_output=True,
text=True,
timeout=timeout,
env=env,
)
except (FileNotFoundError, OSError, subprocess.TimeoutExpired) as e:
return False, str(e)
if t.returncode != 0:
err = (t.stderr or t.stdout or "").strip()
return False, err or f"nginx -t exited {t.returncode}"
try:
r = subprocess.run(
[nginx_bin, "-s", "reload"],
capture_output=True,
text=True,
timeout=timeout,
env=env,
)
except (FileNotFoundError, OSError, subprocess.TimeoutExpired) as e:
return False, str(e)
if r.returncode != 0:
err = (r.stderr or r.stdout or "").strip()
return False, err or f"nginx -s reload exited {r.returncode}"
return True, ""
def nginx_binary_candidates() -> list[str]:
"""Nginx binaries to operate on: panel-bundled first, then common system paths (deduped by realpath)."""
from app.core.config import get_runtime_config
cfg = get_runtime_config()
seen: set[str] = set()
binaries: list[str] = []
panel_ngx = os.path.join(cfg.get("setup_path") or "", "nginx", "sbin", "nginx")
if os.path.isfile(panel_ngx):
binaries.append(panel_ngx)
try:
seen.add(os.path.realpath(panel_ngx))
except OSError:
seen.add(panel_ngx)
for alt in ("/usr/sbin/nginx", "/usr/bin/nginx", "/usr/local/nginx/sbin/nginx"):
if not os.path.isfile(alt):
continue
try:
rp = os.path.realpath(alt)
except OSError:
rp = alt
if rp in seen:
continue
binaries.append(alt)
seen.add(rp)
return binaries
def nginx_reload_all_known(timeout: float = 60.0) -> Tuple[bool, str]:
"""
Test and reload panel nginx (setup_path/nginx/sbin/nginx) and distinct system nginx
binaries so vhost changes apply regardless of which daemon serves sites.
"""
binaries = nginx_binary_candidates()
if not binaries:
return True, ""
errs: list[str] = []
ok_any = False
for ngx in binaries:
ok, err = nginx_test_and_reload(ngx, timeout=timeout)
if ok:
ok_any = True
else:
errs.append(f"{ngx}: {err}")
if ok_any:
return True, ""
return False, "; ".join(errs) if errs else "nginx reload failed for all candidates"

View File

@@ -0,0 +1,30 @@
[
{
"id": "disk_space",
"name": "Disk free space alert",
"schedule": "0 */6 * * *",
"execstr": "df -h / | tail -1 | awk '{if (int($5) > 90) print \"Disk usage over 90% on / — \" $0; exit 0}'",
"description": "Print a line if root filesystem use exceeds 90% (extend with mail/curl as needed)."
},
{
"id": "yakpanel_backup",
"name": "Run YakPanel scheduled backups",
"schedule": "15 * * * *",
"execstr": "curl -fsS -H \"Authorization: Bearer YOUR_TOKEN\" http://127.0.0.1:8889/api/v1/backup/run-scheduled || true",
"description": "Example: call the panel backup API hourly (set token and port; prefer localhost + firewall)."
},
{
"id": "clear_tmp",
"name": "Clean old temp files",
"schedule": "0 3 * * *",
"execstr": "find /tmp -type f -atime +7 -delete 2>/dev/null || true",
"description": "Remove files in /tmp not accessed in 7 days."
},
{
"id": "php_fpm_ping",
"name": "PHP-FPM socket check",
"schedule": "*/10 * * * *",
"execstr": "test -S /tmp/php-cgi-74.sock && exit 0 || echo \"php-fpm 74 socket missing\"",
"description": "Adjust php version/socket path for your stack."
}
]

Binary file not shown.

View File

@@ -29,6 +29,7 @@ from app.api import (
node,
service,
public_installer,
security,
)
@@ -87,6 +88,7 @@ app.include_router(config.router, prefix="/api/v1")
app.include_router(user.router, prefix="/api/v1")
app.include_router(logs.router, prefix="/api/v1")
app.include_router(public_installer.router, prefix="/api/v1")
app.include_router(security.router, prefix="/api/v1")
@app.get("/")

View File

@@ -13,3 +13,7 @@ class BackupPlan(Base):
target_id: Mapped[int] = mapped_column(Integer, nullable=False) # site_id or database_id
schedule: Mapped[str] = mapped_column(String(64), nullable=False) # cron expression, e.g. "0 2 * * *" = daily 2am
enabled: Mapped[bool] = mapped_column(Boolean, default=True)
# Optional S3-compatible copy after local backup (uses AWS_ACCESS_KEY_ID / AWS_SECRET_ACCESS_KEY env).
s3_bucket: Mapped[str] = mapped_column(String(256), default="")
s3_endpoint: Mapped[str] = mapped_column(String(512), default="")
s3_key_prefix: Mapped[str] = mapped_column(String(256), default="")

View File

@@ -16,6 +16,14 @@ class Site(Base):
project_type: Mapped[str] = mapped_column(String(32), default="PHP")
php_version: Mapped[str] = mapped_column(String(16), default="74") # 74, 80, 81, 82
force_https: Mapped[int] = mapped_column(Integer, default=0) # 0=off, 1=redirect HTTP to HTTPS
# Reverse proxy: when proxy_upstream is non-empty, vhost uses proxy_pass instead of PHP root.
proxy_upstream: Mapped[str] = mapped_column(String(512), default="") # e.g. http://127.0.0.1:3000
proxy_websocket: Mapped[int] = mapped_column(Integer, default=0) # 1 = Upgrade headers for WS
# HTTP basic auth for a path prefix (nginx auth_basic). user_file = htpasswd path on server.
dir_auth_path: Mapped[str] = mapped_column(String(256), default="")
dir_auth_user_file: Mapped[str] = mapped_column(String(512), default="")
# Block execution of PHP under common upload paths (nginx deny).
php_deny_execute: Mapped[int] = mapped_column(Integer, default=0)
addtime: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)

View File

@@ -1,17 +1,373 @@
"""YakPanel - Site service"""
import os
import re
from datetime import datetime, timezone
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.models.site import Site, Domain
from app.models.redirect import SiteRedirect
from app.core.config import get_runtime_config
from app.core.utils import path_safe_check, write_file, read_file, exec_shell_sync
from app.core.config import get_runtime_config, get_settings
from app.core.utils import path_safe_check, write_file, read_file, exec_shell_sync, nginx_reload_all_known
DOMAIN_REGEX = re.compile(r"^([\w\-\*]{1,100}\.){1,8}([\w\-]{1,24}|[\w\-]{1,24}\.[\w\-]{1,24})$")
LETSENCRYPT_LIVE = "/etc/letsencrypt/live"
SSL_EXPIRING_DAYS = 14
_SAN_CACHE: dict[str, tuple[float, frozenset[str]]] = {}
def _normalize_hostname(h: str) -> str:
return (h or "").strip().lower().split(":")[0]
def _iter_le_pairs_sorted() -> list[tuple[str, str]]:
if not os.path.isdir(LETSENCRYPT_LIVE):
return []
try:
names = sorted(os.listdir(LETSENCRYPT_LIVE))
except OSError:
return []
out: list[tuple[str, str]] = []
for entry in names:
if entry.startswith(".") or ".." in entry:
continue
fc = os.path.join(LETSENCRYPT_LIVE, entry, "fullchain.pem")
pk = os.path.join(LETSENCRYPT_LIVE, entry, "privkey.pem")
if os.path.isfile(fc) and os.path.isfile(pk):
out.append((fc, pk))
return out
def _cert_san_names(fullchain: str) -> frozenset[str]:
try:
st = os.stat(fullchain)
mtime = st.st_mtime
except OSError:
return frozenset()
hit = _SAN_CACHE.get(fullchain)
if hit is not None and hit[0] == mtime:
return hit[1]
out, _err = exec_shell_sync(f'openssl x509 -in "{fullchain}" -noout -text', timeout=8)
names: set[str] = set()
if out:
for m in re.finditer(r"DNS:([^,\s\n]+)", out, flags=re.IGNORECASE):
names.add(m.group(1).strip().lower())
froz = frozenset(names)
_SAN_CACHE[fullchain] = (mtime, froz)
return froz
def _nginx_site_template_path() -> str | None:
"""
Resolve webserver/templates/nginx_site.conf.
Order: YAKPANEL_NGINX_TEMPLATE env, repo root (parent of backend/), Settings.panel_path.
"""
candidates: list[str] = []
env_override = (os.environ.get("YAKPANEL_NGINX_TEMPLATE") or "").strip()
if env_override:
candidates.append(env_override)
# site_service.py -> services -> app -> backend -> YakPanel-server (repo root)
here = os.path.abspath(__file__)
repo_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(here))))
candidates.append(os.path.join(repo_root, "webserver", "templates", "nginx_site.conf"))
try:
s = get_settings()
pp = (s.panel_path or "").strip()
if pp:
candidates.append(os.path.join(os.path.abspath(pp), "webserver", "templates", "nginx_site.conf"))
sp = (s.setup_path or "").strip()
if sp:
candidates.append(
os.path.join(os.path.abspath(sp), "YakPanel-server", "webserver", "templates", "nginx_site.conf")
)
except Exception:
pass
for path in candidates:
if path and os.path.isfile(path):
return path
return None
def _backup_count(site_name: str, backup_dir: str) -> int:
if not backup_dir or not os.path.isdir(backup_dir):
return 0
prefix = f"{site_name}_"
n = 0
try:
for f in os.listdir(backup_dir):
if f.startswith(prefix) and f.endswith(".tar.gz"):
n += 1
except OSError:
return 0
return n
def _parse_cert_not_after(cert_path: str) -> datetime | None:
if not os.path.isfile(cert_path):
return None
out, _err = exec_shell_sync(f'openssl x509 -in "{cert_path}" -noout -enddate', timeout=5)
if not out or "notAfter=" not in out:
return None
val = out.strip().split("=", 1)[1].strip()
try:
dt = datetime.strptime(val, "%b %d %H:%M:%S %Y GMT")
return dt.replace(tzinfo=timezone.utc)
except ValueError:
return None
def _best_ssl_for_hostnames(hostnames: list[str]) -> dict:
"""Pick the LE cert (live/ or SAN) that covers site hostnames with longest validity."""
none = {"status": "none", "days_left": None, "cert_name": None}
seen: set[str] = set()
want_list: list[str] = []
for host in hostnames:
n = _normalize_hostname(host)
if n and ".." not in n and n not in seen:
seen.add(n)
want_list.append(n)
if not want_list:
return none
want = set(want_list)
try:
if not os.path.isdir(LETSENCRYPT_LIVE):
return none
best_days: int | None = None
best_name: str | None = None
for fc, _pk in _iter_le_pairs_sorted():
live_name = os.path.basename(os.path.dirname(fc)).lower()
if live_name in want:
match_names = {live_name}
else:
match_names = want & _cert_san_names(fc)
if not match_names:
continue
end = _parse_cert_not_after(fc)
if end is None:
continue
now = datetime.now(timezone.utc)
days = int((end - now).total_seconds() // 86400)
pick = min(match_names)
if best_days is None or days > best_days:
best_days = days
best_name = pick
if best_days is None:
return none
if best_days < 0:
status = "expired"
elif best_days <= SSL_EXPIRING_DAYS:
status = "expiring"
else:
status = "active"
return {"status": status, "days_left": best_days, "cert_name": best_name}
except OSError:
return none
def _letsencrypt_paths(hostname: str) -> tuple[str, str] | None:
"""Return (fullchain, privkey) if Let's Encrypt files exist for this hostname."""
h = (hostname or "").strip().lower().split(":")[0]
if not h or ".." in h:
return None
base = os.path.join(LETSENCRYPT_LIVE, h)
fc = os.path.join(base, "fullchain.pem")
pk = os.path.join(base, "privkey.pem")
if os.path.isfile(fc) and os.path.isfile(pk):
return fc, pk
return None
def _letsencrypt_paths_any(hostnames: list[str]) -> tuple[str, str] | None:
"""First matching LE cert: exact live/<host>/, then live dir name, then SAN match."""
seen: set[str] = set()
want_ordered: list[str] = []
for h in hostnames:
n = _normalize_hostname(h)
if n and ".." not in n and n not in seen:
seen.add(n)
want_ordered.append(n)
if not want_ordered:
return None
want = set(want_ordered)
for n in want_ordered:
p = _letsencrypt_paths(n)
if p:
return p
for fc, pk in _iter_le_pairs_sorted():
live_name = os.path.basename(os.path.dirname(fc)).lower()
if live_name in want:
return fc, pk
if want & _cert_san_names(fc):
return fc, pk
return None
def _build_php_deny_execute_block(enabled: int) -> str:
if not enabled:
return ""
return (
r" location ~* ^/uploads/.*\.(php|phar|phtml|php5)$ {" + "\n"
r" deny all;" + "\n"
r" }" + "\n"
r" location ~* ^/storage/.*\.(php|phar|phtml|php5)$ {" + "\n"
r" deny all;" + "\n"
r" }" + "\n"
)
def _build_main_app_block(proxy_upstream: str, proxy_websocket: int, php_version: str) -> str:
pu = (proxy_upstream or "").strip()
pv = php_version or "74"
if pu:
ws_lines = ""
if proxy_websocket:
ws_lines = (
" proxy_set_header Upgrade $http_upgrade;\n"
' proxy_set_header Connection "upgrade";\n'
)
return (
f" location / {{\n"
f" proxy_pass {pu};\n"
f" proxy_http_version 1.1;\n"
f" proxy_set_header Host $host;\n"
f" proxy_set_header X-Real-IP $remote_addr;\n"
f" proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n"
f" proxy_set_header X-Forwarded-Proto $scheme;\n"
f"{ws_lines}"
f" proxy_read_timeout 3600s;\n"
f" }}\n"
)
return (
r" location ~ .*\.(gif|jpg|jpeg|png|bmp|swf)$ {" + "\n"
f" expires 30d;\n"
f" access_log off;\n"
f" }}\n"
r" location ~ .*\.(js|css)?$ {" + "\n"
f" expires 12h;\n"
f" access_log off;\n"
f" }}\n"
r" location ~ \.php$ {" + "\n"
f" fastcgi_pass unix:/tmp/php-cgi-{pv}.sock;\n"
f" fastcgi_index index.php;\n"
f" include fastcgi.conf;\n"
f" }}\n"
)
def _build_dir_auth_block(
dir_path: str,
user_file: str,
proxy_upstream: str,
root_path: str,
) -> str:
dp = (dir_path or "").strip()
uf = (user_file or "").strip()
if not dp or not uf or ".." in dp or ".." in uf:
return ""
if not dp.startswith("/"):
dp = "/" + dp
qf = uf.replace("\\", "\\\\").replace('"', '\\"')
qr = root_path.replace("\\", "\\\\")
pu = (proxy_upstream or "").strip()
if pu:
puc = pu.rstrip("/")
return (
f" location ^~ {dp} {{\n"
f' auth_basic "YakPanel";\n'
f' auth_basic_user_file "{qf}";\n'
f" proxy_pass {puc};\n"
f" proxy_http_version 1.1;\n"
f" proxy_set_header Host $host;\n"
f" proxy_set_header X-Real-IP $remote_addr;\n"
f" proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n"
f" proxy_set_header X-Forwarded-Proto $scheme;\n"
f" }}\n"
)
return (
f" location ^~ {dp} {{\n"
f' auth_basic "YakPanel";\n'
f' auth_basic_user_file "{qf}";\n'
f" root {qr};\n"
f" try_files $uri $uri/ =404;\n"
f" }}\n"
)
def _build_location_bundle(
root_path: str,
redirects: list[tuple[str, str, int]] | None,
proxy_upstream: str,
proxy_websocket: int,
dir_auth_path: str,
dir_auth_user_file: str,
php_deny_execute: int,
php_version: str,
) -> str:
acme = (
f" location ^~ /.well-known/acme-challenge/ {{\n"
f" root {root_path};\n"
f' default_type "text/plain";\n'
f" allow all;\n"
f" access_log off;\n"
f" }}\n"
)
redirect_lines = []
for src, tgt, code in redirects or []:
if src and tgt:
redirect_lines.append(f" location = {src} {{ return {code} {tgt}; }}")
redirect_block = ("\n" + "\n".join(redirect_lines)) if redirect_lines else ""
dir_auth = _build_dir_auth_block(dir_auth_path, dir_auth_user_file, proxy_upstream, root_path)
php_deny = _build_php_deny_execute_block(php_deny_execute)
main = _build_main_app_block(proxy_upstream, proxy_websocket, php_version)
return acme + redirect_block + "\n" + dir_auth + php_deny + main
def _build_ssl_server_block(
server_names: str,
root_path: str,
logs_path: str,
site_name: str,
php_version: str,
fullchain: str,
privkey: str,
redirects: list[tuple[str, str, int]] | None,
proxy_upstream: str = "",
proxy_websocket: int = 0,
dir_auth_path: str = "",
dir_auth_user_file: str = "",
php_deny_execute: int = 0,
) -> str:
"""Second server {} for HTTPS when LE certs exist."""
q_fc = fullchain.replace("\\", "\\\\").replace('"', '\\"')
q_pk = privkey.replace("\\", "\\\\").replace('"', '\\"')
bundle = _build_location_bundle(
root_path,
redirects,
proxy_upstream,
proxy_websocket,
dir_auth_path,
dir_auth_user_file,
php_deny_execute,
php_version,
)
return (
f"server {{\n"
f" listen 443 ssl;\n"
f" server_name {server_names};\n"
f' ssl_certificate "{q_fc}";\n'
f' ssl_certificate_key "{q_pk}";\n'
f" index index.php index.html index.htm default.php default.htm default.html;\n"
f" root {root_path};\n"
f" error_page 404 /404.html;\n"
f" error_page 502 /502.html;\n"
f"{bundle}"
f" access_log {logs_path}/{site_name}.log;\n"
f" error_log {logs_path}/{site_name}.error.log;\n"
f"}}\n"
)
def _render_vhost(
template: str,
@@ -22,21 +378,60 @@ def _render_vhost(
php_version: str,
force_https: int,
redirects: list[tuple[str, str, int]] | None = None,
le_hostnames: list[str] | None = None,
proxy_upstream: str = "",
proxy_websocket: int = 0,
dir_auth_path: str = "",
dir_auth_user_file: str = "",
php_deny_execute: int = 0,
) -> str:
"""Render nginx vhost template. redirects: [(source, target, code), ...]"""
force_block = "return 301 https://$host$request_uri;" if force_https else ""
redirect_lines = []
for src, tgt, code in (redirects or []):
if src and tgt:
redirect_lines.append(f" location = {src} {{ return {code} {tgt}; }}")
redirect_block = "\n".join(redirect_lines) if redirect_lines else ""
if force_https:
force_block = (
' if ($request_uri !~ "^/.well-known/acme-challenge/") {\n'
" return 301 https://$host$request_uri;\n"
" }"
)
else:
force_block = ""
hosts = le_hostnames if le_hostnames is not None else [p for p in server_names.split() if p]
ssl_block = ""
le = _letsencrypt_paths_any(hosts)
if le:
fc, pk = le
ssl_block = _build_ssl_server_block(
server_names,
root_path,
logs_path,
site_name,
php_version,
fc,
pk,
redirects,
proxy_upstream,
proxy_websocket,
dir_auth_path,
dir_auth_user_file,
php_deny_execute,
)
bundle = _build_location_bundle(
root_path,
redirects,
proxy_upstream,
proxy_websocket,
dir_auth_path,
dir_auth_user_file,
php_deny_execute,
php_version,
)
content = template.replace("{SERVER_NAMES}", server_names)
content = content.replace("{ROOT_PATH}", root_path)
content = content.replace("{LOGS_PATH}", logs_path)
content = content.replace("{SITE_NAME}", site_name)
content = content.replace("{PHP_VERSION}", php_version or "74")
content = content.replace("{FORCE_HTTPS_BLOCK}", force_block)
content = content.replace("{REDIRECTS_BLOCK}", redirect_block)
content = content.replace("{LOCATION_BUNDLE}", bundle)
content = content.replace("{SSL_SERVER_BLOCK}", ssl_block)
return content
@@ -62,6 +457,16 @@ async def domain_exists(db: AsyncSession, domains: list[str], exclude_site_id: i
return None
def _vhost_kwargs_from_site(site: Site) -> dict:
return {
"proxy_upstream": getattr(site, "proxy_upstream", None) or "",
"proxy_websocket": int(getattr(site, "proxy_websocket", 0) or 0),
"dir_auth_path": getattr(site, "dir_auth_path", None) or "",
"dir_auth_user_file": getattr(site, "dir_auth_user_file", None) or "",
"php_deny_execute": int(getattr(site, "php_deny_execute", 0) or 0),
}
async def create_site(
db: AsyncSession,
name: str,
@@ -71,6 +476,11 @@ async def create_site(
ps: str = "",
php_version: str = "74",
force_https: int = 0,
proxy_upstream: str = "",
proxy_websocket: int = 0,
dir_auth_path: str = "",
dir_auth_user_file: str = "",
php_deny_execute: int = 0,
) -> dict:
"""Create a new site with vhost config."""
if not path_safe_check(name) or not path_safe_check(path):
@@ -94,7 +504,19 @@ async def create_site(
if not os.path.exists(site_path):
os.makedirs(site_path, 0o755)
site = Site(name=name, path=site_path, ps=ps, project_type=project_type, php_version=php_version or "74", force_https=force_https or 0)
site = Site(
name=name,
path=site_path,
ps=ps,
project_type=project_type,
php_version=php_version or "74",
force_https=force_https or 0,
proxy_upstream=(proxy_upstream or "")[:512],
proxy_websocket=1 if proxy_websocket else 0,
dir_auth_path=(dir_auth_path or "")[:256],
dir_auth_user_file=(dir_auth_user_file or "")[:512],
php_deny_execute=1 if php_deny_execute else 0,
)
db.add(site)
await db.flush()
@@ -113,26 +535,48 @@ async def create_site(
if os.path.exists(template_path):
template = read_file(template_path) or ""
server_names = " ".join(d.split(":")[0] for d in domains)
content = _render_vhost(template, server_names, site_path, www_logs, name, php_version or "74", force_https or 0, [])
le_hosts = [d.split(":")[0] for d in domains]
vk = _vhost_kwargs_from_site(site)
content = _render_vhost(
template,
server_names,
site_path,
www_logs,
name,
php_version or "74",
force_https or 0,
[],
le_hosts,
**vk,
)
write_file(conf_path, content)
# Reload Nginx if available
nginx_bin = os.path.join(setup_path, "nginx", "sbin", "nginx")
if os.path.exists(nginx_bin):
exec_shell_sync(f"{nginx_bin} -t && {nginx_bin} -s reload")
reload_ok, reload_err = nginx_reload_all_known()
await db.commit()
return {"status": True, "msg": "Site created", "id": site.id}
if reload_ok:
return {"status": True, "msg": "Site created", "id": site.id}
return {
"status": True,
"msg": f"Site created but nginx reload failed (HTTPS may not work): {reload_err}",
"id": site.id,
}
async def list_sites(db: AsyncSession) -> list[dict]:
"""List all sites with domain count."""
"""List all sites with domain count, primary domain, backup count, SSL summary."""
cfg = get_runtime_config()
backup_dir = cfg.get("backup_path") or ""
result = await db.execute(select(Site).order_by(Site.id))
sites = result.scalars().all()
out = []
for s in sites:
domain_result = await db.execute(select(Domain).where(Domain.pid == s.id))
domains = domain_result.scalars().all()
domain_result = await db.execute(select(Domain).where(Domain.pid == s.id).order_by(Domain.id))
domain_rows = domain_result.scalars().all()
domain_list = [f"{d.name}:{d.port}" if d.port != "80" else d.name for d in domain_rows]
hostnames = [d.name for d in domain_rows]
primary = hostnames[0] if hostnames else ""
php_ver = getattr(s, "php_version", None) or "74"
out.append({
"id": s.id,
"name": s.name,
@@ -140,8 +584,13 @@ async def list_sites(db: AsyncSession) -> list[dict]:
"status": s.status,
"ps": s.ps,
"project_type": s.project_type,
"domain_count": len(domains),
"domain_count": len(domain_rows),
"addtime": s.addtime.isoformat() if s.addtime else None,
"php_version": php_ver,
"primary_domain": primary,
"domains": domain_list,
"backup_count": _backup_count(s.name, backup_dir),
"ssl": _best_ssl_for_hostnames(hostnames),
})
return out
@@ -162,12 +611,12 @@ async def delete_site(db: AsyncSession, site_id: int) -> dict:
if os.path.exists(conf_path):
os.remove(conf_path)
nginx_bin = os.path.join(cfg["setup_path"], "nginx", "sbin", "nginx")
if os.path.exists(nginx_bin):
exec_shell_sync(f"{nginx_bin} -s reload")
reload_ok, reload_err = nginx_reload_all_known()
await db.commit()
return {"status": True, "msg": "Site deleted"}
if reload_ok:
return {"status": True, "msg": "Site deleted"}
return {"status": True, "msg": f"Site deleted but nginx reload failed: {reload_err}"}
async def get_site_count(db: AsyncSession) -> int:
@@ -177,6 +626,34 @@ async def get_site_count(db: AsyncSession) -> int:
return result.scalar() or 0
async def ssl_alert_summary(db: AsyncSession) -> dict:
"""Sites with LE certs expiring soon or expired (for dashboard banners)."""
result = await db.execute(select(Site).order_by(Site.id))
sites = result.scalars().all()
expired: list[dict] = []
expiring: list[dict] = []
for s in sites:
domain_result = await db.execute(select(Domain).where(Domain.pid == s.id).order_by(Domain.id))
domain_rows = domain_result.scalars().all()
hostnames = [d.name for d in domain_rows]
if not hostnames:
continue
ssl = _best_ssl_for_hostnames(hostnames)
if ssl["status"] == "expired":
expired.append({
"site": s.name,
"primary": hostnames[0],
"days_left": ssl.get("days_left"),
})
elif ssl["status"] == "expiring":
expiring.append({
"site": s.name,
"primary": hostnames[0],
"days_left": ssl.get("days_left"),
})
return {"expired": expired, "expiring": expiring}
async def get_site_with_domains(db: AsyncSession, site_id: int) -> dict | None:
"""Get site with domain list for editing."""
result = await db.execute(select(Site).where(Site.id == site_id))
@@ -195,6 +672,11 @@ async def get_site_with_domains(db: AsyncSession, site_id: int) -> dict | None:
"project_type": site.project_type,
"php_version": getattr(site, "php_version", None) or "74",
"force_https": getattr(site, "force_https", 0) or 0,
"proxy_upstream": getattr(site, "proxy_upstream", None) or "",
"proxy_websocket": int(getattr(site, "proxy_websocket", 0) or 0),
"dir_auth_path": getattr(site, "dir_auth_path", None) or "",
"dir_auth_user_file": getattr(site, "dir_auth_user_file", None) or "",
"php_deny_execute": int(getattr(site, "php_deny_execute", 0) or 0),
"domains": domain_list,
}
@@ -207,6 +689,11 @@ async def update_site(
ps: str | None = None,
php_version: str | None = None,
force_https: int | None = None,
proxy_upstream: str | None = None,
proxy_websocket: int | None = None,
dir_auth_path: str | None = None,
dir_auth_user_file: str | None = None,
php_deny_execute: int | None = None,
) -> dict:
"""Update site domains, path, or note."""
result = await db.execute(select(Site).where(Site.id == site_id))
@@ -236,17 +723,35 @@ async def update_site(
site.php_version = php_version or "74"
if force_https is not None:
site.force_https = 1 if force_https else 0
if proxy_upstream is not None:
site.proxy_upstream = (proxy_upstream or "")[:512]
if proxy_websocket is not None:
site.proxy_websocket = 1 if proxy_websocket else 0
if dir_auth_path is not None:
site.dir_auth_path = (dir_auth_path or "")[:256]
if dir_auth_user_file is not None:
site.dir_auth_user_file = (dir_auth_user_file or "")[:512]
if php_deny_execute is not None:
site.php_deny_execute = 1 if php_deny_execute else 0
await db.flush()
# Regenerate Nginx vhost if domains, php_version, or force_https changed
if domains is not None or php_version is not None or force_https is not None:
regen = (
domains is not None
or php_version is not None
or force_https is not None
or proxy_upstream is not None
or proxy_websocket is not None
or dir_auth_path is not None
or dir_auth_user_file is not None
or php_deny_execute is not None
)
if regen:
cfg = get_runtime_config()
vhost_path = os.path.join(cfg["setup_path"], "panel", "vhost", "nginx")
conf_path = os.path.join(vhost_path, f"{site.name}.conf")
panel_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
template_path = os.path.join(panel_root, "webserver", "templates", "nginx_site.conf")
if os.path.exists(template_path):
template_path = _nginx_site_template_path()
if template_path:
template = read_file(template_path) or ""
domain_result = await db.execute(select(Domain).where(Domain.pid == site.id))
domain_rows = domain_result.scalars().all()
@@ -256,11 +761,25 @@ async def update_site(
fhttps = getattr(site, "force_https", 0) or 0
redir_result = await db.execute(select(SiteRedirect).where(SiteRedirect.site_id == site.id))
redirects = [(r.source, r.target, r.code or 301) for r in redir_result.scalars().all()]
content = _render_vhost(template, server_names, site.path, cfg["www_logs"], site.name, php_ver, fhttps, redirects)
le_hosts = [d.name for d in domain_rows]
vk = _vhost_kwargs_from_site(site)
content = _render_vhost(
template,
server_names,
site.path,
cfg["www_logs"],
site.name,
php_ver,
fhttps,
redirects,
le_hosts,
**vk,
)
write_file(conf_path, content)
nginx_bin = os.path.join(cfg["setup_path"], "nginx", "sbin", "nginx")
if os.path.exists(nginx_bin):
exec_shell_sync(f"{nginx_bin} -t && {nginx_bin} -s reload")
reload_ok, reload_err = nginx_reload_all_known()
if not reload_ok:
await db.commit()
return {"status": False, "msg": f"Vhost updated but nginx test/reload failed: {reload_err}"}
await db.commit()
return {"status": True, "msg": "Site updated"}
@@ -299,28 +818,35 @@ async def set_site_status(db: AsyncSession, site_id: int, status: int) -> dict:
site.status = status
await db.commit()
nginx_bin = os.path.join(get_runtime_config()["setup_path"], "nginx", "sbin", "nginx")
if os.path.exists(nginx_bin):
exec_shell_sync(f"{nginx_bin} -t && {nginx_bin} -s reload")
reload_ok, reload_err = nginx_reload_all_known()
if not reload_ok:
return {
"status": False,
"msg": f"Site {'enabled' if status == 1 else 'disabled'} but nginx test/reload failed: {reload_err}",
}
return {"status": True, "msg": "Site " + ("enabled" if status == 1 else "disabled")}
async def regenerate_site_vhost(db: AsyncSession, site_id: int) -> dict:
"""Regenerate nginx vhost for a site (e.g. after redirect changes)."""
"""Regenerate nginx vhost for a site (e.g. after redirect changes or before LE validation)."""
result = await db.execute(select(Site).where(Site.id == site_id))
site = result.scalar_one_or_none()
if not site:
return {"status": False, "msg": "Site not found"}
cfg = get_runtime_config()
vhost_path = os.path.join(cfg["setup_path"], "panel", "vhost", "nginx")
conf_path = os.path.join(vhost_path, f"{site.name}.conf")
if site.status != 1:
return {"status": True, "msg": "Site disabled, vhost not active"}
panel_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
template_path = os.path.join(panel_root, "webserver", "templates", "nginx_site.conf")
if not os.path.exists(template_path):
return {"status": False, "msg": "Template not found"}
conf_path, disabled_path = _vhost_path(site.name)
if site.status == 1:
write_path = conf_path
else:
write_path = disabled_path if os.path.isfile(disabled_path) else conf_path
template_path = _nginx_site_template_path()
if not template_path:
return {
"status": False,
"msg": "Template not found (nginx_site.conf). Expected under panel webserver/templates/ "
"or set env YAKPANEL_NGINX_TEMPLATE to the full path. Check Settings.panel_path matches the install directory.",
}
template = read_file(template_path) or ""
domain_result = await db.execute(select(Domain).where(Domain.pid == site.id))
domain_rows = domain_result.scalars().all()
@@ -330,9 +856,22 @@ async def regenerate_site_vhost(db: AsyncSession, site_id: int) -> dict:
fhttps = getattr(site, "force_https", 0) or 0
redir_result = await db.execute(select(SiteRedirect).where(SiteRedirect.site_id == site.id))
redirects = [(r.source, r.target, r.code or 301) for r in redir_result.scalars().all()]
content = _render_vhost(template, server_names, site.path, cfg["www_logs"], site.name, php_ver, fhttps, redirects)
write_file(conf_path, content)
nginx_bin = os.path.join(cfg["setup_path"], "nginx", "sbin", "nginx")
if os.path.exists(nginx_bin):
exec_shell_sync(f"{nginx_bin} -t && {nginx_bin} -s reload")
le_hosts = [d.name for d in domain_rows]
vk = _vhost_kwargs_from_site(site)
content = _render_vhost(
template,
server_names,
site.path,
cfg["www_logs"],
site.name,
php_ver,
fhttps,
redirects,
le_hosts,
**vk,
)
write_file(write_path, content)
reload_ok, reload_err = nginx_reload_all_known()
if not reload_ok:
return {"status": False, "msg": f"Vhost written but nginx test/reload failed: {reload_err}"}
return {"status": True, "msg": "Vhost regenerated"}

View File

@@ -12,12 +12,20 @@ asyncpg>=0.29.0
# Auth
python-jose[cryptography]>=3.3.0
passlib[bcrypt]>=1.7.4
# passlib 1.7.4 breaks against bcrypt>=4.1 (ValueError in bcrypt self-test / 72-byte rules)
bcrypt>=4.0.1,<4.1
python-dotenv>=1.0.0
# Redis & Celery
redis>=5.0.0
celery>=5.3.0
# Let's Encrypt (optional if system certbot/snap not used; enables python -m certbot from panel venv)
certbot>=3.0.0
certbot-nginx>=3.0.0
certbot-dns-cloudflare>=3.0.0
boto3>=1.34.0
# Utils
psutil>=5.9.0
croniter>=2.0.0

View File

@@ -14,12 +14,19 @@ from app.core.security import get_password_hash
from app.models.user import User
async def seed():
async def seed(*, reset_password: bool = False):
await init_db()
async with AsyncSessionLocal() as db:
result = await db.execute(select(User).where(User.username == "admin"))
if result.scalar_one_or_none():
print("Admin user already exists")
existing = result.scalar_one_or_none()
if existing:
if reset_password:
existing.password = get_password_hash("admin")
existing.is_active = True
await db.commit()
print("Admin password reset: username=admin, password=admin")
else:
print("Admin user already exists (use --reset-password to force admin/admin)")
return
admin = User(
username="admin",
@@ -32,4 +39,5 @@ async def seed():
if __name__ == "__main__":
asyncio.run(seed())
reset = "--reset-password" in sys.argv
asyncio.run(seed(reset_password=reset))

View File

@@ -0,0 +1,26 @@
# YakPanel feature parity checklist (clean-room)
Internal checklist against common hosting-panel capabilities used as a product roadmap only. No third-party panel code is shipped.
| Area | Status | YakPanel location |
|------|--------|-------------------|
| Sites, domains, redirects | Done | `api/site.py`, `site_service.py` |
| Nginx vhost + SSL (HTTP-01) | Done | `webserver/templates/nginx_site.conf`, `ssl.py` |
| SSL diagnostics + port 443 probe | Done | `GET /ssl/diagnostics` |
| Nginx include wizard (drop-in hints) | Done | `GET /ssl/diagnostics``nginx_wizard` |
| Reverse proxy site mode | Done | `Site.proxy_upstream`, vhost `proxy_pass` |
| WebSocket proxy hint | Done | `Site.proxy_websocket` |
| Directory HTTP basic auth | Done | `Site.dir_auth_path`, `dir_auth_user_file` |
| Disable PHP execution (uploads) | Done | `Site.php_deny_execute` |
| DNS-01 Let's Encrypt (Cloudflare / manual TXT) | Done | `POST /ssl/dns-request/*` |
| Security checklist (read-only probes) | Done | `GET /security/checklist` |
| FTP logs (tail) | Done | `GET /ftp/logs` |
| Cron job templates (YakPanel JSON) | Done | `GET /crontab/templates`, `data/cron_templates.json` |
| Backup plans + optional S3 upload | Done | `backup.py`, `BackupPlan` S3 fields, `boto3` optional |
| Dashboard SSL expiry / inode alerts | Done | `GET /dashboard/stats``ssl_alerts`, `system.inode_*` |
| Firewall UFW + firewalld status in UI | Done | `GET /firewall/status`, `FirewallPage.tsx` |
| Database / FTP / firewall rules engine | Partial (pre-existing) | respective `api/*.py` |
| Mail server | Not planned | — |
| WordPress one-click | Not planned | plugin later |
_Last updated: parity pass (this implementation)._

View File

@@ -0,0 +1 @@
import{j as t}from"./index-Cvh4tLHo.js";function o({variant:l="danger",children:r,className:a="",dismissible:e,onDismiss:s}){return t.jsxs("div",{className:`alert alert-${l} ${e?"alert-dismissible fade show":""} ${a}`.trim(),role:"alert",children:[r,e?t.jsx("button",{type:"button",className:"btn-close","aria-label":"Close",onClick:s}):null]})}export{o as A};

View File

@@ -0,0 +1 @@
import{j as a}from"./index-Cvh4tLHo.js";function i({children:n,variant:r="primary",size:t,type:o="button",disabled:m,className:s="",...u}){return a.jsx("button",{type:o,disabled:m,className:`btn btn-${r}${t?` btn-${t}`:""} ${s}`.trim(),...u,children:n})}export{i as A};

View File

@@ -0,0 +1 @@
import{j as e}from"./index-Cvh4tLHo.js";function c({title:s,iconClass:a,children:i,headerExtra:r,className:d="",bodyClassName:l=""}){return e.jsxs("div",{className:`card flex-fill ${d}`.trim(),children:[(s||r)&&e.jsxs("div",{className:"card-header border-0 pb-0 d-flex align-items-center justify-content-between flex-wrap gap-2",children:[e.jsxs("h4",{className:"mb-0 d-flex align-items-center gap-2",children:[a?e.jsx("i",{className:a,"aria-hidden":!0}):null,s]}),r]}),e.jsx("div",{className:`card-body ${l}`.trim(),children:i})]})}export{c as A};

View File

@@ -0,0 +1 @@
import{j as t}from"./index-Cvh4tLHo.js";function l({children:r,className:s="",responsive:a=!0}){const e=t.jsx("table",{className:`table table-hover ${s}`.trim(),children:r});return a?t.jsx("div",{className:"table-responsive",children:e}):e}export{l as A};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
import{r as i,g as m,j as s}from"./index-Cvh4tLHo.js";import{A as n}from"./AdminAlert-Bt3L8_zJ.js";import{A as o}from"./AdminCard-BYkisaPa.js";import{P as d}from"./PageHeader-D6k34vvM.js";function p(){const[e,r]=i.useState(null),[a,l]=i.useState("");return i.useEffect(()=>{m().then(r).catch(c=>l(c.message))},[]),a?s.jsxs(s.Fragment,{children:[s.jsx(d,{}),s.jsx(n,{variant:"danger",children:a})]}):e?s.jsxs(s.Fragment,{children:[s.jsx(d,{}),s.jsxs("div",{className:"row g-3 mb-4",children:[s.jsx("div",{className:"col-md-4 d-flex",children:s.jsx(t,{iconClass:"ti ti-world",title:"Websites",value:e.site_count})}),s.jsx("div",{className:"col-md-4 d-flex",children:s.jsx(t,{iconClass:"ti ti-folder-share",title:"FTP Accounts",value:e.ftp_count})}),s.jsx("div",{className:"col-md-4 d-flex",children:s.jsx(t,{iconClass:"ti ti-database",title:"Databases",value:e.database_count})})]}),s.jsxs("div",{className:"row g-3",children:[s.jsx("div",{className:"col-md-4 d-flex",children:s.jsx(t,{iconClass:"ti ti-cpu",title:"CPU",value:`${e.system.cpu_percent}%`})}),s.jsx("div",{className:"col-md-4 d-flex",children:s.jsx(t,{iconClass:"ti ti-device-desktop",title:"Memory",value:`${e.system.memory_percent}%`,subtitle:`${e.system.memory_used_mb} / ${e.system.memory_total_mb} MB`})}),s.jsx("div",{className:"col-md-4 d-flex",children:s.jsx(t,{iconClass:"ti ti-database-export",title:"Disk",value:`${e.system.disk_percent}%`,subtitle:`${e.system.disk_used_gb} / ${e.system.disk_total_gb} GB`})})]})]}):s.jsxs(s.Fragment,{children:[s.jsx(d,{}),s.jsx("div",{className:"placeholder-glow",children:s.jsx("span",{className:"placeholder col-12 rounded",style:{height:"8rem"}})})]})}function t({iconClass:e,title:r,value:a,subtitle:l}){return s.jsxs(o,{className:"border-0 shadow-sm",bodyClassName:"d-flex align-items-center gap-3",children:[s.jsx("span",{className:"avatar avatar-md bg-primary-transparent text-primary rounded-circle d-flex align-items-center justify-content-center flex-shrink-0",children:s.jsx("i",{className:`${e} fs-4`,"aria-hidden":!0})}),s.jsxs("div",{children:[s.jsx("p",{className:"text-muted mb-0 small",children:r}),s.jsx("p",{className:"fs-4 fw-semibold mb-0",children:a}),l?s.jsx("p",{className:"text-muted small mb-0",children:l}):null]})]})}export{p as DashboardPage};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
import{r as t,j as e,M as o}from"./index-Cvh4tLHo.js";import{M as l}from"./Modal-CCihVZTY.js";import{A as R}from"./AdminAlert-Bt3L8_zJ.js";import{A as m}from"./AdminButton-BKglG8kI.js";import{P as N}from"./PageHeader-D6k34vvM.js";function k(){const[d,y]=t.useState([]),[c,b]=t.useState([]),[v,u]=t.useState(!0),[x,h]=t.useState(""),[i,p]=t.useState(null),[a,n]=t.useState(null),[j,f]=t.useState(""),g=()=>{u(!0),Promise.all([o("/ssl/domains"),o("/ssl/certificates")]).then(([s,r])=>{y(s),b(r.certificates||[])}).catch(s=>h(s.message)).finally(()=>u(!1))};t.useEffect(()=>{g()},[]);const S=s=>{s.preventDefault(),a&&(p(a.name),o("/ssl/request",{method:"POST",body:JSON.stringify({domain:a.name,webroot:a.site_path,email:j})}).then(()=>{n(null),g()}).catch(r=>h(r.message)).finally(()=>p(null)))},q=s=>c.some(r=>r.name===s||r.name.startsWith(s+" "));return v?e.jsxs(e.Fragment,{children:[e.jsx(N,{title:"Domains & SSL"}),e.jsx("p",{className:"text-secondary",children:"Loading…"})]}):e.jsxs(e.Fragment,{children:[e.jsx(N,{title:"Domains & SSL"}),x?e.jsx(R,{className:"mb-3",children:x}):null,e.jsx("div",{className:"alert alert-warning small mb-4",children:"Request Let's Encrypt certificates for your site domains. Requires certbot and nginx configured for the domain."}),e.jsxs("div",{className:"row g-4",children:[e.jsx("div",{className:"col-lg-6",children:e.jsxs("div",{className:"card h-100",children:[e.jsx("div",{className:"card-header",children:"Domains (from sites)"}),e.jsx("div",{className:"list-group list-group-flush overflow-auto",style:{maxHeight:"20rem"},children:d.length===0?e.jsx("div",{className:"list-group-item text-secondary text-center py-4",children:"No domains. Add a site first."}):d.map(s=>e.jsxs("div",{className:"list-group-item d-flex align-items-center justify-content-between gap-2 flex-wrap",children:[e.jsxs("div",{className:"small",children:[e.jsx("span",{className:"font-monospace",children:s.name}),s.port!=="80"?e.jsxs("span",{className:"text-secondary ms-1",children:[":",s.port]}):null,e.jsxs("span",{className:"text-secondary ms-2",children:["(",s.site_name,")"]})]}),e.jsx("div",{children:q(s.name)?e.jsxs("span",{className:"text-success small",children:[e.jsx("i",{className:"ti ti-shield-check me-1","aria-hidden":!0}),"Cert"]}):e.jsx(m,{variant:"outline-primary",size:"sm",disabled:!!i,onClick:()=>{n(s),f("")},children:i===s.name?e.jsx("span",{className:"spinner-border spinner-border-sm",role:"status"}):"Request SSL"})})]},s.id))})]})}),e.jsx("div",{className:"col-lg-6",children:e.jsxs("div",{className:"card h-100",children:[e.jsx("div",{className:"card-header",children:"Certificates"}),e.jsx("div",{className:"list-group list-group-flush overflow-auto",style:{maxHeight:"20rem"},children:c.length===0?e.jsx("div",{className:"list-group-item text-secondary text-center py-4",children:"No certificates yet"}):c.map(s=>e.jsxs("div",{className:"list-group-item d-flex align-items-center gap-2",children:[e.jsx("i",{className:"ti ti-shield-check text-success flex-shrink-0","aria-hidden":!0}),e.jsx("span",{className:"font-monospace small text-break",children:s.name})]},s.name))})]})})]}),e.jsxs(l,{show:!!a,onHide:()=>n(null),centered:!0,children:[e.jsx(l.Header,{closeButton:!0,children:e.jsxs(l.Title,{children:["Request SSL for ",a==null?void 0:a.name]})}),a?e.jsxs("form",{onSubmit:S,children:[e.jsxs(l.Body,{children:[e.jsxs("div",{className:"mb-3",children:[e.jsx("label",{className:"form-label",children:"Domain"}),e.jsx("input",{type:"text",value:a.name,readOnly:!0,className:"form-control-plaintext border rounded px-3 py-2 bg-body-secondary"})]}),e.jsxs("div",{className:"mb-3",children:[e.jsx("label",{className:"form-label",children:"Webroot (site path)"}),e.jsx("input",{type:"text",value:a.site_path,readOnly:!0,className:"form-control-plaintext border rounded px-3 py-2 bg-body-secondary"})]}),e.jsxs("div",{className:"mb-0",children:[e.jsx("label",{className:"form-label",children:"Email (for Let's Encrypt)"}),e.jsx("input",{type:"email",value:j,onChange:s=>f(s.target.value),placeholder:"admin@example.com",className:"form-control",required:!0})]})]}),e.jsxs(l.Footer,{children:[e.jsx(m,{type:"button",variant:"secondary",onClick:()=>n(null),children:"Cancel"}),e.jsx(m,{type:"submit",variant:"primary",disabled:!!i,children:i?"Requesting…":"Request"})]})]}):null]})]})}export{k as DomainsPage};

View File

@@ -0,0 +1 @@
import{j as t}from"./index-Cvh4tLHo.js";function m({iconClass:s="ti ti-inbox",title:a,description:e,action:i}){return t.jsxs("div",{className:"text-center py-5 text-muted",children:[t.jsx("i",{className:`${s} display-4 d-block mb-3`,"aria-hidden":!0}),t.jsx("h5",{className:"text-body",children:a}),e?t.jsx("p",{className:"mb-3",children:e}):null,i]})}export{m as E};

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
import{r as t,j as e,M as p,_ as k}from"./index-Cvh4tLHo.js";import{M as r}from"./Modal-CCihVZTY.js";import{A as y}from"./AdminAlert-Bt3L8_zJ.js";import{A as c}from"./AdminButton-BKglG8kI.js";import{A as D}from"./AdminTable-BQ5Lf7EC.js";import{E as H}from"./EmptyState-D6lCh4WN.js";import{P as g}from"./PageHeader-D6k34vvM.js";function W(){const[o,v]=t.useState([]),[A,x]=t.useState(!0),[u,d]=t.useState(""),[w,a]=t.useState(!1),[j,f]=t.useState(!1),[N,m]=t.useState(""),[h,b]=t.useState(!1),n=()=>{x(!0),p("/firewall/list").then(v).catch(s=>d(s.message)).finally(()=>x(!1))};t.useEffect(()=>{n()},[]);const S=s=>{s.preventDefault();const l=s.currentTarget,i=l.elements.namedItem("port").value.trim(),E=l.elements.namedItem("protocol").value,F=l.elements.namedItem("action").value,R=l.elements.namedItem("ps").value.trim();if(!i){m("Port is required");return}f(!0),m(""),p("/firewall/create",{method:"POST",body:JSON.stringify({port:i,protocol:E,action:F,ps:R})}).then(()=>{a(!1),l.reset(),n()}).catch(T=>m(T.message)).finally(()=>f(!1))},C=(s,l)=>{confirm(`Delete rule for port ${l}?`)&&p(`/firewall/${s}`,{method:"DELETE"}).then(n).catch(i=>d(i.message))},P=()=>{b(!0),k().then(()=>n()).catch(s=>d(s.message)).finally(()=>b(!1))};return A?e.jsxs(e.Fragment,{children:[e.jsx(g,{title:"Security / Firewall"}),e.jsx("p",{className:"text-secondary",children:"Loading…"})]}):e.jsxs(e.Fragment,{children:[e.jsx(g,{title:"Security / Firewall",actions:e.jsxs("div",{className:"d-flex flex-wrap gap-2",children:[e.jsxs(c,{variant:"success",disabled:h||o.length===0,onClick:P,children:[h?e.jsx("span",{className:"spinner-border spinner-border-sm me-1",role:"status"}):e.jsx("i",{className:"ti ti-bolt me-1","aria-hidden":!0}),h?"Applying…":"Apply to UFW"]}),e.jsxs(c,{variant:"primary",onClick:()=>a(!0),children:[e.jsx("i",{className:"ti ti-plus me-1","aria-hidden":!0}),"Add Rule"]})]})}),u?e.jsx(y,{className:"mb-3",children:u}):null,e.jsxs("div",{className:"alert alert-warning small mb-3",children:['Rules are stored in the panel. Click "Apply to UFW" to run ',e.jsx("code",{className:"font-monospace",children:"ufw allow/deny"})," for each rule."]}),e.jsxs(r,{show:w,onHide:()=>a(!1),centered:!0,children:[e.jsx(r.Header,{closeButton:!0,children:e.jsx(r.Title,{children:"Add Firewall Rule"})}),e.jsxs("form",{onSubmit:S,children:[e.jsxs(r.Body,{children:[N?e.jsx(y,{className:"mb-3",children:N}):null,e.jsxs("div",{className:"mb-3",children:[e.jsx("label",{className:"form-label",children:"Port"}),e.jsx("input",{name:"port",type:"text",placeholder:"80 or 80-90 or 80,443",className:"form-control",required:!0})]}),e.jsxs("div",{className:"mb-3",children:[e.jsx("label",{className:"form-label",children:"Protocol"}),e.jsxs("select",{name:"protocol",className:"form-select",children:[e.jsx("option",{value:"tcp",children:"TCP"}),e.jsx("option",{value:"udp",children:"UDP"})]})]}),e.jsxs("div",{className:"mb-3",children:[e.jsx("label",{className:"form-label",children:"Action"}),e.jsxs("select",{name:"action",className:"form-select",children:[e.jsx("option",{value:"accept",children:"Accept"}),e.jsx("option",{value:"drop",children:"Drop"}),e.jsx("option",{value:"reject",children:"Reject"})]})]}),e.jsxs("div",{className:"mb-0",children:[e.jsx("label",{className:"form-label",children:"Note (optional)"}),e.jsx("input",{name:"ps",type:"text",placeholder:"HTTP",className:"form-control"})]})]}),e.jsxs(r.Footer,{children:[e.jsx(c,{type:"button",variant:"secondary",onClick:()=>a(!1),children:"Cancel"}),e.jsx(c,{type:"submit",variant:"primary",disabled:j,children:j?"Adding…":"Add"})]})]})]}),e.jsx("div",{className:"card",children:e.jsxs(D,{children:[e.jsx("thead",{children:e.jsxs("tr",{children:[e.jsx("th",{children:"Port"}),e.jsx("th",{children:"Protocol"}),e.jsx("th",{children:"Action"}),e.jsx("th",{children:"Note"}),e.jsx("th",{className:"text-end",children:"Actions"})]})}),e.jsx("tbody",{children:o.length===0?e.jsx("tr",{children:e.jsx("td",{colSpan:5,className:"p-0",children:e.jsx(H,{title:"No rules",description:'Click "Add Rule" to create one.'})})}):o.map(s=>e.jsxs("tr",{children:[e.jsx("td",{className:"font-monospace",children:s.port}),e.jsx("td",{children:s.protocol}),e.jsx("td",{children:s.action}),e.jsx("td",{children:s.ps||"—"}),e.jsx("td",{className:"text-end",children:e.jsx("button",{type:"button",className:"btn btn-link btn-sm text-danger p-1",title:"Delete",onClick:()=>C(s.id,s.port),children:e.jsx("i",{className:"ti ti-trash","aria-hidden":!0})})})]},s.id))})]})})]})}export{W as FirewallPage};

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
import{r as t,Y as p,j as e,Z as C}from"./index-Cvh4tLHo.js";import{A as P}from"./AdminAlert-Bt3L8_zJ.js";import{A as g}from"./AdminButton-BKglG8kI.js";import{P as E}from"./PageHeader-D6k34vvM.js";function _(a){return a<1024?a+" B":a<1024*1024?(a/1024).toFixed(1)+" KB":(a/1024/1024).toFixed(1)+" MB"}function D(){const[a,j]=t.useState("/"),[m,N]=t.useState([]),[v,x]=t.useState(!0),[h,i]=t.useState(""),[l,y]=t.useState(null),[b,u]=t.useState(""),[o,r]=t.useState(!1),[c,w]=t.useState(500),d=s=>{x(!0),i(""),C(s).then(n=>{j(n.path),N(n.items.sort((f,F)=>f.is_dir===F.is_dir?0:f.is_dir?-1:1))}).catch(n=>i(n.message)).finally(()=>x(!1))};t.useEffect(()=>{d(a)},[]),t.useEffect(()=>{l&&(r(!0),p(l,c).then(s=>u(s.content)).catch(s=>i(s.message)).finally(()=>r(!1)))},[l,c]);const k=s=>{if(s.is_dir){const n=a==="/"?"/"+s.name:a+"/"+s.name;d(n)}else y(a==="/"?s.name:a+"/"+s.name)},L=()=>{const s=a.replace(/\/$/,"").split("/").filter(Boolean);if(s.length<=1)return;s.pop();const n=s.length===0?"/":"/"+s.join("/");d(n)},S=()=>{l&&(r(!0),p(l,c).then(s=>u(s.content)).catch(s=>i(s.message)).finally(()=>r(!1)))},B=a.split("/").filter(Boolean).length>0;return e.jsxs(e.Fragment,{children:[e.jsx(E,{title:"Logs"}),e.jsxs("div",{className:"d-flex flex-wrap align-items-center gap-2 mb-3",children:[e.jsxs(g,{variant:"secondary",size:"sm",onClick:L,disabled:!B,children:[e.jsx("i",{className:"ti ti-arrow-left me-1","aria-hidden":!0}),"Back"]}),e.jsxs("code",{className:"small bg-body-secondary px-2 py-1 rounded text-break",children:["Path: ",a||"/"]})]}),h?e.jsx(P,{className:"mb-3",children:h}):null,e.jsxs("div",{className:"row g-3",children:[e.jsx("div",{className:"col-lg-6",children:e.jsxs("div",{className:"card h-100",children:[e.jsx("div",{className:"card-header small fw-medium",children:"Log files"}),v?e.jsx("div",{className:"card-body text-center py-5",children:e.jsx("span",{className:"spinner-border text-secondary",role:"status"})}):e.jsx("div",{className:"list-group list-group-flush overflow-auto",style:{maxHeight:500},children:m.length===0?e.jsx("div",{className:"list-group-item text-secondary text-center py-4",children:"Empty directory"}):m.map(s=>e.jsxs("button",{type:"button",className:"list-group-item list-group-item-action d-flex gap-2 align-items-center",onClick:()=>k(s),children:[e.jsx("i",{className:`ti flex-shrink-0 ${s.is_dir?"ti-folder text-warning":"ti-file text-secondary"}`,"aria-hidden":!0}),e.jsx("span",{className:"text-truncate",children:s.name}),s.is_dir?null:e.jsx("span",{className:"small text-secondary ms-auto flex-shrink-0",children:_(s.size)})]},s.name))})]})}),e.jsx("div",{className:"col-lg-6",children:e.jsxs("div",{className:"card h-100 d-flex flex-column",style:{minHeight:400},children:[e.jsxs("div",{className:"card-header d-flex align-items-center justify-content-between gap-2 flex-wrap",children:[e.jsx("span",{className:"small fw-medium text-truncate",children:l||"Select a log file"}),l?e.jsxs("div",{className:"d-flex align-items-center gap-2 flex-shrink-0",children:[e.jsx("label",{className:"small text-secondary mb-0",children:"Lines:"}),e.jsxs("select",{value:c,onChange:s=>w(Number(s.target.value)),className:"form-select form-select-sm",style:{width:"auto"},children:[e.jsx("option",{value:100,children:"100"}),e.jsx("option",{value:500,children:"500"}),e.jsx("option",{value:1e3,children:"1000"}),e.jsx("option",{value:5e3,children:"5000"}),e.jsx("option",{value:1e4,children:"10000"})]}),e.jsx(g,{variant:"light",size:"sm",onClick:S,disabled:o,title:"Refresh",children:o?e.jsx("span",{className:"spinner-border spinner-border-sm",role:"status"}):e.jsx("i",{className:"ti ti-refresh","aria-hidden":!0})})]}):null]}),e.jsx("div",{className:"card-body flex-grow-1 overflow-auto",children:l?o?e.jsx("div",{className:"text-center py-5",children:e.jsx("span",{className:"spinner-border text-secondary",role:"status"})}):e.jsx("pre",{className:"font-monospace small mb-0 text-break",style:{whiteSpace:"pre-wrap"},children:b||"(empty)"}):e.jsx("p",{className:"text-secondary small mb-0",children:"Click a log file to view"})})]})})]})]})}export{D as LogsPage};

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
import{r,j as s,M as b,T as g,U as v}from"./index-Cvh4tLHo.js";import{A as N}from"./AdminAlert-Bt3L8_zJ.js";import{A as y}from"./AdminTable-BQ5Lf7EC.js";import{P as o}from"./PageHeader-D6k34vvM.js";function M(){const[e,d]=r.useState(null),[c,l]=r.useState([]),[t,n]=r.useState(null),[i,p]=r.useState("");return r.useEffect(()=>{const a=()=>{b("/monitor/system").then(d).catch(m=>p(m.message))},h=()=>{g(50).then(m=>l(m.processes)).catch(()=>l([]))},j=()=>{v().then(n).catch(()=>n(null))};a(),h(),j();const u=setInterval(()=>{a(),h(),j()},3e3);return()=>clearInterval(u)},[]),i&&!e?s.jsxs(s.Fragment,{children:[s.jsx(o,{title:"Monitor"}),s.jsx(N,{children:i})]}):e?s.jsxs(s.Fragment,{children:[s.jsx(o,{title:"Monitor"}),i?s.jsx(N,{className:"mb-3",children:i}):null,s.jsx("p",{className:"small text-secondary mb-3",children:"Refreshes every 3 seconds"}),s.jsxs("div",{className:"row g-3 mb-3",children:[s.jsx("div",{className:"col-md-4",children:s.jsx(x,{iconClass:"ti ti-cpu",title:"CPU",value:`${e.cpu_percent}%`,subtitle:"Usage",percent:e.cpu_percent})}),s.jsx("div",{className:"col-md-4",children:s.jsx(x,{iconClass:"ti ti-device-sd-card",title:"Memory",value:`${e.memory_used_mb} / ${e.memory_total_mb} MB`,subtitle:`${e.memory_percent}% used`,percent:e.memory_percent})}),s.jsx("div",{className:"col-md-4",children:s.jsx(x,{iconClass:"ti ti-database",title:"Disk",value:`${e.disk_used_gb} / ${e.disk_total_gb} GB`,subtitle:`${e.disk_percent}% used`,percent:e.disk_percent})})]}),t?s.jsx("div",{className:"card mb-3",children:s.jsxs("div",{className:"card-body",children:[s.jsxs("div",{className:"d-flex align-items-center gap-2 mb-3",children:[s.jsx("i",{className:"ti ti-network fs-5","aria-hidden":!0}),s.jsx("span",{className:"fw-medium",children:"Network I/O"})]}),s.jsxs("div",{className:"row g-3 small",children:[s.jsxs("div",{className:"col-6",children:[s.jsx("span",{className:"text-secondary d-block",children:"Sent"}),s.jsxs("span",{className:"font-monospace fw-medium",children:[t.bytes_sent_mb," MB"]})]}),s.jsxs("div",{className:"col-6",children:[s.jsx("span",{className:"text-secondary d-block",children:"Received"}),s.jsxs("span",{className:"font-monospace fw-medium",children:[t.bytes_recv_mb," MB"]})]})]})]})}):null,s.jsxs("div",{className:"card",children:[s.jsxs("div",{className:"card-header d-flex align-items-center gap-2",children:[s.jsx("i",{className:"ti ti-cpu","aria-hidden":!0}),s.jsx("span",{className:"fw-medium",children:"Top Processes (by CPU)"})]}),s.jsx("div",{className:"table-responsive",style:{maxHeight:"20rem"},children:s.jsxs(y,{responsive:!1,children:[s.jsx("thead",{className:"sticky-top bg-body-secondary",children:s.jsxs("tr",{children:[s.jsx("th",{className:"small",children:"PID"}),s.jsx("th",{className:"small",children:"Name"}),s.jsx("th",{className:"small",children:"User"}),s.jsx("th",{className:"small text-end",children:"CPU %"}),s.jsx("th",{className:"small text-end",children:"Mem %"}),s.jsx("th",{className:"small",children:"Status"})]})}),s.jsx("tbody",{children:c.length===0?s.jsx("tr",{children:s.jsx("td",{colSpan:6,className:"text-center text-secondary small py-3",children:"No process data"})}):c.map(a=>s.jsxs("tr",{className:"small",children:[s.jsx("td",{className:"font-monospace",children:a.pid}),s.jsx("td",{className:"text-truncate",style:{maxWidth:120},title:a.name,children:a.name}),s.jsx("td",{children:a.username}),s.jsxs("td",{className:"text-end font-monospace",children:[a.cpu_percent,"%"]}),s.jsxs("td",{className:"text-end font-monospace",children:[a.memory_percent,"%"]}),s.jsx("td",{className:"text-secondary",children:a.status})]},a.pid))})]})})]}),s.jsxs("div",{className:"alert alert-warning small mt-3 mb-0",children:[s.jsxs("div",{className:"d-flex align-items-center gap-2 fw-medium mb-1",children:[s.jsx("i",{className:"ti ti-activity","aria-hidden":!0}),"Live monitoring"]}),"System metrics, processes, and network stats are polled every 3 seconds."]})]}):s.jsxs(s.Fragment,{children:[s.jsx(o,{title:"Monitor"}),s.jsx("p",{className:"text-secondary",children:"Loading…"})]})}function x({iconClass:e,title:d,value:c,subtitle:l,percent:t}){const n=t>90?"bg-danger":t>70?"bg-warning":"bg-primary";return s.jsx("div",{className:"card h-100",children:s.jsxs("div",{className:"card-body",children:[s.jsxs("div",{className:"d-flex align-items-center gap-3 mb-3",children:[s.jsx("div",{className:"p-3 rounded bg-primary-subtle text-primary",children:s.jsx("i",{className:`${e} fs-2`,"aria-hidden":!0})}),s.jsxs("div",{children:[s.jsx("p",{className:"small text-secondary mb-0",children:d}),s.jsx("p",{className:"h5 mb-0",children:c}),s.jsx("p",{className:"small text-secondary mb-0",children:l})]})]}),s.jsx("div",{className:"progress",style:{height:6},children:s.jsx("div",{className:`progress-bar ${n}`,role:"progressbar",style:{width:`${Math.min(t,100)}%`}})})]})})}export{M as MonitorPage};

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
import{aA as c,j as e,L as m}from"./index-Cvh4tLHo.js";const d={"/":"Dashboard","/site":"Website","/ftp":"FTP","/database":"Databases","/docker":"Docker","/control":"Monitor","/firewall":"Security","/files":"Files","/node":"Node","/logs":"Logs","/ssl_domain":"Domains","/xterm":"Terminal","/crontab":"Cron","/soft":"App Store","/config":"Settings","/services":"Services","/plugins":"Plugins","/backup-plans":"Backup Plans","/users":"Users","/login":"Login","/install":"Remote install"};function b(t){return d[t]||"YakPanel"}function p({title:t,breadcrumbs:i,actions:n}){const{pathname:o}=c(),r=t??b(o),s=i??[{label:"Home",path:"/"},{label:r}];return e.jsx("div",{className:"page-header mb-4",children:e.jsxs("div",{className:"row align-items-center",children:[e.jsxs("div",{className:"col-md-6",children:[e.jsx("h3",{className:"page-title",children:r}),e.jsx("nav",{"aria-label":"breadcrumb",children:e.jsx("ol",{className:"breadcrumb mb-0",children:s.map((a,l)=>e.jsx("li",{className:`breadcrumb-item${l===s.length-1?" active":""}`,...l===s.length-1?{"aria-current":"page"}:{},children:a.path&&l<s.length-1?e.jsx(m,{to:a.path,children:a.label}):a.label},`${a.label}-${l}`))})})]}),n?e.jsx("div",{className:"col-md-6 d-flex justify-content-md-end mt-2 mt-md-0",children:n}):null]})})}export{p as P};

View File

@@ -0,0 +1 @@
import{r as a,j as e,M as R,a4 as U,a5 as w}from"./index-Cvh4tLHo.js";import{M as t}from"./Modal-CCihVZTY.js";import{A as f}from"./AdminAlert-Bt3L8_zJ.js";import{A as r}from"./AdminButton-BKglG8kI.js";import{P as p}from"./PageHeader-D6k34vvM.js";function F(){const[b,N]=a.useState([]),[v,c]=a.useState(!0),[o,m]=a.useState(""),[y,l]=a.useState(!1),[u,h]=a.useState(""),[x,j]=a.useState(!1),[g,i]=a.useState(""),d=()=>R("/plugin/list").then(s=>N(s.plugins||[])).catch(s=>m(s.message));a.useEffect(()=>{c(!0),d().finally(()=>c(!1))},[]);const A=s=>{s.preventDefault();const n=u.trim();n&&(j(!0),i(""),U(n).then(()=>{l(!1),h(""),d()}).catch(S=>i(S.message)).finally(()=>j(!1)))},P=s=>{confirm("Remove this plugin?")&&w(s).then(d).catch(n=>m(n.message))};return v?e.jsxs(e.Fragment,{children:[e.jsx(p,{title:"Plugins",actions:e.jsxs(r,{variant:"primary",disabled:!0,children:[e.jsx("i",{className:"ti ti-plus me-1","aria-hidden":!0}),"Add from URL"]})}),e.jsx("p",{className:"text-secondary",children:"Loading…"})]}):e.jsxs(e.Fragment,{children:[e.jsx(p,{title:"Plugins",actions:e.jsxs(r,{variant:"primary",onClick:()=>l(!0),children:[e.jsx("i",{className:"ti ti-plus me-1","aria-hidden":!0}),"Add from URL"]})}),o?e.jsx(f,{className:"mb-3",children:o}):null,e.jsxs("div",{className:"alert alert-secondary small mb-4",children:["Built-in extensions and third-party plugins. Add plugins from a JSON manifest URL (must include ",e.jsx("code",{children:"id"}),","," ",e.jsx("code",{children:"name"}),", and optionally ",e.jsx("code",{children:"version"}),", ",e.jsx("code",{children:"desc"}),")."]}),e.jsxs(t,{show:y,onHide:()=>{l(!1),i("")},centered:!0,children:[e.jsx(t.Header,{closeButton:!0,children:e.jsx(t.Title,{children:"Add Plugin from URL"})}),e.jsxs("form",{onSubmit:A,children:[e.jsxs(t.Body,{children:[g?e.jsx(f,{className:"mb-3",children:g}):null,e.jsx("label",{className:"form-label",children:"Manifest URL"}),e.jsx("input",{value:u,onChange:s=>h(s.target.value),placeholder:"https://example.com/plugin.json",className:"form-control",required:!0})]}),e.jsxs(t.Footer,{children:[e.jsx(r,{type:"button",variant:"secondary",onClick:()=>{l(!1),i("")},children:"Cancel"}),e.jsx(r,{type:"submit",variant:"primary",disabled:x,children:x?"Adding…":"Add"})]})]})]}),e.jsx("div",{className:"row g-3",children:b.map(s=>e.jsx("div",{className:"col-md-6 col-xl-4",children:e.jsx("div",{className:"card h-100",children:e.jsxs("div",{className:"card-body d-flex gap-3",children:[e.jsx("i",{className:"ti ti-puzzle text-primary fs-2 flex-shrink-0","aria-hidden":!0}),e.jsxs("div",{className:"min-w-0 flex-grow-1",children:[e.jsxs("div",{className:"d-flex flex-wrap align-items-center gap-2 mb-1",children:[e.jsx("h3",{className:"h6 mb-0",children:s.name}),s.enabled?e.jsxs("span",{className:"badge bg-success-subtle text-success small",children:[e.jsx("i",{className:"ti ti-check me-1","aria-hidden":!0}),"Enabled"]}):null,s.builtin?null:e.jsx("button",{type:"button",className:"btn btn-link btn-sm text-danger p-0 ms-auto",title:"Remove",onClick:()=>P(s.id),children:e.jsx("i",{className:"ti ti-trash","aria-hidden":!0})})]}),e.jsx("p",{className:"small text-secondary mb-1",children:s.desc}),e.jsxs("p",{className:"small text-muted mb-0",children:["v",s.version,s.builtin?" (built-in)":""]})]})]})})},s.id))})]})}export{F as PluginsPage};

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
import{r,j as e,M as l}from"./index-Cvh4tLHo.js";import{A as S}from"./AdminAlert-Bt3L8_zJ.js";import{A as g}from"./AdminButton-BKglG8kI.js";import{A as y}from"./AdminTable-BQ5Lf7EC.js";import{P as x}from"./PageHeader-D6k34vvM.js";function u({show:d}){return d?e.jsx("span",{className:"spinner-border spinner-border-sm",role:"status"}):null}function R(){const[d,j]=r.useState([]),[p,o]=r.useState(!0),[m,c]=r.useState(""),[n,s]=r.useState(null),i=()=>{o(!0),l("/service/list").then(t=>j(t.services||[])).catch(t=>c(t.message)).finally(()=>o(!1))};r.useEffect(()=>{i()},[]);const b=t=>{s(t),l(`/service/${t}/start`,{method:"POST"}).then(i).catch(a=>c(a.message)).finally(()=>s(null))},f=t=>{s(t),l(`/service/${t}/stop`,{method:"POST"}).then(i).catch(a=>c(a.message)).finally(()=>s(null))},v=t=>{s(t),l(`/service/${t}/restart`,{method:"POST"}).then(i).catch(a=>c(a.message)).finally(()=>s(null))},h=t=>t==="active"||t==="activating";return p?e.jsxs(e.Fragment,{children:[e.jsx(x,{title:"Services"}),e.jsx("p",{className:"text-secondary",children:"Loading…"})]}):e.jsxs(e.Fragment,{children:[e.jsx(x,{title:"Services",actions:e.jsxs(g,{variant:"secondary",size:"sm",onClick:i,children:[e.jsx("i",{className:"ti ti-rotate-clockwise me-1","aria-hidden":!0}),"Refresh"]})}),m?e.jsx(S,{className:"mb-3",children:m}):null,e.jsx("div",{className:"alert alert-secondary small mb-3",children:"Control system services via systemctl. Requires panel to run with sufficient privileges."}),e.jsx("div",{className:"card",children:e.jsxs(y,{children:[e.jsx("thead",{children:e.jsxs("tr",{children:[e.jsx("th",{children:"Service"}),e.jsx("th",{children:"Unit"}),e.jsx("th",{children:"Status"}),e.jsx("th",{className:"text-end",children:"Actions"})]})}),e.jsx("tbody",{children:d.map(t=>e.jsxs("tr",{children:[e.jsx("td",{children:t.name}),e.jsx("td",{className:"font-monospace small",children:t.unit}),e.jsx("td",{children:e.jsx("span",{className:h(t.status)?"text-success":"text-secondary",children:t.status})}),e.jsx("td",{className:"text-end",children:e.jsx("span",{className:"d-inline-flex gap-1 justify-content-end",children:h(t.status)?e.jsxs(e.Fragment,{children:[e.jsx("button",{type:"button",className:"btn btn-link btn-sm text-warning p-1",title:"Restart",disabled:!!n,onClick:()=>v(t.id),children:n===t.id?e.jsx(u,{show:!0}):e.jsx("i",{className:"ti ti-rotate-clockwise","aria-hidden":!0})}),e.jsx("button",{type:"button",className:"btn btn-link btn-sm text-danger p-1",title:"Stop",disabled:!!n,onClick:()=>f(t.id),children:e.jsx("i",{className:"ti ti-square","aria-hidden":!0})})]}):e.jsx("button",{type:"button",className:"btn btn-link btn-sm text-success p-1",title:"Start",disabled:!!n,onClick:()=>b(t.id),children:n===t.id?e.jsx(u,{show:!0}):e.jsx("i",{className:"ti ti-player-play","aria-hidden":!0})})})})]},t.id))})]})})]})}export{R as ServicesPage};

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
import{r as t,j as e,M as c}from"./index-Cvh4tLHo.js";import{A as b}from"./AdminAlert-Bt3L8_zJ.js";import{A as v}from"./AdminButton-BKglG8kI.js";import{P as m}from"./PageHeader-D6k34vvM.js";function A(){const[x,u]=t.useState([]),[p,d]=t.useState(!0),[o,a]=t.useState(""),[n,l]=t.useState(null),[f,h]=t.useState(""),r=()=>{d(!0),c("/soft/list").then(s=>{u(s.software||[]),h(s.package_manager||"")}).catch(s=>a(s.message)).finally(()=>d(!1))};t.useEffect(()=>{r()},[]);const j=s=>{l(s),a(""),c(`/soft/install/${s}`,{method:"POST"}).then(()=>r()).catch(i=>a(i.message)).finally(()=>l(null))},g=(s,i)=>{confirm(`Uninstall ${i}?`)&&(l(s),a(""),c(`/soft/uninstall/${s}`,{method:"POST"}).then(()=>r()).catch(N=>a(N.message)).finally(()=>l(null)))};return p?e.jsxs(e.Fragment,{children:[e.jsx(m,{title:"App Store"}),e.jsx("div",{className:"d-flex justify-content-center py-5",children:e.jsx("div",{className:"spinner-border text-primary",role:"status",children:e.jsx("span",{className:"visually-hidden",children:"Loading…"})})})]}):e.jsxs(e.Fragment,{children:[e.jsx(m,{title:"App Store",actions:e.jsxs(v,{variant:"secondary",onClick:r,children:[e.jsx("i",{className:"ti ti-refresh me-1"}),"Refresh"]})}),o?e.jsx(b,{variant:"danger",children:o}):null,e.jsxs("div",{className:"alert alert-warning",role:"note",children:["Installs use your server package manager (",f||"unknown","). Panel must run as root (or equivalent). Supported: apt, dnf/yum/microdnf, apk."]}),e.jsx("div",{className:"row g-3",children:x.map(s=>e.jsx("div",{className:"col-md-6 col-xl-4 d-flex",children:e.jsx("div",{className:"card flex-fill shadow-sm",children:e.jsxs("div",{className:"card-body d-flex flex-column",children:[e.jsxs("div",{className:"d-flex align-items-start justify-content-between gap-2 mb-2",children:[e.jsxs("div",{className:"d-flex align-items-start gap-2",children:[e.jsx("span",{className:"avatar avatar-md bg-primary-transparent text-primary rounded flex-shrink-0",children:e.jsx("i",{className:"ti ti-package fs-5","aria-hidden":!0})}),e.jsxs("div",{children:[e.jsx("h5",{className:"card-title mb-1",children:s.name}),e.jsx("p",{className:"text-muted small mb-0",children:s.desc})]})]}),s.installed?e.jsxs("span",{className:"text-success small text-nowrap",children:[e.jsx("i",{className:"ti ti-circle-check me-1"}),s.version||"Installed"]}):e.jsxs("span",{className:"text-muted small text-nowrap",children:[e.jsx("i",{className:"ti ti-x me-1"}),"Not installed"]})]}),e.jsx("div",{className:"mt-auto pt-3",children:s.installed?e.jsxs("button",{type:"button",onClick:()=>g(s.id,s.name),disabled:n===s.id,className:"btn btn-outline-danger btn-sm w-100 d-inline-flex align-items-center justify-content-center gap-2",children:[n===s.id?e.jsx("span",{className:"spinner-border spinner-border-sm",role:"status"}):null,"Uninstall"]}):e.jsxs("button",{type:"button",onClick:()=>j(s.id),disabled:n===s.id,className:"btn btn-primary btn-sm w-100 d-inline-flex align-items-center justify-content-center gap-2",children:[n===s.id?e.jsx("span",{className:"spinner-border spinner-border-sm",role:"status"}):null,"Install"]})})]})})},s.id))})]})}export{A as SoftPage};

Some files were not shown because too many files have changed in this diff Show More