2025-09-04 18:49:55 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
"""
|
|
|
|
Container Management API Server
|
|
|
|
Single-file Python 3.12+ application using aiohttp and docker compose v2 CLI
|
|
|
|
"""
|
|
|
|
|
|
|
|
import asyncio
|
|
|
|
import base64
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import traceback
|
|
|
|
import uuid
|
|
|
|
import weakref
|
|
|
|
import zipfile
|
|
|
|
from datetime import datetime
|
|
|
|
from pathlib import Path
|
|
|
|
from typing import Any, Dict, List, Optional, Tuple
|
|
|
|
|
|
|
|
import aiofiles
|
|
|
|
from aiohttp import web, WSMsgType
|
|
|
|
from dotenv import load_dotenv
|
|
|
|
from ruamel.yaml import YAML
|
|
|
|
from zoneinfo import ZoneInfo
|
|
|
|
|
|
|
|
# Global configuration
|
|
|
|
TIMEZONE = ZoneInfo("Europe/Amsterdam")
|
|
|
|
COMPOSE_FILE = "./containers.yml"
|
|
|
|
MOUNTS_DIR = "./mounts"
|
|
|
|
LOGS_DIR = "./logs"
|
|
|
|
DEFAULT_USER_UID = "1000:1000"
|
|
|
|
|
|
|
|
# Global semaphore for containers.yml writes
|
|
|
|
COMPOSE_WRITE_LOCK = asyncio.Semaphore(1)
|
|
|
|
|
|
|
|
# Error identifiers
|
|
|
|
ERROR_IDS = {
|
|
|
|
"AUTH": "auth_error",
|
|
|
|
"VALIDATION": "validation_error",
|
|
|
|
"NOT_FOUND": "not_found",
|
|
|
|
"CONFLICT": "conflict",
|
|
|
|
"COMPOSE": "compose_error",
|
|
|
|
"IO": "io_error",
|
|
|
|
"TIMEOUT": "timeout",
|
|
|
|
"STATE": "state_error"
|
|
|
|
}
|
|
|
|
|
|
|
|
# Status mapping from Docker to API
|
|
|
|
STATUS_MAP = {
|
|
|
|
"running": "running",
|
|
|
|
"paused": "paused",
|
|
|
|
"exited": "exited",
|
|
|
|
"restarting": "restarting",
|
|
|
|
"dead": "dead",
|
|
|
|
"removing": "removing",
|
|
|
|
"created": "created"
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
class Config:
|
|
|
|
"""Application configuration from environment"""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
load_dotenv()
|
|
|
|
self.APP_USER = os.getenv("APP_USER")
|
|
|
|
self.APP_PASS = os.getenv("APP_PASS")
|
|
|
|
self.DEFAULT_USER_UID = os.getenv("DEFAULT_USER_UID", DEFAULT_USER_UID)
|
|
|
|
|
|
|
|
if not self.APP_USER or not self.APP_PASS:
|
|
|
|
raise ValueError("APP_USER and APP_PASS must be set in .env file")
|
|
|
|
|
|
|
|
|
|
|
|
class ErrorObject:
|
|
|
|
"""Standard error response object"""
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def create(error_id: str, code: str, message: str, status: int,
|
|
|
|
request_id: str, details: Optional[Dict] = None,
|
|
|
|
exception: Optional[Exception] = None) -> Dict:
|
|
|
|
obj = {
|
|
|
|
"error": error_id,
|
|
|
|
"code": code,
|
|
|
|
"message": message,
|
|
|
|
"status": status,
|
|
|
|
"request_id": request_id,
|
|
|
|
"timestamp": datetime.now(TIMEZONE).isoformat()
|
|
|
|
}
|
|
|
|
if details:
|
|
|
|
obj["details"] = details
|
|
|
|
if exception:
|
|
|
|
obj["stacktrace"] = traceback.format_exc()
|
|
|
|
return obj
|
|
|
|
|
|
|
|
|
|
|
|
class ActionLogger:
|
|
|
|
"""JSON Lines logger for all actions"""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
os.makedirs(LOGS_DIR, exist_ok=True)
|
|
|
|
self.log_file = Path(LOGS_DIR) / "actions.jsonl"
|
|
|
|
|
|
|
|
async def log(self, level: str, route: str, method: str, request_id: str,
|
|
|
|
user: str, status: int, duration_ms: float,
|
|
|
|
cuid: Optional[str] = None, details: Optional[Dict] = None,
|
|
|
|
note: Optional[str] = None, error: Optional[Dict] = None):
|
|
|
|
entry = {
|
|
|
|
"ts": datetime.now(TIMEZONE).isoformat(),
|
|
|
|
"level": level,
|
|
|
|
"route": route,
|
|
|
|
"method": method,
|
|
|
|
"request_id": request_id,
|
|
|
|
"user": user,
|
|
|
|
"status": status,
|
|
|
|
"duration_ms": duration_ms
|
|
|
|
}
|
|
|
|
if cuid:
|
|
|
|
entry["cuid"] = cuid
|
|
|
|
if details:
|
|
|
|
entry["details"] = details
|
|
|
|
if note:
|
|
|
|
entry["note"] = note
|
|
|
|
if error:
|
|
|
|
entry["error"] = error
|
|
|
|
|
|
|
|
async with aiofiles.open(self.log_file, "a") as f:
|
|
|
|
await f.write(json.dumps(entry) + "\n")
|
|
|
|
|
|
|
|
|
|
|
|
class ComposeManager:
|
|
|
|
"""Manages docker compose operations"""
|
|
|
|
|
|
|
|
def __init__(self, config: Config, logger: ActionLogger):
|
|
|
|
self.config = config
|
|
|
|
self.logger = logger
|
|
|
|
self.yaml = YAML()
|
|
|
|
self.yaml.preserve_quotes = True
|
|
|
|
self.yaml.indent(mapping=2, sequence=4, offset=2)
|
|
|
|
|
|
|
|
# Initialize compose file if it doesn't exist
|
|
|
|
if not Path(COMPOSE_FILE).exists():
|
|
|
|
self._init_compose_file()
|
|
|
|
|
|
|
|
def _init_compose_file(self):
|
|
|
|
"""Create initial compose file"""
|
|
|
|
initial = {
|
|
|
|
"version": "3.9",
|
|
|
|
"services": {}
|
|
|
|
}
|
|
|
|
with open(COMPOSE_FILE, "w") as f:
|
|
|
|
self.yaml.dump(initial, f)
|
|
|
|
|
|
|
|
async def read_compose(self) -> Dict:
|
|
|
|
"""Read compose file"""
|
|
|
|
async with aiofiles.open(COMPOSE_FILE, "r") as f:
|
|
|
|
content = await f.read()
|
|
|
|
return self.yaml.load(content)
|
|
|
|
|
|
|
|
async def write_compose(self, data: Dict):
|
|
|
|
"""Write compose file with fsync"""
|
|
|
|
async with COMPOSE_WRITE_LOCK:
|
|
|
|
# Write to temp file first
|
|
|
|
temp_file = COMPOSE_FILE + ".tmp"
|
|
|
|
with open(temp_file, "w") as f:
|
|
|
|
self.yaml.dump(data, f)
|
|
|
|
f.flush()
|
|
|
|
os.fsync(f.fileno())
|
|
|
|
|
|
|
|
# Atomic rename
|
|
|
|
os.replace(temp_file, COMPOSE_FILE)
|
|
|
|
|
|
|
|
async def run_compose(self, args: List[str], timeout: int = 300,
|
|
|
|
stdin_data: Optional[bytes] = None) -> Dict:
|
|
|
|
"""Execute docker compose command"""
|
|
|
|
cmd = ["docker", "compose", "-f", COMPOSE_FILE] + args
|
|
|
|
|
|
|
|
try:
|
|
|
|
proc = await asyncio.create_subprocess_exec(
|
|
|
|
*cmd,
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
stderr=asyncio.subprocess.PIPE,
|
|
|
|
stdin=asyncio.subprocess.PIPE if stdin_data else None
|
|
|
|
)
|
|
|
|
|
|
|
|
stdout, stderr = await asyncio.wait_for(
|
|
|
|
proc.communicate(stdin_data),
|
|
|
|
timeout=timeout
|
|
|
|
)
|
|
|
|
|
2025-09-05 05:10:16 +02:00
|
|
|
result = {
|
2025-09-04 18:49:55 +02:00
|
|
|
"returncode": proc.returncode,
|
|
|
|
"stdout": stdout.decode("utf-8"),
|
|
|
|
"stderr": stderr.decode("utf-8")
|
|
|
|
}
|
2025-09-05 05:10:16 +02:00
|
|
|
print(result)
|
|
|
|
return result
|
2025-09-04 18:49:55 +02:00
|
|
|
except asyncio.TimeoutError:
|
|
|
|
if proc and proc.returncode is None:
|
|
|
|
proc.kill()
|
|
|
|
await proc.wait()
|
|
|
|
raise
|
|
|
|
|
|
|
|
def _build_service_spec(self, cuid: str, image: str, env: Dict,
|
|
|
|
tags: List[str], resources: Dict, ports: List[Dict]) -> Dict:
|
|
|
|
"""Build service specification for compose file"""
|
|
|
|
# Merge environment
|
|
|
|
service_env = env.copy()
|
|
|
|
service_env["CONTAINER_UID"] = cuid
|
|
|
|
if tags:
|
|
|
|
service_env["TAGS"] = ",".join(tags)
|
|
|
|
|
|
|
|
spec = {
|
|
|
|
"image": image,
|
|
|
|
"container_name": cuid,
|
|
|
|
"environment": service_env,
|
|
|
|
"working_dir": "/app",
|
|
|
|
"command": ["python", "-u", "/app/boot.py"],
|
|
|
|
"volumes": [f"./mounts/{cuid}:/app:rw"],
|
|
|
|
"restart": "on-failure",
|
|
|
|
"privileged": False,
|
|
|
|
"user": self.config.DEFAULT_USER_UID,
|
|
|
|
"cap_drop": ["ALL"],
|
|
|
|
"security_opt": [
|
|
|
|
"no-new-privileges:true"
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
|
|
|
# Resource limits at service root
|
|
|
|
spec["cpus"] = str(resources.get("cpus", 0.5))
|
|
|
|
spec["mem_limit"] = resources.get("memory", "2048m")
|
|
|
|
spec["pids_limit"] = resources.get("pids", 1024)
|
|
|
|
|
|
|
|
# Ports
|
|
|
|
if ports:
|
|
|
|
spec["ports"] = [
|
|
|
|
f"{p['host']}:{p['container']}/{p.get('protocol', 'tcp')}"
|
|
|
|
for p in ports
|
|
|
|
]
|
|
|
|
|
|
|
|
return spec
|
|
|
|
|
|
|
|
async def create_container(self, image: str, env: Dict, tags: List[str],
|
|
|
|
resources: Dict, ports: List[Dict]) -> str:
|
|
|
|
"""Create new container"""
|
|
|
|
cuid = "c" + str(uuid.uuid4())
|
|
|
|
|
|
|
|
# Create mount directory
|
|
|
|
mount_dir = Path(MOUNTS_DIR) / cuid
|
|
|
|
mount_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
|
# Set ownership to 1000:1000
|
|
|
|
os.chown(mount_dir, 1000, 1000)
|
|
|
|
|
|
|
|
# Update compose file
|
|
|
|
compose_data = await self.read_compose()
|
|
|
|
compose_data["services"][cuid] = self._build_service_spec(
|
|
|
|
cuid, image, env, tags, resources, ports
|
|
|
|
)
|
|
|
|
await self.write_compose(compose_data)
|
|
|
|
|
|
|
|
# Start the container
|
|
|
|
await self.run_compose(["up", "-d", cuid])
|
|
|
|
|
|
|
|
return cuid
|
|
|
|
|
|
|
|
async def update_container(self, cuid: str, env: Optional[Dict] = None,
|
|
|
|
tags: Optional[List[str]] = None,
|
|
|
|
resources: Optional[Dict] = None,
|
|
|
|
image: Optional[str] = None) -> bool:
|
|
|
|
"""Update container configuration"""
|
|
|
|
compose_data = await self.read_compose()
|
|
|
|
|
|
|
|
if cuid not in compose_data["services"]:
|
|
|
|
return False
|
|
|
|
|
|
|
|
service = compose_data["services"][cuid]
|
|
|
|
|
|
|
|
# Update environment (merge)
|
|
|
|
if env is not None:
|
|
|
|
current_env = service.get("environment", {})
|
|
|
|
for key, value in env.items():
|
|
|
|
if value is None and key in current_env:
|
|
|
|
del current_env[key]
|
|
|
|
else:
|
|
|
|
current_env[key] = value
|
|
|
|
service["environment"] = current_env
|
|
|
|
|
|
|
|
# Update tags (replace)
|
|
|
|
if tags is not None:
|
|
|
|
service["environment"]["TAGS"] = ",".join(tags)
|
|
|
|
|
|
|
|
# Update resources (merge)
|
|
|
|
if resources is not None:
|
|
|
|
service["cpus"] = str(resources.get("cpus", service.get("cpus", 0.5)))
|
|
|
|
service["mem_limit"] = resources.get("memory", service.get("mem_limit", "2048m"))
|
|
|
|
service["pids_limit"] = resources.get("pids", service.get("pids_limit", 1024))
|
|
|
|
|
|
|
|
# Update image
|
|
|
|
if image is not None:
|
|
|
|
service["image"] = image
|
|
|
|
|
|
|
|
await self.write_compose(compose_data)
|
|
|
|
await self.run_compose(["up", "-d", cuid])
|
|
|
|
return True
|
|
|
|
|
|
|
|
async def update_ports(self, cuid: str, ports: List[Dict]) -> bool:
|
|
|
|
"""Update container ports"""
|
|
|
|
compose_data = await self.read_compose()
|
|
|
|
|
|
|
|
if cuid not in compose_data["services"]:
|
|
|
|
return False
|
|
|
|
|
|
|
|
service = compose_data["services"][cuid]
|
|
|
|
|
|
|
|
# Replace ports
|
|
|
|
if ports:
|
|
|
|
service["ports"] = [
|
|
|
|
f"{p['host']}:{p['container']}/{p.get('protocol', 'tcp')}"
|
|
|
|
for p in ports
|
|
|
|
]
|
|
|
|
else:
|
|
|
|
service.pop("ports", None)
|
|
|
|
|
|
|
|
await self.write_compose(compose_data)
|
|
|
|
await self.run_compose(["up", "-d", cuid])
|
|
|
|
return True
|
|
|
|
|
|
|
|
async def delete_container(self, cuid: str) -> bool:
|
|
|
|
"""Delete container and its mount"""
|
|
|
|
compose_data = await self.read_compose()
|
|
|
|
|
|
|
|
if cuid not in compose_data["services"]:
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Stop and remove container
|
|
|
|
await self.run_compose(["stop", cuid])
|
|
|
|
await self.run_compose(["rm", "-f", cuid])
|
|
|
|
|
|
|
|
# Remove from compose file
|
|
|
|
del compose_data["services"][cuid]
|
|
|
|
await self.write_compose(compose_data)
|
|
|
|
|
|
|
|
# Delete mount directory
|
|
|
|
mount_dir = Path(MOUNTS_DIR) / cuid
|
|
|
|
if mount_dir.exists():
|
|
|
|
shutil.rmtree(mount_dir)
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
async def get_container_info(self, cuid: str) -> Optional[Dict]:
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 1: Entered get_container_info")
|
2025-09-04 18:49:55 +02:00
|
|
|
"""Get container information"""
|
|
|
|
compose_data = await self.read_compose()
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 2: compose_data loaded")
|
|
|
|
|
2025-09-04 18:49:55 +02:00
|
|
|
if cuid not in compose_data["services"]:
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 3: cuid not in services")
|
2025-09-04 18:49:55 +02:00
|
|
|
return None
|
2025-09-05 05:10:16 +02:00
|
|
|
|
2025-09-04 18:49:55 +02:00
|
|
|
service = compose_data["services"][cuid]
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 4: service loaded")
|
|
|
|
|
2025-09-04 18:49:55 +02:00
|
|
|
# Get runtime status
|
|
|
|
result = await self.run_compose(["ps", "--format", "json", cuid])
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 5: run_compose result", result)
|
2025-09-04 18:49:55 +02:00
|
|
|
status_info = {}
|
|
|
|
if result["returncode"] == 0 and result["stdout"]:
|
|
|
|
try:
|
|
|
|
ps_data = json.loads(result["stdout"])
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 6: ps_data loaded", ps_data)
|
|
|
|
if ps_data:
|
|
|
|
print("DEBUG 7: ps_data is not empty", ps_data)
|
|
|
|
container = ps_data
|
|
|
|
|
2025-09-04 18:49:55 +02:00
|
|
|
status_info = {
|
|
|
|
"status": STATUS_MAP.get(container.get("State", "").lower(), "unknown"),
|
|
|
|
"created_at": container.get("CreatedAt", ""),
|
|
|
|
"started_at": container.get("StartedAt", "")
|
|
|
|
}
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 8: status_info set", status_info)
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
print("DEBUG 9: JSONDecodeError", e)
|
|
|
|
|
2025-09-04 18:49:55 +02:00
|
|
|
# Parse ports back to JSON format
|
|
|
|
ports = []
|
|
|
|
if "ports" in service:
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 10: service has ports", service["ports"])
|
|
|
|
for idx, port_str in enumerate(service["ports"]):
|
|
|
|
print(f"DEBUG 11.{idx}: parsing port_str", port_str)
|
2025-09-04 18:49:55 +02:00
|
|
|
parts = port_str.split(":")
|
|
|
|
proto_split = parts[-1].split("/")
|
2025-09-05 05:10:16 +02:00
|
|
|
if parts and proto_split:
|
|
|
|
try:
|
|
|
|
port_obj = {
|
|
|
|
"host": int(parts[0]),
|
|
|
|
"container": int(proto_split[0]),
|
|
|
|
"protocol": proto_split[1] if len(proto_split) > 1 else "tcp"
|
|
|
|
}
|
|
|
|
ports.append(port_obj)
|
|
|
|
print(f"DEBUG 12.{idx}: port_obj appended", port_obj)
|
|
|
|
except Exception as e:
|
|
|
|
print(f"DEBUG 13.{idx}: Exception parsing port", e)
|
|
|
|
|
2025-09-04 18:49:55 +02:00
|
|
|
# Extract tags
|
|
|
|
tags = []
|
|
|
|
if "TAGS" in service.get("environment", {}):
|
|
|
|
tags = service["environment"]["TAGS"].split(",")
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 14: tags extracted", tags)
|
|
|
|
|
2025-09-04 18:49:55 +02:00
|
|
|
# Build response
|
2025-09-05 05:10:16 +02:00
|
|
|
response = {
|
2025-09-04 18:49:55 +02:00
|
|
|
"cuid": cuid,
|
|
|
|
"image": service.get("image", ""),
|
|
|
|
"env": {k: v for k, v in service.get("environment", {}).items()
|
|
|
|
if k not in ["CONTAINER_UID", "TAGS"]},
|
|
|
|
"tags": tags,
|
|
|
|
"resources": {
|
|
|
|
"cpus": float(service.get("cpus", 0.5)),
|
|
|
|
"memory": service.get("mem_limit", "2048m"),
|
|
|
|
"pids": service.get("pids_limit", 1024)
|
|
|
|
},
|
|
|
|
"ports": ports,
|
|
|
|
**status_info
|
|
|
|
}
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 15: response built", response)
|
|
|
|
return response
|
|
|
|
|
2025-09-04 18:49:55 +02:00
|
|
|
|
|
|
|
async def list_containers(self, status_filter: Optional[List[str]] = None,
|
|
|
|
cursor: Optional[str] = None,
|
|
|
|
limit: int = 20) -> Tuple[List[Dict], Optional[str]]:
|
|
|
|
"""List containers with filtering and pagination"""
|
|
|
|
compose_data = await self.read_compose()
|
|
|
|
all_cuids = list(compose_data["services"].keys())
|
|
|
|
|
|
|
|
# Apply cursor (simple offset-based)
|
|
|
|
start_idx = 0
|
|
|
|
if cursor:
|
|
|
|
try:
|
|
|
|
start_idx = int(cursor)
|
|
|
|
except ValueError:
|
|
|
|
start_idx = 0
|
|
|
|
|
|
|
|
# Get slice
|
|
|
|
end_idx = start_idx + limit
|
|
|
|
cuids_slice = all_cuids[start_idx:end_idx]
|
|
|
|
|
|
|
|
# Get container info
|
|
|
|
containers = []
|
|
|
|
for cuid in cuids_slice:
|
|
|
|
info = await self.get_container_info(cuid)
|
|
|
|
if info:
|
|
|
|
# Apply status filter
|
|
|
|
if status_filter and info.get("status") not in status_filter:
|
|
|
|
continue
|
|
|
|
containers.append(info)
|
|
|
|
|
|
|
|
# Determine next cursor
|
|
|
|
next_cursor = None
|
|
|
|
if end_idx < len(all_cuids):
|
|
|
|
next_cursor = str(end_idx)
|
|
|
|
|
|
|
|
return containers, next_cursor
|
|
|
|
|
|
|
|
|
|
|
|
class WebSocketSession:
|
|
|
|
"""Manages WebSocket connection to container"""
|
|
|
|
|
|
|
|
def __init__(self, cuid: str, ws: web.WebSocketResponse,
|
|
|
|
compose_manager: ComposeManager):
|
|
|
|
self.cuid = cuid
|
|
|
|
self.ws = ws
|
|
|
|
self.compose_manager = compose_manager
|
|
|
|
self.proc = None
|
|
|
|
self.tasks = []
|
|
|
|
|
|
|
|
async def start(self, cols: Optional[int] = None, rows: Optional[int] = None):
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 1: Entered start()")
|
2025-09-04 18:49:55 +02:00
|
|
|
"""Start exec session"""
|
|
|
|
# Check if boot.py exists
|
|
|
|
boot_path = Path(MOUNTS_DIR) / self.cuid / "boot.py"
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 2: Checking boot.py at", boot_path)
|
2025-09-04 18:49:55 +02:00
|
|
|
if not boot_path.exists():
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 3: boot.py not found")
|
2025-09-04 18:49:55 +02:00
|
|
|
error = ErrorObject.create(
|
|
|
|
ERROR_IDS["VALIDATION"],
|
|
|
|
"BOOT_MISSING",
|
|
|
|
"/app/boot.py not found in container mount",
|
|
|
|
422,
|
|
|
|
str(uuid.uuid4()),
|
|
|
|
{"cuid": self.cuid}
|
|
|
|
)
|
|
|
|
await self.ws.send_str(json.dumps({
|
|
|
|
"type": "error",
|
|
|
|
"error": error
|
|
|
|
}))
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 4: Sent error to ws")
|
2025-09-04 18:49:55 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
# Ensure container is running
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 5: Checking if container is running")
|
2025-09-04 18:49:55 +02:00
|
|
|
result = await self.compose_manager.run_compose(["ps", "-q", self.cuid])
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 6: Compose ps result:", result)
|
2025-09-04 18:49:55 +02:00
|
|
|
if not result["stdout"].strip():
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 7: Container not running, starting container")
|
2025-09-04 18:49:55 +02:00
|
|
|
# Start container
|
|
|
|
await self.compose_manager.run_compose(["up", "-d", self.cuid])
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 8: Container started")
|
2025-09-04 18:49:55 +02:00
|
|
|
|
|
|
|
# Setup environment for TTY size
|
|
|
|
env = os.environ.copy()
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 9: Copied environment")
|
2025-09-04 18:49:55 +02:00
|
|
|
if cols:
|
|
|
|
env["COLUMNS"] = str(cols)
|
2025-09-05 05:10:16 +02:00
|
|
|
print(f"DEBUG 10: Set COLUMNS={cols}")
|
2025-09-04 18:49:55 +02:00
|
|
|
if rows:
|
|
|
|
env["LINES"] = str(rows)
|
2025-09-05 05:10:16 +02:00
|
|
|
print(f"DEBUG 11: Set LINES={rows}")
|
2025-09-04 18:49:55 +02:00
|
|
|
|
|
|
|
# Start exec with PTY
|
|
|
|
cmd = [
|
|
|
|
"docker", "compose", "-f", COMPOSE_FILE,
|
|
|
|
"exec", "-i", "-T", self.cuid,
|
|
|
|
"sh", "-lc", "exec python -u /app/boot.py"
|
|
|
|
]
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 12: Command to execute:", cmd)
|
2025-09-04 18:49:55 +02:00
|
|
|
|
|
|
|
self.proc = await asyncio.create_subprocess_exec(
|
|
|
|
*cmd,
|
|
|
|
stdin=asyncio.subprocess.PIPE,
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
stderr=asyncio.subprocess.PIPE,
|
|
|
|
env=env
|
|
|
|
)
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 13: Subprocess started")
|
2025-09-04 18:49:55 +02:00
|
|
|
|
|
|
|
# Start output readers
|
|
|
|
self.tasks.append(asyncio.create_task(self._read_stdout()))
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 14: Started _read_stdout task")
|
2025-09-04 18:49:55 +02:00
|
|
|
self.tasks.append(asyncio.create_task(self._read_stderr()))
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 15: Started _read_stderr task")
|
2025-09-04 18:49:55 +02:00
|
|
|
|
2025-09-05 05:10:16 +02:00
|
|
|
print("DEBUG 16: Returning True from start()")
|
2025-09-04 18:49:55 +02:00
|
|
|
return True
|
2025-09-05 05:10:16 +02:00
|
|
|
|
2025-09-04 18:49:55 +02:00
|
|
|
async def _read_stdout(self):
|
|
|
|
"""Read stdout and send to WebSocket"""
|
|
|
|
while self.proc and self.proc.returncode is None:
|
|
|
|
try:
|
|
|
|
data = await self.proc.stdout.read(1024)
|
|
|
|
if not data:
|
|
|
|
break
|
|
|
|
|
|
|
|
# Try UTF-8 decode, fallback to base64
|
|
|
|
try:
|
|
|
|
text = data.decode("utf-8")
|
|
|
|
await self.ws.send_str(json.dumps({
|
|
|
|
"type": "stdout",
|
|
|
|
"data": text,
|
|
|
|
"encoding": "utf8"
|
|
|
|
}))
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
import base64
|
|
|
|
await self.ws.send_str(json.dumps({
|
|
|
|
"type": "stdout",
|
|
|
|
"data": base64.b64encode(data).decode("ascii"),
|
|
|
|
"encoding": "base64"
|
|
|
|
}))
|
|
|
|
except Exception:
|
|
|
|
break
|
|
|
|
|
|
|
|
async def _read_stderr(self):
|
|
|
|
"""Read stderr and send to WebSocket"""
|
|
|
|
while self.proc and self.proc.returncode is None:
|
|
|
|
try:
|
|
|
|
data = await self.proc.stderr.read(1024)
|
|
|
|
if not data:
|
|
|
|
break
|
|
|
|
|
|
|
|
# Try UTF-8 decode, fallback to base64
|
|
|
|
try:
|
|
|
|
text = data.decode("utf-8")
|
|
|
|
await self.ws.send_str(json.dumps({
|
|
|
|
"type": "stderr",
|
|
|
|
"data": text,
|
|
|
|
"encoding": "utf8"
|
|
|
|
}))
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
import base64
|
|
|
|
await self.ws.send_str(json.dumps({
|
|
|
|
"type": "stderr",
|
|
|
|
"data": base64.b64encode(data).decode("ascii"),
|
|
|
|
"encoding": "base64"
|
|
|
|
}))
|
|
|
|
except Exception:
|
|
|
|
break
|
|
|
|
|
|
|
|
async def write_stdin(self, data: str, encoding: str = "utf8"):
|
|
|
|
"""Write to stdin"""
|
|
|
|
if not self.proc or self.proc.returncode is not None:
|
|
|
|
return
|
|
|
|
|
|
|
|
if encoding == "base64":
|
|
|
|
import base64
|
|
|
|
data_bytes = base64.b64decode(data)
|
|
|
|
else:
|
|
|
|
data_bytes = data.encode("utf-8")
|
|
|
|
|
|
|
|
try:
|
|
|
|
self.proc.stdin.write(data_bytes)
|
|
|
|
await self.proc.stdin.drain()
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
|
|
|
async def resize(self, cols: int, rows: int):
|
|
|
|
"""Send resize sequence"""
|
|
|
|
if not self.proc or self.proc.returncode is not None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Send ANSI resize sequence
|
|
|
|
resize_seq = f"\x1b[8;{rows};{cols}t"
|
|
|
|
try:
|
|
|
|
self.proc.stdin.write(resize_seq.encode())
|
|
|
|
await self.proc.stdin.drain()
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
|
|
|
async def signal(self, sig_name: str):
|
|
|
|
"""Handle signal"""
|
|
|
|
if sig_name == "INT":
|
|
|
|
# Send Ctrl-C to TTY
|
|
|
|
if self.proc and self.proc.returncode is None:
|
|
|
|
try:
|
|
|
|
self.proc.stdin.write(b"\x03")
|
|
|
|
await self.proc.stdin.drain()
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
elif sig_name in ["TERM", "KILL"]:
|
|
|
|
# Kill container
|
|
|
|
await self.compose_manager.run_compose(["kill", "-s", sig_name, self.cuid])
|
|
|
|
await self.close()
|
|
|
|
|
|
|
|
async def close(self):
|
|
|
|
"""Clean up session"""
|
|
|
|
# Cancel tasks
|
|
|
|
for task in self.tasks:
|
|
|
|
if not task.done():
|
|
|
|
task.cancel()
|
|
|
|
|
|
|
|
# Kill process
|
|
|
|
if self.proc and self.proc.returncode is None:
|
|
|
|
self.proc.kill()
|
|
|
|
await self.proc.wait()
|
|
|
|
|
|
|
|
# Send exit code
|
|
|
|
if self.proc:
|
|
|
|
await self.ws.send_str(json.dumps({
|
|
|
|
"type": "exit",
|
|
|
|
"code": self.proc.returncode or 0
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
|
|
|
class Application:
|
|
|
|
"""Main application class"""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.config = Config()
|
|
|
|
self.logger = ActionLogger()
|
|
|
|
self.compose_manager = ComposeManager(self.config, self.logger)
|
|
|
|
self.app = web.Application()
|
|
|
|
self.ws_sessions = weakref.WeakSet()
|
|
|
|
|
|
|
|
# Setup routes
|
|
|
|
self._setup_routes()
|
|
|
|
|
|
|
|
# Setup middleware
|
|
|
|
self._setup_middleware()
|
|
|
|
|
|
|
|
def _setup_routes(self):
|
|
|
|
"""Setup HTTP routes"""
|
|
|
|
self.app.router.add_routes([
|
|
|
|
# Health check
|
|
|
|
web.get("/healthz", self.healthz),
|
|
|
|
|
|
|
|
# Container management
|
|
|
|
web.post("/containers", self.create_container),
|
|
|
|
web.get("/containers", self.list_containers),
|
|
|
|
web.get("/containers/{cuid}", self.get_container),
|
|
|
|
web.patch("/containers/{cuid}", self.update_container),
|
|
|
|
web.delete("/containers/{cuid}", self.delete_container),
|
|
|
|
|
|
|
|
# Lifecycle
|
|
|
|
web.post("/containers/{cuid}/start", self.start_container),
|
|
|
|
web.post("/containers/{cuid}/stop", self.stop_container),
|
|
|
|
web.post("/containers/{cuid}/pause", self.pause_container),
|
|
|
|
web.post("/containers/{cuid}/unpause", self.unpause_container),
|
|
|
|
web.post("/containers/{cuid}/restart", self.restart_container),
|
|
|
|
|
|
|
|
# Ports
|
|
|
|
web.patch("/containers/{cuid}/ports", self.update_ports),
|
|
|
|
|
|
|
|
# Files
|
|
|
|
web.post("/containers/{cuid}/upload-zip", self.upload_zip),
|
|
|
|
web.get("/containers/{cuid}/download", self.download_file),
|
|
|
|
web.get("/containers/{cuid}/download-zip", self.download_zip),
|
|
|
|
|
|
|
|
# Status
|
|
|
|
web.get("/containers/{cuid}/status", self.get_status),
|
|
|
|
|
|
|
|
# WebSocket
|
|
|
|
web.get("/ws/{cuid}", self.websocket_handler)
|
|
|
|
])
|
|
|
|
|
|
|
|
def _setup_middleware(self):
|
|
|
|
"""Setup middleware"""
|
|
|
|
|
|
|
|
@web.middleware
|
|
|
|
async def auth_middleware(request, handler):
|
|
|
|
# Skip auth for healthz
|
|
|
|
if request.path == "/healthz":
|
|
|
|
return await handler(request)
|
|
|
|
|
|
|
|
# Check Basic Auth
|
|
|
|
auth_header = request.headers.get("Authorization", "")
|
|
|
|
if not auth_header.startswith("Basic "):
|
|
|
|
return web.Response(
|
|
|
|
text=json.dumps(ErrorObject.create(
|
|
|
|
ERROR_IDS["AUTH"],
|
|
|
|
"MISSING_AUTH",
|
|
|
|
"Authentication required",
|
|
|
|
401,
|
|
|
|
str(uuid.uuid4())
|
|
|
|
)),
|
|
|
|
status=401,
|
|
|
|
headers={"WWW-Authenticate": 'Basic realm="app"'},
|
|
|
|
content_type="application/json"
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
encoded = auth_header[6:]
|
|
|
|
decoded = base64.b64decode(encoded).decode("utf-8")
|
|
|
|
username, password = decoded.split(":", 1)
|
2025-09-05 05:10:16 +02:00
|
|
|
print("ZZZZZZZZZ", username,password)
|
2025-09-04 18:49:55 +02:00
|
|
|
if username != self.config.APP_USER or password != self.config.APP_PASS:
|
|
|
|
raise ValueError("Invalid credentials")
|
|
|
|
|
|
|
|
request["user"] = username
|
|
|
|
request["request_id"] = str(uuid.uuid4())
|
2025-09-05 05:10:16 +02:00
|
|
|
print("GING GOED!")
|
2025-09-04 18:49:55 +02:00
|
|
|
# Process request
|
|
|
|
start_time = datetime.now()
|
|
|
|
response = await handler(request)
|
|
|
|
duration_ms = (datetime.now() - start_time).total_seconds() * 1000
|
|
|
|
|
|
|
|
# Log action
|
|
|
|
await self.logger.log(
|
|
|
|
"INFO",
|
|
|
|
request.path,
|
|
|
|
request.method,
|
|
|
|
request["request_id"],
|
|
|
|
username,
|
|
|
|
response.status,
|
|
|
|
duration_ms
|
|
|
|
)
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-09-05 05:10:16 +02:00
|
|
|
help(e)
|
|
|
|
print("XXXXXXXXXXX",e,flush=True)
|
2025-09-04 18:49:55 +02:00
|
|
|
return web.Response(
|
|
|
|
text=json.dumps(ErrorObject.create(
|
|
|
|
ERROR_IDS["AUTH"],
|
2025-09-05 05:10:16 +02:00
|
|
|
"INVALID_AUTG",
|
2025-09-04 18:49:55 +02:00
|
|
|
"Invalid credentials",
|
|
|
|
401,
|
|
|
|
str(uuid.uuid4())
|
|
|
|
)),
|
|
|
|
status=401,
|
|
|
|
headers={"WWW-Authenticate": 'Basic realm="app"'},
|
|
|
|
content_type="application/json"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.app.middlewares.append(auth_middleware)
|
|
|
|
|
|
|
|
async def healthz(self, request):
|
|
|
|
"""Health check endpoint"""
|
|
|
|
# Check compose availability
|
|
|
|
result = await self.compose_manager.run_compose(["version"], timeout=5)
|
|
|
|
|
|
|
|
if result["returncode"] != 0:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["COMPOSE"],
|
|
|
|
"COMPOSE_UNAVAILABLE",
|
|
|
|
"Docker Compose not available",
|
|
|
|
500,
|
|
|
|
str(uuid.uuid4())
|
|
|
|
),
|
|
|
|
status=500
|
|
|
|
)
|
|
|
|
|
|
|
|
# Parse version
|
|
|
|
version = "unknown"
|
|
|
|
if "version" in result["stdout"].lower():
|
|
|
|
lines = result["stdout"].strip().split("\n")
|
|
|
|
for line in lines:
|
|
|
|
if "version" in line.lower():
|
|
|
|
version = line.split()[-1]
|
|
|
|
break
|
|
|
|
|
|
|
|
return web.json_response({
|
|
|
|
"status": "ok",
|
|
|
|
"compose_version": version,
|
|
|
|
"uptime_s": 0 # Would need to track app start time
|
|
|
|
})
|
|
|
|
|
|
|
|
async def create_container(self, request):
|
|
|
|
"""Create new container"""
|
|
|
|
try:
|
|
|
|
data = await request.json()
|
|
|
|
|
|
|
|
# Validate required fields
|
|
|
|
if "image" not in data:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["VALIDATION"],
|
|
|
|
"MISSING_IMAGE",
|
|
|
|
"Image is required",
|
|
|
|
400,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=400
|
|
|
|
)
|
|
|
|
|
|
|
|
# Create container
|
|
|
|
cuid = await self.compose_manager.create_container(
|
|
|
|
image=data["image"],
|
|
|
|
env=data.get("env", {}),
|
|
|
|
tags=data.get("tags", []),
|
|
|
|
resources=data.get("resources", {}),
|
|
|
|
ports=data.get("ports", [])
|
|
|
|
)
|
|
|
|
|
|
|
|
# Get container info
|
|
|
|
info = await self.compose_manager.get_container_info(cuid)
|
|
|
|
|
|
|
|
return web.json_response(info, status=201)
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
await self.logger.log(
|
|
|
|
"ERROR",
|
|
|
|
request.path,
|
|
|
|
request.method,
|
|
|
|
request["request_id"],
|
|
|
|
request["user"],
|
|
|
|
500,
|
|
|
|
0,
|
|
|
|
error=ErrorObject.create(
|
|
|
|
ERROR_IDS["COMPOSE"],
|
|
|
|
"CREATE_FAILED",
|
|
|
|
str(e),
|
|
|
|
500,
|
|
|
|
request["request_id"],
|
|
|
|
exception=e
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["COMPOSE"],
|
|
|
|
"CREATE_FAILED",
|
|
|
|
str(e),
|
|
|
|
500,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=500
|
|
|
|
)
|
|
|
|
|
|
|
|
async def list_containers(self, request):
|
|
|
|
"""List containers"""
|
|
|
|
# Parse query params
|
|
|
|
status_filter = None
|
|
|
|
if "status" in request.query:
|
|
|
|
status_filter = request.query["status"].split(",")
|
|
|
|
|
|
|
|
cursor = request.query.get("cursor")
|
|
|
|
limit = int(request.query.get("limit", 20))
|
|
|
|
|
|
|
|
# Get containers
|
|
|
|
containers, next_cursor = await self.compose_manager.list_containers(
|
|
|
|
status_filter, cursor, limit
|
|
|
|
)
|
|
|
|
|
|
|
|
response = {"containers": containers}
|
|
|
|
if next_cursor:
|
|
|
|
response["next_cursor"] = next_cursor
|
|
|
|
|
|
|
|
return web.json_response(response)
|
|
|
|
|
|
|
|
async def get_container(self, request):
|
|
|
|
"""Get container details"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
|
|
|
|
info = await self.compose_manager.get_container_info(cuid)
|
|
|
|
if not info:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["NOT_FOUND"],
|
|
|
|
"CONTAINER_NOT_FOUND",
|
|
|
|
f"Container {cuid} not found",
|
|
|
|
404,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=404
|
|
|
|
)
|
|
|
|
|
|
|
|
return web.json_response(info)
|
|
|
|
|
|
|
|
async def update_container(self, request):
|
|
|
|
"""Update container configuration"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
|
|
|
|
try:
|
|
|
|
data = await request.json()
|
|
|
|
|
|
|
|
success = await self.compose_manager.update_container(
|
|
|
|
cuid,
|
|
|
|
env=data.get("env"),
|
|
|
|
tags=data.get("tags"),
|
|
|
|
resources=data.get("resources"),
|
|
|
|
image=data.get("image")
|
|
|
|
)
|
|
|
|
|
|
|
|
if not success:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["NOT_FOUND"],
|
|
|
|
"CONTAINER_NOT_FOUND",
|
|
|
|
f"Container {cuid} not found",
|
|
|
|
404,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=404
|
|
|
|
)
|
|
|
|
|
|
|
|
# Get updated info
|
|
|
|
info = await self.compose_manager.get_container_info(cuid)
|
|
|
|
return web.json_response(info)
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["COMPOSE"],
|
|
|
|
"UPDATE_FAILED",
|
|
|
|
str(e),
|
|
|
|
500,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=500
|
|
|
|
)
|
|
|
|
|
|
|
|
async def delete_container(self, request):
|
|
|
|
"""Delete container"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
|
|
|
|
success = await self.compose_manager.delete_container(cuid)
|
|
|
|
if not success:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["NOT_FOUND"],
|
|
|
|
"CONTAINER_NOT_FOUND",
|
|
|
|
f"Container {cuid} not found",
|
|
|
|
404,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=404
|
|
|
|
)
|
|
|
|
|
|
|
|
return web.Response(status=204)
|
|
|
|
|
|
|
|
async def start_container(self, request):
|
|
|
|
"""Start container"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
await self.compose_manager.run_compose(["start", cuid])
|
|
|
|
return web.json_response({"status": "started"})
|
|
|
|
|
|
|
|
async def stop_container(self, request):
|
|
|
|
"""Stop container"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
await self.compose_manager.run_compose(["stop", cuid])
|
|
|
|
return web.json_response({"status": "stopped"})
|
|
|
|
|
|
|
|
async def pause_container(self, request):
|
|
|
|
"""Pause container"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
await self.compose_manager.run_compose(["pause", cuid])
|
|
|
|
return web.json_response({"status": "paused"})
|
|
|
|
|
|
|
|
async def unpause_container(self, request):
|
|
|
|
"""Unpause container"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
await self.compose_manager.run_compose(["unpause", cuid])
|
|
|
|
return web.json_response({"status": "unpaused"})
|
|
|
|
|
|
|
|
async def restart_container(self, request):
|
|
|
|
"""Restart container"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
await self.compose_manager.run_compose(["restart", cuid])
|
|
|
|
return web.json_response({"status": "restarted"})
|
|
|
|
|
|
|
|
async def update_ports(self, request):
|
|
|
|
"""Update container ports"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
|
|
|
|
try:
|
|
|
|
data = await request.json()
|
|
|
|
|
|
|
|
if "ports" not in data:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["VALIDATION"],
|
|
|
|
"MISSING_PORTS",
|
|
|
|
"Ports field is required",
|
|
|
|
400,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=400
|
|
|
|
)
|
|
|
|
|
|
|
|
success = await self.compose_manager.update_ports(cuid, data["ports"])
|
|
|
|
|
|
|
|
if not success:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["NOT_FOUND"],
|
|
|
|
"CONTAINER_NOT_FOUND",
|
|
|
|
f"Container {cuid} not found",
|
|
|
|
404,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=404
|
|
|
|
)
|
|
|
|
|
|
|
|
return web.json_response({"status": "updated"})
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["COMPOSE"],
|
|
|
|
"UPDATE_FAILED",
|
|
|
|
str(e),
|
|
|
|
500,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=500
|
|
|
|
)
|
|
|
|
|
|
|
|
async def upload_zip(self, request):
|
|
|
|
"""Upload ZIP file to container mount"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
mount_dir = Path(MOUNTS_DIR) / cuid
|
|
|
|
|
|
|
|
if not mount_dir.exists():
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["NOT_FOUND"],
|
|
|
|
"MOUNT_NOT_FOUND",
|
|
|
|
f"Mount directory for {cuid} not found",
|
|
|
|
404,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=404
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Read body as ZIP
|
|
|
|
body = await request.read()
|
|
|
|
|
|
|
|
# Extract ZIP with protection
|
|
|
|
with zipfile.ZipFile(io.BytesIO(body), "r") as zf:
|
|
|
|
for member in zf.namelist():
|
|
|
|
# Validate path
|
|
|
|
member_path = Path(member)
|
|
|
|
if member_path.is_absolute() or ".." in member_path.parts:
|
|
|
|
continue # Skip dangerous paths
|
|
|
|
|
|
|
|
target_path = mount_dir / member_path
|
|
|
|
|
|
|
|
# Ensure target is within mount
|
|
|
|
if not target_path.resolve().is_relative_to(mount_dir.resolve()):
|
|
|
|
continue # Skip escaping paths
|
|
|
|
|
|
|
|
# Extract file
|
|
|
|
if member.endswith("/"):
|
|
|
|
target_path.mkdir(parents=True, exist_ok=True)
|
|
|
|
else:
|
|
|
|
target_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
with open(target_path, "wb") as f:
|
|
|
|
f.write(zf.read(member))
|
|
|
|
|
|
|
|
return web.json_response({"status": "uploaded"})
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["IO"],
|
|
|
|
"UPLOAD_FAILED",
|
|
|
|
str(e),
|
|
|
|
500,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=500
|
|
|
|
)
|
|
|
|
|
|
|
|
async def download_file(self, request):
|
|
|
|
"""Download single file from container mount"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
rel_path = request.query.get("path", "")
|
|
|
|
|
|
|
|
if not rel_path:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["VALIDATION"],
|
|
|
|
"MISSING_PATH",
|
|
|
|
"Path parameter is required",
|
|
|
|
400,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=400
|
|
|
|
)
|
|
|
|
|
|
|
|
# Remove leading slash if present
|
|
|
|
rel_path = rel_path.lstrip("/")
|
|
|
|
|
|
|
|
mount_dir = Path(MOUNTS_DIR) / cuid
|
|
|
|
target_path = (mount_dir / rel_path).resolve()
|
|
|
|
|
|
|
|
# Validate path
|
|
|
|
if not target_path.is_relative_to(mount_dir.resolve()):
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["VALIDATION"],
|
|
|
|
"INVALID_PATH",
|
|
|
|
"Path escapes mount directory",
|
|
|
|
400,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=400
|
|
|
|
)
|
|
|
|
|
|
|
|
if not target_path.exists() or not target_path.is_file():
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["NOT_FOUND"],
|
|
|
|
"FILE_NOT_FOUND",
|
|
|
|
f"File not found: {rel_path}",
|
|
|
|
404,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=404
|
|
|
|
)
|
|
|
|
|
|
|
|
# Stream file
|
|
|
|
response = web.StreamResponse()
|
|
|
|
response.headers["Content-Type"] = "application/octet-stream"
|
|
|
|
response.headers["Content-Disposition"] = f'attachment; filename="{target_path.name}"'
|
|
|
|
await response.prepare(request)
|
|
|
|
|
|
|
|
async with aiofiles.open(target_path, "rb") as f:
|
|
|
|
while chunk := await f.read(8192):
|
|
|
|
await response.write(chunk)
|
|
|
|
|
|
|
|
await response.write_eof()
|
|
|
|
return response
|
|
|
|
|
|
|
|
async def download_zip(self, request):
|
|
|
|
"""Download directory as ZIP"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
rel_path = request.query.get("path", "")
|
|
|
|
|
|
|
|
# Remove leading slash if present
|
|
|
|
rel_path = rel_path.lstrip("/")
|
|
|
|
|
|
|
|
mount_dir = Path(MOUNTS_DIR) / cuid
|
|
|
|
|
|
|
|
if rel_path:
|
|
|
|
target_dir = (mount_dir / rel_path).resolve()
|
|
|
|
else:
|
|
|
|
target_dir = mount_dir.resolve()
|
|
|
|
|
|
|
|
# Validate path
|
|
|
|
if not target_dir.is_relative_to(mount_dir.resolve()):
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["VALIDATION"],
|
|
|
|
"INVALID_PATH",
|
|
|
|
"Path escapes mount directory",
|
|
|
|
400,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=400
|
|
|
|
)
|
|
|
|
|
|
|
|
if not target_dir.exists() or not target_dir.is_dir():
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["NOT_FOUND"],
|
|
|
|
"DIR_NOT_FOUND",
|
|
|
|
f"Directory not found: {rel_path}",
|
|
|
|
404,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=404
|
|
|
|
)
|
|
|
|
|
|
|
|
# Create ZIP in memory
|
|
|
|
import io
|
|
|
|
zip_buffer = io.BytesIO()
|
|
|
|
|
|
|
|
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zf:
|
|
|
|
for file_path in target_dir.rglob("*"):
|
|
|
|
if file_path.is_file():
|
|
|
|
arcname = file_path.relative_to(target_dir)
|
|
|
|
zf.write(file_path, arcname)
|
|
|
|
|
|
|
|
# Stream ZIP
|
|
|
|
response = web.StreamResponse()
|
|
|
|
response.headers["Content-Type"] = "application/zip"
|
|
|
|
response.headers["Content-Disposition"] = f'attachment; filename="{cuid}.zip"'
|
|
|
|
await response.prepare(request)
|
|
|
|
|
|
|
|
zip_buffer.seek(0)
|
|
|
|
await response.write(zip_buffer.read())
|
|
|
|
await response.write_eof()
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
async def get_status(self, request):
|
|
|
|
"""Get container status"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
|
|
|
|
info = await self.compose_manager.get_container_info(cuid)
|
|
|
|
if not info:
|
|
|
|
return web.json_response(
|
|
|
|
ErrorObject.create(
|
|
|
|
ERROR_IDS["NOT_FOUND"],
|
|
|
|
"CONTAINER_NOT_FOUND",
|
|
|
|
f"Container {cuid} not found",
|
|
|
|
404,
|
|
|
|
request["request_id"]
|
|
|
|
),
|
|
|
|
status=404
|
|
|
|
)
|
|
|
|
|
|
|
|
# Get additional status info
|
|
|
|
result = await self.compose_manager.run_compose(["ps", "--format", "json", cuid])
|
|
|
|
|
|
|
|
status_data = {
|
|
|
|
"cuid": cuid,
|
|
|
|
"status": info.get("status", "unknown"),
|
|
|
|
"created_at": info.get("created_at", ""),
|
|
|
|
"uptime": "",
|
|
|
|
"restarts": 0,
|
|
|
|
"last_error": None
|
|
|
|
}
|
|
|
|
|
|
|
|
if result["returncode"] == 0 and result["stdout"]:
|
|
|
|
try:
|
|
|
|
ps_data = json.loads(result["stdout"])
|
|
|
|
if ps_data and len(ps_data) > 0:
|
2025-09-05 05:10:16 +02:00
|
|
|
container = ps_data
|
2025-09-04 18:49:55 +02:00
|
|
|
status_data["uptime"] = container.get("RunningFor", "")
|
|
|
|
# Would need to parse container inspect for restart count
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return web.json_response(status_data)
|
|
|
|
|
|
|
|
async def websocket_handler(self, request):
|
|
|
|
"""WebSocket handler for container I/O"""
|
|
|
|
cuid = request.match_info["cuid"]
|
|
|
|
|
|
|
|
# Check auth
|
|
|
|
auth_header = request.headers.get("Authorization", "")
|
|
|
|
if not auth_header.startswith("Basic "):
|
|
|
|
return web.Response(status=401)
|
|
|
|
|
|
|
|
try:
|
|
|
|
encoded = auth_header[6:]
|
|
|
|
decoded = base64.b64decode(encoded).decode("utf-8")
|
|
|
|
username, password = decoded.split(":", 1)
|
|
|
|
|
|
|
|
if username != self.config.APP_USER or password != self.config.APP_PASS:
|
|
|
|
return web.Response(status=401)
|
|
|
|
except Exception:
|
|
|
|
return web.Response(status=401)
|
|
|
|
|
|
|
|
# Setup WebSocket
|
|
|
|
ws = web.WebSocketResponse()
|
|
|
|
await ws.prepare(request)
|
|
|
|
|
|
|
|
# Get initial TTY size from query params
|
|
|
|
cols = request.query.get("cols", type=int)
|
|
|
|
rows = request.query.get("rows", type=int)
|
|
|
|
|
|
|
|
# Create session
|
|
|
|
session = WebSocketSession(cuid, ws, self.compose_manager)
|
|
|
|
self.ws_sessions.add(session)
|
|
|
|
|
|
|
|
# Start exec session
|
|
|
|
if not await session.start(cols, rows):
|
|
|
|
await ws.close()
|
|
|
|
return ws
|
|
|
|
|
|
|
|
try:
|
|
|
|
async for msg in ws:
|
|
|
|
if msg.type == WSMsgType.TEXT:
|
|
|
|
try:
|
|
|
|
data = json.loads(msg.data)
|
|
|
|
msg_type = data.get("type")
|
|
|
|
|
|
|
|
if msg_type == "stdin":
|
|
|
|
await session.write_stdin(
|
|
|
|
data.get("data", ""),
|
|
|
|
data.get("encoding", "utf8")
|
|
|
|
)
|
|
|
|
elif msg_type == "resize":
|
|
|
|
await session.resize(
|
|
|
|
data.get("cols", 80),
|
|
|
|
data.get("rows", 24)
|
|
|
|
)
|
|
|
|
elif msg_type == "signal":
|
|
|
|
await session.signal(data.get("name", ""))
|
|
|
|
elif msg_type == "close":
|
|
|
|
break
|
|
|
|
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
await ws.send_str(json.dumps({
|
|
|
|
"type": "error",
|
|
|
|
"error": {"message": "Invalid JSON"}
|
|
|
|
}))
|
|
|
|
|
|
|
|
elif msg.type == WSMsgType.ERROR:
|
|
|
|
break
|
|
|
|
|
|
|
|
finally:
|
|
|
|
await session.close()
|
|
|
|
|
|
|
|
return ws
|
|
|
|
|
|
|
|
async def run(self):
|
|
|
|
"""Run the application"""
|
|
|
|
# Check Docker Compose availability
|
|
|
|
result = await self.compose_manager.run_compose(["version"], timeout=5)
|
|
|
|
if result["returncode"] != 0:
|
|
|
|
raise RuntimeError("Docker Compose not available")
|
|
|
|
|
|
|
|
# Create required directories
|
|
|
|
os.makedirs(MOUNTS_DIR, exist_ok=True)
|
|
|
|
os.makedirs(LOGS_DIR, exist_ok=True)
|
|
|
|
|
|
|
|
# Run app
|
|
|
|
runner = web.AppRunner(self.app)
|
|
|
|
await runner.setup()
|
|
|
|
site = web.TCPSite(runner, "0.0.0.0", 8080)
|
|
|
|
|
|
|
|
print("Container Manager API starting on http://0.0.0.0:8080")
|
|
|
|
await site.start()
|
|
|
|
|
|
|
|
# Keep running
|
|
|
|
try:
|
|
|
|
await asyncio.Event().wait()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
await runner.cleanup()
|
|
|
|
|
|
|
|
|
|
|
|
async def main():
|
|
|
|
"""Main entry point"""
|
|
|
|
app = Application()
|
|
|
|
await app.run()
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
import io
|
|
|
|
asyncio.run(main())
|