Files
ccdi/deploy/remote-deploy.py

178 lines
6.3 KiB
Python
Raw Normal View History

import argparse
import os
import posixpath
import shlex
import sys
from pathlib import Path
import paramiko
SKIP_DIRS = {"__pycache__", ".pytest_cache", ".git"}
SKIP_FILES = {".DS_Store"}
def parse_args():
parser = argparse.ArgumentParser(description="Upload CCDI deployment package and run docker compose remotely.")
parser.add_argument("--host", required=True)
parser.add_argument("--port", type=int, required=True)
parser.add_argument("--username", required=True)
parser.add_argument("--password", required=True)
parser.add_argument("--local-root", required=True)
parser.add_argument("--remote-root", required=True)
return parser.parse_args()
def ensure_remote_dir(ssh, remote_path):
command = f"mkdir -p {shlex.quote(remote_path)}"
exit_code, output, error = run_command(ssh, command)
if exit_code != 0:
raise RuntimeError(f"Failed to create remote directory {remote_path}:\n{output}\n{error}")
def resolve_sftp_root(sftp, shell_root):
parts = [part for part in shell_root.split("/") if part]
for index in range(len(parts)):
candidate = "/" + "/".join(parts[index:])
try:
sftp.listdir(candidate)
return candidate
except OSError:
continue
raise RuntimeError(f"Unable to resolve SFTP path for remote root: {shell_root}")
def upload_tree(ssh, sftp, local_root, shell_remote_root, sftp_remote_root):
for current_root, dirs, files in os.walk(local_root):
dirs[:] = [directory for directory in dirs if directory not in SKIP_DIRS]
relative = os.path.relpath(current_root, local_root)
relative_posix = "" if relative == "." else relative.replace("\\", "/")
shell_remote_dir = shell_remote_root if not relative_posix else posixpath.join(shell_remote_root, relative_posix)
sftp_remote_dir = sftp_remote_root if not relative_posix else posixpath.join(sftp_remote_root, relative_posix)
ensure_remote_dir(ssh, shell_remote_dir)
for file_name in files:
if file_name in SKIP_FILES:
continue
local_file = os.path.join(current_root, file_name)
remote_file = posixpath.join(sftp_remote_dir, file_name)
sftp.put(local_file, remote_file)
def run_command(ssh, command):
stdin, stdout, stderr = ssh.exec_command(command)
exit_code = stdout.channel.recv_exit_status()
output = stdout.read().decode("utf-8", errors="ignore")
error = stderr.read().decode("utf-8", errors="ignore")
return exit_code, output, error
def sudo_prefix(password):
return f"printf '%s\\n' {shlex.quote(password)} | sudo -S -p '' "
def detect_compose_command(ssh, password):
daemon_prefix = ""
daemon_checks = [
("", "docker ps >/dev/null 2>&1"),
(sudo_prefix(password), f"{sudo_prefix(password)}docker ps >/dev/null 2>&1"),
]
for prefix, probe in daemon_checks:
exit_code, _, _ = run_command(ssh, probe)
if exit_code == 0:
daemon_prefix = prefix
break
else:
raise RuntimeError("Docker daemon is not accessible on remote host.")
checks = [
(f"{daemon_prefix}docker compose", f"{daemon_prefix}docker compose version"),
(f"{daemon_prefix}docker-compose", f"{daemon_prefix}docker-compose --version"),
]
for compose_cmd, probe in checks:
exit_code, _, _ = run_command(ssh, probe)
if exit_code == 0:
return compose_cmd
raise RuntimeError("Docker Compose command not found on remote host.")
def main():
args = parse_args()
local_root = Path(args.local_root).resolve()
remote_root = args.remote_root.rstrip("/")
if not local_root.exists():
raise FileNotFoundError(f"Local root does not exist: {local_root}")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
hostname=args.host,
port=args.port,
username=args.username,
password=args.password,
timeout=20,
)
sftp = ssh.open_sftp()
try:
pre_clean = (
"set -e;"
f"mkdir -p {shlex.quote(remote_root)};"
f"mkdir -p {shlex.quote(posixpath.join(remote_root, 'runtime/ruoyi'))};"
f"mkdir -p {shlex.quote(posixpath.join(remote_root, 'runtime/logs/backend'))};"
f"rm -rf {shlex.quote(posixpath.join(remote_root, 'backend'))} "
f"{shlex.quote(posixpath.join(remote_root, 'frontend'))} "
f"{shlex.quote(posixpath.join(remote_root, 'docker'))} "
f"{shlex.quote(posixpath.join(remote_root, 'lsfx-mock-server'))};"
f"rm -f {shlex.quote(posixpath.join(remote_root, 'docker-compose.yml'))} "
f"{shlex.quote(posixpath.join(remote_root, '.env.example'))};"
)
exit_code, output, error = run_command(ssh, pre_clean)
if exit_code != 0:
raise RuntimeError(f"Remote cleanup failed:\n{output}\n{error}")
sftp_remote_root = resolve_sftp_root(sftp, remote_root)
upload_tree(ssh, sftp, str(local_root), remote_root, sftp_remote_root)
compose_cmd = detect_compose_command(ssh, args.password)
deploy_command = (
"set -e;"
f"cd {shlex.quote(remote_root)};"
f"{compose_cmd} up -d --build;"
f"{compose_cmd} ps;"
)
exit_code, output, error = run_command(ssh, deploy_command)
if exit_code != 0:
raise RuntimeError(f"Remote deploy failed:\n{output}\n{error}")
logs_command = (
"set -e;"
f"cd {shlex.quote(remote_root)};"
f"{compose_cmd} logs backend --tail 120;"
)
_, logs_output, logs_error = run_command(ssh, logs_command)
print("=== DEPLOY OUTPUT ===")
print(output.strip())
if error.strip():
print("=== DEPLOY STDERR ===")
print(error.strip())
if logs_output.strip():
print("=== BACKEND LOGS ===")
print(logs_output.strip())
if logs_error.strip():
print("=== BACKEND LOG STDERR ===")
print(logs_error.strip())
finally:
sftp.close()
ssh.close()
if __name__ == "__main__":
try:
main()
except Exception as exc:
print(str(exc), file=sys.stderr)
sys.exit(1)