feat(sandbox): add Python 3 code execution sandbox support
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -35,3 +35,6 @@ nltk_data/
|
||||
tika-server*.jar*
|
||||
cl100k_base.tiktoken
|
||||
libssl*.deb
|
||||
|
||||
sandbox/lib/seccomp_python/target
|
||||
sandbox/lib/seccomp_nodejs/target
|
||||
|
||||
42
sandbox/Dockerfile
Normal file
42
sandbox/Dockerfile
Normal file
@@ -0,0 +1,42 @@
|
||||
FROM python:3.12-slim
|
||||
USER root
|
||||
WORKDIR /code
|
||||
LABEL authors="Eterntiy"
|
||||
|
||||
ARG NEED_MIRROR=0
|
||||
|
||||
RUN --mount=type=cache,id=mem_apt,target=/var/cache/apt,sharing=locked \
|
||||
if [ "$NEED_MIRROR" == "1" ]; then \
|
||||
sed -i 's|https://ports.ubuntu.com|https://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list; \
|
||||
sed -i 's|https://archive.ubuntu.com|https://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list; \
|
||||
fi; \
|
||||
rm -f /etc/apt/apt.conf.d/docker-clean && \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache && \
|
||||
chmod 1777 /tmp && \
|
||||
apt update && \
|
||||
apt --no-install-recommends install -y ca-certificates && \
|
||||
apt update && \
|
||||
apt install -y python3-pip pipx nginx unzip curl wget git vim less && \
|
||||
apt-get install -y --no-install-recommends tzdata libseccomp2 libseccomp-dev && \
|
||||
ln -snf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && \
|
||||
echo "Asia/Shanghai" > /etc/timezone && \
|
||||
apt install -y cargo
|
||||
|
||||
COPY ./app /code/app
|
||||
COPY ./dependencies /code/dependencies
|
||||
COPY ./lib /code/lib
|
||||
COPY ./script /code/script
|
||||
COPY ./config.yaml /code/config.yaml
|
||||
COPY ./main.py /code/main.py
|
||||
COPY ./requirements.txt /code/requirements.txt
|
||||
|
||||
RUN python -m venv .venv
|
||||
RUN .venv/bin/python3 -m pip install -r requirements.txt
|
||||
|
||||
RUN cargo build --release --manifest-path lib/seccomp_python/Cargo.toml
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
|
||||
CMD curl 127.0.0.1:8194/health
|
||||
|
||||
|
||||
CMD [".venv/bin/python3", "main.py"]
|
||||
134
sandbox/app/config.py
Normal file
134
sandbox/app/config.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""Configuration management"""
|
||||
import os
|
||||
from typing import List, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
import yaml
|
||||
|
||||
SANDBOX_USER_ID = 1000
|
||||
SANDBOX_GROUP_ID = 1000
|
||||
|
||||
DEFAULT_PYTHON_LIB_REQUIREMENTS_AMD = [
|
||||
"/usr/local/lib/python3.12",
|
||||
"/usr/lib/python3",
|
||||
"/usr/lib/x86_64-linux-gnu",
|
||||
"/etc/ssl/certs/ca-certificates.crt",
|
||||
"/etc/nsswitch.conf",
|
||||
"/etc/hosts",
|
||||
"/etc/resolv.conf",
|
||||
"/run/systemd/resolve/stub-resolv.conf",
|
||||
"/run/resolvconf/resolv.conf",
|
||||
"/etc/localtime",
|
||||
"/usr/share/zoneinfo",
|
||||
"/etc/timezone",
|
||||
]
|
||||
|
||||
|
||||
class AppConfig(BaseModel):
|
||||
"""Application configuration"""
|
||||
port: int = 8194
|
||||
debug: bool = True
|
||||
key: str = "redbear-sandbox"
|
||||
|
||||
|
||||
class ProxyConfig(BaseModel):
|
||||
"""Proxy configuration"""
|
||||
socks5: str = ""
|
||||
http: str = ""
|
||||
https: str = ""
|
||||
|
||||
|
||||
class Config(BaseModel):
|
||||
"""Global configuration"""
|
||||
app: AppConfig = Field(default_factory=AppConfig)
|
||||
max_workers: int = 4
|
||||
max_requests: int = 50
|
||||
worker_timeout: int = 30
|
||||
nodejs_path: str = "node"
|
||||
enable_network: bool = True
|
||||
enable_preload: bool = False
|
||||
|
||||
python_path: str = ""
|
||||
python_lib_paths: list = Field(default=DEFAULT_PYTHON_LIB_REQUIREMENTS_AMD)
|
||||
python_deps_update_interval: str = "30m"
|
||||
allowed_syscalls: List[int] = Field(default_factory=list)
|
||||
proxy: ProxyConfig = Field(default_factory=ProxyConfig)
|
||||
|
||||
|
||||
# Global configuration instance
|
||||
_config: Optional[Config] = None
|
||||
|
||||
|
||||
def load_config(config_path: str) -> Config:
|
||||
"""Load configuration from YAML file"""
|
||||
global _config
|
||||
|
||||
# Load from file
|
||||
if os.path.exists(config_path):
|
||||
with open(config_path, 'r') as f:
|
||||
data = yaml.safe_load(f)
|
||||
_config = Config(**data)
|
||||
else:
|
||||
_config = Config()
|
||||
|
||||
# Override with environment variables
|
||||
if os.getenv("DEBUG"):
|
||||
_config.app.debug = os.getenv("DEBUG").lower() in ("true", "1", "yes")
|
||||
|
||||
if os.getenv("MAX_WORKERS"):
|
||||
_config.max_workers = int(os.getenv("MAX_WORKERS"))
|
||||
|
||||
if os.getenv("MAX_REQUESTS"):
|
||||
_config.max_requests = int(os.getenv("MAX_REQUESTS"))
|
||||
|
||||
if os.getenv("SANDBOX_PORT"):
|
||||
_config.app.port = int(os.getenv("SANDBOX_PORT"))
|
||||
|
||||
if os.getenv("WORKER_TIMEOUT"):
|
||||
_config.worker_timeout = int(os.getenv("WORKER_TIMEOUT"))
|
||||
|
||||
if os.getenv("API_KEY"):
|
||||
_config.app.key = os.getenv("API_KEY")
|
||||
|
||||
if os.getenv("NODEJS_PATH"):
|
||||
_config.nodejs_path = os.getenv("NODEJS_PATH")
|
||||
|
||||
if os.getenv("ENABLE_NETWORK"):
|
||||
_config.enable_network = os.getenv("ENABLE_NETWORK").lower() in ("true", "1", "yes")
|
||||
|
||||
if os.getenv("ENABLE_PRELOAD"):
|
||||
_config.enable_preload = os.getenv("ENABLE_PRELOAD").lower() in ("true", "1", "yes")
|
||||
|
||||
if os.getenv("ALLOWED_SYSCALLS"):
|
||||
_config.allowed_syscalls = [int(x) for x in os.getenv("ALLOWED_SYSCALLS").split(",")]
|
||||
|
||||
if os.getenv("SOCKS5_PROXY"):
|
||||
_config.proxy.socks5 = os.getenv("SOCKS5_PROXY")
|
||||
|
||||
if os.getenv("HTTP_PROXY"):
|
||||
_config.proxy.http = os.getenv("HTTP_PROXY")
|
||||
|
||||
if os.getenv("HTTPS_PROXY"):
|
||||
_config.proxy.https = os.getenv("HTTPS_PROXY")
|
||||
|
||||
# python
|
||||
if os.getenv("PYTHON_PATH"):
|
||||
_config.python_path = os.getenv("PYTHON_PATH")
|
||||
|
||||
if os.getenv("PYTHON_LIB_PATH"):
|
||||
_config.python_lib_paths = os.getenv("PYTHON_LIB_PATH").split(',')
|
||||
|
||||
if os.getenv("PYTHON_DEPS_UPDATE_INTERVAL"):
|
||||
_config.python_deps_update_interval = os.getenv("PYTHON_DEPS_UPDATE_INTERVAL")
|
||||
|
||||
return _config
|
||||
|
||||
|
||||
config_path = os.getenv("CONFIG_PATH", "config.yaml")
|
||||
load_config(config_path)
|
||||
|
||||
|
||||
def get_config() -> Config:
|
||||
"""Get global configuration"""
|
||||
if _config is None:
|
||||
raise RuntimeError("Configuration not loaded. Call load_config() first.")
|
||||
return _config
|
||||
8
sandbox/app/controllers/__init__.py
Normal file
8
sandbox/app/controllers/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from fastapi import APIRouter
|
||||
|
||||
from . import health_controller, sandbox_controller
|
||||
|
||||
manager_router = APIRouter()
|
||||
|
||||
manager_router.include_router(health_controller.router)
|
||||
manager_router.include_router(sandbox_controller.router)
|
||||
12
sandbox/app/controllers/health_controller.py
Normal file
12
sandbox/app/controllers/health_controller.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""Health check endpoint"""
|
||||
from fastapi import APIRouter
|
||||
|
||||
from app.models import HealthResponse
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/health", response_model=HealthResponse)
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
return HealthResponse(status="healthy", version="2.0.0")
|
||||
59
sandbox/app/controllers/sandbox_controller.py
Normal file
59
sandbox/app/controllers/sandbox_controller.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""Sandbox API endpoints"""
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from app.middleware.auth import verify_api_key
|
||||
from app.middleware.concurrency import check_max_requests, acquire_worker
|
||||
from app.models import (
|
||||
RunCodeRequest,
|
||||
ApiResponse,
|
||||
UpdateDependencyRequest,
|
||||
error_response
|
||||
)
|
||||
from app.services.python_service import (
|
||||
run_python_code,
|
||||
list_python_dependencies,
|
||||
update_python_dependencies
|
||||
)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/v1/sandbox",
|
||||
tags=["sandbox"],
|
||||
dependencies=[Depends(verify_api_key)]
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/run",
|
||||
response_model=ApiResponse,
|
||||
dependencies=[Depends(check_max_requests),
|
||||
Depends(acquire_worker)]
|
||||
)
|
||||
async def run_code(request: RunCodeRequest):
|
||||
"""Execute code in sandbox"""
|
||||
if request.language == "python3":
|
||||
return await run_python_code(request.code, request.preload, request.options)
|
||||
elif request.language == "nodejs":
|
||||
# TODO
|
||||
return error_response(-400, "TODO")
|
||||
else:
|
||||
return error_response(-400, "unsupported language")
|
||||
|
||||
|
||||
@router.get("/dependencies", response_model=ApiResponse)
|
||||
async def get_dependencies(language: str):
|
||||
"""Get installed dependencies"""
|
||||
if language == "python3":
|
||||
return await list_python_dependencies()
|
||||
else:
|
||||
return error_response(-400, "unsupported language")
|
||||
|
||||
|
||||
@router.post("/dependencies/update", response_model=ApiResponse)
|
||||
async def update_dependencies(request: UpdateDependencyRequest):
|
||||
"""Update dependencies"""
|
||||
if request.language == "python3":
|
||||
return await update_python_dependencies()
|
||||
else:
|
||||
return error_response(-400, "unsupported language")
|
||||
|
||||
|
||||
1
sandbox/app/core/__init__.py
Normal file
1
sandbox/app/core/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Core functionality package"""
|
||||
32
sandbox/app/core/encryption.py
Normal file
32
sandbox/app/core/encryption.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Code encryption utilities"""
|
||||
import base64
|
||||
|
||||
|
||||
def encrypt_code(code: bytes, key: bytes) -> str:
|
||||
"""Encrypt code using XOR cipher with base64 encoding
|
||||
|
||||
Args:
|
||||
code: Plain code string
|
||||
key: Encryption key bytes
|
||||
|
||||
Returns:
|
||||
Base64 encoded encrypted code
|
||||
"""
|
||||
encrypted_code = bytearray(len(code))
|
||||
for i in range(len(code)):
|
||||
encrypted_code[i] = code[i] ^ key[i % 64]
|
||||
encoded_code = base64.b64encode(encrypted_code).decode("utf-8")
|
||||
return encoded_code
|
||||
|
||||
|
||||
def generate_key(length: int = 64) -> bytes:
|
||||
"""Generate random encryption key
|
||||
|
||||
Args:
|
||||
length: Key length in bytes (default 64 for 512 bits)
|
||||
|
||||
Returns:
|
||||
Random key bytes
|
||||
"""
|
||||
import secrets
|
||||
return secrets.token_bytes(length)
|
||||
48
sandbox/app/core/executor.py
Normal file
48
sandbox/app/core/executor.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""Code execution engine"""
|
||||
import os
|
||||
from typing import Optional
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from app.config import get_config
|
||||
from app.logger import get_logger
|
||||
from app.models import RunnerOptions
|
||||
|
||||
|
||||
class ExecutionResult:
|
||||
"""Result of code execution"""
|
||||
|
||||
def __init__(self, stdout: str = "", stderr: str = "", exit_code: int = 0, error: Optional[str] = None):
|
||||
self.stdout = stdout
|
||||
self.stderr = stderr
|
||||
self.exit_code = exit_code
|
||||
self.error = error
|
||||
|
||||
|
||||
class CodeExecutor(ABC):
|
||||
"""Base code executor"""
|
||||
|
||||
def __init__(self):
|
||||
self.logger = get_logger()
|
||||
self.config = get_config()
|
||||
|
||||
@abstractmethod
|
||||
async def run(
|
||||
self,
|
||||
code: str,
|
||||
options: RunnerOptions,
|
||||
preload: str = "",
|
||||
timeout: Optional[int] = None
|
||||
) -> ExecutionResult:
|
||||
pass
|
||||
|
||||
def cleanup_temp_file(self, file_path: str) -> None:
|
||||
"""Remove temporary file
|
||||
|
||||
Args:
|
||||
file_path: Path to file to remove
|
||||
"""
|
||||
try:
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to cleanup temp file {file_path}: {e}")
|
||||
1
sandbox/app/core/runners/__init__.py
Normal file
1
sandbox/app/core/runners/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Code runners package"""
|
||||
4
sandbox/app/core/runners/python/__init__.py
Normal file
4
sandbox/app/core/runners/python/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# -*- coding: UTF-8 -*-
|
||||
# Author: Eternity
|
||||
# @Email: 1533512157@qq.com
|
||||
# @Time : 2026/1/23 11:27
|
||||
50
sandbox/app/core/runners/python/env.py
Normal file
50
sandbox/app/core/runners/python/env.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import asyncio
|
||||
import tempfile
|
||||
import stat
|
||||
from pathlib import Path
|
||||
|
||||
from app.config import get_config
|
||||
from app.core.runners.python.settings import LIB_PATH
|
||||
from app.logger import get_logger
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
async def prepare_python_dependencies_env():
|
||||
config = get_config()
|
||||
|
||||
with tempfile.TemporaryDirectory(dir="/") as root_path:
|
||||
root = Path(root_path)
|
||||
|
||||
env_sh = root / "env.sh"
|
||||
with open("script/env.sh") as f:
|
||||
env_sh.write_text(f.read())
|
||||
env_sh.chmod(env_sh.stat().st_mode | stat.S_IXUSR)
|
||||
|
||||
for lib_path in config.python_lib_paths:
|
||||
lib_path = Path(lib_path)
|
||||
|
||||
if not lib_path.exists():
|
||||
logger.warning("python lib path %s is not available", lib_path)
|
||||
continue
|
||||
|
||||
cmd = [
|
||||
"bash",
|
||||
str(env_sh),
|
||||
str(lib_path),
|
||||
str(LIB_PATH),
|
||||
]
|
||||
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
stdout, stderr = await process.communicate()
|
||||
retcode = process.returncode
|
||||
|
||||
if retcode != 0:
|
||||
logger.error(
|
||||
f"create env error for file {lib_path}: retcode={retcode}, stderr={stderr.decode()}"
|
||||
)
|
||||
56
sandbox/app/core/runners/python/prescript.py
Normal file
56
sandbox/app/core/runners/python/prescript.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import ctypes
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from base64 import b64decode
|
||||
|
||||
|
||||
# Setup exception hook
|
||||
def excepthook(etype, value, tb):
|
||||
sys.stderr.write("".join(traceback.format_exception(etype, value, tb)))
|
||||
sys.stderr.flush()
|
||||
sys.exit(-1)
|
||||
|
||||
|
||||
sys.excepthook = excepthook
|
||||
|
||||
# Load security library if available
|
||||
lib = ctypes.CDLL("./libpython.so")
|
||||
lib.init_seccomp.argtypes = [ctypes.c_uint32, ctypes.c_uint32, ctypes.c_bool]
|
||||
lib.init_seccomp.restype = None
|
||||
|
||||
# Get running path
|
||||
running_path = sys.argv[1]
|
||||
if not running_path:
|
||||
exit(-1)
|
||||
|
||||
# Get decrypt key
|
||||
key = sys.argv[2]
|
||||
if not key:
|
||||
exit(-1)
|
||||
|
||||
key = b64decode(key)
|
||||
|
||||
os.chdir(running_path)
|
||||
|
||||
# Preload code
|
||||
{{preload}}
|
||||
|
||||
# Apply security if library is available
|
||||
lib.init_seccomp({{uid}}, {{gid}}, {{enable_network}})
|
||||
|
||||
# Decrypt and execute code
|
||||
code = b64decode("{{code}}")
|
||||
|
||||
|
||||
def decrypt(code, key):
|
||||
key_len = len(key)
|
||||
code_len = len(code)
|
||||
code = bytearray(code)
|
||||
for i in range(code_len):
|
||||
code[i] = code[i] ^ key[i % key_len]
|
||||
return bytes(code)
|
||||
|
||||
|
||||
code = decrypt(code, key)
|
||||
exec(code)
|
||||
151
sandbox/app/core/runners/python/python_runner.py
Normal file
151
sandbox/app/core/runners/python/python_runner.py
Normal file
@@ -0,0 +1,151 @@
|
||||
"""Python code runner"""
|
||||
import asyncio
|
||||
import base64
|
||||
import os
|
||||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from app.config import SANDBOX_USER_ID, SANDBOX_GROUP_ID, get_config
|
||||
from app.core.encryption import generate_key, encrypt_code
|
||||
from app.core.executor import CodeExecutor, ExecutionResult
|
||||
from app.core.runners.python.settings import check_lib_avaiable, release_lib_binary, LIB_PATH
|
||||
from app.models import RunnerOptions
|
||||
|
||||
# Python sandbox prescript template
|
||||
with open("app/core/runners/python/prescript.py") as f:
|
||||
PYTHON_PRESCRIPT = f.read()
|
||||
|
||||
|
||||
class PythonRunner(CodeExecutor):
|
||||
"""Python code runner with security isolation"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
@staticmethod
|
||||
def init_enviroment(code: bytes, preload, options: RunnerOptions) -> tuple[str, str]:
|
||||
if not check_lib_avaiable():
|
||||
release_lib_binary(False)
|
||||
config = get_config()
|
||||
code_file_name = uuid.uuid4().hex.replace("-", "_")
|
||||
|
||||
script = PYTHON_PRESCRIPT.replace("{{uid}}", str(SANDBOX_USER_ID), 1)
|
||||
script = script.replace("{{gid}}", str(SANDBOX_GROUP_ID), 1)
|
||||
script = script.replace(
|
||||
"{{enable_network}}",
|
||||
str(int(options.enable_network and config.enable_network)
|
||||
),
|
||||
1
|
||||
)
|
||||
script = script.replace("{{preload}}", f"{preload}\n", 1)
|
||||
|
||||
key = generate_key(64)
|
||||
|
||||
encoded_code = encrypt_code(code, key)
|
||||
encoded_key = base64.b64encode(key).decode("utf-8")
|
||||
|
||||
script = script.replace("{{code}}", encoded_code, 1)
|
||||
|
||||
code_path = f"{LIB_PATH}/tmp/{code_file_name}.py"
|
||||
try:
|
||||
os.makedirs(os.path.dirname(code_path), mode=0o755, exist_ok=True)
|
||||
with open(code_path, "w", encoding="utf-8") as f:
|
||||
f.write(script)
|
||||
os.chmod(code_path, 0o755)
|
||||
|
||||
except OSError as e:
|
||||
raise RuntimeError(f"Failed to write {code_path}") from e
|
||||
|
||||
return code_path, encoded_key
|
||||
|
||||
async def run(
|
||||
self,
|
||||
code: str,
|
||||
options: RunnerOptions,
|
||||
preload: str = "",
|
||||
timeout: Optional[int] = None
|
||||
) -> ExecutionResult:
|
||||
"""Run Python code in sandbox
|
||||
|
||||
Args:
|
||||
options:
|
||||
code: Base64 encoded encrypted code
|
||||
preload: Preload code to execute before main code
|
||||
timeout: Execution timeout in seconds
|
||||
|
||||
Returns:
|
||||
ExecutionResult with stdout, stderr, and exit code
|
||||
"""
|
||||
config = self.config
|
||||
|
||||
if timeout is None:
|
||||
timeout = config.worker_timeout
|
||||
|
||||
# Check if preload is allowed
|
||||
if not config.enable_preload:
|
||||
preload = ""
|
||||
code = base64.b64decode(code)
|
||||
script_path, encoded_key = self.init_enviroment(code, preload, options=options)
|
||||
|
||||
try:
|
||||
# Setup environment
|
||||
env = {}
|
||||
|
||||
# Add proxy settings if configured
|
||||
if config.proxy.socks5:
|
||||
env["HTTPS_PROXY"] = config.proxy.socks5
|
||||
env["HTTP_PROXY"] = config.proxy.socks5
|
||||
elif config.proxy.https or config.proxy.http:
|
||||
if config.proxy.https:
|
||||
env["HTTPS_PROXY"] = config.proxy.https
|
||||
if config.proxy.http:
|
||||
env["HTTP_PROXY"] = config.proxy.http
|
||||
|
||||
# Add allowed syscalls if configured
|
||||
if config.allowed_syscalls:
|
||||
env["ALLOWED_SYSCALLS"] = ",".join(map(str, config.allowed_syscalls))
|
||||
|
||||
# Execute with Python interpreter
|
||||
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
config.python_path,
|
||||
script_path,
|
||||
LIB_PATH,
|
||||
encoded_key,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
env=env,
|
||||
cwd=LIB_PATH
|
||||
)
|
||||
|
||||
# Wait for completion with timeout
|
||||
try:
|
||||
stdout, stderr = await asyncio.wait_for(
|
||||
process.communicate(),
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
return ExecutionResult(
|
||||
stdout=stdout.decode('utf-8', errors='replace'),
|
||||
stderr=stderr.decode('utf-8', errors='replace'),
|
||||
exit_code=process.returncode
|
||||
)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
# Kill process on timeout
|
||||
try:
|
||||
process.kill()
|
||||
await process.wait()
|
||||
except:
|
||||
pass
|
||||
|
||||
return ExecutionResult(
|
||||
stdout="",
|
||||
stderr="Execution timeout",
|
||||
exit_code=-1,
|
||||
error="Execution timeout"
|
||||
)
|
||||
|
||||
finally:
|
||||
# Cleanup temporary file
|
||||
self.cleanup_temp_file(script_path)
|
||||
62
sandbox/app/core/runners/python/settings.py
Normal file
62
sandbox/app/core/runners/python/settings.py
Normal file
@@ -0,0 +1,62 @@
|
||||
import os
|
||||
|
||||
from app.logger import get_logger
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
RELEASE_LIB_PATH = "./lib/seccomp_python/target/release/libpython.so"
|
||||
LIB_PATH = "/var/sandbox/sandbox-python"
|
||||
LIB_NAME = "libpython.so"
|
||||
|
||||
try:
|
||||
with open(RELEASE_LIB_PATH, "rb") as f:
|
||||
_PYTHON_LIB = f.read()
|
||||
except:
|
||||
logger.critical("failed to load python lib")
|
||||
raise
|
||||
|
||||
|
||||
def check_lib_avaiable():
|
||||
return os.path.exists(os.path.join(LIB_PATH, LIB_NAME))
|
||||
|
||||
|
||||
def release_lib_binary(force_remove: bool):
|
||||
logger.info("init runtime enviroment")
|
||||
lib_file = os.path.join(LIB_PATH, LIB_NAME)
|
||||
if os.path.exists(lib_file):
|
||||
if force_remove:
|
||||
try:
|
||||
os.remove(lib_file)
|
||||
except OSError:
|
||||
logger.critical(f"failed to remove {os.path.join(LIB_PATH, LIB_NAME)}")
|
||||
raise
|
||||
|
||||
try:
|
||||
os.makedirs(LIB_PATH, mode=0o755, exist_ok=True)
|
||||
except OSError:
|
||||
logger.critical(f"failed to create {LIB_PATH}")
|
||||
raise
|
||||
|
||||
try:
|
||||
with open(lib_file, "wb") as f:
|
||||
f.write(_PYTHON_LIB)
|
||||
os.chmod(lib_file, 0o755)
|
||||
except OSError:
|
||||
logger.critical(f"failed to write {lib_file}")
|
||||
raise
|
||||
else:
|
||||
try:
|
||||
os.makedirs(LIB_PATH, mode=0o755, exist_ok=True)
|
||||
except OSError:
|
||||
logger.critical(f"failed to create {LIB_PATH}")
|
||||
raise
|
||||
|
||||
try:
|
||||
with open(lib_file, "wb") as f:
|
||||
f.write(_PYTHON_LIB)
|
||||
os.chmod(lib_file, 0o755)
|
||||
except OSError:
|
||||
logger.critical(f"failed to write {lib_file}")
|
||||
raise
|
||||
|
||||
logger.info("python runner environment initialized")
|
||||
161
sandbox/app/dependencies.py
Normal file
161
sandbox/app/dependencies.py
Normal file
@@ -0,0 +1,161 @@
|
||||
"""Dependency management"""
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import List, Dict
|
||||
|
||||
from app.config import get_config
|
||||
from app.core.runners.python.env import prepare_python_dependencies_env
|
||||
from app.logger import get_logger
|
||||
|
||||
|
||||
async def setup_dependencies():
|
||||
"""Setup initial dependencies"""
|
||||
logger = get_logger()
|
||||
|
||||
try:
|
||||
logger.info("Installing Python dependencies...")
|
||||
await install_python_dependencies()
|
||||
logger.info("Python dependencies installed")
|
||||
|
||||
logger.info("Preparing Python dependencies environment...")
|
||||
await prepare_python_dependencies_env()
|
||||
logger.info("Python dependencies environment ready")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to setup dependencies: {e}")
|
||||
|
||||
|
||||
async def update_dependencies():
|
||||
# TODO
|
||||
return
|
||||
|
||||
|
||||
async def install_python_dependencies():
|
||||
"""Install Python dependencies from requirements file"""
|
||||
logger = get_logger()
|
||||
config = get_config()
|
||||
|
||||
# Check if requirements file exists
|
||||
req_file = Path("dependencies/python-requirements.txt")
|
||||
if not req_file.exists():
|
||||
logger.warning("Python requirements file not found, skipping installation")
|
||||
return
|
||||
|
||||
# Read requirements
|
||||
requirements = req_file.read_text().strip()
|
||||
if not requirements:
|
||||
logger.info("No Python requirements to install")
|
||||
return
|
||||
|
||||
# Install using pip
|
||||
cmd = [
|
||||
config.python_path,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--upgrade"
|
||||
]
|
||||
|
||||
# Add packages from requirements
|
||||
for line in requirements.split("\n"):
|
||||
line = line.strip()
|
||||
if line and not line.startswith("#"):
|
||||
cmd.append(line)
|
||||
|
||||
try:
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
stdout, stderr = await process.communicate()
|
||||
|
||||
if process.returncode != 0:
|
||||
logger.error(f"Failed to install Python dependencies: {stderr.decode()}")
|
||||
else:
|
||||
logger.info("Python dependencies installed successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error installing Python dependencies: {e}")
|
||||
|
||||
|
||||
async def list_dependencies(language: str) -> List[Dict[str, str]]:
|
||||
"""List installed dependencies
|
||||
|
||||
Args:
|
||||
language: Language (python or Node.js)
|
||||
|
||||
Returns:
|
||||
List of dependencies with name and version
|
||||
"""
|
||||
if language == "python":
|
||||
return await list_python_packages()
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
async def list_python_packages() -> List[Dict[str, str]]:
|
||||
"""List installed Python packages"""
|
||||
config = get_config()
|
||||
|
||||
try:
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
config.python_path,
|
||||
"-m",
|
||||
"pip",
|
||||
"list",
|
||||
"--format=freeze",
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
stdout, stderr = await process.communicate()
|
||||
|
||||
if process.returncode != 0:
|
||||
return []
|
||||
|
||||
# Parse output
|
||||
packages = []
|
||||
for line in stdout.decode().split("\n"):
|
||||
line = line.strip()
|
||||
if line and "==" in line:
|
||||
name, version = line.split("==", 1)
|
||||
packages.append({"name": name, "version": version})
|
||||
|
||||
return packages
|
||||
|
||||
except Exception as e:
|
||||
get_logger().error(f"Failed to list Python packages: {e}")
|
||||
return []
|
||||
|
||||
|
||||
async def update_dependencies_periodically():
|
||||
"""Periodically update dependencies"""
|
||||
logger = get_logger()
|
||||
config = get_config()
|
||||
|
||||
# Parse interval
|
||||
interval_str = config.python_deps_update_interval
|
||||
|
||||
# Convert to seconds
|
||||
if interval_str.endswith("m"):
|
||||
interval = int(interval_str[:-1]) * 60
|
||||
elif interval_str.endswith("h"):
|
||||
interval = int(interval_str[:-1]) * 3600
|
||||
elif interval_str.endswith("s"):
|
||||
interval = int(interval_str[:-1])
|
||||
else:
|
||||
interval = 1800 # Default 30 minutes
|
||||
|
||||
logger.info(f"Starting periodic dependency updates every {interval} seconds")
|
||||
|
||||
while True:
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
try:
|
||||
logger.info("Updating Python dependencies...")
|
||||
# TODO: await update_dependencies("python")
|
||||
logger.info("Python dependencies updated successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update Python dependencies: {e}")
|
||||
42
sandbox/app/logger.py
Normal file
42
sandbox/app/logger.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Logging configuration"""
|
||||
import logging
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
from app.config import get_config
|
||||
|
||||
_logger: Optional[logging.Logger] = None
|
||||
|
||||
|
||||
def setup_logger() -> logging.Logger:
|
||||
"""Setup application logger"""
|
||||
global _logger
|
||||
|
||||
config = get_config()
|
||||
|
||||
# Create logger
|
||||
_logger = logging.getLogger("sandbox")
|
||||
_logger.setLevel(logging.DEBUG if config.app.debug else logging.INFO)
|
||||
|
||||
# Create console handler
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(logging.DEBUG if config.app.debug else logging.INFO)
|
||||
|
||||
# Create formatter
|
||||
formatter = logging.Formatter(
|
||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
# Add handler to logger
|
||||
_logger.addHandler(handler)
|
||||
|
||||
return _logger
|
||||
|
||||
|
||||
def get_logger() -> logging.Logger:
|
||||
"""Get application logger"""
|
||||
if _logger is None:
|
||||
return setup_logger()
|
||||
return _logger
|
||||
1
sandbox/app/middleware/__init__.py
Normal file
1
sandbox/app/middleware/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Middleware package"""
|
||||
15
sandbox/app/middleware/auth.py
Normal file
15
sandbox/app/middleware/auth.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Authentication middleware"""
|
||||
from fastapi import Header, HTTPException, status
|
||||
|
||||
from app.config import get_config
|
||||
|
||||
|
||||
async def verify_api_key(x_api_key: str = Header(..., alias="X-Api-Key")):
|
||||
"""Verify API key from request header"""
|
||||
config = get_config()
|
||||
if x_api_key != config.app.key:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid API key"
|
||||
)
|
||||
return x_api_key
|
||||
48
sandbox/app/middleware/concurrency.py
Normal file
48
sandbox/app/middleware/concurrency.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""Concurrency control middleware"""
|
||||
import asyncio
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
from app.config import get_config
|
||||
from app.models import error_response
|
||||
|
||||
|
||||
# Global semaphores
|
||||
_worker_semaphore: None | asyncio.Semaphore = None
|
||||
_request_counter = 0
|
||||
_request_lock = asyncio.Lock()
|
||||
|
||||
|
||||
def init_concurrency_control():
|
||||
"""Initialize concurrency control"""
|
||||
global _worker_semaphore
|
||||
config = get_config()
|
||||
_worker_semaphore = asyncio.Semaphore(config.max_workers)
|
||||
|
||||
|
||||
async def check_max_requests():
|
||||
"""Check if max requests limit is reached"""
|
||||
global _request_counter
|
||||
config = get_config()
|
||||
|
||||
async with _request_lock:
|
||||
if _request_counter >= config.max_requests:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail=error_response(-503, "Too many requests")
|
||||
)
|
||||
_request_counter += 1
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
async with _request_lock:
|
||||
_request_counter -= 1
|
||||
|
||||
|
||||
async def acquire_worker():
|
||||
"""Acquire a worker slot"""
|
||||
if _worker_semaphore is None:
|
||||
init_concurrency_control()
|
||||
|
||||
async with _worker_semaphore:
|
||||
yield
|
||||
80
sandbox/app/models.py
Normal file
80
sandbox/app/models.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""Data models"""
|
||||
from typing import Optional, Any
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class RunnerOptions(BaseModel):
|
||||
enable_network: bool = Field(default=False, description="Sandbox network flag")
|
||||
|
||||
|
||||
class RunCodeRequest(BaseModel):
|
||||
"""Request model for code execution"""
|
||||
language: str = Field(..., description="Programming language (python3 or nodejs)")
|
||||
code: str = Field(..., description="Base64 encoded encrypted code")
|
||||
preload: Optional[str] = Field(default="", description="Preload code")
|
||||
options: RunnerOptions = Field(default_factory=RunnerOptions, description="Enable network access")
|
||||
|
||||
|
||||
class RunCodeResponse(BaseModel):
|
||||
"""Response model for code execution"""
|
||||
stdout: str = Field(default="", description="Standard output")
|
||||
stderr: str = Field(default="", description="Standard error")
|
||||
|
||||
|
||||
class DependencyRequest(BaseModel):
|
||||
"""Request model for dependency operations"""
|
||||
language: str = Field(..., description="Programming language")
|
||||
|
||||
|
||||
class UpdateDependencyRequest(BaseModel):
|
||||
"""Request model for updating dependencies"""
|
||||
language: str = Field(..., description="Programming language")
|
||||
packages: list[str] = Field(default_factory=list, description="Packages to install")
|
||||
|
||||
|
||||
class Dependency(BaseModel):
|
||||
"""Dependency information"""
|
||||
name: str
|
||||
version: str
|
||||
|
||||
|
||||
class ListDependenciesResponse(BaseModel):
|
||||
"""Response model for listing dependencies"""
|
||||
dependencies: list[Dependency] = Field(default_factory=list)
|
||||
|
||||
|
||||
class RefreshDependenciesResponse(BaseModel):
|
||||
"""Response model for refreshing dependencies"""
|
||||
dependencies: list[Dependency] = Field(default_factory=list)
|
||||
|
||||
|
||||
class UpdateDependenciesResponse(BaseModel):
|
||||
"""Response model for updating dependencies"""
|
||||
success: bool = True
|
||||
installed: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class HealthResponse(BaseModel):
|
||||
"""Health check response"""
|
||||
status: str = "healthy"
|
||||
version: str = "2.0.0"
|
||||
|
||||
|
||||
class ApiResponse(BaseModel):
|
||||
"""Standard API response wrapper"""
|
||||
code: int = Field(default=0, description="Response code (0 for success, negative for error)")
|
||||
message: str = Field(default="success", description="Response message")
|
||||
data: Optional[Any] = Field(default=None, description="Response data")
|
||||
|
||||
|
||||
def success_response(data: Any) -> ApiResponse:
|
||||
"""Create success response"""
|
||||
return ApiResponse(code=0, message="success", data=data)
|
||||
|
||||
|
||||
def error_response(code: int, message: str) -> ApiResponse:
|
||||
"""Create error response"""
|
||||
if code >= 0:
|
||||
code = -1
|
||||
return ApiResponse(code=code, message=message, data=None)
|
||||
1
sandbox/app/services/__init__.py
Normal file
1
sandbox/app/services/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Services package"""
|
||||
80
sandbox/app/services/python_service.py
Normal file
80
sandbox/app/services/python_service.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""Python execution service"""
|
||||
import signal
|
||||
|
||||
from app.core.runners.python.python_runner import PythonRunner
|
||||
from app.dependencies import (
|
||||
list_dependencies as list_deps,
|
||||
update_dependencies as update_deps
|
||||
)
|
||||
from app.logger import get_logger
|
||||
from app.models import (
|
||||
success_response,
|
||||
error_response,
|
||||
RunCodeResponse,
|
||||
ListDependenciesResponse,
|
||||
UpdateDependenciesResponse,
|
||||
Dependency,
|
||||
RunnerOptions
|
||||
)
|
||||
|
||||
|
||||
async def run_python_code(code: str, preload: str, options: RunnerOptions):
|
||||
"""Execute Python code in sandbox
|
||||
|
||||
Args:
|
||||
options:
|
||||
code: Base64 encoded encrypted code
|
||||
preload: Preload code
|
||||
|
||||
Returns:
|
||||
API response with execution result
|
||||
"""
|
||||
logger = get_logger()
|
||||
|
||||
try:
|
||||
runner = PythonRunner()
|
||||
result = await runner.run(code, options, preload)
|
||||
if result.exit_code == -signal.SIGSYS:
|
||||
return error_response(31, "sandbox security policy violation")
|
||||
|
||||
if result.error:
|
||||
return error_response(-500, result.error)
|
||||
|
||||
return success_response(RunCodeResponse(
|
||||
stdout=result.stdout,
|
||||
stderr=result.stderr
|
||||
))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Python execution failed: {e}", exc_info=True)
|
||||
return error_response(-500, str(e))
|
||||
|
||||
|
||||
async def list_python_dependencies():
|
||||
"""List installed Python dependencies
|
||||
|
||||
Returns:
|
||||
API response with dependency list
|
||||
"""
|
||||
try:
|
||||
deps = await list_deps("python")
|
||||
dependencies = [
|
||||
Dependency(name=dep["name"], version=dep["version"])
|
||||
for dep in deps
|
||||
]
|
||||
return success_response(ListDependenciesResponse(dependencies=dependencies))
|
||||
except Exception as e:
|
||||
return error_response(500, str(e))
|
||||
|
||||
|
||||
async def update_python_dependencies():
|
||||
"""Update Python dependencies
|
||||
|
||||
Returns:
|
||||
API response with update result
|
||||
"""
|
||||
try:
|
||||
await update_deps()
|
||||
return success_response(UpdateDependenciesResponse(success=True))
|
||||
except Exception as e:
|
||||
return error_response(500, str(e))
|
||||
20
sandbox/config.yaml
Normal file
20
sandbox/config.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
app:
|
||||
port: 8194
|
||||
debug: true
|
||||
key: redbear-sandbox
|
||||
|
||||
max_workers: 4
|
||||
max_requests: 50
|
||||
worker_timeout: 30
|
||||
python_path: /usr/local/bin/python
|
||||
nodejs_path: /usr/local/bin/node
|
||||
enable_network: true
|
||||
enable_preload: false
|
||||
python_deps_update_interval: 30m
|
||||
|
||||
allowed_syscalls: []
|
||||
|
||||
proxy:
|
||||
socks5: ''
|
||||
http: ''
|
||||
https: ''
|
||||
4
sandbox/dependencies/python-requirements.txt
Normal file
4
sandbox/dependencies/python-requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
requests==2.31.0
|
||||
# numpy==1.26.0
|
||||
# pandas==2.0.0
|
||||
jinja2==3.1.2
|
||||
7
sandbox/lib/seccomp_nodejs/Cargo.lock
generated
Normal file
7
sandbox/lib/seccomp_nodejs/Cargo.lock
generated
Normal file
@@ -0,0 +1,7 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "seccomp_nodejs"
|
||||
version = "0.1.0"
|
||||
6
sandbox/lib/seccomp_nodejs/Cargo.toml
Normal file
6
sandbox/lib/seccomp_nodejs/Cargo.toml
Normal file
@@ -0,0 +1,6 @@
|
||||
[package]
|
||||
name = "seccomp_nodejs"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
0
sandbox/lib/seccomp_nodejs/src/lib.rs
Normal file
0
sandbox/lib/seccomp_nodejs/src/lib.rs
Normal file
23
sandbox/lib/seccomp_python/Cargo.lock
generated
Normal file
23
sandbox/lib/seccomp_python/Cargo.lock
generated
Normal file
@@ -0,0 +1,23 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.180"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc"
|
||||
|
||||
[[package]]
|
||||
name = "libseccomp-sys"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60276e2d41bbb68b323e566047a1bfbf952050b157d8b5cdc74c07c1bf4ca3b6"
|
||||
|
||||
[[package]]
|
||||
name = "seccomp_python"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"libseccomp-sys",
|
||||
]
|
||||
12
sandbox/lib/seccomp_python/Cargo.toml
Normal file
12
sandbox/lib/seccomp_python/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "seccomp_python"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "python"
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
libc = "0.2.180"
|
||||
libseccomp-sys = "0.3.0"
|
||||
195
sandbox/lib/seccomp_python/src/lib.rs
Normal file
195
sandbox/lib/seccomp_python/src/lib.rs
Normal file
@@ -0,0 +1,195 @@
|
||||
mod syscalls;
|
||||
|
||||
use crate::syscalls::*;
|
||||
use libc::{chdir, chroot, gid_t, uid_t, c_int};
|
||||
use libseccomp_sys::*;
|
||||
use std::env;
|
||||
use std::ffi::CString;
|
||||
use std::str::FromStr;
|
||||
|
||||
|
||||
/*
|
||||
* get_allowed_syscalls - retrieve allowed syscalls for the sandbox
|
||||
* @enable_network: enable network-related syscalls if non-zero
|
||||
*
|
||||
* Syscall selection order:
|
||||
* 1. ALLOWED_SYSCALLS environment variable
|
||||
* 2. Built-in default allowlist
|
||||
* 3. Optional network syscall extension
|
||||
*
|
||||
* Returns:
|
||||
* (allowed_syscalls, allowed_not_kill_syscalls)
|
||||
* allowed_syscalls: syscalls fully allowed
|
||||
* allowed_not_kill_syscalls: syscalls returning EPERM
|
||||
*/
|
||||
pub fn get_allowed_syscalls(enable_network: bool) -> (Vec<i32>, Vec<i32>) {
|
||||
let mut allowed_syscalls = Vec::new();
|
||||
let mut allowed_not_kill_syscalls = Vec::new();
|
||||
|
||||
/* Syscalls that return error instead of killing */
|
||||
allowed_not_kill_syscalls.extend(ALLOW_ERROR_SYSCALLS);
|
||||
|
||||
/* Load from environment variable ALLOWED_SYSCALLS */
|
||||
if let Ok(env_val) = env::var("ALLOWED_SYSCALLS") {
|
||||
if !env_val.is_empty() {
|
||||
for s in env_val.split(',') {
|
||||
if let Ok(sc) = i32::from_str(s) {
|
||||
allowed_syscalls.push(sc);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Fallback to default syscalls if env not set */
|
||||
if allowed_syscalls.is_empty() {
|
||||
allowed_syscalls.extend(ALLOW_SYSCALLS);
|
||||
if enable_network {
|
||||
allowed_syscalls.extend(ALLOW_NETWORK_SYSCALLS);
|
||||
}
|
||||
}
|
||||
|
||||
(allowed_syscalls, allowed_not_kill_syscalls)
|
||||
}
|
||||
|
||||
/*
|
||||
* setup_root - setup restricted filesystem root
|
||||
*
|
||||
* Perform chroot(".") and change working directory to "/".
|
||||
*
|
||||
* Return:
|
||||
* 0 on success
|
||||
* negative error code on failure
|
||||
*/
|
||||
fn setup_root() -> Result<(), c_int> {
|
||||
let root = CString::new(".").unwrap();
|
||||
if unsafe { chroot(root.as_ptr()) } != 0 {
|
||||
return Err(-1);
|
||||
}
|
||||
|
||||
let root_dir = CString::new("/").unwrap();
|
||||
if unsafe { chdir(root_dir.as_ptr()) } != 0 {
|
||||
return Err(-2);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/*
|
||||
* set_no_new_privs - enable PR_SET_NO_NEW_PRIVS
|
||||
*
|
||||
* Prevent privilege escalation via execve.
|
||||
*
|
||||
* Return:
|
||||
* 0 on success
|
||||
* negative error code on failure
|
||||
*/
|
||||
fn set_no_new_privs() -> Result<(), c_int> {
|
||||
if unsafe { libc::prctl(libc::PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0) } != 0 {
|
||||
return Err(-3);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/*
|
||||
* drop_privileges - drop process privileges
|
||||
* @uid: target user ID
|
||||
* @gid: target group ID
|
||||
*
|
||||
* Permanently reduce process privileges.
|
||||
*
|
||||
* Return:
|
||||
* 0 on success
|
||||
* negative error code on failure
|
||||
*/
|
||||
fn drop_privileges(uid: uid_t, gid: gid_t) -> Result<(), c_int> {
|
||||
if unsafe { libc::setgid(gid) } != 0 {
|
||||
return Err(-4);
|
||||
}
|
||||
if unsafe { libc::setuid(uid) } != 0 {
|
||||
return Err(-5);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/*
|
||||
* install_seccomp - install seccomp filter
|
||||
* @enable_network: enable network-related syscalls if non-zero
|
||||
*
|
||||
* Default action is SCMP_ACT_KILL_PROCESS.
|
||||
* Allowed syscalls are explicitly whitelisted.
|
||||
*
|
||||
* Return:
|
||||
* 0 on success
|
||||
* negative error code on failure
|
||||
*/
|
||||
fn install_seccomp(enable_network: bool) -> Result<(), c_int> {
|
||||
unsafe {
|
||||
let ctx = seccomp_init(SCMP_ACT_KILL_PROCESS);
|
||||
if ctx.is_null() {
|
||||
return Err(-6); /* failed to init seccomp context */
|
||||
}
|
||||
|
||||
let (allowed_syscalls, allowed_not_kill_syscalls) = get_allowed_syscalls(enable_network);
|
||||
|
||||
/* add fully allowed syscalls */
|
||||
for &sc in &allowed_syscalls {
|
||||
if seccomp_rule_add(ctx, SCMP_ACT_ALLOW, sc, 0) != 0 {
|
||||
seccomp_release(ctx);
|
||||
return Err(-7);
|
||||
}
|
||||
}
|
||||
|
||||
/* add syscalls returning EPERM */
|
||||
for &sc in &allowed_not_kill_syscalls {
|
||||
if seccomp_rule_add(ctx, SCMP_ACT_ERRNO(libc::EPERM as u16), sc, 0) != 0 {
|
||||
seccomp_release(ctx);
|
||||
return Err(-8);
|
||||
}
|
||||
}
|
||||
|
||||
if seccomp_load(ctx) != 0 {
|
||||
seccomp_release(ctx);
|
||||
return Err(-9);
|
||||
}
|
||||
|
||||
seccomp_release(ctx);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* init_seccomp - initialize seccomp sandbox
|
||||
* @uid: target user ID
|
||||
* @gid: target group ID
|
||||
* @enable_network: enable network syscalls if non-zero
|
||||
*
|
||||
* Initialize the sandbox and apply privilege restrictions
|
||||
* in the following order:
|
||||
* 1. setup_root()
|
||||
* 2. set_no_new_privs()
|
||||
* 3. drop_privileges()
|
||||
* 4. install_seccomp()
|
||||
*
|
||||
* This function must be called before executing any untrusted code.
|
||||
* It is not thread-safe and must be invoked once per process.
|
||||
*
|
||||
* Return:
|
||||
* 0 on success
|
||||
* negative error code on failure
|
||||
*/
|
||||
#[unsafe(no_mangle)]
|
||||
pub unsafe extern "C" fn init_seccomp(uid: uid_t, gid: gid_t, enable_network: i32) -> c_int {
|
||||
if let Err(code) = setup_root() {
|
||||
return code;
|
||||
}
|
||||
if let Err(code) = set_no_new_privs() {
|
||||
return code;
|
||||
}
|
||||
if let Err(code) = drop_privileges(uid, gid) {
|
||||
return code;
|
||||
}
|
||||
match install_seccomp(enable_network != 0) {
|
||||
Ok(_) => 0,
|
||||
Err(code) => code,
|
||||
}
|
||||
}
|
||||
85
sandbox/lib/seccomp_python/src/syscalls.rs
Normal file
85
sandbox/lib/seccomp_python/src/syscalls.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
// src/syscalls.rs
|
||||
|
||||
pub static ALLOW_SYSCALLS: &[i32] = &[
|
||||
// file io
|
||||
libc::SYS_read as i32,
|
||||
libc::SYS_write as i32,
|
||||
libc::SYS_openat as i32,
|
||||
libc::SYS_close as i32,
|
||||
libc::SYS_newfstatat as i32,
|
||||
libc::SYS_ioctl as i32,
|
||||
libc::SYS_lseek as i32,
|
||||
libc::SYS_getdents64 as i32,
|
||||
|
||||
// thread
|
||||
libc::SYS_futex as i32,
|
||||
|
||||
// memory
|
||||
libc::SYS_mmap as i32,
|
||||
libc::SYS_brk as i32,
|
||||
libc::SYS_mprotect as i32,
|
||||
libc::SYS_munmap as i32,
|
||||
libc::SYS_rt_sigreturn as i32,
|
||||
libc::SYS_mremap as i32,
|
||||
|
||||
// user / group
|
||||
libc::SYS_setuid as i32,
|
||||
libc::SYS_setgid as i32,
|
||||
libc::SYS_getuid as i32,
|
||||
|
||||
// process
|
||||
libc::SYS_getpid as i32,
|
||||
libc::SYS_getppid as i32,
|
||||
libc::SYS_gettid as i32,
|
||||
libc::SYS_exit as i32,
|
||||
libc::SYS_exit_group as i32,
|
||||
libc::SYS_tgkill as i32,
|
||||
libc::SYS_rt_sigaction as i32,
|
||||
libc::SYS_sched_yield as i32,
|
||||
libc::SYS_set_robust_list as i32,
|
||||
libc::SYS_get_robust_list as i32,
|
||||
libc::SYS_rseq as i32,
|
||||
|
||||
// time
|
||||
libc::SYS_clock_gettime as i32,
|
||||
libc::SYS_gettimeofday as i32,
|
||||
libc::SYS_nanosleep as i32,
|
||||
libc::SYS_epoll_create1 as i32,
|
||||
libc::SYS_epoll_ctl as i32,
|
||||
libc::SYS_clock_nanosleep as i32,
|
||||
libc::SYS_pselect6 as i32,
|
||||
libc::SYS_rt_sigprocmask as i32,
|
||||
libc::SYS_sigaltstack as i32,
|
||||
libc::SYS_getrandom as i32,
|
||||
|
||||
];
|
||||
|
||||
pub static ALLOW_ERROR_SYSCALLS: &[i32] = &[
|
||||
libc::SYS_clone as i32,
|
||||
libc::SYS_mkdirat as i32,
|
||||
libc::SYS_mkdir as i32,
|
||||
];
|
||||
|
||||
pub static ALLOW_NETWORK_SYSCALLS: &[i32] = &[
|
||||
libc::SYS_socket as i32,
|
||||
libc::SYS_connect as i32,
|
||||
libc::SYS_bind as i32,
|
||||
libc::SYS_listen as i32,
|
||||
libc::SYS_accept as i32,
|
||||
libc::SYS_sendto as i32,
|
||||
libc::SYS_recvfrom as i32,
|
||||
libc::SYS_getsockname as i32,
|
||||
libc::SYS_recvmsg as i32,
|
||||
libc::SYS_getpeername as i32,
|
||||
libc::SYS_setsockopt as i32,
|
||||
libc::SYS_ppoll as i32,
|
||||
libc::SYS_uname as i32,
|
||||
libc::SYS_sendmsg as i32,
|
||||
libc::SYS_sendmmsg as i32,
|
||||
libc::SYS_getsockopt as i32,
|
||||
libc::SYS_fstat as i32,
|
||||
libc::SYS_fcntl as i32,
|
||||
libc::SYS_fstatfs as i32,
|
||||
libc::SYS_poll as i32,
|
||||
libc::SYS_epoll_pwait as i32,
|
||||
];
|
||||
97
sandbox/main.py
Normal file
97
sandbox/main.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""
|
||||
Redbear Sandbox - Main Entry Point
|
||||
"""
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
|
||||
from app.config import get_config
|
||||
from app.controllers import manager_router
|
||||
from app.dependencies import setup_dependencies, update_dependencies_periodically
|
||||
from app.logger import setup_logger, get_logger
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Application lifespan manager"""
|
||||
logger = get_logger()
|
||||
|
||||
# Startup
|
||||
logger.info("Starting RedBear Sandbox...")
|
||||
|
||||
# Setup dependencies in background
|
||||
asyncio.create_task(setup_dependencies())
|
||||
|
||||
# Start periodic dependency updates
|
||||
config = get_config()
|
||||
if config.python_deps_update_interval:
|
||||
asyncio.create_task(update_dependencies_periodically())
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
logger.info("Shutting down Redbear Sandbox...")
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
"""Create FastAPI application"""
|
||||
config = get_config()
|
||||
|
||||
app = FastAPI(
|
||||
title="Sandbox",
|
||||
description="Secure code execution sandbox",
|
||||
version="2.0.0",
|
||||
lifespan=lifespan,
|
||||
debug=config.app.debug
|
||||
)
|
||||
|
||||
app.include_router(manager_router)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def check_root_privileges():
|
||||
"""Check if running with root privileges"""
|
||||
if os.geteuid() != 0:
|
||||
logger.info("Error: Sandbox must be run as root for security features (chroot, setuid)")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point"""
|
||||
# Check root privileges
|
||||
check_root_privileges()
|
||||
|
||||
# Setup logging
|
||||
setup_logger()
|
||||
|
||||
config = get_config()
|
||||
logger = get_logger()
|
||||
|
||||
logger.info(f"Starting server on port {config.app.port}")
|
||||
logger.info(f"Debug mode: {config.app.debug}")
|
||||
logger.info(f"Max workers: {config.max_workers}")
|
||||
logger.info(f"Max requests: {config.max_requests}")
|
||||
logger.info(f"Network enabled: {config.enable_network}")
|
||||
|
||||
# Create app
|
||||
app = create_app()
|
||||
|
||||
# Run server
|
||||
uvicorn.run(
|
||||
app,
|
||||
host="0.0.0.0",
|
||||
port=config.app.port,
|
||||
log_level="debug" if config.app.debug else "info",
|
||||
access_log=config.app.debug
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
20
sandbox/requirements.txt
Normal file
20
sandbox/requirements.txt
Normal file
@@ -0,0 +1,20 @@
|
||||
# Web Framework
|
||||
fastapi==0.115.0
|
||||
uvicorn[standard]==0.32.0
|
||||
pydantic==2.9.0
|
||||
pydantic-settings==2.5.0
|
||||
|
||||
# Configuration
|
||||
PyYAML==6.0.2
|
||||
|
||||
# Security
|
||||
pyseccomp==0.1.2
|
||||
|
||||
|
||||
# Async & Concurrency
|
||||
aiofiles==24.1.0
|
||||
|
||||
# Testing
|
||||
pytest==8.3.0
|
||||
pytest-asyncio==0.24.0
|
||||
httpx==0.27.0
|
||||
53
sandbox/script/env.sh
Normal file
53
sandbox/script/env.sh
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Check if the correct number of arguments are provided
|
||||
if [ "$#" -ne 2 ]; then
|
||||
echo "Usage: $0 <src> <dest>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
src="$1"
|
||||
dest="$2"
|
||||
|
||||
# Function to copy and link files
|
||||
copy_and_link() {
|
||||
local src_file="$1"
|
||||
local dest_file="$2"
|
||||
|
||||
if [ -L "$src_file" ]; then
|
||||
# If src_file is a symbolic link, copy it without changing permissions
|
||||
cp -P "$src_file" "$dest_file"
|
||||
elif [ -b "$src_file" ] || [ -c "$src_file" ]; then
|
||||
# If src_file is a device file, copy it and change permissions
|
||||
cp "$src_file" "$dest_file"
|
||||
chmod 444 "$dest_file"
|
||||
else
|
||||
# Otherwise, create a hard link and change the permissions to read-only
|
||||
ln -f "$src_file" "$dest_file" 2>/dev/null || { cp "$src_file" "$dest_file" && chmod 444 "$dest_file"; }
|
||||
fi
|
||||
}
|
||||
|
||||
# Check if src is a file or directory
|
||||
if [ -f "$src" ]; then
|
||||
# src is a file, create hard link directly in dest
|
||||
mkdir -p "$(dirname "$dest/$src")"
|
||||
copy_and_link "$src" "$dest/$src"
|
||||
elif [ -d "$src" ]; then
|
||||
# src is a directory, process as before
|
||||
mkdir -p "$dest/$src"
|
||||
|
||||
# Find all files in the source directory
|
||||
find "$src" -type f,l | while read -r file; do
|
||||
# Get the relative path of the file
|
||||
rel_path="${file#$src/}"
|
||||
# Get the directory of the relative path
|
||||
rel_dir=$(dirname "$rel_path")
|
||||
# Create the same directory structure in the destination
|
||||
mkdir -p "$dest/$src/$rel_dir"
|
||||
# Copy and link the file
|
||||
copy_and_link "$file" "$dest/$src/$rel_path"
|
||||
done
|
||||
else
|
||||
echo "Error: $src is neither a file nor a directory"
|
||||
exit 1
|
||||
fi
|
||||
Reference in New Issue
Block a user