Merge branch 'refs/heads/develop' into fix/memory_bug_fix

# Conflicts:
#	api/app/services/memory_storage_service.py
This commit is contained in:
lixinyue
2026-01-26 17:08:55 +08:00
55 changed files with 2068 additions and 698 deletions

3
.gitignore vendored
View File

@@ -35,3 +35,6 @@ nltk_data/
tika-server*.jar*
cl100k_base.tiktoken
libssl*.deb
sandbox/lib/seccomp_python/target
sandbox/lib/seccomp_nodejs/target

View File

@@ -133,7 +133,7 @@ class WorkflowExecutor:
for node in self.workflow_config.get("nodes")
if node.get("type") in [NodeType.LOOP, NodeType.ITERATION]
], # loop, iteration node id
"looping": False, # loop runing flag, only use in loop node,not use in main loop
"looping": 0, # loop runing flag, only use in loop node,not use in main loop
"activate": {
self.start_node_id: True
}
@@ -261,7 +261,7 @@ class WorkflowExecutor:
"data": {
"execution_id": self.execution_id,
"workspace_id": self.workspace_id,
"timestamp": start_time.isoformat()
"timestamp": int(start_time.timestamp() * 1000)
}
}
@@ -293,20 +293,33 @@ class WorkflowExecutor:
# Handle custom streaming events (chunks from nodes via stream writer)
chunk_count += 1
event_type = data.get("type", "node_chunk") # "message" or "node_chunk"
logger.info(f"[CUSTOM] ✅ 收到 {event_type} #{chunk_count} from {data.get('node_id')}"
f"- execution_id: {self.execution_id}")
yield {
"event": event_type, # "message" or "node_chunk"
"data": {
"node_id": data.get("node_id"),
"chunk": data.get("chunk"),
"full_content": data.get("full_content"),
"chunk_index": data.get("chunk_index"),
"is_prefix": data.get("is_prefix"),
"is_suffix": data.get("is_suffix"),
"conversation_id": input_data.get("conversation_id"),
if event_type in ("message", "node_chunk"):
logger.info(f"[CUSTOM] ✅ 收到 {event_type} #{chunk_count} from {data.get('node_id')}"
f"- execution_id: {self.execution_id}")
yield {
"event": event_type, # "message" or "node_chunk"
"data": {
"node_id": data.get("node_id"),
"chunk": data.get("chunk"),
"full_content": data.get("full_content"),
"chunk_index": data.get("chunk_index"),
"is_prefix": data.get("is_prefix"),
"is_suffix": data.get("is_suffix"),
"conversation_id": input_data.get("conversation_id"),
}
}
elif event_type == "node_error":
yield {
"event": event_type, # "message" or "node_chunk"
"data": {
"node_id": data.get("node_id"),
"status": "failed",
"input": data.get("input_data"),
"elapsed_time": data.get("elapsed_time"),
"output": None,
"error": data.get("error")
}
}
}
elif mode == "debug":
# Handle debug information (node execution status)
@@ -325,14 +338,15 @@ class WorkflowExecutor:
conversation_id = input_data.get("conversation_id")
logger.info(f"[NODE-START] Node starts execution: {node_name} "
f"- execution_id: {self.execution_id}")
yield {
"event": "node_start",
"data": {
"node_id": node_name,
"conversation_id": conversation_id,
"execution_id": self.execution_id,
"timestamp": data.get("timestamp"),
"timestamp": int(datetime.datetime.fromisoformat(
data.get("timestamp")
).timestamp() * 1000),
}
}
elif event_type == "task_result":
@@ -351,13 +365,18 @@ class WorkflowExecutor:
"node_id": node_name,
"conversation_id": conversation_id,
"execution_id": self.execution_id,
"timestamp": data.get("timestamp"),
"state": result.get("node_outputs", {}).get(node_name),
"timestamp": int(datetime.datetime.fromisoformat(
data.get("timestamp")
).timestamp() * 1000),
"input": result.get("node_outputs", {}).get(node_name, {}).get("input"),
"output": result.get("node_outputs", {}).get(node_name, {}).get("output"),
"elapsed_time": result.get("node_outputs", {}).get(node_name, {}).get("elapsed_time"),
}
}
elif mode == "updates":
# Handle state updates - store final state
# TODO:流式输出点
logger.debug(f"[UPDATES] 收到 state 更新 from {list(data.keys())} "
f"- execution_id: {self.execution_id}")

View File

@@ -19,13 +19,17 @@ from app.core.workflow.variable_pool import VariablePool
logger = logging.getLogger(__name__)
def merget_activate_state(x, y):
def merge_activate_state(x, y):
return {
k: x.get(k, False) or y.get(k, False)
for k in set(x) | set(y)
}
def merge_looping_state(x, y):
return y if y > x else x
class WorkflowState(TypedDict):
"""Workflow state
@@ -36,7 +40,7 @@ class WorkflowState(TypedDict):
# Set of loop node IDs, used for assigning values in loop nodes
cycle_nodes: list
looping: Annotated[bool, lambda x, y: x and y]
looping: Annotated[int, merge_looping_state]
# Input variables (passed from configured variables)
# Uses a deep merge function, supporting nested dict updates (e.g., conv.xxx)
@@ -68,7 +72,7 @@ class WorkflowState(TypedDict):
streaming_buffer: Annotated[dict[str, Any], lambda x, y: {**x, **y}]
# node activate status
activate: Annotated[dict[str, bool], merget_activate_state]
activate: Annotated[dict[str, bool], merge_activate_state]
class BaseNode(ABC):
@@ -540,6 +544,11 @@ class BaseNode(ABC):
"error_node": self.node_id
}
else:
writer = get_stream_writer()
writer({
"type": "node_error",
**node_output
})
# 无错误边:抛出异常停止工作流
logger.error(f"节点 {self.node_id} 执行失败,停止工作流: {error_message}")
raise Exception(f"节点 {self.node_id} 执行失败: {error_message}")

View File

@@ -28,6 +28,6 @@ class BreakNode(BaseNode):
Returns:
Optional dictionary indicating the loop has been stopped.
"""
state["looping"] = False
state["looping"] = 2
logger.info(f"Setting cycle node exit flag, cycle={self.cycle}, looping={state['looping']}")

View File

@@ -58,10 +58,10 @@ class IterationRuntime:
idx: Index of the element in the input array.
Returns:
A deep copy of the workflow state with iteration-specific variables set.
A copy of the workflow state with iteration-specific variables set.
"""
loopstate = WorkflowState(
**copy.deepcopy(self.state)
**self.state
)
loopstate["runtime_vars"][self.node_id] = {
"item": item,
@@ -71,7 +71,7 @@ class IterationRuntime:
"item": item,
"index": idx,
}
loopstate["looping"] = True
loopstate["looping"] = 1
loopstate["activate"][self.start_id] = True
return loopstate
@@ -89,7 +89,7 @@ class IterationRuntime:
self.result.extend(output)
else:
self.result.append(output)
if not result["looping"]:
if result["looping"] == 2:
self.looping = False
return result
@@ -150,10 +150,9 @@ class IterationRuntime:
self.result.extend(output)
else:
self.result.append(output)
if not result["looping"]:
if result["looping"] == 2:
self.looping = False
idx += 1
logger.info(f"Iteration node {self.node_id}: execution completed")
return {
"output": self.result,

View File

@@ -46,6 +46,7 @@ class LoopRuntime:
self.state = state
self.node_id = node_id
self.typed_config = LoopNodeConfig(**config)
self.looping = True
def _init_loop_state(self):
"""
@@ -88,7 +89,7 @@ class LoopRuntime:
loopstate = WorkflowState(
**self.state
)
loopstate["looping"] = True
loopstate["looping"] = 1
loopstate["activate"][self.start_id] = True
return loopstate
@@ -179,9 +180,12 @@ class LoopRuntime:
loopstate = self._init_loop_state()
loop_time = self.typed_config.max_loop
child_state = []
while self.evaluate_conditional(loopstate) and loopstate["looping"] and loop_time > 0:
while self.evaluate_conditional(loopstate) and self.looping and loop_time > 0:
logger.info(f"loop node {self.node_id}: running")
child_state.append(await self.graph.ainvoke(loopstate))
result = await self.graph.ainvoke(loopstate)
child_state.append(result)
if result["looping"] == 2:
self.looping = False
loop_time -= 1
logger.info(f"loop node {self.node_id}: execution completed")

View File

@@ -0,0 +1,52 @@
"""2026011240
Revision ID: 325b759cd66b
Revises: 9a936a9ebb20
Create Date: 2026-01-26 12:37:35.946749
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = '325b759cd66b'
down_revision: Union[str, None] = '9a936a9ebb20'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# 1. 重命名表 data_config -> memory_config
op.rename_table('data_config', 'memory_config')
# 2. 重命名列 group_id -> end_user_id
op.alter_column('memory_config', 'group_id', new_column_name='end_user_id')
# 3. config_id: INTEGER -> UUID保留旧值以便回滚
op.drop_constraint('data_config_pkey', 'memory_config', type_='primary')
op.alter_column('memory_config', 'config_id', new_column_name='config_id_old', nullable=True)
op.add_column('memory_config', sa.Column('config_id', sa.UUID(), nullable=True))
op.execute("UPDATE memory_config SET config_id = apply_id::uuid")
op.alter_column('memory_config', 'config_id', nullable=False)
op.create_primary_key('memory_config_pkey', 'memory_config', ['config_id'])
op.execute("DROP SEQUENCE IF EXISTS data_config_config_id_seq")
def downgrade() -> None:
# 1. config_id: UUID -> INTEGER恢复旧值空值生成新ID
op.execute("CREATE SEQUENCE IF NOT EXISTS data_config_config_id_seq")
op.execute("UPDATE memory_config SET config_id_old = nextval('data_config_config_id_seq') WHERE config_id_old IS NULL")
op.drop_constraint('memory_config_pkey', 'memory_config', type_='primary')
op.drop_column('memory_config', 'config_id')
op.alter_column('memory_config', 'config_id_old', new_column_name='config_id', nullable=False)
op.create_primary_key('data_config_pkey', 'memory_config', ['config_id'])
op.execute("ALTER SEQUENCE data_config_config_id_seq OWNED BY memory_config.config_id")
op.execute("SELECT setval('data_config_config_id_seq', COALESCE((SELECT MAX(config_id) FROM memory_config), 1))")
# 2. 重命名列 end_user_id -> group_id
op.alter_column('memory_config', 'end_user_id', new_column_name='group_id')
# 3. 重命名表 memory_config -> data_config
op.rename_table('memory_config', 'data_config')

42
sandbox/Dockerfile Normal file
View File

@@ -0,0 +1,42 @@
FROM python:3.12-slim
USER root
WORKDIR /code
LABEL authors="Eterntiy"
ARG NEED_MIRROR=0
RUN --mount=type=cache,id=mem_apt,target=/var/cache/apt,sharing=locked \
if [ "$NEED_MIRROR" == "1" ]; then \
sed -i 's|https://ports.ubuntu.com|https://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list; \
sed -i 's|https://archive.ubuntu.com|https://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list; \
fi; \
rm -f /etc/apt/apt.conf.d/docker-clean && \
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache && \
chmod 1777 /tmp && \
apt update && \
apt --no-install-recommends install -y ca-certificates && \
apt update && \
apt install -y python3-pip pipx nginx unzip curl wget git vim less && \
apt-get install -y --no-install-recommends tzdata libseccomp2 libseccomp-dev && \
ln -snf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && \
echo "Asia/Shanghai" > /etc/timezone && \
apt install -y cargo
COPY ./app /code/app
COPY ./dependencies /code/dependencies
COPY ./lib /code/lib
COPY ./script /code/script
COPY ./config.yaml /code/config.yaml
COPY ./main.py /code/main.py
COPY ./requirements.txt /code/requirements.txt
RUN python -m venv .venv
RUN .venv/bin/python3 -m pip install -r requirements.txt
RUN cargo build --release --manifest-path lib/seccomp_python/Cargo.toml
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD curl 127.0.0.1:8194/health
CMD [".venv/bin/python3", "main.py"]

134
sandbox/app/config.py Normal file
View File

@@ -0,0 +1,134 @@
"""Configuration management"""
import os
from typing import List, Optional
from pydantic import BaseModel, Field
import yaml
SANDBOX_USER_ID = 1000
SANDBOX_GROUP_ID = 1000
DEFAULT_PYTHON_LIB_REQUIREMENTS_AMD = [
"/usr/local/lib/python3.12",
"/usr/lib/python3",
"/usr/lib/x86_64-linux-gnu",
"/etc/ssl/certs/ca-certificates.crt",
"/etc/nsswitch.conf",
"/etc/hosts",
"/etc/resolv.conf",
"/run/systemd/resolve/stub-resolv.conf",
"/run/resolvconf/resolv.conf",
"/etc/localtime",
"/usr/share/zoneinfo",
"/etc/timezone",
]
class AppConfig(BaseModel):
"""Application configuration"""
port: int = 8194
debug: bool = True
key: str = "redbear-sandbox"
class ProxyConfig(BaseModel):
"""Proxy configuration"""
socks5: str = ""
http: str = ""
https: str = ""
class Config(BaseModel):
"""Global configuration"""
app: AppConfig = Field(default_factory=AppConfig)
max_workers: int = 4
max_requests: int = 50
worker_timeout: int = 30
nodejs_path: str = "node"
enable_network: bool = True
enable_preload: bool = False
python_path: str = ""
python_lib_paths: list = Field(default=DEFAULT_PYTHON_LIB_REQUIREMENTS_AMD)
python_deps_update_interval: str = "30m"
allowed_syscalls: List[int] = Field(default_factory=list)
proxy: ProxyConfig = Field(default_factory=ProxyConfig)
# Global configuration instance
_config: Optional[Config] = None
def load_config(config_path: str) -> Config:
"""Load configuration from YAML file"""
global _config
# Load from file
if os.path.exists(config_path):
with open(config_path, 'r') as f:
data = yaml.safe_load(f)
_config = Config(**data)
else:
_config = Config()
# Override with environment variables
if os.getenv("DEBUG"):
_config.app.debug = os.getenv("DEBUG").lower() in ("true", "1", "yes")
if os.getenv("MAX_WORKERS"):
_config.max_workers = int(os.getenv("MAX_WORKERS"))
if os.getenv("MAX_REQUESTS"):
_config.max_requests = int(os.getenv("MAX_REQUESTS"))
if os.getenv("SANDBOX_PORT"):
_config.app.port = int(os.getenv("SANDBOX_PORT"))
if os.getenv("WORKER_TIMEOUT"):
_config.worker_timeout = int(os.getenv("WORKER_TIMEOUT"))
if os.getenv("API_KEY"):
_config.app.key = os.getenv("API_KEY")
if os.getenv("NODEJS_PATH"):
_config.nodejs_path = os.getenv("NODEJS_PATH")
if os.getenv("ENABLE_NETWORK"):
_config.enable_network = os.getenv("ENABLE_NETWORK").lower() in ("true", "1", "yes")
if os.getenv("ENABLE_PRELOAD"):
_config.enable_preload = os.getenv("ENABLE_PRELOAD").lower() in ("true", "1", "yes")
if os.getenv("ALLOWED_SYSCALLS"):
_config.allowed_syscalls = [int(x) for x in os.getenv("ALLOWED_SYSCALLS").split(",")]
if os.getenv("SOCKS5_PROXY"):
_config.proxy.socks5 = os.getenv("SOCKS5_PROXY")
if os.getenv("HTTP_PROXY"):
_config.proxy.http = os.getenv("HTTP_PROXY")
if os.getenv("HTTPS_PROXY"):
_config.proxy.https = os.getenv("HTTPS_PROXY")
# python
if os.getenv("PYTHON_PATH"):
_config.python_path = os.getenv("PYTHON_PATH")
if os.getenv("PYTHON_LIB_PATH"):
_config.python_lib_paths = os.getenv("PYTHON_LIB_PATH").split(',')
if os.getenv("PYTHON_DEPS_UPDATE_INTERVAL"):
_config.python_deps_update_interval = os.getenv("PYTHON_DEPS_UPDATE_INTERVAL")
return _config
config_path = os.getenv("CONFIG_PATH", "config.yaml")
load_config(config_path)
def get_config() -> Config:
"""Get global configuration"""
if _config is None:
raise RuntimeError("Configuration not loaded. Call load_config() first.")
return _config

View File

@@ -0,0 +1,8 @@
from fastapi import APIRouter
from . import health_controller, sandbox_controller
manager_router = APIRouter()
manager_router.include_router(health_controller.router)
manager_router.include_router(sandbox_controller.router)

View File

@@ -0,0 +1,12 @@
"""Health check endpoint"""
from fastapi import APIRouter
from app.models import HealthResponse
router = APIRouter()
@router.get("/health", response_model=HealthResponse)
async def health_check():
"""Health check endpoint"""
return HealthResponse(status="healthy", version="2.0.0")

View File

@@ -0,0 +1,59 @@
"""Sandbox API endpoints"""
from fastapi import APIRouter, Depends
from app.middleware.auth import verify_api_key
from app.middleware.concurrency import check_max_requests, acquire_worker
from app.models import (
RunCodeRequest,
ApiResponse,
UpdateDependencyRequest,
error_response
)
from app.services.python_service import (
run_python_code,
list_python_dependencies,
update_python_dependencies
)
router = APIRouter(
prefix="/v1/sandbox",
tags=["sandbox"],
dependencies=[Depends(verify_api_key)]
)
@router.post(
"/run",
response_model=ApiResponse,
dependencies=[Depends(check_max_requests),
Depends(acquire_worker)]
)
async def run_code(request: RunCodeRequest):
"""Execute code in sandbox"""
if request.language == "python3":
return await run_python_code(request.code, request.preload, request.options)
elif request.language == "nodejs":
# TODO
return error_response(-400, "TODO")
else:
return error_response(-400, "unsupported language")
@router.get("/dependencies", response_model=ApiResponse)
async def get_dependencies(language: str):
"""Get installed dependencies"""
if language == "python3":
return await list_python_dependencies()
else:
return error_response(-400, "unsupported language")
@router.post("/dependencies/update", response_model=ApiResponse)
async def update_dependencies(request: UpdateDependencyRequest):
"""Update dependencies"""
if request.language == "python3":
return await update_python_dependencies()
else:
return error_response(-400, "unsupported language")

View File

@@ -0,0 +1 @@
"""Core functionality package"""

View File

@@ -0,0 +1,33 @@
"""Code encryption utilities"""
import base64
def encrypt_code(code: bytes, key: bytes) -> str:
"""Encrypt code using XOR cipher with base64 encoding
Args:
code: Plain code string
key: Encryption key bytes
Returns:
Base64 encoded encrypted code
"""
key_length = len(key)
encrypted_code = bytearray(len(code))
for i in range(len(code)):
encrypted_code[i] = code[i] ^ key[i % key_length]
encoded_code = base64.b64encode(encrypted_code).decode("utf-8")
return encoded_code
def generate_key(length: int = 64) -> bytes:
"""Generate random encryption key
Args:
length: Key length in bytes (default 64 for 512 bits)
Returns:
Random key bytes
"""
import secrets
return secrets.token_bytes(length)

View File

@@ -0,0 +1,48 @@
"""Code execution engine"""
import os
from typing import Optional
from abc import ABC, abstractmethod
from app.config import get_config
from app.logger import get_logger
from app.models import RunnerOptions
class ExecutionResult:
"""Result of code execution"""
def __init__(self, stdout: str = "", stderr: str = "", exit_code: int = 0, error: Optional[str] = None):
self.stdout = stdout
self.stderr = stderr
self.exit_code = exit_code
self.error = error
class CodeExecutor(ABC):
"""Base code executor"""
def __init__(self):
self.logger = get_logger()
self.config = get_config()
@abstractmethod
async def run(
self,
code: str,
options: RunnerOptions,
preload: str = "",
timeout: Optional[int] = None
) -> ExecutionResult:
pass
def cleanup_temp_file(self, file_path: str) -> None:
"""Remove temporary file
Args:
file_path: Path to file to remove
"""
try:
if os.path.exists(file_path):
os.remove(file_path)
except Exception as e:
self.logger.warning(f"Failed to cleanup temp file {file_path}: {e}")

View File

@@ -0,0 +1 @@
"""Code runners package"""

View File

@@ -0,0 +1,4 @@
# -*- coding: UTF-8 -*-
# Author: Eternity
# @Email: 1533512157@qq.com
# @Time : 2026/1/23 11:27

View File

@@ -0,0 +1,50 @@
import asyncio
import tempfile
import stat
from pathlib import Path
from app.config import get_config
from app.core.runners.python.settings import LIB_PATH
from app.logger import get_logger
logger = get_logger()
async def prepare_python_dependencies_env():
config = get_config()
with tempfile.TemporaryDirectory(dir="/") as root_path:
root = Path(root_path)
env_sh = root / "env.sh"
with open("script/env.sh") as f:
env_sh.write_text(f.read())
env_sh.chmod(env_sh.stat().st_mode | stat.S_IXUSR)
for lib_path in config.python_lib_paths:
lib_path = Path(lib_path)
if not lib_path.exists():
logger.warning("python lib path %s is not available", lib_path)
continue
cmd = [
"bash",
str(env_sh),
str(lib_path),
str(LIB_PATH),
]
process = await asyncio.create_subprocess_exec(
*cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
retcode = process.returncode
if retcode != 0:
logger.error(
f"create env error for file {lib_path}: retcode={retcode}, stderr={stderr.decode()}"
)

View File

@@ -0,0 +1,56 @@
import ctypes
import os
import sys
import traceback
from base64 import b64decode
# Setup exception hook
def excepthook(etype, value, tb):
sys.stderr.write("".join(traceback.format_exception(etype, value, tb)))
sys.stderr.flush()
sys.exit(-1)
sys.excepthook = excepthook
# Load security library if available
lib = ctypes.CDLL("./libpython.so")
lib.init_seccomp.argtypes = [ctypes.c_uint32, ctypes.c_uint32, ctypes.c_bool]
lib.init_seccomp.restype = None # TODO: raise error info
# Get running path
running_path = sys.argv[1]
if not running_path:
exit(-1)
# Get decrypt key
key = sys.argv[2]
if not key:
exit(-1)
key = b64decode(key)
os.chdir(running_path)
# Preload code
{{preload}}
# Apply security if library is available
lib.init_seccomp({{uid}}, {{gid}}, {{enable_network}})
# Decrypt and execute code
code = b64decode("{{code}}")
def decrypt(code, key):
key_len = len(key)
code_len = len(code)
code = bytearray(code)
for i in range(code_len):
code[i] = code[i] ^ key[i % key_len]
return bytes(code)
code = decrypt(code, key)
exec(code)

View File

@@ -0,0 +1,151 @@
"""Python code runner"""
import asyncio
import base64
import os
import uuid
from typing import Optional
from app.config import SANDBOX_USER_ID, SANDBOX_GROUP_ID, get_config
from app.core.encryption import generate_key, encrypt_code
from app.core.executor import CodeExecutor, ExecutionResult
from app.core.runners.python.settings import check_lib_avaiable, release_lib_binary, LIB_PATH
from app.models import RunnerOptions
# Python sandbox prescript template
with open("app/core/runners/python/prescript.py") as f:
PYTHON_PRESCRIPT = f.read()
class PythonRunner(CodeExecutor):
"""Python code runner with security isolation"""
def __init__(self):
super().__init__()
@staticmethod
def init_enviroment(code: bytes, preload, options: RunnerOptions) -> tuple[str, str]:
if not check_lib_avaiable():
release_lib_binary(False)
config = get_config()
code_file_name = uuid.uuid4().hex.replace("-", "_")
script = PYTHON_PRESCRIPT.replace("{{uid}}", str(SANDBOX_USER_ID), 1)
script = script.replace("{{gid}}", str(SANDBOX_GROUP_ID), 1)
script = script.replace(
"{{enable_network}}",
str(int(options.enable_network and config.enable_network)
),
1
)
script = script.replace("{{preload}}", f"{preload}\n", 1)
key = generate_key(64)
encoded_code = encrypt_code(code, key)
encoded_key = base64.b64encode(key).decode("utf-8")
script = script.replace("{{code}}", encoded_code, 1)
code_path = f"{LIB_PATH}/tmp/{code_file_name}.py"
try:
os.makedirs(os.path.dirname(code_path), mode=0o755, exist_ok=True)
with open(code_path, "w", encoding="utf-8") as f:
f.write(script)
os.chmod(code_path, 0o755)
except OSError as e:
raise RuntimeError(f"Failed to write {code_path}") from e
return code_path, encoded_key
async def run(
self,
code: str,
options: RunnerOptions,
preload: str = "",
timeout: Optional[int] = None
) -> ExecutionResult:
"""Run Python code in sandbox
Args:
options:
code: Base64 encoded encrypted code
preload: Preload code to execute before main code
timeout: Execution timeout in seconds
Returns:
ExecutionResult with stdout, stderr, and exit code
"""
config = self.config
if timeout is None:
timeout = config.worker_timeout
# Check if preload is allowed
if not config.enable_preload:
preload = ""
code = base64.b64decode(code)
script_path, encoded_key = self.init_enviroment(code, preload, options=options)
try:
# Setup environment
env = {}
# Add proxy settings if configured
if config.proxy.socks5:
env["HTTPS_PROXY"] = config.proxy.socks5
env["HTTP_PROXY"] = config.proxy.socks5
elif config.proxy.https or config.proxy.http:
if config.proxy.https:
env["HTTPS_PROXY"] = config.proxy.https
if config.proxy.http:
env["HTTP_PROXY"] = config.proxy.http
# Add allowed syscalls if configured
if config.allowed_syscalls:
env["ALLOWED_SYSCALLS"] = ",".join(map(str, config.allowed_syscalls))
# Execute with Python interpreter
process = await asyncio.create_subprocess_exec(
config.python_path,
script_path,
LIB_PATH,
encoded_key,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
env=env,
cwd=LIB_PATH
)
# Wait for completion with timeout
try:
stdout, stderr = await asyncio.wait_for(
process.communicate(),
timeout=timeout
)
return ExecutionResult(
stdout=stdout.decode('utf-8', errors='replace'),
stderr=stderr.decode('utf-8', errors='replace'),
exit_code=process.returncode
)
except asyncio.TimeoutError:
# Kill process on timeout
try:
process.kill()
await process.wait()
except:
pass
return ExecutionResult(
stdout="",
stderr="Execution timeout",
exit_code=-1,
error="Execution timeout"
)
finally:
# Cleanup temporary file
self.cleanup_temp_file(script_path)

View File

@@ -0,0 +1,62 @@
import os
from app.logger import get_logger
logger = get_logger()
RELEASE_LIB_PATH = "./lib/seccomp_python/target/release/libpython.so"
LIB_PATH = "/var/sandbox/sandbox-python"
LIB_NAME = "libpython.so"
try:
with open(RELEASE_LIB_PATH, "rb") as f:
_PYTHON_LIB = f.read()
except:
logger.critical("failed to load python lib")
raise
def check_lib_avaiable():
return os.path.exists(os.path.join(LIB_PATH, LIB_NAME))
def release_lib_binary(force_remove: bool):
logger.info("init runtime enviroment")
lib_file = os.path.join(LIB_PATH, LIB_NAME)
if os.path.exists(lib_file):
if force_remove:
try:
os.remove(lib_file)
except OSError:
logger.critical(f"failed to remove {os.path.join(LIB_PATH, LIB_NAME)}")
raise
try:
os.makedirs(LIB_PATH, mode=0o755, exist_ok=True)
except OSError:
logger.critical(f"failed to create {LIB_PATH}")
raise
try:
with open(lib_file, "wb") as f:
f.write(_PYTHON_LIB)
os.chmod(lib_file, 0o755)
except OSError:
logger.critical(f"failed to write {lib_file}")
raise
else:
try:
os.makedirs(LIB_PATH, mode=0o755, exist_ok=True)
except OSError:
logger.critical(f"failed to create {LIB_PATH}")
raise
try:
with open(lib_file, "wb") as f:
f.write(_PYTHON_LIB)
os.chmod(lib_file, 0o755)
except OSError:
logger.critical(f"failed to write {lib_file}")
raise
logger.info("python runner environment initialized")

161
sandbox/app/dependencies.py Normal file
View File

@@ -0,0 +1,161 @@
"""Dependency management"""
import asyncio
from pathlib import Path
from typing import List, Dict
from app.config import get_config
from app.core.runners.python.env import prepare_python_dependencies_env
from app.logger import get_logger
async def setup_dependencies():
"""Setup initial dependencies"""
logger = get_logger()
try:
logger.info("Installing Python dependencies...")
await install_python_dependencies()
logger.info("Python dependencies installed")
logger.info("Preparing Python dependencies environment...")
await prepare_python_dependencies_env()
logger.info("Python dependencies environment ready")
except Exception as e:
logger.error(f"Failed to setup dependencies: {e}")
async def update_dependencies():
# TODO
return
async def install_python_dependencies():
"""Install Python dependencies from requirements file"""
logger = get_logger()
config = get_config()
# Check if requirements file exists
req_file = Path("dependencies/python-requirements.txt")
if not req_file.exists():
logger.warning("Python requirements file not found, skipping installation")
return
# Read requirements
requirements = req_file.read_text().strip()
if not requirements:
logger.info("No Python requirements to install")
return
# Install using pip
cmd = [
config.python_path,
"-m",
"pip",
"install",
"--upgrade"
]
# Add packages from requirements
for line in requirements.split("\n"):
line = line.strip()
if line and not line.startswith("#"):
cmd.append(line)
try:
process = await asyncio.create_subprocess_exec(
*cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
if process.returncode != 0:
logger.error(f"Failed to install Python dependencies: {stderr.decode()}")
else:
logger.info("Python dependencies installed successfully")
except Exception as e:
logger.error(f"Error installing Python dependencies: {e}")
async def list_dependencies(language: str) -> List[Dict[str, str]]:
"""List installed dependencies
Args:
language: Language (python or Node.js)
Returns:
List of dependencies with name and version
"""
if language == "python":
return await list_python_packages()
else:
return []
async def list_python_packages() -> List[Dict[str, str]]:
"""List installed Python packages"""
config = get_config()
try:
process = await asyncio.create_subprocess_exec(
config.python_path,
"-m",
"pip",
"list",
"--format=freeze",
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
if process.returncode != 0:
return []
# Parse output
packages = []
for line in stdout.decode().split("\n"):
line = line.strip()
if line and "==" in line:
name, version = line.split("==", 1)
packages.append({"name": name, "version": version})
return packages
except Exception as e:
get_logger().error(f"Failed to list Python packages: {e}")
return []
async def update_dependencies_periodically():
"""Periodically update dependencies"""
logger = get_logger()
config = get_config()
# Parse interval
interval_str = config.python_deps_update_interval
# Convert to seconds
if interval_str.endswith("m"):
interval = int(interval_str[:-1]) * 60
elif interval_str.endswith("h"):
interval = int(interval_str[:-1]) * 3600
elif interval_str.endswith("s"):
interval = int(interval_str[:-1])
else:
interval = 1800 # Default 30 minutes
logger.info(f"Starting periodic dependency updates every {interval} seconds")
while True:
await asyncio.sleep(interval)
try:
logger.info("Updating Python dependencies...")
# TODO: await update_dependencies("python")
logger.info("Python dependencies updated successfully")
except Exception as e:
logger.error(f"Failed to update Python dependencies: {e}")

42
sandbox/app/logger.py Normal file
View File

@@ -0,0 +1,42 @@
"""Logging configuration"""
import logging
import sys
from typing import Optional
from app.config import get_config
_logger: Optional[logging.Logger] = None
def setup_logger() -> logging.Logger:
"""Setup application logger"""
global _logger
config = get_config()
# Create logger
_logger = logging.getLogger("sandbox")
_logger.setLevel(logging.DEBUG if config.app.debug else logging.INFO)
# Create console handler
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG if config.app.debug else logging.INFO)
# Create formatter
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
handler.setFormatter(formatter)
# Add handler to logger
_logger.addHandler(handler)
return _logger
def get_logger() -> logging.Logger:
"""Get application logger"""
if _logger is None:
return setup_logger()
return _logger

View File

@@ -0,0 +1 @@
"""Middleware package"""

View File

@@ -0,0 +1,15 @@
"""Authentication middleware"""
from fastapi import Header, HTTPException, status
from app.config import get_config
async def verify_api_key(x_api_key: str = Header(..., alias="X-Api-Key")):
"""Verify API key from request header"""
config = get_config()
if x_api_key != config.app.key:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid API key"
)
return x_api_key

View File

@@ -0,0 +1,48 @@
"""Concurrency control middleware"""
import asyncio
from fastapi import HTTPException, status
from app.config import get_config
from app.models import error_response
# Global semaphores
_worker_semaphore: None | asyncio.Semaphore = None
_request_counter = 0
_request_lock = asyncio.Lock()
def init_concurrency_control():
"""Initialize concurrency control"""
global _worker_semaphore
config = get_config()
_worker_semaphore = asyncio.Semaphore(config.max_workers)
async def check_max_requests():
"""Check if max requests limit is reached"""
global _request_counter
config = get_config()
async with _request_lock:
if _request_counter >= config.max_requests:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail=error_response(-503, "Too many requests")
)
_request_counter += 1
try:
yield
finally:
async with _request_lock:
_request_counter -= 1
async def acquire_worker():
"""Acquire a worker slot"""
if _worker_semaphore is None:
init_concurrency_control()
async with _worker_semaphore:
yield

80
sandbox/app/models.py Normal file
View File

@@ -0,0 +1,80 @@
"""Data models"""
from typing import Optional, Any
from pydantic import BaseModel, Field
class RunnerOptions(BaseModel):
enable_network: bool = Field(default=False, description="Sandbox network flag")
class RunCodeRequest(BaseModel):
"""Request model for code execution"""
language: str = Field(..., description="Programming language (python3 or nodejs)")
code: str = Field(..., description="Base64 encoded encrypted code")
preload: Optional[str] = Field(default="", description="Preload code")
options: RunnerOptions = Field(default_factory=RunnerOptions, description="Enable network access")
class RunCodeResponse(BaseModel):
"""Response model for code execution"""
stdout: str = Field(default="", description="Standard output")
stderr: str = Field(default="", description="Standard error")
class DependencyRequest(BaseModel):
"""Request model for dependency operations"""
language: str = Field(..., description="Programming language")
class UpdateDependencyRequest(BaseModel):
"""Request model for updating dependencies"""
language: str = Field(..., description="Programming language")
packages: list[str] = Field(default_factory=list, description="Packages to install")
class Dependency(BaseModel):
"""Dependency information"""
name: str
version: str
class ListDependenciesResponse(BaseModel):
"""Response model for listing dependencies"""
dependencies: list[Dependency] = Field(default_factory=list)
class RefreshDependenciesResponse(BaseModel):
"""Response model for refreshing dependencies"""
dependencies: list[Dependency] = Field(default_factory=list)
class UpdateDependenciesResponse(BaseModel):
"""Response model for updating dependencies"""
success: bool = True
installed: list[str] = Field(default_factory=list)
class HealthResponse(BaseModel):
"""Health check response"""
status: str = "healthy"
version: str = "2.0.0"
class ApiResponse(BaseModel):
"""Standard API response wrapper"""
code: int = Field(default=0, description="Response code (0 for success, negative for error)")
message: str = Field(default="success", description="Response message")
data: Optional[Any] = Field(default=None, description="Response data")
def success_response(data: Any) -> ApiResponse:
"""Create success response"""
return ApiResponse(code=0, message="success", data=data)
def error_response(code: int, message: str) -> ApiResponse:
"""Create error response"""
if code >= 0:
code = -1
return ApiResponse(code=code, message=message, data=None)

View File

@@ -0,0 +1 @@
"""Services package"""

View File

@@ -0,0 +1,80 @@
"""Python execution service"""
import signal
from app.core.runners.python.python_runner import PythonRunner
from app.dependencies import (
list_dependencies as list_deps,
update_dependencies as update_deps
)
from app.logger import get_logger
from app.models import (
success_response,
error_response,
RunCodeResponse,
ListDependenciesResponse,
UpdateDependenciesResponse,
Dependency,
RunnerOptions
)
async def run_python_code(code: str, preload: str, options: RunnerOptions):
"""Execute Python code in sandbox
Args:
options:
code: Base64 encoded encrypted code
preload: Preload code
Returns:
API response with execution result
"""
logger = get_logger()
try:
runner = PythonRunner()
result = await runner.run(code, options, preload)
if result.exit_code == -signal.SIGSYS:
return error_response(31, "sandbox security policy violation")
if result.error:
return error_response(-500, result.error)
return success_response(RunCodeResponse(
stdout=result.stdout,
stderr=result.stderr
))
except Exception as e:
logger.error(f"Python execution failed: {e}", exc_info=True)
return error_response(-500, str(e))
async def list_python_dependencies():
"""List installed Python dependencies
Returns:
API response with dependency list
"""
try:
deps = await list_deps("python")
dependencies = [
Dependency(name=dep["name"], version=dep["version"])
for dep in deps
]
return success_response(ListDependenciesResponse(dependencies=dependencies))
except Exception as e:
return error_response(500, str(e))
async def update_python_dependencies():
"""Update Python dependencies
Returns:
API response with update result
"""
try:
await update_deps()
return success_response(UpdateDependenciesResponse(success=True))
except Exception as e:
return error_response(500, str(e))

20
sandbox/config.yaml Normal file
View File

@@ -0,0 +1,20 @@
app:
port: 8194
debug: true
key: redbear-sandbox
max_workers: 4
max_requests: 50
worker_timeout: 30
python_path: /usr/local/bin/python
nodejs_path: /usr/local/bin/node
enable_network: true
enable_preload: false
python_deps_update_interval: 30m
allowed_syscalls: []
proxy:
socks5: ''
http: ''
https: ''

View File

@@ -0,0 +1,4 @@
requests==2.31.0
# numpy==1.26.0
# pandas==2.0.0
jinja2==3.1.2

7
sandbox/lib/seccomp_nodejs/Cargo.lock generated Normal file
View File

@@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "seccomp_nodejs"
version = "0.1.0"

View File

@@ -0,0 +1,6 @@
[package]
name = "seccomp_nodejs"
version = "0.1.0"
edition = "2024"
[dependencies]

View File

23
sandbox/lib/seccomp_python/Cargo.lock generated Normal file
View File

@@ -0,0 +1,23 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "libc"
version = "0.2.180"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc"
[[package]]
name = "libseccomp-sys"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60276e2d41bbb68b323e566047a1bfbf952050b157d8b5cdc74c07c1bf4ca3b6"
[[package]]
name = "seccomp_python"
version = "0.1.0"
dependencies = [
"libc",
"libseccomp-sys",
]

View File

@@ -0,0 +1,12 @@
[package]
name = "seccomp_python"
version = "0.1.0"
edition = "2024"
[lib]
name = "python"
crate-type = ["cdylib"]
[dependencies]
libc = "0.2.180"
libseccomp-sys = "0.3.0"

View File

@@ -0,0 +1,195 @@
mod syscalls;
use crate::syscalls::*;
use libc::{chdir, chroot, gid_t, uid_t, c_int};
use libseccomp_sys::*;
use std::env;
use std::ffi::CString;
use std::str::FromStr;
/*
* get_allowed_syscalls - retrieve allowed syscalls for the sandbox
* @enable_network: enable network-related syscalls if non-zero
*
* Syscall selection order:
* 1. ALLOWED_SYSCALLS environment variable
* 2. Built-in default allowlist
* 3. Optional network syscall extension
*
* Returns:
* (allowed_syscalls, allowed_not_kill_syscalls)
* allowed_syscalls: syscalls fully allowed
* allowed_not_kill_syscalls: syscalls returning EPERM
*/
pub fn get_allowed_syscalls(enable_network: bool) -> (Vec<i32>, Vec<i32>) {
let mut allowed_syscalls = Vec::new();
let mut allowed_not_kill_syscalls = Vec::new();
/* Syscalls that return error instead of killing */
allowed_not_kill_syscalls.extend(ALLOW_ERROR_SYSCALLS);
/* Load from environment variable ALLOWED_SYSCALLS */
if let Ok(env_val) = env::var("ALLOWED_SYSCALLS") {
if !env_val.is_empty() {
for s in env_val.split(',') {
if let Ok(sc) = i32::from_str(s) {
allowed_syscalls.push(sc);
}
}
}
}
/* Fallback to default syscalls if env not set */
if allowed_syscalls.is_empty() {
allowed_syscalls.extend(ALLOW_SYSCALLS);
if enable_network {
allowed_syscalls.extend(ALLOW_NETWORK_SYSCALLS);
}
}
(allowed_syscalls, allowed_not_kill_syscalls)
}
/*
* setup_root - setup restricted filesystem root
*
* Perform chroot(".") and change working directory to "/".
*
* Return:
* 0 on success
* negative error code on failure
*/
fn setup_root() -> Result<(), c_int> {
let root = CString::new(".").unwrap();
if unsafe { chroot(root.as_ptr()) } != 0 {
return Err(-1);
}
let root_dir = CString::new("/").unwrap();
if unsafe { chdir(root_dir.as_ptr()) } != 0 {
return Err(-2);
}
Ok(())
}
/*
* set_no_new_privs - enable PR_SET_NO_NEW_PRIVS
*
* Prevent privilege escalation via execve.
*
* Return:
* 0 on success
* negative error code on failure
*/
fn set_no_new_privs() -> Result<(), c_int> {
if unsafe { libc::prctl(libc::PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0) } != 0 {
return Err(-3);
}
Ok(())
}
/*
* drop_privileges - drop process privileges
* @uid: target user ID
* @gid: target group ID
*
* Permanently reduce process privileges.
*
* Return:
* 0 on success
* negative error code on failure
*/
fn drop_privileges(uid: uid_t, gid: gid_t) -> Result<(), c_int> {
if unsafe { libc::setgid(gid) } != 0 {
return Err(-4);
}
if unsafe { libc::setuid(uid) } != 0 {
return Err(-5);
}
Ok(())
}
/*
* install_seccomp - install seccomp filter
* @enable_network: enable network-related syscalls if non-zero
*
* Default action is SCMP_ACT_KILL_PROCESS.
* Allowed syscalls are explicitly whitelisted.
*
* Return:
* 0 on success
* negative error code on failure
*/
fn install_seccomp(enable_network: bool) -> Result<(), c_int> {
unsafe {
let ctx = seccomp_init(SCMP_ACT_KILL_PROCESS);
if ctx.is_null() {
return Err(-6); /* failed to init seccomp context */
}
let (allowed_syscalls, allowed_not_kill_syscalls) = get_allowed_syscalls(enable_network);
/* add fully allowed syscalls */
for &sc in &allowed_syscalls {
if seccomp_rule_add(ctx, SCMP_ACT_ALLOW, sc, 0) != 0 {
seccomp_release(ctx);
return Err(-7);
}
}
/* add syscalls returning EPERM */
for &sc in &allowed_not_kill_syscalls {
if seccomp_rule_add(ctx, SCMP_ACT_ERRNO(libc::EPERM as u16), sc, 0) != 0 {
seccomp_release(ctx);
return Err(-8);
}
}
if seccomp_load(ctx) != 0 {
seccomp_release(ctx);
return Err(-9);
}
seccomp_release(ctx);
Ok(())
}
}
/*
* init_seccomp - initialize seccomp sandbox
* @uid: target user ID
* @gid: target group ID
* @enable_network: enable network syscalls if non-zero
*
* Initialize the sandbox and apply privilege restrictions
* in the following order:
* 1. setup_root()
* 2. set_no_new_privs()
* 3. drop_privileges()
* 4. install_seccomp()
*
* This function must be called before executing any untrusted code.
* It is not thread-safe and must be invoked once per process.
*
* Return:
* 0 on success
* negative error code on failure
*/
#[unsafe(no_mangle)]
pub unsafe extern "C" fn init_seccomp(uid: uid_t, gid: gid_t, enable_network: i32) -> c_int {
if let Err(code) = setup_root() {
return code;
}
if let Err(code) = set_no_new_privs() {
return code;
}
if let Err(code) = drop_privileges(uid, gid) {
return code;
}
match install_seccomp(enable_network != 0) {
Ok(_) => 0,
Err(code) => code,
}
}

View File

@@ -0,0 +1,85 @@
// src/syscalls.rs
pub static ALLOW_SYSCALLS: &[i32] = &[
// file io
libc::SYS_read as i32,
libc::SYS_write as i32,
libc::SYS_openat as i32,
libc::SYS_close as i32,
libc::SYS_newfstatat as i32,
libc::SYS_ioctl as i32,
libc::SYS_lseek as i32,
libc::SYS_getdents64 as i32,
// thread
libc::SYS_futex as i32,
// memory
libc::SYS_mmap as i32,
libc::SYS_brk as i32,
libc::SYS_mprotect as i32,
libc::SYS_munmap as i32,
libc::SYS_rt_sigreturn as i32,
libc::SYS_mremap as i32,
// user / group
libc::SYS_setuid as i32,
libc::SYS_setgid as i32,
libc::SYS_getuid as i32,
// process
libc::SYS_getpid as i32,
libc::SYS_getppid as i32,
libc::SYS_gettid as i32,
libc::SYS_exit as i32,
libc::SYS_exit_group as i32,
libc::SYS_tgkill as i32,
libc::SYS_rt_sigaction as i32,
libc::SYS_sched_yield as i32,
libc::SYS_set_robust_list as i32,
libc::SYS_get_robust_list as i32,
libc::SYS_rseq as i32,
// time
libc::SYS_clock_gettime as i32,
libc::SYS_gettimeofday as i32,
libc::SYS_nanosleep as i32,
libc::SYS_epoll_create1 as i32,
libc::SYS_epoll_ctl as i32,
libc::SYS_clock_nanosleep as i32,
libc::SYS_pselect6 as i32,
libc::SYS_rt_sigprocmask as i32,
libc::SYS_sigaltstack as i32,
libc::SYS_getrandom as i32,
];
pub static ALLOW_ERROR_SYSCALLS: &[i32] = &[
libc::SYS_clone as i32,
libc::SYS_mkdirat as i32,
libc::SYS_mkdir as i32,
];
pub static ALLOW_NETWORK_SYSCALLS: &[i32] = &[
libc::SYS_socket as i32,
libc::SYS_connect as i32,
libc::SYS_bind as i32,
libc::SYS_listen as i32,
libc::SYS_accept as i32,
libc::SYS_sendto as i32,
libc::SYS_recvfrom as i32,
libc::SYS_getsockname as i32,
libc::SYS_recvmsg as i32,
libc::SYS_getpeername as i32,
libc::SYS_setsockopt as i32,
libc::SYS_ppoll as i32,
libc::SYS_uname as i32,
libc::SYS_sendmsg as i32,
libc::SYS_sendmmsg as i32,
libc::SYS_getsockopt as i32,
libc::SYS_fstat as i32,
libc::SYS_fcntl as i32,
libc::SYS_fstatfs as i32,
libc::SYS_poll as i32,
libc::SYS_epoll_pwait as i32,
];

97
sandbox/main.py Normal file
View File

@@ -0,0 +1,97 @@
"""
Redbear Sandbox - Main Entry Point
"""
import asyncio
import os
import sys
from contextlib import asynccontextmanager
import uvicorn
from fastapi import FastAPI
from app.config import get_config
from app.controllers import manager_router
from app.dependencies import setup_dependencies, update_dependencies_periodically
from app.logger import setup_logger, get_logger
logger = get_logger()
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Application lifespan manager"""
logger = get_logger()
# Startup
logger.info("Starting RedBear Sandbox...")
# Setup dependencies in background
asyncio.create_task(setup_dependencies())
# Start periodic dependency updates
config = get_config()
if config.python_deps_update_interval:
asyncio.create_task(update_dependencies_periodically())
yield
# Shutdown
logger.info("Shutting down Redbear Sandbox...")
def create_app() -> FastAPI:
"""Create FastAPI application"""
config = get_config()
app = FastAPI(
title="Sandbox",
description="Secure code execution sandbox",
version="2.0.0",
lifespan=lifespan,
debug=config.app.debug
)
app.include_router(manager_router)
return app
def check_root_privileges():
"""Check if running with root privileges"""
if os.geteuid() != 0:
logger.info("Error: Sandbox must be run as root for security features (chroot, setuid)")
sys.exit(1)
def main():
"""Main entry point"""
# Check root privileges
check_root_privileges()
# Setup logging
setup_logger()
config = get_config()
logger = get_logger()
logger.info(f"Starting server on port {config.app.port}")
logger.info(f"Debug mode: {config.app.debug}")
logger.info(f"Max workers: {config.max_workers}")
logger.info(f"Max requests: {config.max_requests}")
logger.info(f"Network enabled: {config.enable_network}")
# Create app
app = create_app()
# Run server
uvicorn.run(
app,
host="0.0.0.0",
port=config.app.port,
log_level="debug" if config.app.debug else "info",
access_log=config.app.debug
)
if __name__ == "__main__":
main()

20
sandbox/requirements.txt Normal file
View File

@@ -0,0 +1,20 @@
# Web Framework
fastapi==0.115.0
uvicorn[standard]==0.32.0
pydantic==2.9.0
pydantic-settings==2.5.0
# Configuration
PyYAML==6.0.2
# Security
pyseccomp==0.1.2
# Async & Concurrency
aiofiles==24.1.0
# Testing
pytest==8.3.0
pytest-asyncio==0.24.0
httpx==0.27.0

53
sandbox/script/env.sh Normal file
View File

@@ -0,0 +1,53 @@
#!/bin/bash
# Check if the correct number of arguments are provided
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <src> <dest>"
exit 1
fi
src="$1"
dest="$2"
# Function to copy and link files
copy_and_link() {
local src_file="$1"
local dest_file="$2"
if [ -L "$src_file" ]; then
# If src_file is a symbolic link, copy it without changing permissions
cp -P "$src_file" "$dest_file"
elif [ -b "$src_file" ] || [ -c "$src_file" ]; then
# If src_file is a device file, copy it and change permissions
cp "$src_file" "$dest_file"
chmod 444 "$dest_file"
else
# Otherwise, create a hard link and change the permissions to read-only
ln -f "$src_file" "$dest_file" 2>/dev/null || { cp "$src_file" "$dest_file" && chmod 444 "$dest_file"; }
fi
}
# Check if src is a file or directory
if [ -f "$src" ]; then
# src is a file, create hard link directly in dest
mkdir -p "$(dirname "$dest/$src")"
copy_and_link "$src" "$dest/$src"
elif [ -d "$src" ]; then
# src is a directory, process as before
mkdir -p "$dest/$src"
# Find all files in the source directory
find "$src" -type f,l | while read -r file; do
# Get the relative path of the file
rel_path="${file#$src/}"
# Get the directory of the relative path
rel_dir=$(dirname "$rel_path")
# Create the same directory structure in the destination
mkdir -p "$dest/$src/$rel_dir"
# Copy and link the file
copy_and_link "$file" "$dest/$src/$rel_path"
done
else
echo "Error: $src is neither a file nor a directory"
exit 1
fi

View File

@@ -116,20 +116,20 @@ export const getRagContent = (end_user_id: string) => {
return request.get(`/dashboard/rag_content`, { end_user_id, limit: 20 })
}
// Emotion distribution analysis
export const getWordCloud = (group_id: string) => {
return request.post(`/memory/emotion-memory/wordcloud`, { group_id, limit: 20 })
export const getWordCloud = (end_user_id: string) => {
return request.post(`/memory/emotion-memory/wordcloud`, { end_user_id, limit: 20 })
}
// High-frequency emotion keywords
export const getEmotionTags = (group_id: string) => {
return request.post(`/memory/emotion-memory/tags`, { group_id, limit: 20 })
export const getEmotionTags = (end_user_id: string) => {
return request.post(`/memory/emotion-memory/tags`, { end_user_id, limit: 20 })
}
// Emotion health index
export const getEmotionHealth = (group_id: string) => {
return request.post(`/memory/emotion-memory/health`, { group_id, limit: 20 })
export const getEmotionHealth = (end_user_id: string) => {
return request.post(`/memory/emotion-memory/health`, { end_user_id })
}
// Personalized suggestions
export const getEmotionSuggestions = (group_id: string) => {
return request.post(`/memory/emotion-memory/suggestions`, { group_id, limit: 20 })
export const getEmotionSuggestions = (end_user_id: string) => {
return request.post(`/memory/emotion-memory/suggestions`, { end_user_id })
}
export const generateSuggestions = (end_user_id: string) => {
return request.post(`/memory/emotion-memory/generate_suggestions`, { end_user_id })
@@ -138,8 +138,8 @@ export const analyticsRefresh = (end_user_id: string) => {
return request.post('/memory-storage/analytics/generate_cache', { end_user_id })
}
// Forgetting stats
export const getForgetStats = (group_id: string) => {
return request.get(`/memory/forget-memory/stats`, { group_id })
export const getForgetStats = (end_user_id: string) => {
return request.get(`/memory/forget-memory/stats`, { end_user_id })
}
// Implicit Memory - Preferences
export const getImplicitPreferences = (end_user_id: string) => {
@@ -165,20 +165,20 @@ export const getShortTerm = (end_user_id: string) => {
return request.get(`/memory/short/short_term`, { end_user_id })
}
// Perceptual Memory - Visual memory
export const getPerceptualLastVisual = (end_user: string) => {
return request.get(`/memory/perceptual/${end_user}/last_visual`)
export const getPerceptualLastVisual = (end_user_id: string) => {
return request.get(`/memory/perceptual/${end_user_id}/last_visual`)
}
// Perceptual Memory - Audio memory
export const getPerceptualLastListen = (end_user: string) => {
return request.get(`/memory/perceptual/${end_user}/last_listen`)
export const getPerceptualLastListen = (end_user_id: string) => {
return request.get(`/memory/perceptual/${end_user_id}/last_listen`)
}
// Perceptual Memory - Text memory
export const getPerceptualLastText = (end_user: string) => {
return request.get(`/memory/perceptual/${end_user}/last_text`)
export const getPerceptualLastText = (end_user_id: string) => {
return request.get(`/memory/perceptual/${end_user_id}/last_text`)
}
// Perceptual Memory - Perceptual memory timeline
export const getPerceptualTimeline = (end_user: string) => {
return request.get(`/memory/perceptual/${end_user}/timeline`)
export const getPerceptualTimeline = (end_user_id: string) => {
return request.get(`/memory/perceptual/${end_user_id}/timeline`)
}
// Episodic Memory - Overview
export const getEpisodicOverview = (data: { end_user_id: string; time_range: string; episodic_type: string; } ) => {
@@ -201,14 +201,14 @@ export const getExplicitMemory = (end_user_id: string) => {
export const getExplicitMemoryDetails = (data: { end_user_id: string, memory_id: string; }) => {
return request.post(`/memory/explicit-memory/details`, data)
}
export const getConversations = (end_user: string) => {
return request.get(`/memory/work/${end_user}/conversations`)
export const getConversations = (end_user_id: string) => {
return request.get(`/memory/work/${end_user_id}/conversations`)
}
export const getConversationMessages = (end_user: string, conversation_id: string) => {
return request.get(`/memory/work/${end_user}/messages`, { conversation_id })
export const getConversationMessages = (end_user_id: string, conversation_id: string) => {
return request.get(`/memory/work/${end_user_id}/messages`, { conversation_id })
}
export const getConversationDetail = (end_user: string, conversation_id: string) => {
return request.get(`/memory/work/${end_user}/detail`, { conversation_id })
export const getConversationDetail = (end_user_id: string, conversation_id: string) => {
return request.get(`/memory/work/${end_user_id}/detail`, { conversation_id })
}
export const forgetTrigger = (data: { max_merge_batch_size: number; min_days_since_access: number; end_user_id: string;}) => {
return request.post(`/memory/forget-memory/trigger`, data)

View File

@@ -8,6 +8,7 @@ import { type FC, useRef, useEffect } from 'react'
import clsx from 'clsx'
import Markdown from '@/components/Markdown'
import type { ChatContentProps } from './types'
import { Spin } from 'antd'
/**
* 聊天内容显示组件
@@ -21,7 +22,8 @@ const ChatContent: FC<ChatContentProps> = ({
empty,
labelPosition = 'bottom',
labelFormat,
errorDesc
errorDesc,
renderRuntime
}) => {
// 滚动容器引用,用于控制自动滚动到底部
const scrollContainerRef = useRef<(HTMLDivElement | null)>(null)
@@ -45,8 +47,8 @@ const ChatContent: FC<ChatContentProps> = ({
'rb:left-0 rb:text-left': item.role === 'assistant', // 助手消息左对齐
})}>
{/* 流式加载时且内容为空则不显示 */}
{streamLoading && item.content === ''
? null
{streamLoading && item.content === '' && !renderRuntime
? <Spin />
: <>
{/* 顶部标签(如时间戳、用户名等) */}
{labelPosition === 'top' &&
@@ -55,16 +57,17 @@ const ChatContent: FC<ChatContentProps> = ({
</div>
}
{/* 消息气泡框 */}
<div className={clsx('rb:border rb:text-left rb:rounded-lg rb:mt-1.5 rb:leading-4.5 rb:p-[10px_12px_2px_12px] rb:inline-block rb:max-w-[520px] rb:wrap-break-word', contentClassNames, {
<div className={clsx('rb:border rb:text-left rb:rounded-lg rb:mt-1.5 rb:leading-4.5 rb:p-[10px_12px_2px_12px] rb:inline-block rb:max-w-130 rb:wrap-break-word', contentClassNames, {
// 错误消息样式内容为null且非助手消息
'rb:border-[rgba(255,93,52,0.30)] rb:bg-[rgba(255,93,52,0.08)] rb:text-[#FF5D34]': errorDesc && item.role === 'assistant' && item.content === null,
'rb:border-[rgba(255,93,52,0.30)] rb:bg-[rgba(255,93,52,0.08)] rb:text-[#FF5D34]': errorDesc && item.role === 'assistant' && item.content === null && !renderRuntime,
// 助手消息样式
'rb:bg-[rgba(21,94,239,0.08)] rb:border-[rgba(21,94,239,0.30)]': item.role === 'user',
// 用户消息样式
'rb:bg-[#FFFFFF] rb:border-[#EBEBEB]': item.role === 'assistant' && (item.content || item.content === ''),
'rb:bg-[#FFFFFF] rb:border-[#EBEBEB]': item.role === 'assistant' && (item.content || item.content === '' || typeof renderRuntime === 'function'),
})}>
{item.subContent && renderRuntime && renderRuntime(item, index)}
{/* 使用Markdown组件渲染消息内容 */}
<Markdown content={item.content ?? errorDesc ?? ''} />
<Markdown content={renderRuntime ? item.content ?? '' : item.content ?? errorDesc ?? ''} />
</div>
{/* 底部标签(如时间戳、用户名等) */}
{labelPosition === 'bottom' &&

View File

@@ -19,7 +19,9 @@ export interface ChatItem {
/** 消息内容 */
content?: string | null;
/** 创建时间 */
created_at?: number | string
created_at?: number | string;
status?: string;
subContent?: Record<string, any>[]
}
/**
@@ -81,4 +83,5 @@ export interface ChatContentProps {
/** 标签格式化函数 */
labelFormat: (item: ChatItem) => any;
errorDesc?: string;
renderRuntime?: (item: ChatItem, index: number) => ReactNode;
}

View File

@@ -6,6 +6,9 @@ import CopyBtn from './CopyBtn';
type ICodeBlockProps = {
value: string;
needCopy?: boolean;
size?: 'small' | 'default';
showLineNumbers?: boolean;
}
// enum languageType {
@@ -16,6 +19,9 @@ type ICodeBlockProps = {
const CodeBlock: FC<ICodeBlockProps> = ({
value,
needCopy = true,
size = 'default',
showLineNumbers = false
}) => {
return (
@@ -23,24 +29,26 @@ const CodeBlock: FC<ICodeBlockProps> = ({
<SyntaxHighlighter
style={atelierHeathLight}
customStyle={{
padding: '16px 20px 16px 24px',
padding: '8px 12px 8px 12px',
backgroundColor: '#F0F3F8',
borderRadius: 8,
fontSize: size === 'small' ? 12 : 14,
wordBreak: 'break-all'
}}
language="json"
showLineNumbers={false}
showLineNumbers={showLineNumbers}
PreTag="div"
>
{value}
</SyntaxHighlighter>
<CopyBtn
{needCopy && <CopyBtn
value={value}
style={{
position: 'absolute',
top: 20,
right: 20,
}}
/>
/>}
</div>
)
}

View File

@@ -1982,6 +1982,10 @@ Memory Bear: After the rebellion, regional warlordism intensified for several re
arrange: 'Arrange',
redo: 'Redo',
undo: 'Undo',
input: 'Input',
output: 'Output',
error: 'Error Message',
},
emotionEngine: {
emotionEngineConfig: 'Emotion Engine Configuration',

View File

@@ -2076,6 +2076,10 @@ export const zh = {
arrange: '整理',
redo: '重做',
undo: '撤销',
input: '输入',
output: '输出',
error: '错误信息',
},
emotionEngine: {
emotionEngineConfig: '情感引擎配置',

View File

@@ -123,6 +123,20 @@ export const handleSSE = async (url: string, data: any, onMessage?: (data: SSEMe
let response = await makeSSERequest(url, data, token || '', config);
switch (response.status) {
case 500:
case 502:
const errorData = await response.json();
errorData.error || i18n.t('common.serviceUpgrading');
message.warning(errorData.error || i18n.t('common.serviceUpgrading'));
break
case 400:
const error = await response.json();
message.warning(error.error);
throw error || 'Bad Request';
case 504:
const errorJson = await response.json();
message.warning(errorJson.error || i18n.t('common.serverError'));
break
case 401:
if (url?.includes('/public')) {
return message.warning(i18n.t('common.publicApiCannotRefreshToken'));

View File

@@ -4,7 +4,7 @@
* @Author: yujiangping
* @Date: 2026-01-12 16:34:59
* @LastEditors: yujiangping
* @LastEditTime: 2026-01-16 15:38:35
* @LastEditTime: 2026-01-23 19:07:36
*/
import React, { useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
@@ -23,6 +23,13 @@ const GuideCard: React.FC = () => {
return currentLang === 'zh' ? versionInfo.introduction : (versionInfo.introduction_en || versionInfo.introduction);
};
// 解析换行符和HTML的方法
const parseContent = (text: string) => {
if (!text) return '';
// 将 \n 转换为 <br/> 标签
return text.replace(/\\n/g, '<br/>');
};
useEffect(() => {
const fetchVersion = async () => {
try {
@@ -58,13 +65,16 @@ const GuideCard: React.FC = () => {
{t('version.name')}: {introduction.codeName}
</span>
</div>
<p className='rb:text-sm rb:text-[#5B6167] rb:leading-5 rb:mt-2 '>
{introduction.upgradePosition}
</p>
<p
className='rb:text-sm rb:text-[#5B6167] rb:leading-5 rb:mt-2'
dangerouslySetInnerHTML={{ __html: introduction.upgradePosition }}
/>
{introduction.coreUpgrades?.map((item: string, index: number) => (
<p key={index} className='rb:text-sm rb:text-[#5B6167] rb:leading-5'>
{index + 1}. {item}
</p>
<p
key={index}
className='rb:text-sm rb:text-[#5B6167] rb:leading-5'
dangerouslySetInnerHTML={{ __html: item }}
/>
))}
</>) : null;
})()}

View File

@@ -45,7 +45,7 @@ const searchSwitchList = [
]
export interface TestParams {
group_id: string;
end_user_id: string;
message: string;
search_switch: string;
history: { role: string; content: string }[];
@@ -107,7 +107,7 @@ const MemoryConversation: FC = () => {
setLoading(true)
readService({
message: msg,
group_id: userId,
end_user_id: userId,
search_switch: search_switch,
history: [],
})
@@ -204,7 +204,7 @@ const MemoryConversation: FC = () => {
}
)}
>
<div className="rb:text-[16px] rb:font-medium rb:leading-[22px] rb:mb-6">{log.title}</div>
<div className="rb:text-[16px] rb:font-medium rb:leading-5.5 rb:mb-6">{log.title}</div>
{log.type === 'problem_split' && Array.isArray(log.data) && log.data.length > 0
? <Space size={12} direction="vertical" style={{width: '100%'}}>
{log.data.map(vo => (

View File

@@ -1093,606 +1093,4 @@ export const groupDataByType = (data: any[], groupKey: string) => {
})
return grouped
}
export const mockTestResult = {
"generated_at": "2025-12-12T09:48:43.389893",
"entities": {
"extracted_count": 148
},
"dedup": {
"total_merged_count": 39,
"breakdown": {
"exact": 30,
"fuzzy": 0,
"llm": 9
},
"impact": [
{
"name": "记忆熊",
"type": "Person",
"appear_count": 9,
"merge_count": 8
},
{
"name": "宋朝",
"type": "Organization",
"appear_count": 5,
"merge_count": 2
},
{
"name": "军费",
"type": "EconomicMetric",
"appear_count": 2,
"merge_count": 1
},
{
"name": "学生",
"type": "Person",
"appear_count": 6,
"merge_count": 5
},
{
"name": "废除丞相制度",
"type": "Event",
"appear_count": 6,
"merge_count": 3
},
{
"name": "六部",
"type": "Organization",
"appear_count": 4,
"merge_count": 3
},
{
"name": "六部缺乏协调机制",
"type": "Concept",
"appear_count": 2,
"merge_count": 1
},
{
"name": "丞相",
"type": "Position",
"appear_count": 4,
"merge_count": 1
},
{
"name": "总理",
"type": "Position",
"appear_count": 2,
"merge_count": 1
},
{
"name": "各部委",
"type": "Organization",
"appear_count": 2,
"merge_count": 1
},
{
"name": "六部直接对皇帝负责",
"type": "AdministrativeStructure",
"appear_count": 2,
"merge_count": 1
},
{
"name": "秦国",
"type": "Organization",
"appear_count": 5,
"merge_count": 2
},
{
"name": "文官集团",
"type": "Organization",
"appear_count": 2,
"merge_count": 1
}
]
},
"disambiguation": {
"block_count": 1,
"effects": [
{
"left": {
"name": "节度使",
"type": "Role"
},
"right": {
"name": "节度使",
"type": "Person"
},
"result": "成功区分"
}
]
},
"memory": {
"chunks": 2
},
"triplets": {
"count": 88
},
"core_entities": [
{
"type": "Organization",
"type_cn": "组织",
"count": 16,
"entities": [
"厂卫机构",
"西厂",
"东厂",
"工部",
"地方军阀"
]
},
{
"type": "Event",
"type_cn": "事件",
"count": 12,
"entities": [
"均田制瓦解",
"无法批阅完所有政务",
"废除丞相制度",
"持续战争",
"政令执行困难"
]
},
{
"type": "Condition",
"type_cn": "Condition",
"count": 9,
"entities": [
"缺乏协作机制",
"作战效率低下",
"厢军装备不足",
"军权分散",
"军事专业化难以提升"
]
},
{
"type": "Person",
"type_cn": "人物",
"count": 8,
"entities": [
"官员",
"宦官",
"节度使",
"皇帝",
"文士"
]
},
{
"type": "Concept",
"type_cn": "Concept",
"count": 8,
"entities": [
"行政紧张",
"军力不足",
"秦国统一六国的原因",
"六部缺乏协调机制",
"专业分工"
]
},
{
"type": "Action",
"type_cn": "Action",
"count": 6,
"entities": [
"再花钱募兵",
"建立军功爵制度",
"裁撤兵员",
"削减装备",
"建立法律制度"
]
},
{
"type": "Outcome",
"type_cn": "Outcome",
"count": 5,
"entities": [
"打仗更吃亏",
"提升国家组织能力",
"降低行政效率",
"士兵效忠个人而非国家",
"政令推行困难"
]
},
{
"type": "EconomicMetric",
"type_cn": "EconomicMetric",
"count": 4,
"entities": [
"财政",
"财政支出",
"支出",
"军费"
]
},
{
"type": "Statement",
"type_cn": "Statement",
"count": 3,
"entities": [
"没有银子",
"禁军由文官控制导致作战效率低下",
"武器没材料"
]
},
{
"type": "State",
"type_cn": "State",
"count": 3,
"entities": [
"军队更弱",
"理解不足",
"不足"
]
},
{
"type": "HistoricalPeriod",
"type_cn": "HistoricalPeriod",
"count": 3,
"entities": [
"春秋战国史",
"唐朝史",
"宋朝"
]
},
{
"type": "Attribute",
"type_cn": "Attribute",
"count": 3,
"entities": [
"资源丰富",
"易守难攻",
"政策连续性强"
]
},
{
"type": "Right",
"type_cn": "Right",
"count": 3,
"entities": [
"军事指挥权",
"财政调度权",
"募兵权"
]
},
{
"type": "Policy",
"type_cn": "Policy",
"count": 2,
"entities": [
"商鞅变法",
"禁军由文官控制"
]
},
{
"type": "MilitaryCondition",
"type_cn": "MilitaryCondition",
"count": 2,
"entities": [
"军力不足",
"缺乏战略纵深"
]
},
{
"type": "Role",
"type_cn": "Role",
"count": 2,
"entities": [
"节度使",
"协调中枢"
]
},
{
"type": "Position",
"type_cn": "Position",
"count": 2,
"entities": [
"总理",
"丞相"
]
},
{
"type": "PoliticalCharacteristic",
"type_cn": "PoliticalCharacteristic",
"count": 2,
"entities": [
"旧贵族势力弱",
"中央集权程度高"
]
},
{
"type": "Phenomenon",
"type_cn": "Phenomenon",
"count": 1,
"entities": [
"宋朝军事弱势"
]
},
{
"type": "Factor",
"type_cn": "Factor",
"count": 1,
"entities": [
"制度性因素"
]
},
{
"type": "EconomicFactor",
"type_cn": "EconomicFactor",
"count": 1,
"entities": [
"财政压力"
]
},
{
"type": "EconomicIndicator",
"type_cn": "EconomicIndicator",
"count": 1,
"entities": [
"财政支出"
]
},
{
"type": "MilitaryStrategy",
"type_cn": "MilitaryStrategy",
"count": 1,
"entities": [
"对外战略被动"
]
},
{
"type": "MilitaryCapability",
"type_cn": "MilitaryCapability",
"count": 1,
"entities": [
"机动能力弱"
]
},
{
"type": "PersonGroup",
"type_cn": "PersonGroup",
"count": 1,
"entities": [
"武将"
]
},
{
"type": "EconomicCondition",
"type_cn": "EconomicCondition",
"count": 1,
"entities": [
"财政压力"
]
},
{
"type": "InstitutionalPolicy",
"type_cn": "InstitutionalPolicy",
"count": 1,
"entities": [
"废除丞相制度"
]
},
{
"type": "StateOfAffairs",
"type_cn": "StateOfAffairs",
"count": 1,
"entities": [
"中央决策高度集中于皇帝"
]
},
{
"type": "Institution",
"type_cn": "Institution",
"count": 1,
"entities": [
"科举"
]
},
{
"type": "Function",
"type_cn": "Function",
"count": 1,
"entities": [
"统筹大事小情"
]
},
{
"type": "AdministrativeStructure",
"type_cn": "AdministrativeStructure",
"count": 1,
"entities": [
"六部直接对皇帝负责"
]
},
{
"type": "AdministrativeProblem",
"type_cn": "AdministrativeProblem",
"count": 1,
"entities": [
"皇帝一人批不完政务"
]
},
{
"type": "Behavior",
"type_cn": "Behavior",
"count": 1,
"entities": [
"互相推诿责任"
]
},
{
"type": "Resource",
"type_cn": "Resource",
"count": 1,
"entities": [
"银子"
]
},
{
"type": "Situation",
"type_cn": "Situation",
"count": 1,
"entities": [
"没人拍板"
]
},
{
"type": "HistoricalState",
"type_cn": "HistoricalState",
"count": 1,
"entities": [
"秦国"
]
},
{
"type": "Location",
"type_cn": "地点",
"count": 1,
"entities": [
"关中"
]
},
{
"type": "HistoricalEvent",
"type_cn": "HistoricalEvent",
"count": 1,
"entities": [
"安史之乱"
]
},
{
"type": "PoliticalAction",
"type_cn": "PoliticalAction",
"count": 1,
"entities": [
"中央整顿"
]
},
{
"type": "PoliticalPhenomenon",
"type_cn": "PoliticalPhenomenon",
"count": 1,
"entities": [
"藩镇割据加剧"
]
},
{
"type": "EconomicEntity",
"type_cn": "EconomicEntity",
"count": 1,
"entities": [
"中央财政"
]
},
{
"type": "System",
"type_cn": "System",
"count": 1,
"entities": [
"募兵制"
]
},
{
"type": "WorkRole",
"type_cn": "WorkRole",
"count": 1,
"entities": [
"掌控禁军"
]
}
],
"triplet_samples": [
{
"subject": "记忆熊",
"predicate": "MENTIONS",
"predicate_cn": "提到",
"object": "宋朝军事弱势"
},
{
"subject": "宋朝军事弱势",
"predicate": "RESULTED_IN",
"predicate_cn": "resulted in",
"object": "制度性因素"
},
{
"subject": "记忆熊",
"predicate": "MENTIONS",
"predicate_cn": "提到",
"object": "禁军由文官控制导致作战效率低下"
},
{
"subject": "禁军由文官控制",
"predicate": "RESULTED_IN",
"predicate_cn": "resulted in",
"object": "作战效率低下"
},
{
"subject": "记忆熊",
"predicate": "MENTIONS",
"predicate_cn": "提到",
"object": "厢军装备不足"
},
{
"subject": "记忆熊",
"predicate": "MENTIONS",
"predicate_cn": "提到",
"object": "宋朝"
},
{
"subject": "记忆熊",
"predicate": "MENTIONS",
"predicate_cn": "提到",
"object": "军费"
}
],
"self_reflexion": [
{
"conflict": {
"data": [
{
"id": "76be6d82d8804beda6baa3d3447d6cbc",
"statement": "学生对\"六部缺乏协调机制\"的具体影响表示理解不足。",
"group_id": "group_123",
"chunk_id": "4a0804127d35456f86d4f06e1fa458f7",
"created_at": "2025-12-12 09:48:00.166068",
"expired_at": null,
"valid_at": null,
"invalid_at": null,
"entity_ids": []
}
],
"conflict": true,
"conflict_memory": {
"id": "e268a6fff35543fab471986c188e023e",
"statement": "学生对\"六部缺乏协调机制\"的具体影响表示理解不足。",
"group_id": "group_123",
"chunk_id": "e6cb5f56020e4a8d925d148e1d2fbda0",
"created_at": "2025-12-12 09:48:00.166068",
"expired_at": null,
"valid_at": null,
"invalid_at": null,
"entity_ids": []
}
},
"reflexion": {
"reason": "同一学生在不同时间点重复提出对'六部缺乏协调机制'具体影响的理解困难,表明原有解释未能有效解决其认知障碍,存在记忆冗余与教学反馈失效的冲突。",
"solution": "保留后出现的记忆记录chunk_id为4a0804127d35456f86d4f06e1fa458f7作为最新学习状态将其设为有效将前次相同内容的记忆id为e268a6fff35543fab471986c188e023e标记为失效避免重复干预并基于后续完整解释优化知识呈现逻辑。"
},
"resolved": {
"original_memory_id": "e268a6fff35543fab471986c188e023e",
"resolved_memory": {
"id": "e268a6fff35543fab471986c188e023e",
"statement": "学生对\"六部缺乏协调机制\"的具体影响表示理解不足。",
"group_id": "group_123",
"chunk_id": "e6cb5f56020e4a8d925d148e1d2fbda0",
"created_at": "2025-12-12 09:48:00.166068",
"expired_at": null,
"valid_at": null,
"invalid_at": "2025-12-12 09:48:00.166068",
"entity_ids": []
}
}
}
]
}
}

View File

@@ -23,7 +23,6 @@ export interface Memory {
include_dialogue_context: boolean;
max_context: string;
lambda_mem: string;
lambda_mem: string;
offset: string;
state: boolean;
created_at: string;

View File

@@ -59,6 +59,11 @@ const PerceptualLastInfo: FC<{ type: 'last_visual' | 'last_listen' | 'last_text'
})
}
const handleDownload = () => {
if (!data.file_path) return
window.open(data.file_path, '_blank')
}
return (
<RbCard
title={t(`perceptualDetail.${type}`)}
@@ -78,17 +83,17 @@ const PerceptualLastInfo: FC<{ type: 'last_visual' | 'last_listen' | 'last_text'
<Image src={data.file_path} alt={data.file_name} />
// <img src={data.file_path} alt={data.file_name} className="rb:max-w-full rb:max-h-full rb:object-contain" />
) : (
<div className="rb:text-gray-500">{data.file_name}</div>
<div className="rb:text-[#5B6167]">{data.file_name}</div>
)
) : type === 'last_listen' && /\.(mp3|wav|ogg|m4a|aac)$/i.test(data.file_name) ? (
<audio controls className="rb:w-full">
<source src={data.file_path} />
</audio>
) : (
<div className="rb:text-gray-500">{data.file_name}</div>
<div className="rb:text-[#5B6167] rb:cursor-pointer" onClick={handleDownload}>{data.file_name}</div>
)
) : (
<div className="rb:text-gray-400">No file</div>
<div className="rb:text-[#5B6167]">{t('empty.tableEmpty')}</div>
)}
</div>
<Space size={4} direction="vertical" className="rb:w-full rb:mt-3">

View File

@@ -1,8 +1,9 @@
import { forwardRef, useImperativeHandle, useState, useRef } from 'react'
import { useTranslation } from 'react-i18next'
import clsx from 'clsx'
import { Input, Form, App } from 'antd'
import { Space, Button } from 'antd'
import { Input, Form, App, Space, Button, Collapse } from 'antd'
import { CheckCircleFilled, CloseCircleFilled, LoadingOutlined } from '@ant-design/icons'
import CodeBlock from '@/components/Markdown/CodeBlock'
import ChatIcon from '@/assets/images/application/chat.png'
import RbDrawer from '@/components/RbDrawer';
@@ -13,8 +14,11 @@ import ChatContent from '@/components/Chat/ChatContent'
import type { ChatItem } from '@/components/Chat/types'
import ChatSendIcon from '@/assets/images/application/chatSend.svg'
import dayjs from 'dayjs'
import type { ChatRef, VariableConfigModalRef, StartVariableItem, GraphRef } from '../../types'
import type { ChatRef, VariableConfigModalRef, GraphRef } from '../../types'
import { type SSEMessage } from '@/utils/stream'
import type { Variable } from '../Properties/VariableList/types'
import styles from './chat.module.css'
import Markdown from '@/components/Markdown'
const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef }>(({ appId, graphRef }, ref) => {
const { t } = useTranslation()
@@ -24,7 +28,7 @@ const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef }>(({ appId
const [open, setOpen] = useState(false)
const [loading, setLoading] = useState(false)
const [chatList, setChatList] = useState<ChatItem[]>([])
const [variables, setVariables] = useState<StartVariableItem[]>([])
const [variables, setVariables] = useState<Variable[]>([])
const [streamLoading, setStreamLoading] = useState(false)
const [conversationId, setConversationId] = useState<string | null>(null)
@@ -39,7 +43,7 @@ const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef }>(({ appId
if (startNodes.length) {
const curVariables = startNodes[0].config.variables?.defaultValue
curVariables.forEach((vo: StartVariableItem) => {
curVariables.forEach((vo: Variable) => {
if (typeof vo.default !== 'undefined') {
vo.value = vo.default
}
@@ -60,7 +64,7 @@ const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef }>(({ appId
const handleEditVariables = () => {
variableConfigModalRef.current?.handleOpen(variables)
}
const handleSave = (values: StartVariableItem[]) => {
const handleSave = (values: Variable[]) => {
setVariables([...values])
}
const handleSend = () => {
@@ -97,13 +101,28 @@ const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef }>(({ appId
role: 'assistant',
content: '',
created_at: Date.now(),
subContent: [],
}])
const handleStreamMessage = (data: SSEMessage[]) => {
setStreamLoading(false)
data.forEach(item => {
const { chunk, conversation_id } = item.data as { chunk: string; conversation_id: string | null; };
const { chunk, conversation_id, node_id, input, output, error, elapsed_time, status } = item.data as {
chunk: string;
conversation_id: string | null;
node_id: string;
node_name?: string;
input?: any;
output?: any;
elapsed_time?: string;
error?: any;
state: Record<string, any>;
status?: 'completed' | 'failed'
};
const node = graphRef.current?.getNodes().find(n => n.id === node_id);
const { name, icon } = node?.getData() || {}
console.log('node', node?.getData())
switch(item.event) {
case 'message':
@@ -119,6 +138,66 @@ const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef }>(({ appId
return newList
})
break
case 'node_start':
setChatList(prev => {
const newList = [...prev]
const lastIndex = newList.length - 1
if (lastIndex >= 0) {
const newSubContent = newList[lastIndex].subContent || []
const filterIndex = newSubContent.findIndex(vo => vo.id === node_id)
if (filterIndex > -1) {
newSubContent[filterIndex] = {
...newSubContent[filterIndex],
node_id: node_id,
node_name: name,
icon,
content: {},
}
} else {
newSubContent.push({
id: node_id,
node_id: node_id,
node_name: name,
icon,
content: {},
})
}
newList[lastIndex] = {
...newList[lastIndex],
subContent: newSubContent
}
}
return newList
})
break
case 'node_end':
case 'node_error':
setChatList(prev => {
const newList = [...prev]
const lastIndex = newList.length - 1
if (lastIndex >= 0) {
const newSubContent = newList[lastIndex].subContent || []
const filterIndex = newSubContent.findIndex(vo => vo.node_id === node_id)
if (filterIndex > -1 && newSubContent[filterIndex].content) {
newSubContent[filterIndex] = {
...newSubContent[filterIndex],
content: {
input,
output,
error,
},
status: status || 'completed',
elapsed_time
}
}
newList[lastIndex] = {
...newList[lastIndex],
subContent: newSubContent
}
}
return newList
})
break
case 'workflow_end':
setChatList(prev => {
const newList = [...prev]
@@ -126,6 +205,7 @@ const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef }>(({ appId
if (lastIndex >= 0) {
newList[lastIndex] = {
...newList[lastIndex],
status,
content: newList[lastIndex].content === '' ? null : newList[lastIndex].content
}
}
@@ -142,14 +222,31 @@ const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef }>(({ appId
}
form.setFieldValue('message', undefined)
setStreamLoading(true)
draftRun(appId, {
message: message,
variables: params,
stream: true,
conversation_id: conversationId
}, handleStreamMessage)
.catch((error) => {
setChatList(prev => {
const newList = [...prev]
const lastIndex = newList.length - 1
if (lastIndex >= 0) {
newList[lastIndex] = {
...newList[lastIndex],
status: 'failed',
content: null,
subContent: error.error
}
}
return newList
})
})
.finally(() => {
setLoading(false)
setStreamLoading(false)
})
}
// 暴露给父组件的方法
@@ -158,6 +255,11 @@ const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef }>(({ appId
handleClose
}));
const getStatus = (status?: string) => {
return status === 'completed' ? 'rb:text-[#369F21]' : status === 'failed' ? 'rb:text-[#FF5D34]' : 'rb:text-[#5B6167]'
}
console.log('chatList', chatList)
return (
<RbDrawer
title={<div className="rb:flex rb:items-center rb:gap-2.5">
@@ -173,10 +275,7 @@ const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef }>(({ appId
onClose={handleClose}
>
<ChatContent
classNames={{
'rb:mx-[16px] rb:pt-[24px] rb:h-[calc(100%-76px)]': true,
}}
classNames="rb:mx-[16px] rb:pt-[24px] rb:h-[calc(100%-76px)]"
contentClassNames="rb:max-w-[400px]!'"
empty={<Empty url={ChatIcon} title={t('application.chatEmpty')} isNeedSubTitle={false} size={[240, 200]} className="rb:h-full" />}
data={chatList}
@@ -184,6 +283,87 @@ const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef }>(({ appId
labelPosition="bottom"
labelFormat={(item) => dayjs(item.created_at).locale('en').format('MMMM D, YYYY [at] h:mm A')}
errorDesc={t('application.ReplyException')}
renderRuntime={(item, index) => {
return (
<div key={index} className="rb:w-100 rb:mb-2">
<Collapse
className={styles[item.status || 'default']}
items={[{
key: 0,
label: <div className={getStatus(item.status)}>
{item.status === 'completed' ? <CheckCircleFilled className="rb:mr-1" /> : item.status === 'failed' ? <CloseCircleFilled className="rb:mr-1" /> : <LoadingOutlined className="rb:mr-1" />}
{t('application.workflow')}
</div>,
className: styles.collapseItem,
children: (
Array.isArray(item.subContent)
? <Space size={8} direction="vertical" className="rb:w-full!">
{item.subContent?.map(vo => (
<Collapse
key={vo.node_id}
items={[{
key: vo.node_id,
label: <div className={clsx("rb:flex rb:justify-between rb:items-center", getStatus(vo.status))}>
<div className="rb:flex rb:items-center rb:gap-1 rb:flex-1">
{vo.icon && <img src={vo.icon} className="rb:size-4" />}
<div className="rb:wrap-break-word rb:line-clamp-1">{vo.node_name || vo.node_id}</div>
</div>
<span>
{typeof vo.elapsed_time == 'number' && <>{vo.elapsed_time?.toFixed(3)}ms</>}
{vo.status === 'completed' ? <CheckCircleFilled className="rb:ml-1" /> : vo.status === 'failed' ? <CloseCircleFilled className="rb:ml-1" /> : <LoadingOutlined className="rb:ml-1" />}
</span>
</div>,
className: styles.collapseItem,
children: (
<Space size={8} direction="vertical" className="rb:w-full!">
{vo.status === 'failed' &&
<div className={clsx("rb:bg-[#F0F3F8] rb:rounded-md", getStatus(vo.status))}>
<div className="rb:py-2 rb:px-3 rb:flex rb:justify-between rb:items-center rb:text-[12px]">
{t(`workflow.error`)}
<Button
className="rb:py-0! rb:px-1! rb:text-[12px]!"
size="small"
>{t('common.copy')}</Button>
</div>
<div className="rb:pb-2 rb:px-3 rb:max-h-40 rb:overflow-auto">
<Markdown content={vo.content?.error || ''} />
</div>
</div>
}
{['input', 'output'].map(key => (
<div key={key} className="rb:bg-[#F0F3F8] rb:rounded-md">
<div className="rb:py-2 rb:px-3 rb:flex rb:justify-between rb:items-center rb:text-[12px]">
{t(`workflow.${key}`)}
<Button
className="rb:py-0! rb:px-1! rb:text-[12px]!"
size="small"
>{t('common.copy')}</Button>
</div>
<div className="rb:max-h-40 rb:overflow-auto">
<CodeBlock
size="small"
value={typeof vo.content === 'object' && vo.content?.[key] ? JSON.stringify(vo.content[key], null, 2) : '{}'}
needCopy={false}
showLineNumbers={true}
/>
</div>
</div>
))}
</Space>
)
}]}
/>
))}
</Space>
: <div className={clsx("rb:bg-[#FBFDFF] rb:rounded-md rb:py-2 rb:px-3 ", getStatus('failed'))}>
<Markdown content={item.subContent || ''} />
</div>
)
}]}
/>
</div>
)
}}
/>
<div className="rb:flex rb:items-center rb:gap-2.5 rb:p-4">
<Form form={form} style={{width: 'calc(100% - 54px)'}}>

View File

@@ -0,0 +1,45 @@
.completed {
background-color: rgba(54, 159, 33, 0.06);
border-color: rgba(54, 159, 33, 0.25);
border-radius: 8px;
}
.failed {
background-color: rgba(255, 138, 76, 0.08);
border-color: rgba(255, 138, 76, 0.20);
border-radius: 8px;
}
.default {
background-color: rgba(91, 97, 103, 0.08);
border-color: rgba(91, 97, 103, 0.30);
border-radius: 8px;
}
.collapse-item {
font-size: 12px;
line-height: 16px;
}
.collapse-item:global(.ant-collapse-item>.ant-collapse-header) {
padding: 8px 12px;
}
.collapse-item:global(.ant-collapse-item>.ant-collapse-header .ant-collapse-expand-icon) {
height: 16px;
}
.completed:global(.ant-collapse .ant-collapse-content),
.failed:global(.ant-collapse .ant-collapse-content) {
background-color: transparent;
border-top: none;
}
:global(.ant-collapse .ant-collapse-content>.ant-collapse-content-box) {
padding-top: 0;
}
.collapse-item :global(.ant-collapse) {
/* background-color: #F0F3F8; */
background-color: #FBFDFF;
border-radius: 6px;
}
.collapse-item :global(.ant-collapse>.ant-collapse-item:last-child),
.collapse-item :global(.ant-collapse>.ant-collapse-item:last-child>.ant-collapse-header) {
border-radius: 0 0 6px 6px;
}
.collapse-item :global(.ant-collapse .ant-collapse-content>.ant-collapse-content-box) {
padding: 0 4px 4px 4px;
}