Compare commits

..

39 Commits

Author SHA1 Message Date
Ke Sun
feae2f2e1e Merge pull request #1033 from SuanmoSuanyangTechnology/release/v0.3.2
Release/v0.3.2
2026-04-30 11:12:12 +08:00
Mark
415234d4c8 Merge pull request #1032 from SuanmoSuanyangTechnology/fix/sandbox
feat(core): add configurable SANDBOX_URL for code node sandbox requests
2026-04-29 20:26:55 +08:00
Eternity
e38a60e107 feat(core): add configurable SANDBOX_URL for code node sandbox requests 2026-04-29 20:24:10 +08:00
yingzhao
86eb08c73f Merge pull request #1027 from SuanmoSuanyangTechnology/fix/release0.3.2_zy
fix(web): node executionStatus update remove silent
2026-04-29 12:26:26 +08:00
zhaoying
53f1b0e586 fix(web): node executionStatus update remove silent 2026-04-29 12:24:34 +08:00
yingzhao
49cc47a79a Merge pull request #1026 from SuanmoSuanyangTechnology/fix/release0.3.2_zy
fix(web): ontology tag
2026-04-29 12:17:40 +08:00
zhaoying
1817f52edf fix(web): ontology tag 2026-04-29 11:55:43 +08:00
山程漫悟
40633d72c3 Merge pull request #1024 from SuanmoSuanyangTechnology/fix/Timebomb_032
fix(workspace)
2026-04-28 18:37:50 +08:00
Timebomb2018
6f10296969 fix(workspace): deactivate user when removed from last active workspace 2026-04-28 18:34:06 +08:00
yingzhao
89228825cf Merge pull request #1023 from SuanmoSuanyangTechnology/fix/v0.3.2_zy
fix(web): workflow redo/undo
2026-04-28 17:41:45 +08:00
zhaoying
cab4deb2ff fix(web): workflow redo/undo 2026-04-28 17:37:59 +08:00
Ke Sun
4048a10858 ci: add GitHub Actions workflow to sync all branches and tags to Gitee 2026-04-28 16:44:50 +08:00
yingzhao
d6ef0f4923 Merge pull request #1022 from SuanmoSuanyangTechnology/fix/v0.3.2_zy
fix(web): thinking_budget_tokens add min & default value
2026-04-28 16:18:11 +08:00
zhaoying
75fbe44839 fix(web): add min validator 2026-04-28 16:17:31 +08:00
山程漫悟
06597c567b Merge pull request #1019 from SuanmoSuanyangTechnology/fix/Timebomb_032
fix(workspace)
2026-04-28 16:11:44 +08:00
Timebomb2018
28694fefb0 fix(app): adjust thinking budget tokens default and validation range
The default thinking budget tokens value was changed from 10000 to 1024 in base.py, and the minimum validation constraint was updated from 1024 to 1 in app_schema.py to allow smaller budgets while maintaining backward compatibility.
2026-04-28 16:10:44 +08:00
zhaoying
7a0f08148e fix(web): thinking_budget_tokens add min & default value 2026-04-28 16:10:18 +08:00
Timebomb2018
d3058ce379 fix(workspace): make delete workspace member async and invalidate user tokens 2026-04-28 15:04:13 +08:00
Ke Sun
8d88df391d Merge pull request #1017 from SuanmoSuanyangTechnology/revert-1016-feat/episodic-memory-detail-and-pagination
Revert "refactor(memory): replace raw dict responses with Pydantic schema mod…"
2026-04-27 18:50:43 +08:00
Ke Sun
7621321d1b Revert "refactor(memory): replace raw dict responses with Pydantic schema mod…" 2026-04-27 18:50:26 +08:00
Ke Sun
0e29b0b2a5 Merge pull request #1016 from SuanmoSuanyangTechnology/feat/episodic-memory-detail-and-pagination
refactor(memory): replace raw dict responses with Pydantic schema mod…
2026-04-27 18:43:53 +08:00
lanceyq
2fa4d29548 fix(memory): use explicit None checks and remove unnecessary Optional type
- Replace truthiness checks with 'is not None' for data.message in graph_data and community_graph endpoints to handle empty string correctly
- Remove Optional wrapper from GraphStatistics.edge_types since it already has a default_factory
2026-04-27 18:39:33 +08:00
yingzhao
7bb181c1c7 Merge pull request #1014 from SuanmoSuanyangTechnology/fix/v0.3.2_zy
Fix/v0.3.2 zy
2026-04-27 18:07:10 +08:00
zhaoying
a9c87b03ff Merge branch 'fix/v0.3.2_zy' of github.com:SuanmoSuanyangTechnology/MemoryBear into fix/v0.3.2_zy 2026-04-27 18:05:59 +08:00
zhaoying
720af8d261 fix(web): file icon 2026-04-27 18:04:55 +08:00
山程漫悟
09d32ed446 Merge pull request #1015 from SuanmoSuanyangTechnology/fix/Timebomb_032
fix(multimodal)
2026-04-27 18:01:12 +08:00
lanceyq
9a5ce7f7c6 refactor(memory): replace raw dict responses with Pydantic schema models in user memory controllers
- Add user_memory_schema.py with typed Pydantic models for all user memory
  API responses: MemoryInsightReportData, UserSummaryData, GraphData,
  MemoryTypeStatItem, cache result models, and RelationshipEvolutionData
- Refactor user_memory_controllers.py to construct schema instances and
  return model_dump() instead of raw dicts
- Remove unused imports (datetime, timestamp_to_datetime, EndUserInfoResponse,
  EndUserInfoCreate, EndUser)
2026-04-27 17:57:06 +08:00
Timebomb2018
531d785629 fix(multimodal): support HTML image tags in document extraction and chat responses
- Replace plain image URLs with `<img src="..." data-url="...">` HTML tags in multimodal and document extractor services
- Propagate citations from workflow end events to client responses
- Update system prompts to instruct LLMs to render images using Markdown `![alt](url)` with strict UUID-preserving URL copying
2026-04-27 17:56:58 +08:00
zhaoying
6d80d74f4a Merge branch 'fix/v0.3.2_zy' of github.com:SuanmoSuanyangTechnology/MemoryBear into fix/v0.3.2_zy 2026-04-27 17:55:51 +08:00
Ke Sun
3d9882643e ci: add GitHub Actions workflow to sync all branches and tags to Gitee 2026-04-27 17:48:35 +08:00
zhaoying
b4e4be1133 fix(web): chat file icon 2026-04-27 17:42:56 +08:00
zhaoying
16926d9db5 fix(web): tool node config reset 2026-04-27 17:10:02 +08:00
zhaoying
f369a63c8d fix(web): loop & iteration child node history 2026-04-27 16:31:10 +08:00
zhaoying
1861b0fbc9 Merge branch 'fix/v0.3.2_zy' of github.com:SuanmoSuanyangTechnology/MemoryBear into fix/v0.3.2_zy 2026-04-27 16:07:20 +08:00
zhaoying
750d4ca841 fix(web): custom tool schema api add case
Co-authored-by: Copilot <copilot@github.com>
2026-04-27 16:04:02 +08:00
zhaoying
8baa466b31 fix(web): loop & iteration history 2026-04-27 15:00:49 +08:00
zhaoying
dd7f9f6cee fix(web): output type node only has left port 2026-04-27 14:08:02 +08:00
zhaoying
d5d81f0c4f fix(web): node execution status reset 2026-04-27 13:47:49 +08:00
zhaoying
610ae27cf9 fix(web): switch space 2026-04-27 10:48:03 +08:00
61 changed files with 1166 additions and 1211 deletions

View File

@@ -3,12 +3,9 @@ name: Sync to Gitee
on:
push:
branches:
- main # Production
- develop # Integration
- 'release/*' # Release preparation
- 'hotfix/*' # Urgent fixes
- '**' # All branchs
tags:
- '*' # All version tags (v1.0.0, etc.)
- '**' # All version tags (v1.0.0, etc.)
jobs:
sync:

View File

@@ -82,32 +82,19 @@ async def get_preview_chunks(
detail="The file does not exist or you do not have permission to access it"
)
# 5. Get file content from storage backend
if not db_file.file_key:
# 5. Construct file path/files/{kb_id}/{parent_id}/{file.id}{file.file_ext}
file_path = os.path.join(
settings.FILE_PATH,
str(db_file.kb_id),
str(db_file.parent_id),
f"{db_file.id}{db_file.file_ext}"
)
# 6. Check if the file exists
if not os.path.exists(file_path):
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="File has no storage key (legacy data not migrated)"
)
from app.services.file_storage_service import FileStorageService
import asyncio
storage_service = FileStorageService()
async def _download():
return await storage_service.download_file(db_file.file_key)
try:
file_binary = asyncio.run(_download())
except RuntimeError:
loop = asyncio.new_event_loop()
try:
file_binary = loop.run_until_complete(_download())
finally:
loop.close()
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"File not found in storage: {e}"
detail="File not found (possibly deleted)"
)
# 7. Document parsing & segmentation
@@ -117,12 +104,11 @@ async def get_preview_chunks(
vision_model = QWenCV(
key=db_knowledge.image2text.api_keys[0].api_key,
model_name=db_knowledge.image2text.api_keys[0].model_name,
lang="Chinese",
lang="Chinese", # Default to Chinese
base_url=db_knowledge.image2text.api_keys[0].api_base
)
from app.core.rag.app.naive import chunk
res = chunk(filename=db_file.file_name,
binary=file_binary,
res = chunk(filename=file_path,
from_page=0,
to_page=5,
callback=progress_callback,

View File

@@ -20,7 +20,6 @@ from app.models.user_model import User
from app.schemas import document_schema
from app.schemas.response_schema import ApiResponse
from app.services import document_service, file_service, knowledge_service
from app.services.file_storage_service import FileStorageService, get_file_storage_service
# Obtain a dedicated API logger
@@ -232,8 +231,7 @@ async def update_document(
async def delete_document(
document_id: uuid.UUID,
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user),
storage_service: FileStorageService = Depends(get_file_storage_service),
current_user: User = Depends(get_current_user)
):
"""
Delete document
@@ -259,7 +257,7 @@ async def delete_document(
db.commit()
# 3. Delete file
await file_controller._delete_file(db=db, file_id=file_id, current_user=current_user, storage_service=storage_service)
await file_controller._delete_file(db=db, file_id=file_id, current_user=current_user)
# 4. Delete vector index
db_knowledge = knowledge_service.get_knowledge_by_id(db, knowledge_id=db_document.kb_id, current_user=current_user)
@@ -307,25 +305,38 @@ async def parse_documents(
detail="The file does not exist or you do not have permission to access it"
)
# 3. Get file_key for storage backend
if not db_file.file_key:
api_logger.error(f"File has no storage key (legacy data not migrated): file_id={db_file.id}")
# 3. Construct file path/files/{kb_id}/{parent_id}/{file.id}{file.file_ext}
file_path = os.path.join(
settings.FILE_PATH,
str(db_file.kb_id),
str(db_file.parent_id),
f"{db_file.id}{db_file.file_ext}"
)
# 4. Check if the file exists
api_logger.debug(f"Constructed file path: {file_path}")
api_logger.debug(f"File metadata - kb_id: {db_file.kb_id}, parent_id: {db_file.parent_id}, file_id: {db_file.id}, extension: {db_file.file_ext}")
if not os.path.exists(file_path):
api_logger.error(f"File not found (possibly deleted): file_path={file_path}, file_id={db_file.id}, document_id={document_id}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="File has no storage key (legacy data not migrated)"
detail="File not found (possibly deleted)"
)
# 4. Obtain knowledge base information
api_logger.info(f"Obtain details of the knowledge base: knowledge_id={db_document.kb_id}")
# 5. Obtain knowledge base information
api_logger.info( f"Obtain details of the knowledge base: knowledge_id={db_document.kb_id}")
db_knowledge = knowledge_service.get_knowledge_by_id(db, knowledge_id=db_document.kb_id, current_user=current_user)
if not db_knowledge:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Knowledge base not found")
api_logger.warning(f"The knowledge base does not exist or access is denied: knowledge_id={db_document.kb_id}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="The knowledge base does not exist or access is denied"
)
# 5. Dispatch parse task with file_key (not file_path)
task = celery_app.send_task(
"app.core.rag.tasks.parse_document",
args=[db_file.file_key, document_id, db_file.file_name]
)
# 6. Task: Document parsing, vectorization, and storage
# from app.tasks import parse_document
# parse_document(file_path, document_id)
task = celery_app.send_task("app.core.rag.tasks.parse_document", args=[file_path, document_id])
result = {
"task_id": task.id
}

View File

@@ -1,10 +1,12 @@
import os
from pathlib import Path
import shutil
from typing import Any, Optional
import uuid
from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile, Query
from fastapi.encoders import jsonable_encoder
from fastapi.responses import Response
from fastapi.responses import FileResponse
from sqlalchemy.orm import Session
from app.core.config import settings
@@ -17,14 +19,10 @@ from app.models.user_model import User
from app.schemas import file_schema, document_schema
from app.schemas.response_schema import ApiResponse
from app.services import file_service, document_service
from app.services.knowledge_service import get_knowledge_by_id as get_kb_by_id
from app.services.file_storage_service import (
FileStorageService,
generate_kb_file_key,
get_file_storage_service,
)
from app.core.quota_stub import check_knowledge_capacity_quota
# Obtain a dedicated API logger
api_logger = get_api_logger()
router = APIRouter(
@@ -37,37 +35,67 @@ router = APIRouter(
async def get_files(
kb_id: uuid.UUID,
parent_id: uuid.UUID,
page: int = Query(1, gt=0),
pagesize: int = Query(20, gt=0, le=100),
page: int = Query(1, gt=0), # Default: 1, which must be greater than 0
pagesize: int = Query(20, gt=0, le=100), # Default: 20 items per page, maximum: 100 items
orderby: Optional[str] = Query(None, description="Sort fields, such as: created_at"),
desc: Optional[bool] = Query(False, description="Is it descending order"),
keywords: Optional[str] = Query(None, description="Search keywords (file name)"),
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user)
):
"""Paged query file list"""
api_logger.info(f"Query file list: kb_id={kb_id}, parent_id={parent_id}, page={page}, pagesize={pagesize}")
"""
Paged query file list
- Support filtering by kb_id and parent_id
- Support keyword search for file names
- Support dynamic sorting
- Return paging metadata + file list
"""
api_logger.info(f"Query file list: kb_id={kb_id}, parent_id={parent_id}, page={page}, pagesize={pagesize}, keywords={keywords}, username: {current_user.username}")
# 1. parameter validation
if page < 1 or pagesize < 1:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="The paging parameter must be greater than 0")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="The paging parameter must be greater than 0"
)
filters = [file_model.File.kb_id == kb_id]
# 2. Construct query conditions
filters = [
file_model.File.kb_id == kb_id
]
if parent_id:
filters.append(file_model.File.parent_id == parent_id)
# Keyword search (fuzzy matching of file name)
if keywords:
filters.append(file_model.File.file_name.ilike(f"%{keywords}%"))
# 3. Execute paged query
try:
api_logger.debug("Start executing file paging query")
total, items = file_service.get_files_paginated(
db=db, filters=filters, page=page, pagesize=pagesize,
orderby=orderby, desc=desc, current_user=current_user
db=db,
filters=filters,
page=page,
pagesize=pagesize,
orderby=orderby,
desc=desc,
current_user=current_user
)
api_logger.info(f"File query successful: total={total}, returned={len(items)} records")
except Exception as e:
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Query failed: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Query failed: {str(e)}"
)
# 4. Return structured response
result = {
"items": items,
"page": {"page": page, "pagesize": pagesize, "total": total, "has_next": page * pagesize < total}
"page": {
"page": page,
"pagesize": pagesize,
"total": total,
"has_next": True if page * pagesize < total else False
}
}
return success(data=jsonable_encoder(result), msg="Query of file list succeeded")
@@ -80,14 +108,23 @@ async def create_folder(
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""Create a new folder"""
api_logger.info(f"Create folder request: kb_id={kb_id}, parent_id={parent_id}, folder_name={folder_name}")
"""
Create a new folder
"""
api_logger.info(f"Create folder request: kb_id={kb_id}, parent_id={parent_id}, folder_name={folder_name}, username: {current_user.username}")
try:
create_folder_data = file_schema.FileCreate(
kb_id=kb_id, created_by=current_user.id, parent_id=parent_id,
file_name=folder_name, file_ext='folder', file_size=0,
api_logger.debug(f"Start creating a folder: {folder_name}")
create_folder = file_schema.FileCreate(
kb_id=kb_id,
created_by=current_user.id,
parent_id=parent_id,
file_name=folder_name,
file_ext='folder',
file_size=0,
)
db_file = file_service.create_file(db=db, file=create_folder_data, current_user=current_user)
db_file = file_service.create_file(db=db, file=create_folder, current_user=current_user)
api_logger.info(f"Folder created successfully: {db_file.file_name} (ID: {db_file.id})")
return success(data=jsonable_encoder(file_schema.File.model_validate(db_file)), msg="Folder creation successful")
except Exception as e:
api_logger.error(f"Folder creation failed: {folder_name} - {str(e)}")
@@ -101,58 +138,76 @@ async def upload_file(
parent_id: uuid.UUID,
file: UploadFile = File(...),
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user),
storage_service: FileStorageService = Depends(get_file_storage_service),
current_user: User = Depends(get_current_user)
):
"""Upload file to storage backend"""
api_logger.info(f"upload file request: kb_id={kb_id}, parent_id={parent_id}, filename={file.filename}")
"""
upload file
"""
api_logger.info(f"upload file request: kb_id={kb_id}, parent_id={parent_id}, filename={file.filename}, username: {current_user.username}")
# Read the contents of the file
contents = await file.read()
# Check file size
file_size = len(contents)
print(f"file size: {file_size} byte")
if file_size == 0:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="The file is empty.")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="The file is empty."
)
# If the file size exceeds 50MB (50 * 1024 * 1024 bytes)
if file_size > settings.MAX_FILE_SIZE:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"File size exceeds {settings.MAX_FILE_SIZE} byte limit")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"The file size exceeds the {settings.MAX_FILE_SIZE}byte limit"
)
# Extract the extension using `os.path.splitext`
_, file_extension = os.path.splitext(file.filename)
file_ext = file_extension.lower()
# Create File record
upload_file_data = file_schema.FileCreate(
kb_id=kb_id, created_by=current_user.id, parent_id=parent_id,
file_name=file.filename, file_ext=file_ext, file_size=file_size,
upload_file = file_schema.FileCreate(
kb_id=kb_id,
created_by=current_user.id,
parent_id=parent_id,
file_name=file.filename,
file_ext=file_extension.lower(),
file_size=file_size,
)
db_file = file_service.create_file(db=db, file=upload_file_data, current_user=current_user)
db_file = file_service.create_file(db=db, file=upload_file, current_user=current_user)
# Upload to storage backend
file_key = generate_kb_file_key(kb_id=kb_id, file_id=db_file.id, file_ext=file_ext)
try:
await storage_service.storage.upload(file_key=file_key, content=contents, content_type=file.content_type)
except Exception as e:
api_logger.error(f"Storage upload failed: {e}")
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"File storage failed: {str(e)}")
# Construct a save path/files/{kb_id}/{parent_id}/{file.id}{file_extension}
save_dir = os.path.join(settings.FILE_PATH, str(kb_id), str(parent_id))
Path(save_dir).mkdir(parents=True, exist_ok=True) # Ensure that the directory exists
save_path = os.path.join(save_dir, f"{db_file.id}{db_file.file_ext}")
# Save file_key
db_file.file_key = file_key
db.commit()
db.refresh(db_file)
# Save file
with open(save_path, "wb") as f:
f.write(contents)
# Create document (inherit parser_config from knowledge base)
default_parser_config = {
"layout_recognize": "DeepDOC", "chunk_token_num": 128, "delimiter": "\n",
"auto_keywords": 0, "auto_questions": 0, "html4excel": "false"
}
try:
db_knowledge = get_kb_by_id(db, knowledge_id=kb_id, current_user=current_user)
if db_knowledge and db_knowledge.parser_config:
default_parser_config.update(dict(db_knowledge.parser_config))
except Exception:
pass
# Verify whether the file has been saved successfully
if not os.path.exists(save_path):
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="File save failed"
)
# Create a document
create_data = document_schema.DocumentCreate(
kb_id=kb_id, created_by=current_user.id, file_id=db_file.id,
file_name=db_file.file_name, file_ext=db_file.file_ext, file_size=db_file.file_size,
file_meta={}, parser_id="naive", parser_config=default_parser_config
kb_id=kb_id,
created_by=current_user.id,
file_id=db_file.id,
file_name=db_file.file_name,
file_ext=db_file.file_ext,
file_size=db_file.file_size,
file_meta={},
parser_id="naive",
parser_config={
"layout_recognize": "DeepDOC",
"chunk_token_num": 128,
"delimiter": "\n",
"auto_keywords": 0,
"auto_questions": 0,
"html4excel": "false"
}
)
db_document = document_service.create_document(db=db, document=create_data, current_user=current_user)
@@ -166,73 +221,123 @@ async def custom_text(
parent_id: uuid.UUID,
create_data: file_schema.CustomTextFileCreate,
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user),
storage_service: FileStorageService = Depends(get_file_storage_service),
current_user: User = Depends(get_current_user)
):
"""Custom text upload"""
"""
custom text
"""
api_logger.info(f"custom text upload request: kb_id={kb_id}, parent_id={parent_id}, title={create_data.title}, content={create_data.content}, username: {current_user.username}")
# Check file content size
# 将内容编码为字节UTF-8
content_bytes = create_data.content.encode('utf-8')
file_size = len(content_bytes)
print(f"file size: {file_size} byte")
if file_size == 0:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="The content is empty.")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="The content is empty."
)
# If the file size exceeds 50MB (50 * 1024 * 1024 bytes)
if file_size > settings.MAX_FILE_SIZE:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"Content size exceeds {settings.MAX_FILE_SIZE} byte limit")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"The content size exceeds the {settings.MAX_FILE_SIZE}byte limit"
)
upload_file_data = file_schema.FileCreate(
kb_id=kb_id, created_by=current_user.id, parent_id=parent_id,
file_name=f"{create_data.title}.txt", file_ext=".txt", file_size=file_size,
upload_file = file_schema.FileCreate(
kb_id=kb_id,
created_by=current_user.id,
parent_id=parent_id,
file_name=f"{create_data.title}.txt",
file_ext=".txt",
file_size=file_size,
)
db_file = file_service.create_file(db=db, file=upload_file_data, current_user=current_user)
db_file = file_service.create_file(db=db, file=upload_file, current_user=current_user)
# Upload to storage backend
file_key = generate_kb_file_key(kb_id=kb_id, file_id=db_file.id, file_ext=".txt")
try:
await storage_service.storage.upload(file_key=file_key, content=content_bytes, content_type="text/plain")
except Exception as e:
api_logger.error(f"Storage upload failed: {e}")
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"File storage failed: {str(e)}")
# Construct a save path/files/{kb_id}/{parent_id}/{file.id}{file_extension}
save_dir = os.path.join(settings.FILE_PATH, str(kb_id), str(parent_id))
Path(save_dir).mkdir(parents=True, exist_ok=True) # Ensure that the directory exists
save_path = os.path.join(save_dir, f"{db_file.id}.txt")
db_file.file_key = file_key
db.commit()
db.refresh(db_file)
# Save file
with open(save_path, "wb") as f:
f.write(content_bytes)
# Verify whether the file has been saved successfully
if not os.path.exists(save_path):
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="File save failed"
)
# Create a document
create_document_data = document_schema.DocumentCreate(
kb_id=kb_id, created_by=current_user.id, file_id=db_file.id,
file_name=db_file.file_name, file_ext=db_file.file_ext, file_size=db_file.file_size,
file_meta={}, parser_id="naive",
parser_config={"layout_recognize": "DeepDOC", "chunk_token_num": 128, "delimiter": "\n",
"auto_keywords": 0, "auto_questions": 0, "html4excel": "false"}
kb_id=kb_id,
created_by=current_user.id,
file_id=db_file.id,
file_name=db_file.file_name,
file_ext=db_file.file_ext,
file_size=db_file.file_size,
file_meta={},
parser_id="naive",
parser_config={
"layout_recognize": "DeepDOC",
"chunk_token_num": 128,
"delimiter": "\n",
"auto_keywords": 0,
"auto_questions": 0,
"html4excel": "false"
}
)
db_document = document_service.create_document(db=db, document=create_document_data, current_user=current_user)
api_logger.info(f"custom text upload successfully: {create_data.title} (file_id: {db_file.id}, document_id: {db_document.id})")
return success(data=jsonable_encoder(document_schema.Document.model_validate(db_document)), msg="custom text upload successful")
@router.get("/{file_id}", response_model=Any)
async def get_file(
file_id: uuid.UUID,
db: Session = Depends(get_db),
storage_service: FileStorageService = Depends(get_file_storage_service),
db: Session = Depends(get_db)
) -> Any:
"""Download file by file_id"""
"""
Download the file based on the file_id
- Query file information from the database
- Construct the file path and check if it exists
- Return a FileResponse to download the file
"""
api_logger.info(f"Download the file based on the file_id: file_id={file_id}")
# 1. Query file information from the database
db_file = file_service.get_file_by_id(db, file_id=file_id)
if not db_file:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File not found")
api_logger.warning(f"The file does not exist or you do not have permission to access it: file_id={file_id}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="The file does not exist or you do not have permission to access it"
)
if not db_file.file_key:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File has no storage key (legacy data not migrated)")
# 2. Construct file path/files/{kb_id}/{parent_id}/{file.id}{file.file_ext}
file_path = os.path.join(
settings.FILE_PATH,
str(db_file.kb_id),
str(db_file.parent_id),
f"{db_file.id}{db_file.file_ext}"
)
try:
content = await storage_service.download_file(db_file.file_key)
except Exception as e:
api_logger.error(f"Storage download failed: {e}")
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File not found in storage")
# 3. Check if the file exists
if not os.path.exists(file_path):
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="File not found (possibly deleted)"
)
import mimetypes
media_type = mimetypes.guess_type(db_file.file_name)[0] or "application/octet-stream"
return Response(
content=content,
media_type=media_type,
headers={"Content-Disposition": f'attachment; filename="{db_file.file_name}"'}
# 4.Return FileResponse (automatically handle download)
return FileResponse(
path=file_path,
filename=db_file.file_name, # Use original file name
media_type="application/octet-stream" # Universal binary stream type
)
@@ -243,22 +348,50 @@ async def update_file(
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user)
):
"""Update file information (such as file name)"""
db_file = file_service.get_file_by_id(db, file_id=file_id)
if not db_file:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File not found")
"""
Update file information (such as file name)
- Only specified fields such as file_name are allowed to be modified
"""
api_logger.debug(f"Query the file to be updated: {file_id}")
# 1. Check if the file exists
db_file = file_service.get_file_by_id(db, file_id=file_id)
if not db_file:
api_logger.warning(f"The file does not exist or you do not have permission to access it: file_id={file_id}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="The file does not exist or you do not have permission to access it"
)
# 2. Update fields (only update non-null fields)
api_logger.debug(f"Start updating the file fields: {file_id}")
updated_fields = []
for field, value in update_data.dict(exclude_unset=True).items():
if hasattr(db_file, field):
setattr(db_file, field, value)
old_value = getattr(db_file, field)
if old_value != value:
# update value
setattr(db_file, field, value)
updated_fields.append(f"{field}: {old_value} -> {value}")
if updated_fields:
api_logger.debug(f"updated fields: {', '.join(updated_fields)}")
# 3. Save to database
try:
db.commit()
db.refresh(db_file)
api_logger.info(f"The file has been successfully updated: {db_file.file_name} (ID: {db_file.id})")
except Exception as e:
db.rollback()
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"File update failed: {str(e)}")
api_logger.error(f"File update failed: file_id={file_id} - {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"File update failed: {str(e)}"
)
# 4. Return the updated file
return success(data=jsonable_encoder(file_schema.File.model_validate(db_file)), msg="File information updated successfully")
@@ -266,43 +399,60 @@ async def update_file(
async def delete_file(
file_id: uuid.UUID,
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user),
storage_service: FileStorageService = Depends(get_file_storage_service),
current_user: User = Depends(get_current_user)
):
"""Delete a file or folder"""
api_logger.info(f"Request to delete file: file_id={file_id}")
await _delete_file(db=db, file_id=file_id, current_user=current_user, storage_service=storage_service)
"""
Delete a file or folder
"""
api_logger.info(f"Request to delete file: file_id={file_id}, username: {current_user.username}")
await _delete_file(db=db, file_id=file_id, current_user=current_user)
return success(msg="File deleted successfully")
async def _delete_file(
file_id: uuid.UUID,
db: Session,
current_user: User,
storage_service: FileStorageService,
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user)
) -> None:
"""Delete a file or folder from storage and database"""
"""
Delete a file or folder
"""
# 1. Check if the file exists
db_file = file_service.get_file_by_id(db, file_id=file_id)
if not db_file:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File not found")
api_logger.warning(f"The file does not exist or you do not have permission to access it: file_id={file_id}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="The file does not exist or you do not have permission to access it"
)
# Delete from storage backend
# 2. Construct physical path
file_path = Path(
settings.FILE_PATH,
str(db_file.kb_id),
str(db_file.id)
) if db_file.file_ext == 'folder' else Path(
settings.FILE_PATH,
str(db_file.kb_id),
str(db_file.parent_id),
f"{db_file.id}{db_file.file_ext}"
)
# 3. Delete physical files/folders
try:
if file_path.exists():
if db_file.file_ext == 'folder':
shutil.rmtree(file_path) # Recursively delete folders
else:
file_path.unlink() # Delete a single file
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to delete physical file/folder: {str(e)}"
)
# 4.Delete db_file
if db_file.file_ext == 'folder':
# For folders, delete all child files from storage first
child_files = db.query(file_model.File).filter(file_model.File.parent_id == db_file.id).all()
for child in child_files:
if child.file_key:
try:
await storage_service.delete_file(child.file_key)
except Exception as e:
api_logger.warning(f"Failed to delete child file from storage: {child.file_key} - {e}")
db.query(file_model.File).filter(file_model.File.parent_id == db_file.id).delete()
else:
if db_file.file_key:
try:
await storage_service.delete_file(db_file.file_key)
except Exception as e:
api_logger.warning(f"Failed to delete file from storage: {db_file.file_key} - {e}")
db.delete(db_file)
db.commit()

View File

@@ -1,4 +1,4 @@
import asyncio
import uuid
from fastapi import APIRouter, Depends, HTTPException, status, Query
from pydantic import BaseModel, Field
@@ -10,7 +10,7 @@ from app.dependencies import get_current_user
from app.models.user_model import User
from app.schemas.response_schema import ApiResponse
from app.services import memory_dashboard_service, workspace_service
from app.services import memory_dashboard_service, memory_storage_service, workspace_service
from app.services.memory_agent_service import get_end_users_connected_configs_batch
from app.services.app_statistics_service import AppStatisticsService
from app.core.logging_config import get_api_logger
@@ -48,7 +48,7 @@ def get_workspace_total_end_users(
@router.get("/end_users", response_model=ApiResponse)
def get_workspace_end_users(
async def get_workspace_end_users(
workspace_id: Optional[uuid.UUID] = Query(None, description="工作空间ID可选默认当前用户工作空间"),
keyword: Optional[str] = Query(None, description="搜索关键词(同时模糊匹配 other_name 和 id"),
page: int = Query(1, ge=1, description="页码从1开始"),
@@ -58,15 +58,6 @@ def get_workspace_end_users(
):
"""
获取工作空间的宿主列表(分页查询,支持模糊搜索)
新增:记忆数量过滤:
Neo4j 模式:
- 使用 end_users.memory_count 过滤 memory_count > 0 的宿主
- memory_num.total 直接取 end_user.memory_count
RAG 模式:
- 使用 documents.chunk_num 聚合过滤 chunk 总数 > 0 的宿主
- memory_num.total 取聚合后的 chunk 总数
返回工作空间下的宿主列表,支持分页查询和模糊搜索。
通过 keyword 参数同时模糊匹配 other_name 和 id 字段。
@@ -89,29 +80,17 @@ def get_workspace_end_users(
current_workspace_type = memory_dashboard_service.get_current_workspace_type(db, workspace_id, current_user)
api_logger.info(f"用户 {current_user.username} 请求获取工作空间 {workspace_id} 的宿主列表, 类型: {current_workspace_type}")
if current_workspace_type == "rag":
end_users_result = memory_dashboard_service.get_workspace_end_users_paginated_rag(
db=db,
workspace_id=workspace_id,
current_user=current_user,
page=page,
pagesize=pagesize,
keyword=keyword,
)
raw_items = end_users_result.get("items", [])
end_users = [item["end_user"] for item in raw_items]
else:
end_users_result = memory_dashboard_service.get_workspace_end_users_paginated(
db=db,
workspace_id=workspace_id,
current_user=current_user,
page=page,
pagesize=pagesize,
keyword=keyword,
)
raw_items = end_users_result.get("items", [])
end_users = raw_items
# 获取分页的 end_users
end_users_result = memory_dashboard_service.get_workspace_end_users_paginated(
db=db,
workspace_id=workspace_id,
current_user=current_user,
page=page,
pagesize=pagesize,
keyword=keyword
)
end_users = end_users_result.get("items", [])
total = end_users_result.get("total", 0)
if not end_users:
@@ -122,19 +101,50 @@ def get_workspace_end_users(
"page": page,
"pagesize": pagesize,
"total": total,
"hasnext": (page * pagesize) < total,
},
"hasnext": (page * pagesize) < total
}
}, msg="宿主列表获取成功")
end_user_ids = [str(user.id) for user in end_users]
try:
memory_configs_map = get_end_users_connected_configs_batch(end_user_ids, db)
except Exception as e:
api_logger.error(f"批量获取记忆配置失败: {str(e)}")
memory_configs_map = {}
# 并发执行两个独立的查询任务
async def get_memory_configs():
"""获取记忆配置(在线程池中执行同步查询)"""
try:
return await asyncio.to_thread(
get_end_users_connected_configs_batch,
end_user_ids, db
)
except Exception as e:
api_logger.error(f"批量获取记忆配置失败: {str(e)}")
return {}
# 触发按需初始化:为 implicit_emotions_storage / interest_distribution 中没有记录的用户异步生成数据
async def get_memory_nums():
"""获取记忆数量"""
if current_workspace_type == "rag":
# RAG 模式:批量查询
try:
chunk_map = await asyncio.to_thread(
memory_dashboard_service.get_users_total_chunk_batch,
end_user_ids, db, current_user
)
return {uid: {"total": count} for uid, count in chunk_map.items()}
except Exception as e:
api_logger.error(f"批量获取 RAG chunk 数量失败: {str(e)}")
return {uid: {"total": 0} for uid in end_user_ids}
elif current_workspace_type == "neo4j":
# Neo4j 模式批量查询简化版本只返回total
try:
batch_result = await memory_storage_service.search_all_batch(end_user_ids)
return {uid: {"total": count} for uid, count in batch_result.items()}
except Exception as e:
api_logger.error(f"批量获取 Neo4j 记忆数量失败: {str(e)}")
return {uid: {"total": 0} for uid in end_user_ids}
return {uid: {"total": 0} for uid in end_user_ids}
# 触发按需初始化:为 implicit_emotions_storage 中没有记录的用户异步生成数据
try:
from app.celery_app import celery_app as _celery_app
_celery_app.send_task(
@@ -149,26 +159,27 @@ def get_workspace_end_users(
except Exception as e:
api_logger.warning(f"触发按需初始化任务失败(不影响主流程): {e}")
# 并发执行配置查询和记忆数量查询
memory_configs_map, memory_nums_map = await asyncio.gather(
get_memory_configs(),
get_memory_nums()
)
# 构建结果列表
items = []
for index, end_user in enumerate(end_users):
for end_user in end_users:
user_id = str(end_user.id)
config_info = memory_configs_map.get(user_id, {})
if current_workspace_type == "rag":
memory_total = int(raw_items[index].get("memory_count", 0) or 0)
else:
memory_total = int(getattr(end_user, "memory_count", 0) or 0)
items.append({
"end_user": {
"id": user_id,
"other_name": end_user.other_name,
'end_user': {
'id': user_id,
'other_name': end_user.other_name
},
"memory_num": {"total": memory_total},
"memory_config": {
'memory_num': memory_nums_map.get(user_id, {"total": 0}),
'memory_config': {
"memory_config_id": config_info.get("memory_config_id"),
"memory_config_name": config_info.get("memory_config_name"),
},
"memory_config_name": config_info.get("memory_config_name")
}
})
# 触发社区聚类补全任务(异步,不阻塞接口响应)
@@ -396,7 +407,6 @@ def get_current_user_rag_total_num(
total_chunk = memory_dashboard_service.get_current_user_total_chunk(end_user_id, db, current_user)
return success(data=total_chunk, msg="宿主RAG知识数据获取成功")
@router.get("/rag_content", response_model=ApiResponse)
def get_rag_content(
end_user_id: str = Query(..., description="宿主ID"),

View File

@@ -296,7 +296,7 @@ async def chat(
}
)
# 多 Agent 非流式返回
# workflow 非流式返回
result = await app_chat_service.workflow_chat(
message=payload.message,

View File

@@ -221,7 +221,7 @@ def update_workspace_members(
@router.delete("/members/{member_id}", response_model=ApiResponse)
@cur_workspace_access_guard()
def delete_workspace_member(
async def delete_workspace_member(
member_id: uuid.UUID,
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user),
@@ -230,7 +230,7 @@ def delete_workspace_member(
workspace_id = current_user.current_workspace_id
api_logger.info(f"用户 {current_user.username} 请求删除工作空间 {workspace_id} 的成员 {member_id}")
workspace_service.delete_workspace_member(
await workspace_service.delete_workspace_member(
db=db,
workspace_id=workspace_id,
member_id=member_id,

View File

@@ -241,6 +241,8 @@ class Settings:
SMTP_PORT: int = int(os.getenv("SMTP_PORT", "587"))
SMTP_USER: str = os.getenv("SMTP_USER", "")
SMTP_PASSWORD: str = os.getenv("SMTP_PASSWORD", "")
SANDBOX_URL: str = os.getenv("SANDBOX_URL", "")
REFLECTION_INTERVAL_SECONDS: float = float(os.getenv("REFLECTION_INTERVAL_SECONDS", "300"))
HEALTH_CHECK_SECONDS: float = float(os.getenv("HEALTH_CHECK_SECONDS", "600"))

View File

@@ -20,7 +20,6 @@ from app.core.memory.storage_services.extraction_engine.knowledge_extraction.mem
memory_summary_generation
from app.core.memory.utils.llm.llm_utils import MemoryClientFactory
from app.core.memory.utils.log.logging_utils import log_time
from app.core.memory.utils.memory_count_utils import sync_end_user_memory_count_from_neo4j
from app.db import get_db_context
from app.repositories.neo4j.add_edges import add_memory_summary_statement_edges
from app.repositories.neo4j.add_nodes import add_memory_summary_nodes
@@ -314,28 +313,6 @@ async def write(
except Exception as cache_err:
logger.warning(f"[WRITE] 写入活动统计缓存失败(不影响主流程): {cache_err}", exc_info=True)
# 同步 Neo4j 记忆节点总数到 PostgreSQL end_users.memory_count
if end_user_id:
try:
memory_count_connector = Neo4jConnector()
try:
node_count = await sync_end_user_memory_count_from_neo4j(
end_user_id,
memory_count_connector,
)
finally:
await memory_count_connector.close()
logger.info(
f"[MemoryCount] 写入后同步 memory_count: "
f"end_user_id={end_user_id}, count={node_count}"
)
except Exception as e:
logger.warning(
f"[MemoryCount] 写入后同步 memory_count 失败(不影响主流程): {e}",
exc_info=True,
)
# Close LLM/Embedder underlying httpx clients to prevent
# 'RuntimeError: Event loop is closed' during garbage collection
for client_obj in (llm_client, embedder_client):
@@ -354,4 +331,3 @@ async def write(
logger.info("=== Pipeline Complete ===")
logger.info(f"Total execution time: {total_time:.2f} seconds")

View File

@@ -20,7 +20,6 @@ from uuid import UUID
from datetime import datetime
from app.core.memory.storage_services.forgetting_engine.forgetting_strategy import ForgettingStrategy
from app.core.memory.utils.memory_count_utils import sync_end_user_memory_count_from_neo4j
from app.repositories.neo4j.neo4j_connector import Neo4jConnector
@@ -146,22 +145,7 @@ class ForgettingScheduler:
}
logger.info("没有可遗忘的节点对,遗忘周期结束")
# 同步 Neo4j 记忆节点总数到 PostgreSQL 的 end_users.memory_count
if end_user_id:
try:
node_count = await sync_end_user_memory_count_from_neo4j(
end_user_id,
self.connector,
)
logger.info(
f"[MemoryCount] 遗忘后同步 memory_count: "
f"end_user_id={end_user_id}, count={node_count}"
)
except Exception as e:
logger.warning(
f"[MemoryCount] 遗忘后同步 memory_count 失败(不影响主流程): {e}",
exc_info=True,
)
return report
# 步骤3按激活值排序激活值最低的优先
@@ -318,22 +302,7 @@ class ForgettingScheduler:
f"({reduction_rate:.2%}), "
f"耗时 {duration:.2f}"
)
# 同步 Neo4j 记忆节点总数到 PostgreSQL 的 end_users.memory_count
if end_user_id:
try:
node_count = await sync_end_user_memory_count_from_neo4j(
end_user_id,
self.connector,
)
logger.info(
f"[MemoryCount] 遗忘后同步 memory_count: "
f"end_user_id={end_user_id}, count={node_count}"
)
except Exception as e:
logger.warning(
f"[MemoryCount] 遗忘后同步 memory_count 失败(不影响主流程): {e}",
exc_info=True,
)
return report
except Exception as e:

View File

@@ -1,36 +0,0 @@
from uuid import UUID
from app.db import get_db_context
from app.models.end_user_model import EndUser
from app.repositories.memory_config_repository import MemoryConfigRepository
from app.repositories.neo4j.neo4j_connector import Neo4jConnector
async def sync_end_user_memory_count_from_neo4j(
end_user_id: str,
connector: Neo4jConnector,
) -> int:
"""
Sync one end user's Neo4j memory node count to PostgreSQL.
The caller owns the Neo4j connector lifecycle.
"""
if not end_user_id:
return 0
result = await connector.execute_query(
MemoryConfigRepository.SEARCH_FOR_ALL_BATCH,
end_user_ids=[end_user_id],
)
node_count = int(result[0]["total"]) if result else 0
with get_db_context() as db:
db.query(EndUser).filter(
EndUser.id == UUID(end_user_id)
).update(
{"memory_count": node_count},
synchronize_session=False,
)
db.commit()
return node_count

View File

@@ -216,7 +216,7 @@ class RedBearModelFactory:
# 深度思考模式Claude 3.7 Sonnet 等支持思考的模型
# 通过 additional_model_request_fields 传递 thinking 块关闭时不传Bedrock 无 disabled 选项)
if config.deep_thinking:
budget = config.thinking_budget_tokens or 10000
budget = config.thinking_budget_tokens or 1024
params["additional_model_request_fields"] = {
"thinking": {"type": "enabled", "budget_tokens": budget}
}

View File

@@ -14,7 +14,6 @@ Transcribe the content from the provided PDF page image into clean Markdown form
6. Do NOT wrap the output in ```markdown or ``` blocks.
7. Only apply Markdown structure to headings, paragraphs, lists, and tables, strictly based on the layout of the image. Do NOT create tables unless an actual table exists in the image.
8. Preserve the original language, information, and order exactly as shown in the image.
9. Your output language MUST match the language of the content in the image. If the image contains Chinese text, output in Chinese. If English, output in English. Never translate.
{% if page %}
At the end of the transcription, add the page divider: `--- Page {{ page }} ---`.

View File

@@ -14,6 +14,7 @@ from app.core.workflow.engine.variable_pool import VariablePool
from app.core.workflow.nodes import BaseNode
from app.core.workflow.nodes.code.config import CodeNodeConfig
from app.core.workflow.variable.base_variable import VariableType, DEFAULT_VALUE
from app.core.config import settings
logger = logging.getLogger(__name__)
@@ -131,7 +132,7 @@ class CodeNode(BaseNode):
async with httpx.AsyncClient(timeout=60) as client:
response = await client.post(
"http://sandbox:8194/v1/sandbox/run",
f"{settings.SANDBOX_URL}:8194/v1/sandbox/run",
headers={
"x-api-key": 'redbear-sandbox'
},

View File

@@ -182,7 +182,7 @@ class DocExtractorNode(BaseNode):
mime_type=f"image/{ext}",
is_file=True,
).model_dump())
text = text + f"\n{placeholder}: {url}"
text = text + f"\n{placeholder}: <img src=\"{url}\" data-url=\"{url}\">"
except Exception as e:
logger.error(f"Node {self.node_id}: failed to save image {placeholder}: {e}")

View File

@@ -1,7 +1,7 @@
import datetime
import uuid
from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Text
from sqlalchemy import Column, DateTime, ForeignKey, String, Text
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import relationship
@@ -38,15 +38,6 @@ class EndUser(Base):
comment="关联的记忆配置ID"
)
memory_count = Column(
Integer,
nullable=False,
default=0,
server_default="0",
index=True,
comment="记忆节点总数",
)
# 用户摘要四个维度 - User Summary Four Dimensions
user_summary = Column(Text, nullable=True, comment="缓存的用户摘要(基本介绍)")
personality_traits = Column(Text, nullable=True, comment="性格特点")

View File

@@ -15,5 +15,4 @@ class File(Base):
file_ext = Column(String, index=True, nullable=False, comment="file extension:folder|pdf")
file_size = Column(Integer, default=0, comment="file size(byte)")
file_url = Column(String, index=True, nullable=True, comment="file comes from a website url")
file_key = Column(String(512), nullable=True, index=True, comment="storage file key for FileStorageService")
created_at = Column(DateTime, default=datetime.datetime.now)

View File

@@ -250,7 +250,7 @@ class ModelParameters(BaseModel):
n: int = Field(default=1, ge=1, le=10, description="生成的回复数量")
stop: Optional[List[str]] = Field(default=None, description="停止序列")
deep_thinking: bool = Field(default=False, description="是否启用深度思考模式(需模型支持,如 DeepSeek-R1、QwQ 等)")
thinking_budget_tokens: Optional[int] = Field(default=None, ge=1024, le=131072, description="深度思考 token 预算(仅部分模型支持)")
thinking_budget_tokens: Optional[int] = Field(default=None, ge=1, le=131072, description="深度思考 token 预算(仅部分模型支持)")
json_output: bool = Field(default=False, description="是否强制 JSON 格式输出(需模型支持 json_output 能力)")

View File

@@ -19,6 +19,4 @@ class EndUser(BaseModel):
# 用户摘要和洞察更新时间
user_summary_updated_at: Optional[datetime.datetime] = Field(description="用户摘要最后更新时间", default=None)
memory_insight_updated_at: Optional[datetime.datetime] = Field(description="洞察报告最后更新时间", default=None)
#用户记忆节点总数Neo4j模式
memory_count: int = Field(description="记忆节点总数", default=0)
memory_insight_updated_at: Optional[datetime.datetime] = Field(description="洞察报告最后更新时间", default=None)

View File

@@ -11,7 +11,6 @@ class FileBase(BaseModel):
file_ext: str
file_size: int
file_url: str | None = None
file_key: str | None = None
created_at: datetime.datetime | None = None

View File

@@ -161,7 +161,10 @@ class AppChatService:
f.type == FileType.DOCUMENT for f in files
):
system_prompt += (
"\n\n文档文字中包含图片位置标记如 [图片 第2页 第1张]: http://...,请在回答中用 Markdown 格式 ![图片描述](url) 展示对应图片。"
"\n\n文档文字中包含图片位置标记如 [图片 第2页 第1张]: <img src=\"url\"...>"
"请在回答中用 Markdown 格式 ![图片描述](url) 展示对应图片。"
"重要:图片 URL 中包含 UUID如 /storage/permanent/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
"必须将 src 属性的值原封不动复制到 Markdown 的括号中,不得增删任何字符。"
)
# 创建 LangChain Agent
@@ -448,7 +451,10 @@ class AppChatService:
):
from langchain.agents import create_agent
system_prompt += (
"\n\n文档文字中包含图片位置标记如 [图片 第2页 第1张]: http://...,请在回答中用 Markdown 格式 ![图片描述](url) 展示对应图片。"
"\n\n文档文字中包含图片位置标记如 [图片 第2页 第1张]: <img src=\"url\"...>"
"请在回答中用 Markdown 格式 ![图片描述](url) 展示对应图片。"
"重要:图片 URL 中包含 UUID如 /storage/permanent/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
"必须将 src 属性的值原封不动复制到 Markdown 的括号中,不得增删任何字符。"
)
# 创建 LangChain Agent

View File

@@ -650,7 +650,10 @@ class AgentRunService:
)
if has_doc_with_images:
system_prompt += (
"\n\n文档文字中包含图片位置标记如 [图片 第2页 第1张]: http://...,请在回答中用 Markdown 格式 ![图片描述](url) 展示对应图片。"
"\n\n文档文字中包含图片位置标记如 [图片 第2页 第1张]: <img src=\"url\"...>"
"请在回答中用 Markdown 格式 ![图片描述](url) 展示对应图片。"
"重要:图片 URL 中包含 UUID如 /storage/permanent/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
"必须将 src 属性的值原封不动复制到 Markdown 的括号中,不得增删任何字符。"
)
agent = LangChainAgent(
@@ -924,7 +927,10 @@ class AgentRunService:
)
if has_doc_with_images:
system_prompt += (
"\n\n文档文字中包含图片位置标记如 [图片 第2页 第1张]: http://...,请在回答中用 Markdown 格式 ![图片描述](url) 展示对应图片。"
"\n\n文档文字中包含图片位置标记如 [图片 第2页 第1张]: <img src=\"url\"...>"
"请在回答中用 Markdown 格式 ![图片描述](url) 展示对应图片。"
"重要:图片 URL 中包含 UUID如 /storage/permanent/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
"必须将 src 属性的值原封不动复制到 Markdown 的括号中,不得增删任何字符。"
)
# 创建 LangChain Agent

View File

@@ -34,7 +34,26 @@ def generate_file_key(
Generate a unique file key for storage.
The file key follows the format: {tenant_id}/{workspace_id}/{file_id}{file_ext}
Args:
tenant_id: The tenant UUID.
workspace_id: The workspace UUID.
file_id: The file UUID.
file_ext: The file extension (e.g., '.pdf', '.txt').
Returns:
A unique file key string.
Example:
>>> generate_file_key(
... uuid.UUID('550e8400-e29b-41d4-a716-446655440000'),
... uuid.UUID('660e8400-e29b-41d4-a716-446655440001'),
... uuid.UUID('770e8400-e29b-41d4-a716-446655440002'),
... '.pdf'
... )
'550e8400-e29b-41d4-a716-446655440000/660e8400-e29b-41d4-a716-446655440001/770e8400-e29b-41d4-a716-446655440002.pdf'
"""
# Ensure file_ext starts with a dot
if file_ext and not file_ext.startswith('.'):
file_ext = f'.{file_ext}'
if workspace_id:
@@ -42,21 +61,6 @@ def generate_file_key(
return f"{tenant_id}/{file_id}{file_ext}"
def generate_kb_file_key(
kb_id: uuid.UUID,
file_id: uuid.UUID,
file_ext: str,
) -> str:
"""
Generate a file key for knowledge base files.
Format: kb/{kb_id}/{file_id}{file_ext}
"""
if file_ext and not file_ext.startswith('.'):
file_ext = f'.{file_ext}'
return f"kb/{kb_id}/{file_id}{file_ext}"
class FileStorageService:
"""
High-level service for file storage operations.

View File

@@ -1,5 +1,5 @@
from sqlalchemy.orm import Session
from sqlalchemy import desc, nullslast, or_, cast, String, func
from sqlalchemy import desc, nullslast, or_, and_, cast, String
from typing import List, Optional, Dict, Any
import uuid
from fastapi import HTTPException
@@ -102,7 +102,6 @@ def get_workspace_end_users_paginated(
"""获取工作空间的宿主列表(分页版本,支持模糊搜索)
返回结果按 created_at 从新到旧排序NULL 值排在最后)
固定过滤 memory_count > 0 的宿主,保证分页基于“有记忆宿主”集合计算。
支持通过 keyword 参数同时模糊搜索 other_name 和 id 字段
Args:
@@ -121,8 +120,7 @@ def get_workspace_end_users_paginated(
try:
# 构建基础查询
base_query = db.query(EndUserModel).filter(
EndUserModel.workspace_id == workspace_id,
EndUserModel.memory_count > 0 , # 只查询有记忆的宿主
EndUserModel.workspace_id == workspace_id
)
# 构建搜索条件过滤空字符串和None
@@ -130,13 +128,20 @@ def get_workspace_end_users_paginated(
if keyword:
keyword_pattern = f"%{keyword}%"
# other_name 匹配始终生效id 匹配仅对 other_name 为空的记录生效
base_query = base_query.filter(
or_(
EndUserModel.other_name.ilike(keyword_pattern),
cast(EndUserModel.id, String).ilike(keyword_pattern),
and_(
or_(
EndUserModel.other_name.is_(None),
EndUserModel.other_name == "",
),
cast(EndUserModel.id, String).ilike(keyword_pattern),
),
)
)
business_logger.info(f"应用模糊搜索: keyword={keyword}(匹配 other_name id")
business_logger.info(f"应用模糊搜索: keyword={keyword}(匹配 other_nameother_name 为空时匹配 id")
# 获取总记录数
total = base_query.count()
@@ -164,98 +169,6 @@ def get_workspace_end_users_paginated(
business_logger.error(f"获取工作空间宿主列表(分页)失败: workspace_id={workspace_id} - {str(e)}")
raise
def get_workspace_end_users_paginated_rag(
db: Session,
workspace_id: uuid.UUID,
current_user: User,
page: int,
pagesize: int,
keyword: Optional[str] = None,
) -> Dict[str, Any]:
"""RAG 模式宿主列表分页。
RAG 记忆数量以 documents.chunk_num 为准:
- file_name = end_user_id + ".txt"
- 只统计当前 workspace 下 permission_id="Memory" 的用户记忆知识库
- 在 SQL 层过滤 chunk 总数为 0 的宿主,保证分页准确
"""
business_logger.info(
f"获取 RAG 宿主列表(分页): workspace_id={workspace_id}, "
f"keyword={keyword}, page={page}, pagesize={pagesize}, 操作者: {current_user.username}"
)
try:
from app.models.document_model import Document
from app.models.knowledge_model import Knowledge
chunk_subquery = (
db.query(
Document.file_name.label("file_name"),
func.coalesce(func.sum(Document.chunk_num), 0).label("memory_count"),
)
.join(Knowledge, Document.kb_id == Knowledge.id)
.filter(
Knowledge.workspace_id == workspace_id,
Knowledge.status == 1,
Knowledge.permission_id == "Memory",
Document.status == 1,
)
.group_by(Document.file_name)
.subquery()
)
base_query = (
db.query(
EndUserModel,
chunk_subquery.c.memory_count.label("memory_count"),
)
.join(
chunk_subquery,
chunk_subquery.c.file_name == func.concat(cast(EndUserModel.id, String), ".txt"),
)
.filter(
EndUserModel.workspace_id == workspace_id,
chunk_subquery.c.memory_count > 0,
)
)
keyword = keyword.strip() if keyword else None
if keyword:
keyword_pattern = f"%{keyword}%"
base_query = base_query.filter(
or_(
EndUserModel.other_name.ilike(keyword_pattern),
cast(EndUserModel.id, String).ilike(keyword_pattern),
)
)
total = base_query.count()
if total == 0:
business_logger.info("RAG 模式下没有符合条件的宿主")
return {"items": [], "total": 0}
rows = base_query.order_by(
nullslast(desc(EndUserModel.created_at)),
desc(EndUserModel.id),
).offset((page - 1) * pagesize).limit(pagesize).all()
items = []
for end_user_orm, memory_count in rows:
items.append({
"end_user": EndUserSchema.model_validate(end_user_orm),
"memory_count": int(memory_count or 0),
})
business_logger.info(f"成功获取 RAG 宿主记录 {len(items)} 条,总计 {total}")
return {"items": items, "total": total}
except HTTPException:
raise
except Exception as e:
business_logger.error(
f"获取 RAG 宿主列表(分页)失败: workspace_id={workspace_id} - {str(e)}"
)
raise
def get_workspace_memory_increment(
db: Session,

View File

@@ -400,7 +400,7 @@ class MultimodalService:
# 在文本内容中追加图片位置标记
if result and result[-1].get("type") in ("text", "document"):
key = "text" if "text" in result[-1] else list(result[-1].keys())[-1]
result[-1][key] = result[-1].get(key, "") + f"\n[图片 {placeholder}]: {img_url}"
result[-1][key] = result[-1].get(key, "") + f"\n[图片 {placeholder}]: <img src=\"{img_url}\" data-url=\"{img_url}\">"
# 将图片以视觉格式追加到消息内容中
img_file = FileInput(
type=FileType.IMAGE,

View File

@@ -554,13 +554,16 @@ class WorkflowService:
}
}
case "workflow_end":
data = {
"elapsed_time": payload.get("elapsed_time"),
"message_length": len(payload.get("output", "")),
"error": payload.get("error", "")
}
if "citations" in payload and payload["citations"]:
data["citations"] = payload["citations"]
return {
"event": "end",
"data": {
"elapsed_time": payload.get("elapsed_time"),
"message_length": len(payload.get("output", "")),
"error": payload.get("error", "")
}
"data": data
}
case "node_start" | "node_end" | "node_error" | "cycle_item":
return None

View File

@@ -20,6 +20,7 @@ from app.models.workspace_model import (
)
from app.repositories import workspace_repository
from app.repositories.workspace_invite_repository import WorkspaceInviteRepository
from app.services.session_service import SessionService
from app.schemas.workspace_schema import (
InviteAcceptRequest,
InviteValidateResponse,
@@ -58,7 +59,7 @@ def switch_workspace(
raise BusinessException(f"切换工作空间失败: {str(e)}", BizCode.INTERNAL_ERROR)
def delete_workspace_member(
async def delete_workspace_member(
db: Session,
workspace_id: uuid.UUID,
member_id: uuid.UUID,
@@ -76,10 +77,29 @@ def delete_workspace_member(
BizCode.WORKSPACE_NOT_FOUND)
try:
deleted_user = workspace_member.user
workspace_member.is_active = False
workspace_member.user.current_workspace_id = None
deleted_user.current_workspace_id = None
# 若被删除成员不是超级管理员且没有其他可用工作空间,则禁用该用户
if not deleted_user.is_superuser:
remaining = (
db.query(WorkspaceMember)
.filter(
WorkspaceMember.user_id == deleted_user.id,
WorkspaceMember.workspace_id != workspace_id,
WorkspaceMember.is_active.is_(True),
)
.count()
)
if remaining == 0:
deleted_user.is_active = False
db.commit()
business_logger.info(f"用户 {user.username} 成功删除工作空间 {workspace_id} 的成员 {member_id}")
# 使被删除成员的所有 token 立即失效
await SessionService.invalidate_all_user_tokens(str(workspace_member.user_id))
except Exception as e:
db.rollback()
business_logger.error(f"删除工作空间成员失败 - 工作空间: {workspace_id}, 成员: {member_id}, 错误: {str(e)}")

View File

@@ -210,14 +210,9 @@ def _build_vision_model(file_path: str, db_knowledge):
@celery_app.task(name="app.core.rag.tasks.parse_document")
def parse_document(file_key: str, document_id: uuid.UUID, file_name: str = ""):
def parse_document(file_path: str, document_id: uuid.UUID):
"""
Document parsing, vectorization, and storage.
Args:
file_key: Storage key for FileStorageService (e.g. "kb/{kb_id}/{file_id}.docx")
document_id: Document UUID
file_name: Original file name (used for extension detection in chunk())
Document parsing, vectorization, and storage
"""
db_document = None
@@ -228,6 +223,7 @@ def parse_document(file_key: str, document_id: uuid.UUID, file_name: str = ""):
with get_db_context() as db:
try:
# Celery JSON 序列化会将 UUID 转为字符串,需要确保类型正确
if not isinstance(document_id, uuid.UUID):
document_id = uuid.UUID(str(document_id))
@@ -238,11 +234,7 @@ def parse_document(file_key: str, document_id: uuid.UUID, file_name: str = ""):
if db_knowledge is None:
raise ValueError(f"Knowledge {db_document.kb_id} not found")
# Use file_name from argument or fall back to document record
if not file_name:
file_name = db_document.file_name
# 1. Download file from storage backend
# 1. Document parsing & segmentation
progress_lines.append(f"{datetime.now().strftime('%H:%M:%S')} Start to parse.")
start_time = time.time()
db_document.progress = 0.0
@@ -253,36 +245,45 @@ def parse_document(file_key: str, document_id: uuid.UUID, file_name: str = ""):
db.commit()
db.refresh(db_document)
# Read file content from storage backend (no NFS dependency)
from app.services.file_storage_service import FileStorageService
import asyncio
storage_service = FileStorageService()
async def _download():
return await storage_service.download_file(file_key)
try:
file_binary = asyncio.run(_download())
except RuntimeError:
# If there's already a running loop (e.g. in some worker configurations)
loop = asyncio.new_event_loop()
try:
file_binary = loop.run_until_complete(_download())
finally:
loop.close()
if not file_binary:
raise IOError(f"Downloaded empty file from storage: {file_key}")
logger.info(f"[ParseDoc] Downloaded {len(file_binary)} bytes from storage key: {file_key}")
def progress_callback(prog=None, msg=None):
progress_lines.append(f"{datetime.now().strftime('%H:%M:%S')} parse progress: {prog} msg: {msg}.")
# Prepare vision_model for parsing
vision_model = _build_vision_model(file_name, db_knowledge)
vision_model = _build_vision_model(file_path, db_knowledge)
# 先将文件读入内存,避免解析过程中依赖 NFS 文件持续可访问
# python-docx 等库在 binary=None 时会用路径直接打开文件,
# 在 NFS/共享存储上可能因缓存失效导致 "Package not found"
max_wait_seconds = 30
wait_interval = 2
waited = 0
file_binary = None
while waited <= max_wait_seconds:
# os.listdir 强制 NFS 客户端刷新目录缓存
parent_dir = os.path.dirname(file_path)
try:
os.listdir(parent_dir)
except OSError:
pass
try:
with open(file_path, "rb") as f:
file_binary = f.read()
if not file_binary:
# NFS 上文件存在但内容为空(可能还在同步中)
raise IOError(f"File is empty (0 bytes), NFS may still be syncing: {file_path}")
break
except (FileNotFoundError, IOError) as e:
if waited >= max_wait_seconds:
raise type(e)(
f"File not accessible at '{file_path}' after waiting {max_wait_seconds}s: {e}"
)
logger.warning(f"File not ready on this node, retrying in {wait_interval}s: {file_path} ({e})")
time.sleep(wait_interval)
waited += wait_interval
from app.core.rag.app.naive import chunk
logger.info(f"[ParseDoc] file_binary size={len(file_binary)} bytes, type={type(file_binary).__name__}, bool={bool(file_binary)}")
res = chunk(filename=file_name,
res = chunk(filename=file_path,
binary=file_binary,
from_page=0,
to_page=DEFAULT_PARSE_TO_PAGE,

View File

@@ -1,47 +0,0 @@
"""202604271530
Revision ID: 1f85dce125e5
Revises: 4e89970f9e7c
Create Date: 2026-04-27 15:30:35.614679
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '1f85dce125e5'
down_revision: Union[str, None] = '4e89970f9e7c'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('file_key', sa.String(length=512), nullable=True, comment='storage file key for FileStorageService'))
op.create_index(op.f('ix_files_file_key'), 'files', ['file_key'], unique=False)
op.alter_column('model_configs', 'capability',
existing_type=postgresql.ARRAY(sa.VARCHAR()),
comment="模型能力列表(如['vision', 'audio', 'video', 'thinking']",
existing_comment="模型能力列表(如['vision', 'audio', 'video']",
existing_nullable=False)
# ### end Alembic commands ###
op.execute("""
UPDATE files
SET file_key = 'kb/' || kb_id::text || '/' || parent_id::text || '/' || id::text || file_ext
WHERE file_ext != 'folder' AND file_key IS NULL
""")
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('model_configs', 'capability',
existing_type=postgresql.ARRAY(sa.VARCHAR()),
comment="模型能力列表(如['vision', 'audio', 'video']",
existing_comment="模型能力列表(如['vision', 'audio', 'video', 'thinking']",
existing_nullable=False)
op.drop_index(op.f('ix_files_file_key'), table_name='files')
op.drop_column('files', 'file_key')
# ### end Alembic commands ###

View File

@@ -1,34 +0,0 @@
"""202604281230
Revision ID: e2d60c6d1a1a
Revises: 1f85dce125e5
Create Date: 2026-04-28 12:32:01.643954
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = 'e2d60c6d1a1a'
down_revision: Union[str, None] = '1f85dce125e5'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('tenants', 'api_ops_rate_limit')
op.drop_column('tenants', 'plan')
op.drop_column('tenants', 'plan_expired_at')
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('tenants', sa.Column('plan_expired_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.add_column('tenants', sa.Column('plan', sa.VARCHAR(length=50), autoincrement=False, nullable=True))
op.add_column('tenants', sa.Column('api_ops_rate_limit', sa.VARCHAR(length=100), autoincrement=False, nullable=True))
# ### end Alembic commands ###

Binary file not shown.

Before

Width:  |  Height:  |  Size: 108 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 336 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 387 B

View File

@@ -1,13 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="16px" height="16px" viewBox="0 0 16 16" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<title>勾选</title>
<g id="空间外层页面优化" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<g id="登录页面" transform="translate(-64, -611)" fill="#FFFFFF" fill-rule="nonzero">
<g id="编组-8" transform="translate(64, 608)">
<g id="勾选" transform="translate(0, 3)">
<path d="M12,0 C14.209139,0 16,1.790861 16,4 L16,12 C16,14.209139 14.209139,16 12,16 L4,16 C1.790861,16 0,14.209139 0,12 L0,4 C0,1.790861 1.790861,4.4408921e-16 4,0 L12,0 Z M11.9182266,4.80024782 C11.7273831,4.80024782 11.5444062,4.87629473 11.4097812,5.0115625 L6.552,9.86932813 L4.4284375,7.74489063 C4.29381317,7.60962766 4.11083967,7.53358379 3.92,7.53358379 C3.72916033,7.53358379 3.54618683,7.60962766 3.4115625,7.74489063 C3.27602096,7.87955071 3.19979999,8.06271883 3.19979999,8.25378125 C3.19979999,8.44484367 3.27602096,8.62801179 3.4115625,8.76267188 L6.0453125,11.3946719 C6.17993745,11.5299396 6.3629143,11.6059866 6.55375781,11.6059866 C6.74460132,11.6059866 6.92757818,11.5299396 7.06220312,11.3946719 L12.4311094,6.02667188 C12.5659036,5.89187668 12.6412595,5.70881589 12.6404302,5.51818919 C12.639587,5.3275625 12.5626279,5.14516989 12.4266562,5.0115625 C12.2920469,4.87629473 12.1090701,4.80024782 11.9182266,4.80024782 Z" id="形状结合"></path>
</g>
</g>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.8 KiB

View File

@@ -8,12 +8,11 @@ import { type FC, useRef, useEffect, useState } from 'react'
import clsx from 'clsx'
import Markdown from '@/components/Markdown'
import type { ChatContentProps } from './types'
import { Spin, Image, Flex, Button } from 'antd'
import { Spin, Flex, Button } from 'antd'
import { SoundOutlined } from '@ant-design/icons'
import { useTranslation } from 'react-i18next'
import AudioPlayer from './AudioPlayer'
import VideoPlayer from './VideoPlayer'
import MessageFiles from './MessageFiles'
const getFileUrl = (file: any) => {
return file.thumbUrl || file.url || (file.originFileObj ? URL.createObjectURL(file.originFileObj) : undefined)
@@ -149,72 +148,7 @@ const ChatContent: FC<ChatContentProps> = ({
{labelFormat(item)}
</div>
}
{item?.meta_data?.files && item.meta_data?.files.length > 0 && <Flex gap={8} vertical align="end" className="rb:mb-2!">
{item.meta_data?.files?.map((file) => {
if (file.type.includes('image')) {
return (
<div key={file.url || file.uid} className={`rb:inline-block rb:group rb:relative rb:rounded-lg ${contentClassNames}`}>
<Image src={getFileUrl(file)} alt={file.name} className="rb:w-full rb:max-w-80 rb:rounded-lg rb:object-cover rb:cursor-pointer" />
</div>
)
}
if (file.type.includes('video')) {
return (
<div key={file.url || file.uid} className="rb:w-50">
{/* <video src={getFileUrl(file)} controls className="rb:max-w-80 rb:rounded-lg rb:object-cover rb:cursor-pointer" /> */}
<VideoPlayer key={file.url || file.uid} src={getFileUrl(file)} />
</div>
)
}
if (file.type.includes('audio')) {
return (
<div key={file.url || file.uid} className="rb:w-50">
<AudioPlayer key={file.url || file.uid} src={getFileUrl(file)} />
</div>
)
}
const documentType = (file.file_type || file.type)?.split('/')
return (
<Flex
key={file.url || file.uid}
align="center"
gap={10}
className="rb:text-left rb:w-45 rb:text-[12px] rb:group rb:relative rb:rounded-lg rb-border rb:py-2! rb:px-2.5! rb:border rb:border-[#F6F6F6]"
onClick={() => handleDownload(file)}
>
<div
className={clsx(
"rb:size-5 rb:cursor-pointer rb:bg-cover rb:bg-[url('@/assets/images/conversation/pdf_disabled.svg')]",
file.type?.includes('pdf')
? "rb:bg-[url('@/assets/images/file/pdf.svg')]"
: (file.type?.includes('excel') || file.type?.includes('spreadsheetml.sheet')) || file.type?.includes('xls') || file.type?.includes('xlsx')
? "rb:bg-[url('@/assets/images/file/excel.svg')]"
: file.type?.includes('csv')
? "rb:bg-[url('@/assets/images/file/csv.svg')]"
: file.type?.includes('html')
? "rb:bg-[url('@/assets/images/file/html.svg')]"
: file.type?.includes('json')
? "rb:bg-[url('@/assets/images/file/json.svg')]"
: file.type?.includes('ppt')
? "rb:bg-[url('@/assets/images/file/ppt.svg')]"
: file.type?.includes('markdown')
? "rb:bg-[url('@/assets/images/file/md.svg')]"
: file.type?.includes('text')
? "rb:bg-[url('@/assets/images/file/txt.svg')]"
: (file.type?.includes('doc') || file.type?.includes('docx') || file.type?.includes('word') || file.type?.includes('wordprocessingml.document'))
? "rb:bg-[url('@/assets/images/file/word.svg')]"
: "rb:bg-[url('@/assets/images/file/txt.svg')]"
)}
></div>
<div className="rb:flex-1 rb:w-32.5">
<div className="rb:leading-4 rb:text-ellipsis rb:overflow-hidden rb:whitespace-nowrap">{file.name}</div>
<div className="rb:leading-3.5 rb:mt-0.5 rb:text-[#5B6167] rb:text-ellipsis rb:overflow-hidden rb:whitespace-nowrap">{documentType?.[documentType.length - 1]} · {file.size}</div>
</div>
</Flex>
)
})}
</Flex>}
<MessageFiles files={item.meta_data?.files ?? []} contentClassNames={contentClassNames} onDownload={handleDownload} />
{/* Message bubble */}
<div className={clsx('rb:text-left rb:leading-5 rb:inline-block rb:wrap-break-word rb:relative', item.role === 'user' ? contentClassNames : '', {
// Error message style (content is null and not assistant message)

View File

@@ -0,0 +1,87 @@
import { Image, Flex } from 'antd'
import clsx from 'clsx'
import AudioPlayer from './AudioPlayer'
import VideoPlayer from './VideoPlayer'
const getFileUrl = (file: any) =>
file.thumbUrl || file.url || (file.originFileObj ? URL.createObjectURL(file.originFileObj) : undefined)
const DOC_ICONS: [string[], string][] = [
[['pdf'], "rb:bg-[url('@/assets/images/file/pdf.svg')]"],
[['excel', 'spreadsheetml.sheet', 'xls', 'xlsx'], "rb:bg-[url('@/assets/images/file/excel.svg')]"],
[['csv'], "rb:bg-[url('@/assets/images/file/csv.svg')]"],
[['html'], "rb:bg-[url('@/assets/images/file/html.svg')]"],
[['json'], "rb:bg-[url('@/assets/images/file/json.svg')]"],
[['ppt'], "rb:bg-[url('@/assets/images/file/ppt.svg')]"],
[['markdown'], "rb:bg-[url('@/assets/images/file/md.svg')]"],
[['text'], "rb:bg-[url('@/assets/images/file/txt.svg')]"],
[['doc', 'docx', 'word', 'wordprocessingml.document'], "rb:bg-[url('@/assets/images/file/word.svg')]"],
]
const getDocIcon = (parts: string[]) => {
const match = DOC_ICONS.find(([keys]) => keys.some(k => parts.includes(k)))
return match ? match[1] : "rb:bg-[url('@/assets/images/file/txt.svg')]"
}
interface MessageFilesProps {
files: any[]
contentClassNames?: string | Record<string, boolean>
onDownload: (file: any) => void
}
const MessageFiles = ({ files, contentClassNames, onDownload }: MessageFilesProps) => {
if (!files?.length) return null
return (
<Flex gap={8} vertical align="end" className="rb:mb-2!">
{files.map((file) => {
const key = file.url || file.uid
if (file.type.includes('image')) {
return (
<div key={key} className={clsx('rb:inline-block rb:group rb:relative rb:rounded-lg', contentClassNames)}>
<Image src={getFileUrl(file)} alt={file.name} className="rb:w-full rb:max-w-80 rb:rounded-lg rb:object-cover rb:cursor-pointer" />
</div>
)
}
if (file.type.includes('video')) {
return (
<div key={key} className="rb:w-50">
<VideoPlayer src={getFileUrl(file)} />
</div>
)
}
if (file.type.includes('audio')) {
return (
<div key={key} className="rb:w-50">
<AudioPlayer src={getFileUrl(file)} />
</div>
)
}
const documentType = (file.file_type || file.type)?.split('/') ?? []
return (
<Flex
key={key}
align="center"
gap={10}
className="rb:text-left rb:w-45 rb:text-[12px] rb:group rb:relative rb:rounded-lg rb-border rb:py-2! rb:px-2.5! rb:border rb:border-[#F6F6F6]"
onClick={() => onDownload(file)}
>
<div
className={clsx(
"rb:size-5 rb:cursor-pointer rb:bg-cover rb:bg-[url('@/assets/images/conversation/pdf_disabled.svg')]",
getDocIcon(documentType)
)}
/>
<div className="rb:flex-1 rb:w-32.5">
<div className="rb:leading-4 rb:text-ellipsis rb:overflow-hidden rb:whitespace-nowrap">{file.name}</div>
<div className="rb:leading-3.5 rb:mt-0.5 rb:text-[#5B6167] rb:text-ellipsis rb:overflow-hidden rb:whitespace-nowrap">
{documentType?.[documentType.length - 1]} · {file.size}
</div>
</div>
</Flex>
)
})}
</Flex>
)
}
export default MessageFiles

View File

@@ -3,14 +3,14 @@ import { Popover, type PopoverProps } from 'antd'
import Tag, { type TagProps } from '@/components/Tag'
interface OverflowTagsProps {
items: ReactNode[];
items?: ReactNode[];
gap?: number;
numTagColor?: TagProps['color'];
numTag?: (num?: number) => ReactNode;
popoverProps?: PopoverProps | false;
}
const OverflowTags = ({ items, gap = 8, numTagColor = 'default', numTag, popoverProps }: OverflowTagsProps) => {
const OverflowTags = ({ items = [], gap = 8, numTagColor = 'default', numTag, popoverProps }: OverflowTagsProps) => {
const containerRef = useRef<HTMLDivElement>(null)
const measureRef = useRef<HTMLDivElement>(null)
const [visibleCount, setVisibleCount] = useState(items.length)
@@ -20,7 +20,7 @@ const OverflowTags = ({ items, gap = 8, numTagColor = 'default', numTag, popover
if (!measure || containerWidth === 0) return
const children = Array.from(measure.children) as HTMLElement[]
if (!children.length) return
if (!children.length) { setVisibleCount(0); return }
// last child is the sample +N tag
const extraTagWidth = (children[children.length - 1] as HTMLElement).offsetWidth

View File

@@ -399,7 +399,7 @@ const Menu: FC<{
className="rb:overflow-y-auto rb:flex-1!"
/>
{/* Return to space button for superusers */}
{user?.is_superuser && source === 'space' &&
{source === 'space' &&
<Flex gap={4} vertical className="rb:my-3! rb:mx-3!">
<Divider className="rb:mb-2.5! rb:mt-0! rb:border-[#DFE4ED]! rb:mx-2! rb:min-w-[calc(100%-20px)]! rb:w-[calc(100%-20px)]!" />
<Flex
@@ -412,16 +412,18 @@ const Menu: FC<{
<div className="rb:cursor-pointer rb:size-4 rb:bg-cover rb:bg-[url('@/assets/images/menuNew/switch.svg')]"></div>
{collapsed ? null : t('common.switchSpace')}
</Flex>
<Flex
gap={8}
align="center"
justify="start"
onClick={goToSpace}
className="rb:p-2.5! rb:text-[13px] rb:hover:bg-[rgba(223,228,237,0.5)] rb:rounded-lg rb:leading-3.5 rb:font-regular rb:text-center rb:cursor-pointer"
>
<div className="rb:cursor-pointer rb:size-4 rb:bg-cover rb:bg-[url('@/assets/images/menuNew/return.svg')]"></div>
{collapsed ? null : t('common.returnToSpace')}
</Flex>
{user?.is_superuser &&
<Flex
gap={8}
align="center"
justify="start"
onClick={goToSpace}
className="rb:p-2.5! rb:text-[13px] rb:hover:bg-[rgba(223,228,237,0.5)] rb:rounded-lg rb:leading-3.5 rb:font-regular rb:text-center rb:cursor-pointer"
>
<div className="rb:cursor-pointer rb:size-4 rb:bg-cover rb:bg-[url('@/assets/images/menuNew/return.svg')]"></div>
{collapsed ? null : t('common.returnToSpace')}
</Flex>
}
</Flex>
}
{source === 'manage' && subscription && !collapsed &&

View File

@@ -1538,6 +1538,7 @@ export const en = {
json_output: 'Support JSON formatted output',
thinking_budget_tokens: 'thinking budget tokens',
thinking_budget_tokens_max_error: "Cannot exceed the max tokens limit ({{max}})",
thinking_budget_tokens_min_error: "Cannot be less than {{min}}",
logSearchPlaceholder: 'Search log content',
},
userMemory: {

View File

@@ -868,6 +868,7 @@ export const zh = {
json_output: '支持JSON格式化输出',
thinking_budget_tokens: '深度思考预算Token数',
thinking_budget_tokens_max_error: "不能超过 最大令牌数 ({{max}})",
thinking_budget_tokens_min_error: "不能小于 {{min}}",
logSearchPlaceholder: '搜索日志内容',
},
table: {

View File

@@ -467,29 +467,4 @@ input:-webkit-autofill:active {
animation-name: onAutoFillStart;
animation-duration: 1ms;
}
@keyframes onAutoFillStart { from {} to {} }
/* Login input placeholder */
.login-input input::placeholder {
color: #A8A9AA !important;
}
.login-input {
border-color: #A8A9AA;
}
/* Login input hover/focus border */
.login-input:hover,
.login-input:focus-within {
border-color: #FFFFFF !important;
box-shadow: none !important;
}
/* Override browser autofill styles */
.login-input input:-webkit-autofill,
.login-input input:-webkit-autofill:hover,
.login-input input:-webkit-autofill:focus,
.login-input input:-webkit-autofill:active {
-webkit-box-shadow: 0 0 0px 1000px #0A0A0A inset !important;
-webkit-text-fill-color: #FFFFFF !important;
transition: background-color 5000s ease-in-out 0s !important;
}
@keyframes onAutoFillStart { from {} to {} }

View File

@@ -49,6 +49,8 @@ const configFields = [
{ key: 'n', max: 10, min: 1, step: 1, defaultValue: 1 },
]
const minThinkingBudgetTokens = 128;
const defaultThinkingBudgetTokens = 1000;
const ModelConfigModal = forwardRef<ModelConfigModalRef, ModelConfigModalProps>(({
refresh,
data,
@@ -108,7 +110,7 @@ const ModelConfigModal = forwardRef<ModelConfigModalRef, ModelConfigModalProps>(
const newValues: ModelConfig = {
capability: (option as Model).capability,
deep_thinking: false,
thinking_budget_tokens: undefined,
thinking_budget_tokens: defaultThinkingBudgetTokens,
json_output: false,
}
if (source === 'chat') {
@@ -128,6 +130,12 @@ const ModelConfigModal = forwardRef<ModelConfigModalRef, ModelConfigModalProps>(
form.setFieldsValue({ ...rest })
}, [data?.default_model_config_id])
useEffect(() => {
if (values?.deep_thinking && !values?.thinking_budget_tokens) {
form.setFieldValue('thinking_budget_tokens', defaultThinkingBudgetTokens)
}
}, [values?.deep_thinking])
const handleReset = () => {
if (!id) return
resetAppModelConfig(id).then((res) => {
@@ -178,15 +186,20 @@ const ModelConfigModal = forwardRef<ModelConfigModalRef, ModelConfigModalProps>(
name="thinking_budget_tokens"
label={t('application.thinking_budget_tokens')}
hidden={!['model', 'chat'].includes(source) || !(values?.deep_thinking || values?.capability?.includes('thinking'))}
extra={<>{t('application.range')}: [{0}, {t(`application.max_tokens`)}: {values?.max_tokens}]</>}
extra={<>{t('application.range')}: [{minThinkingBudgetTokens}, {t(`application.max_tokens`)}: {values?.max_tokens}]</>}
rules={[
{ required: values?.deep_thinking, message: t('common.pleaseEnter') },
{
validator: (_, value) => {
const maxTokens = values?.max_tokens
const deep_thinking = values?.deep_thinking;
if (deep_thinking && value !== undefined && maxTokens !== undefined && value > maxTokens) {
return Promise.reject(t('application.thinking_budget_tokens_max_error', { max: maxTokens }))
if (deep_thinking && value !== undefined) {
if (value < minThinkingBudgetTokens) {
return Promise.reject(t('application.thinking_budget_tokens_min_error', { min: minThinkingBudgetTokens }))
}
if (maxTokens !== undefined && value > maxTokens) {
return Promise.reject(t('application.thinking_budget_tokens_max_error', { max: maxTokens }))
}
}
return Promise.resolve()
}
@@ -195,7 +208,7 @@ const ModelConfigModal = forwardRef<ModelConfigModalRef, ModelConfigModalProps>(
>
<RbSlider
step={1}
min={0}
min={minThinkingBudgetTokens}
max={32000}
isInput={true}
disabled={!values?.deep_thinking}

View File

@@ -102,7 +102,7 @@ const Index = () => {
<Flex gap={12} wrap="nowrap" className="rb:w-full! rb:h-full! rb:overflow-y-auto">
<div className="rb:flex-1 rb:min-w-0">
<Flex vertical>
<div className='rb:w-full rb:h-26 rb:p-4 rb:bg-cover rb:bg-[url("@/assets/images/index/index_bg.png")] rb:rounded-xl rb:overflow-hidden'>
<div className='rb:w-full rb:h-26 rb:p-4 rb:bg-cover rb:bg-[url("@/assets/images/index/index_bg@2x.png")] rb:rounded-xl rb:overflow-hidden'>
<div className="rb:font-[MiSans-Bold] rb:font-bold rb:text-white rb:text-[18px] rb:leading-7">
{t('index.spaceTitle')}
</div>

View File

@@ -14,33 +14,27 @@ import React, { useState, useEffect } from 'react';
import { useTranslation } from 'react-i18next';
import { Button, Input, Form, App } from 'antd';
import type { FormProps } from 'antd';
import clsx from 'clsx';
import { useUser, type LoginInfo } from '@/store/user';
import { login } from '@/api/user'
import loginBg from '@/assets/images/login/bg.mp4'
import check from '@/assets/images/login/check.svg'
import loginBg from '@/assets/images/login/loginBg.png'
import check from '@/assets/images/login/check.png'
import email from '@/assets/images/login/email.svg'
import lock from '@/assets/images/login/lock.svg'
import type { LoginForm } from './types';
import { useI18n } from '@/store/locale'
/**
* Input field styling
*/
const inputClassName = "login-input rb:rounded-[8px]! rb:p-[12px]! rb:h-[44px]! rb:bg-transparent! rb:text-[#FFFFFF]! [&_input]:rb:text-[#FFFFFF]! [&_input]:rb:caret-[#FFFFFF]!"
const inputClassName = "rb:rounded-[8px]! rb:p-[12px]! rb:h-[44px]!"
/**
* Login page component
*/const LoginPage: React.FC = () => {
const { t } = useTranslation();
const { clearUserInfo, updateLoginInfo, getUserInfo } = useUser();
const { language } = useI18n()
const [loading, setLoading] = useState(false);
const [form] = Form.useForm<LoginForm>();
const emailVal = Form.useWatch('email', form);
const passwordVal = Form.useWatch('password', form);
const canLogin = !!(emailVal && passwordVal);
const { message } = App.useApp();
useEffect(() => {
@@ -49,7 +43,6 @@ const inputClassName = "login-input rb:rounded-[8px]! rb:p-[12px]! rb:h-[44px]!
/** Handle login form submission */
const handleLogin: FormProps<LoginForm>['onFinish'] = async (values) => {
if (!canLogin) return;
if (!values.email) {
message.warning(t('login.emailPlaceholder'));
return;
@@ -71,45 +64,42 @@ const inputClassName = "login-input rb:rounded-[8px]! rb:p-[12px]! rb:h-[44px]!
return (
<div className="rb:min-h-screen rb:flex rb:h-screen rb:bg-[#0A0A0A] rb:text-[#FFFFFF]">
<div className="rb:min-h-screen rb:flex rb:h-screen">
<div className="rb:relative rb:w-1/2 rb:h-screen rb:overflow-hidden">
<video src={loginBg} loop autoPlay playsInline muted className="rb:w-full rb:h-full rb:object-cover"></video>
<div className="rb:absolute rb:top-10 rb:left-12">
<div className={clsx("rb:h-8.25 rb:bg-cover", {
"rb:w-89 rb:bg-[url('@/assets/images/login/title_en.png')]": language !== 'zh',
"rb:w-42 rb:bg-[url('@/assets/images/login/title_zh.png')]": language === 'zh'
})}></div>
<div className="rb:text-[18px] rb:text-[rgba(255,255,255,0.7)] rb:leading-6.25 rb:font-regular rb:mt-3">{t('login.subTitle')}</div>
<img src={loginBg} alt="loginBg" className="rb:w-full rb:h-full rb:object-cover rb:absolute rb:top-1/2 rb:-translate-y-1/2 rb:left-0" />
<div className="rb:absolute rb:top-14 rb:left-16">
<div className="rb:text-[28px] rb:leading-8.25 rb:font-bold rb:font-[AlimamaShuHeiTi,AlimamaShuHeiTi] rb:mb-4">{t('login.title')}</div>
<div className="rb:text-[18px] rb:leading-6.25 rb:font-regular">{t('login.subTitle')}</div>
</div>
<div className="rb:absolute rb:bottom-14 rb:left-12 rb:right-12 rb:grid rb:grid-cols-2 rb:gap-x-30 rb:gap-y-10.75">
{['intelligentMemory', 'instantRecall', 'knowledgeAssociation'].map((key, index) => (
<div key={key} className={`rb:flex${index === 0 ? ' rb:col-span-2' : ''}`}>
<div className="rb:absolute rb:bottom-20.25 rb:left-16 rb:grid rb:grid-cols-2 rb:gap-x-30 rb:gap-y-10.75">
{['intelligentMemory', 'instantRecall', 'knowledgeAssociation'].map(key => (
<div key={key} className="rb:flex">
<img src={check} className="rb:w-4 rb:h-4 rb:mr-2 rb:mt-0.75" />
<div className="rb:text-[16px] rb:leading-5.5">
<div className="rb:font-medium">{t(`login.${key}`)}</div>
<div className="rb:text-[14px] rb:text-[rgba(255,255,255,0.7)] rb:leading-5 rb:font-regular! rb:mt-2">{t(`login.${key}Desc`)}</div>
<div className="rb:text-[#5B6167] rb:text-[14px] rb:leading-5 rb:font-regular! rb:mt-2">{t(`login.${key}Desc`)}</div>
</div>
</div>
))}
</div>
</div>
<div className="rb:flex rb:items-center rb:justify-center rb:flex-[1_1_auto]">
<div className="rb:w-110 rb:mx-auto">
<div className="rb:text-center rb:text-[24px] rb:font-[MiSans-Bold] rb:font-bold rb:leading-8 rb:mb-12">{t('login.welcome')}</div>
<div className="rb:bg-[#FFFFFF] rb:flex rb:items-center rb:justify-center rb:flex-[1_1_auto]">
<div className="rb:w-100 rb:mx-auto">
<div className="rb:text-center rb:text-[28px] rb:font-semibold rb:leading-8 rb:mb-12">{t('login.welcome')}</div>
<Form
form={form}
onFinish={handleLogin}
>
<Form.Item name="email" className="rb:mb-6!">
<Form.Item name="email" className="rb:mb-5!">
<Input
prefix={<img src={email} className="rb:w-5 rb:h-5 rb:mr-2" />}
placeholder={t('login.emailPlaceholder')}
className={inputClassName}
/>
</Form.Item>
<Form.Item name="password" className="rb:mb-0!">
<Form.Item name="password">
<Input.Password
prefix={<img src={lock} className="rb:w-5 rb:h-5 rb:mr-2" />}
placeholder={t('login.passwordPlaceholder')}
@@ -121,11 +111,7 @@ const inputClassName = "login-input rb:rounded-[8px]! rb:p-[12px]! rb:h-[44px]!
block
loading={loading}
htmlType="submit"
disabled={!canLogin}
className={clsx("rb:h-11.5! rb:rounded-lg! rb:mt-12", {
'rb:hover:bg-[#2d6ef1]! rb:bg-[#155EEF]! rb:border-[#155EEF]!': canLogin,
'rb:bg-[#171719]! rb:border-[#171719]!': !canLogin
})}
className="rb:h-10! rb:rounded-lg! rb:mt-4"
>
{t('login.loginIn')}
</Button>

View File

@@ -166,10 +166,10 @@ const Ontology: FC = () => {
<div className="rb:h-10 rb:wrap-break-word rb:line-clamp-2 rb:leading-5">{item.scene_description}</div>
</Tooltip>
<div className="rb:mt-2">
<div className="rb:mt-2 rb:h-5.5">
<OverflowTags
popoverProps={false}
items={[...item.entity_type?.map((type, i) => <Tag key={i} variant="borderless" color="dark">{type}</Tag>), <Tag variant="borderless" color="dark">{`+${item.type_num - 3}`}</Tag>]}
items={item.entity_type ? [...item.entity_type.map((type, i) => <Tag key={i} variant="borderless" color="dark">{type}</Tag>), <Tag variant="borderless" color="dark">{`+${item.type_num - 3}`}</Tag>] : []}
numTag={(num?: number) => <Tag variant="borderless" color="dark">{`+${item.type_num - 3 + (num ? num - 1 : 0)}`}</Tag>}
/>
</div>

View File

@@ -361,7 +361,7 @@ const Market: React.FC<{ getStatusTag?: (status: string) => ReactNode }> = () =>
)}
</Flex>
<div>
<div className="rb:font-[MiSans-Bold] rb:font-bold rb:text-[16px] rb:leading-5.5">{source.name}</div>
<div className="rb:font-[MiSans Bold] rb:font-bold rb:text-[16px] rb:leading-5.5">{source.name}</div>
<div className="rb:text-[#5B6167] rb:text-[12px] rb:leading-4.5">{t('tool.availableMcp')} ({mcpTotal})</div>
</div>
</Flex>

View File

@@ -101,6 +101,7 @@ const CustomToolModal = forwardRef<CustomToolModalRef, CustomToolModalProps>(({
});
};
const formatSchema = (value: string) => {
if (!value || value.trim() === '') return
setParseSchemaData({} as ParseSchemaData)
parseSchema({ schema_content: value })
.then(res => {

View File

@@ -57,7 +57,6 @@ const CanvasToolbar: FC<CanvasToolbarProps> = ({
}
}}
labelRender={(props) => {
console.log('props', props)
return `${props.value}%`
}}
className="rb:w-20 rb:h-4!"

View File

@@ -66,8 +66,6 @@ const Chat = forwardRef<ChatRef, { appId: string; graphRef: GraphRef; data: Work
const [fileList, setFileList] = useState<any[]>([])
const [message, setMessage] = useState<string | undefined>(undefined)
console.log('abortRef', abortRef, chatList)
/**
* Opens the chat drawer and loads workflow variables from the start node
*/

View File

@@ -18,6 +18,7 @@ const AddNode: ReactShapeConfig['component'] = ({ node, graph }) => {
// Handle node selection from popover and create new node replacing the add-node placeholder
const handleNodeSelect = (selectedNodeType: any) => {
graph.startBatch('add-node');
const parentBBox = node.getBBox();
const cycleId = data.cycle;
const horizontalSpacing = 0;
@@ -43,7 +44,7 @@ const AddNode: ReactShapeConfig['component'] = ({ node, graph }) => {
if (cycleId) {
const parentNode = graph.getNodes().find((n: any) => n.getData()?.id === cycleId);
if (parentNode) {
parentNode.addChild(newNode);
parentNode.addChild(newNode, { silent: true });
}
}
@@ -76,55 +77,40 @@ const AddNode: ReactShapeConfig['component'] = ({ node, graph }) => {
}
});
setTimeout(() => {
addedEdges.forEach(e => {
const src = graph.getCellById(e.getSourceCellId());
const tgt = graph.getCellById(e.getTargetCellId());
if (src?.isNode()) src.toFront();
if (tgt?.isNode()) tgt.toFront();
});
}, 50);
// Automatically adjust loop node size
const loopNode = graph.getNodes().find((n: any) => n.getData()?.id === cycleId);
if (loopNode) {
const adjustLoopSize = () => {
const childNodes = graph.getNodes().filter((n: any) => n.getData()?.cycle === cycleId);
if (childNodes.length > 0) {
const bounds = childNodes.reduce((acc, child) => {
const bbox = child.getBBox();
return {
minX: Math.min(acc.minX, bbox.x),
minY: Math.min(acc.minY, bbox.y),
maxX: Math.max(acc.maxX, bbox.x + bbox.width),
maxY: Math.max(acc.maxY, bbox.y + bbox.height)
};
}, { minX: Infinity, minY: Infinity, maxX: -Infinity, maxY: -Infinity });
const padding = 50;
const newWidth = Math.max(nodeWidth, bounds.maxX - bounds.minX + padding * 2);
const newHeight = Math.max(120, bounds.maxY - bounds.minY + padding * 2);
loopNode.prop('size', { width: newWidth, height: newHeight });
// Update right port x position
const ports = loopNode.getPorts();
ports.forEach(port => {
if (port.group === 'right' && port.args) {
loopNode.portProp(port.id!, 'args/x', newWidth);
}
});
}
};
adjustLoopSize();
// Listen to child node movement events
const childNodes = graph.getNodes().filter((n: any) => n.getData()?.cycle === cycleId);
childNodes.forEach((childNode: any) => {
childNode.on('change:position', adjustLoopSize);
});
if (childNodes.length > 0) {
const bounds = childNodes.reduce((acc, child) => {
const bbox = child.getBBox();
return {
minX: Math.min(acc.minX, bbox.x),
minY: Math.min(acc.minY, bbox.y),
maxX: Math.max(acc.maxX, bbox.x + bbox.width),
maxY: Math.max(acc.maxY, bbox.y + bbox.height)
};
}, { minX: Infinity, minY: Infinity, maxX: -Infinity, maxY: -Infinity });
const padding = 50;
const newWidth = Math.max(nodeWidth, bounds.maxX - bounds.minX + padding * 2);
const newHeight = Math.max(120, bounds.maxY - bounds.minY + padding * 2);
loopNode.prop('size', { width: newWidth, height: newHeight });
loopNode.getPorts().forEach(port => {
if (port.group === 'right' && port.args) {
loopNode.portProp(port.id!, 'args/x', newWidth);
}
});
}
}
addedEdges.forEach(e => {
const src = graph.getCellById(e.getSourceCellId());
const tgt = graph.getCellById(e.getTargetCellId());
if (src?.isNode()) src.toFront();
if (tgt?.isNode()) tgt.toFront();
});
graph.stopBatch('add-node');
setOpen(false);
};

View File

@@ -99,7 +99,7 @@ const ConditionNode: ReactShapeConfig['component'] = ({ node }) => {
{data.type === 'if-else' &&
<Flex vertical gap={4} className="rb:mt-3!">
{data.config?.cases?.defaultValue.map((item: any, index: number) => (
<div key={index} className={item.expressions.length > 0 ? '' : 'rb:mb-1'}>
<div key={index}>
<Flex justify={item.expressions.length > 0 ? "space-between" : 'end'} className="rb:mb-1! rb:leading-4">
{item.expressions.length > 0 && <span className="rb:text-[#5B6167] rb:text-[10px] rb:pl-1">CASE{index + 1}</span>}
<span className="rb:text-[#212332] rb:font-medium rb:text-[12px]">{index === 0 ? 'IF' : `ELIF`}</span>

View File

@@ -1,134 +1,15 @@
import { useEffect } from 'react';
import { useTranslation } from 'react-i18next'
import clsx from 'clsx';
import type { ReactShapeConfig } from '@antv/x6-react-shape';
import { Flex } from 'antd';
import { CheckCircleFilled, CloseCircleFilled, LoadingOutlined } from '@ant-design/icons';
import { useTranslation } from 'react-i18next'
import { graphNodeLibrary, edgeAttrs } from '../../constant';
import NodeTools from './NodeTools'
const LoopNode: ReactShapeConfig['component'] = ({ node, graph }) => {
const LoopNode: ReactShapeConfig['component'] = ({ node }) => {
const data = node.getData() || {};
const { t } = useTranslation()
useEffect(() => {
// 使用setTimeout确保在所有节点都添加完成后再创建连线
const timer = setTimeout(() => {
initNodes()
checkAndAddAddNode()
}, 50)
return () => clearTimeout(timer)
}, [graph])
const checkAndAddAddNode = () => {
if (!graph) return;
const childNodes = graph.getNodes().filter((n: any) => n.getData()?.cycle === data.id);
const cycleStartNodes = childNodes.filter((n: any) => n.getData()?.type === 'cycle-start');
// 如果只有一个cycle-start节点且没有其他类型的子节点则添加add-node
if (cycleStartNodes.length === 1 && childNodes.length === 1) {
const cycleStartNode = cycleStartNodes[0];
const cycleStartBBox = cycleStartNode.getBBox();
const addNode = graph.addNode({
...graphNodeLibrary.addStart,
x: cycleStartBBox.x + 84,
y: cycleStartBBox.y + 4,
data: {
type: 'add-node',
label: t('workflow.addNode'),
icon: '+',
parentId: node.id,
cycle: data.id,
},
});
node.addChild(addNode);
// 连接cycle-start和add-node
const sourcePorts = cycleStartNode.getPorts();
const targetPorts = addNode.getPorts();
const sourcePort = sourcePorts.find((port: any) => port.group === 'right')?.id || 'right';
const targetPort = targetPorts.find((port: any) => port.group === 'left')?.id || 'left';
// 然后创建连线
graph.addEdge({
source: { cell: cycleStartNode.id, port: sourcePort },
target: { cell: addNode.id, port: targetPort },
...edgeAttrs,
});
cycleStartNode.toFront()
addNode.toFront()
}
}
const initNodes = () => {
// 检查是否存在cycle为当前节点ID的子节点若存在则不调用initNodes避免重复创建
const existingCycleNodes = graph.getNodes().filter((n: any) =>
n.getData()?.cycle === data.id
);
if (existingCycleNodes.length > 0) return;
// 添加默认子节点
const parentBBox = node.getBBox();
const centerX = parentBBox.x + 24;
const centerY = parentBBox.y + 70;
const cycleStartNodeId = `cycle_start_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`
const cycleStartNode = graph.addNode({
...graphNodeLibrary.cycleStart,
x: centerX,
y: centerY,
id: cycleStartNodeId,
data: {
id: cycleStartNodeId,
type: 'cycle-start',
parentId: node.id,
isDefault: true, // 标记为默认节点,不可删除
cycle: data.id,
},
});
const addNode = graph.addNode({
...graphNodeLibrary.addStart,
x: centerX + 84,
y: centerY + 4,
data: {
type: 'add-node',
label: t('workflow.addNode'),
icon: '+',
parentId: node.id,
cycle: data.id,
},
});
node.addChild(cycleStartNode)
node.addChild(addNode)
const sourcePorts = cycleStartNode.getPorts()
const targetPorts = addNode.getPorts()
let sourcePort = sourcePorts.find((port: any) => port.group === 'right')?.id || 'right';
const edgeConfig = {
source: {
cell: cycleStartNode.id,
port: sourcePort
},
target: {
cell: addNode.id,
port: targetPorts.find((port: any) => port.group === 'left')?.id || 'left'
},
...edgeAttrs
}
graph.addEdge(edgeConfig)
setTimeout(() => {
cycleStartNode.toFront()
addNode.toFront()
}, 0)
}
return (
<div className={clsx('rb:cursor-pointer rb:group rb:relative rb:h-full rb:w-full rb:p-3 rb:border rb:rounded-2xl rb:bg-[#FCFCFD] rb:shadow-[0px_2px_4px_0px_rgba(23,23,25,0.03)]', {
'rb:border-[#171719]!': data.isSelected && !data.executionStatus,

View File

@@ -43,70 +43,52 @@ const PortClickHandler: React.FC<PortClickHandlerProps> = ({ graph }) => {
};
}, []);
// Handle node selection from popover menu and create new node with edge connection
const handleNodeSelect = (selectedNodeType: any) => {
if (!sourceNode || !graph) return;
const sourceNodeData = sourceNode.getData();
const sourceNodeType = sourceNodeData?.type;
// If it's a cycle-start node, handle the add-node placeholder
const isCycleSubNode = !!sourceNodeData.cycle;
const isCycleContainer = (type: string) => type === 'loop' || type === 'iteration';
const newNodeType = selectedNodeType.type;
// Save add-node placeholder position before disabling history
let addNodePosition = null;
const isCycleSubNode = sourceNodeData.cycle
if (isCycleSubNode && sourceNodeType === 'cycle-start') {
const cycleId = sourceNodeData.cycle;
const addNodes = graph.getNodes().filter((n: any) =>
const addNodes = graph.getNodes().filter((n: any) =>
n.getData()?.type === 'add-node' && n.getData()?.cycle === cycleId
);
if (addNodes.length > 0) {
const addNode = addNodes[0];
addNodePosition = addNode.getBBox();
addNode.remove();
}
if (addNodes.length > 0) addNodePosition = addNodes[0].getBBox();
}
// Calculate new node position to avoid overlapping
// Calculate position
const sourceBBox = sourceNode.getBBox();
const nodeWidth = graphNodeLibrary[selectedNodeType.type]?.width || 120;
const nodeHeight = graphNodeLibrary[selectedNodeType.type]?.height || 88;
const horizontalSpacing = isCycleSubNode ? 48 : 80;
const verticalSpacing = 10;
// Get source port group information
const nw = graphNodeLibrary[newNodeType]?.width || 120;
const nh = graphNodeLibrary[newNodeType]?.height || 88;
const hSpacing = isCycleSubNode ? 48 : 80;
const vSpacing = 10;
const sourcePortInfo = sourceNode.getPorts().find((p: any) => p.id === sourcePort);
const sourcePortGroup = sourcePortInfo?.group || sourcePort;
// Calculate new node position
let newX, newY;
let newX: number, newY: number;
if (edgeInsertion) {
// Edge insertion: place new node on the same row as target, between source and target
const targetBBox = edgeInsertion.targetCell.getBBox();
const gap = targetBBox.x - (sourceBBox.x + sourceBBox.width);
const requiredSpace = nodeWidth + horizontalSpacing * 4;
// New node x: right after source + spacing
newX = sourceBBox.x + sourceBBox.width + horizontalSpacing;
// Same row as target node
newY = targetBBox.y + (targetBBox.height - nodeHeight) / 2;
// If not enough space, shift target and all downstream nodes to the right
const requiredSpace = nw + hSpacing * 4;
newX = sourceBBox.x + sourceBBox.width + hSpacing;
newY = targetBBox.y + (targetBBox.height - nh) / 2;
if (gap < requiredSpace) {
const shiftX = requiredSpace - gap;
const visited = new Set<string>();
const shiftDownstream = (cell: any) => {
const cellId = cell.id;
if (visited.has(cellId)) return;
visited.add(cellId);
if (visited.has(cell.id)) return;
visited.add(cell.id);
const pos = cell.getPosition();
cell.setPosition(pos.x + shiftX, pos.y);
// Recursively shift nodes connected from right ports
graph.getConnectedEdges(cell, { outgoing: true }).forEach((e: any) => {
const tId = e.getTargetCellId();
if (tId && !visited.has(tId)) {
const tCell = graph.getCellById(tId);
if (tCell?.isNode()) shiftDownstream(tCell);
}
const tCell = graph.getCellById(e.getTargetCellId());
if (tCell?.isNode()) shiftDownstream(tCell);
});
};
shiftDownstream(edgeInsertion.targetCell);
@@ -114,208 +96,170 @@ const PortClickHandler: React.FC<PortClickHandlerProps> = ({ graph }) => {
} else if (addNodePosition) {
newX = addNodePosition.x;
newY = addNodePosition.y;
} else if (sourcePortGroup === 'left') {
newX = sourceBBox.x - nw * 2 - hSpacing;
newY = sourceBBox.y;
} else {
// Determine node placement direction based on port position
if (sourcePortGroup === 'left') {
// Left port: add node to the left
newX = sourceBBox.x - nodeWidth*2 - horizontalSpacing;
newY = sourceBBox.y;
} else {
// Right port: add node to the right
newX = sourceBBox.x + sourceBBox.width + horizontalSpacing;
newY = sourceBBox.y;
}
// Check if position overlaps with existing nodes (only consider connected nodes)
const checkOverlap = (x: number, y: number) => {
// Get nodes connected to the source node
const connectedNodes = new Set();
graph.getConnectedEdges(sourceNode).forEach((edge: any) => {
const sourceId = edge.getSourceCellId();
const targetId = edge.getTargetCellId();
if (sourceId !== sourceNode.id) connectedNodes.add(sourceId);
if (targetId !== sourceNode.id) connectedNodes.add(targetId);
newX = sourceBBox.x + sourceBBox.width + hSpacing;
newY = sourceBBox.y;
const connectedNodes = new Set<string>();
graph.getConnectedEdges(sourceNode).forEach((e: any) => {
[e.getSourceCellId(), e.getTargetCellId()].forEach((cid: string) => {
if (cid !== sourceNode.id) connectedNodes.add(cid);
});
return graph.getNodes().some((node: any) => {
if (node.id === sourceNode.id) return false;
if (!connectedNodes.has(node.id)) return false; // Only consider connected nodes
const bbox = node.getBBox();
return !(x + nodeWidth < bbox.x || x > bbox.x + bbox.width ||
y + nodeHeight < bbox.y || y > bbox.y + bbox.height);
});
const checkOverlap = (x: number, y: number) =>
graph.getNodes().some((n: any) => {
if (n.id === sourceNode.id || !connectedNodes.has(n.id)) return false;
const b = n.getBBox();
return !(x + nw < b.x || x > b.x + b.width || y + nh < b.y || y > b.y + b.height);
});
};
// If position is occupied, search downward for empty space
while (checkOverlap(newX, newY)) {
newY += nodeHeight + verticalSpacing;
}
while (checkOverlap(newX, newY)) newY += nh + vSpacing;
}
// Create new node
const id = `${selectedNodeType.type.replace(/-/g, '_')}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`
// Disable history for all graph mutations
graph.disableHistory();
// Remove add-node placeholder
if (isCycleSubNode && sourceNodeType === 'cycle-start') {
const cycleId = sourceNodeData.cycle;
graph.getNodes()
.filter((n: any) => n.getData()?.type === 'add-node' && n.getData()?.cycle === cycleId)
.forEach((n: any) => n.remove());
}
const id = `${newNodeType.replace(/-/g, '_')}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
const newNode = graph.addNode({
...(graphNodeLibrary[selectedNodeType.type] || graphNodeLibrary.default),
...(graphNodeLibrary[newNodeType] || graphNodeLibrary.default),
x: newX,
y: newY - (isCycleSubNode && sourceNodeType === 'cycle-start' ? 12 : 0),
id,
data: {
id,
type: selectedNodeType.type,
type: newNodeType,
icon: selectedNodeType.icon,
name: t(`workflow.${selectedNodeType.type}`),
cycle: sourceNodeData.cycle, // Inherit cycle from source node
name: t(`workflow.${newNodeType}`),
cycle: sourceNodeData.cycle,
config: selectedNodeType.config || {}
},
});
// Add new node as child of parent node
if (sourceNodeData.cycle) {
const parentNode = graph.getNodes().find((n: any) => n.getData()?.id === sourceNodeData.cycle);
if (parentNode) {
parentNode.addChild(newNode);
}
if (parentNode) parentNode.addChild(newNode, { silent: true });
}
// Edge insertion: remove old edge immediately before creating new edges
if (edgeInsertion) {
const { edge: oldEdge } = edgeInsertion;
if (oldEdge.id && graph.getCellById(oldEdge.id)) {
graph.removeCell(oldEdge.id);
} else {
graph.removeEdge(oldEdge);
}
if (oldEdge.id && graph.getCellById(oldEdge.id)) graph.removeCell(oldEdge.id);
else graph.removeEdge(oldEdge);
}
// Create edge connection
setTimeout(() => {
const newPorts = newNode.getPorts();
const newPorts = newNode.getPorts();
const addedCells: any[] = [newNode];
const addedEdges: any[] = [];
if (edgeInsertion) {
// Edge insertion: create source→new and new→target edges
const { targetCell, targetPort: origTargetPort } = edgeInsertion;
const newLeftPort = newPorts.find((p: any) => p.group === 'left')?.id || 'left';
const newRightPort = newPorts.find((p: any) => p.group === 'right')?.id || 'right';
addedEdges.push(graph.addEdge({
source: { cell: sourceNode.id, port: sourcePort },
target: { cell: newNode.id, port: newLeftPort },
...edgeAttrs
}));
addedEdges.push(graph.addEdge({
source: { cell: newNode.id, port: newRightPort },
target: { cell: targetCell.id, port: origTargetPort },
...edgeAttrs
}));
setEdgeInsertion(null);
} else if (sourcePortGroup === 'left') {
// Connect from left port to new node's right side
const targetPort = newPorts.find((port: any) => port.group === 'right')?.id || 'right';
addedEdges.push(graph.addEdge({
source: { cell: newNode.id, port: targetPort },
target: { cell: sourceNode.id, port: sourcePort },
...edgeAttrs
}));
} else {
// Connect from right port to new node's left side
const targetPort = newPorts.find((port: any) => port.group === 'left')?.id || 'left';
addedEdges.push(graph.addEdge({
source: { cell: sourceNode.id, port: sourcePort },
target: { cell: newNode.id, port: targetPort },
...edgeAttrs
}));
}
// Adjust loop node size when child node is added via port within loop node
const cycleId = sourceNodeData.cycle;
if (cycleId) {
const parentNode = graph.getNodes().find((n: any) => n.getData()?.id === cycleId);
if (edgeInsertion) {
const { targetCell, targetPort: origTargetPort } = edgeInsertion;
const newLeftPort = newPorts.find((p: any) => p.group === 'left')?.id || 'left';
const newRightPort = newPorts.find((p: any) => p.group === 'right')?.id || 'right';
addedCells.push(graph.addEdge({ source: { cell: sourceNode.id, port: sourcePort }, target: { cell: newNode.id, port: newLeftPort }, ...edgeAttrs }));
addedCells.push(graph.addEdge({ source: { cell: newNode.id, port: newRightPort }, target: { cell: targetCell.id, port: origTargetPort }, ...edgeAttrs }));
setEdgeInsertion(null);
} else if (sourcePortGroup === 'left') {
const tp = newPorts.find((p: any) => p.group === 'right')?.id || 'right';
addedCells.push(graph.addEdge({ source: { cell: newNode.id, port: tp }, target: { cell: sourceNode.id, port: sourcePort }, ...edgeAttrs }));
} else {
const tp = newPorts.find((p: any) => p.group === 'left')?.id || 'left';
addedCells.push(graph.addEdge({ source: { cell: sourceNode.id, port: sourcePort }, target: { cell: newNode.id, port: tp }, ...edgeAttrs }));
}
if (parentNode) {
const adjustLoopSize = () => {
const childNodes = graph.getNodes().filter((n: any) => n.getData()?.cycle === cycleId);
if (childNodes.length > 0) {
const bounds = childNodes.reduce((acc: any, child: any) => {
const bbox = child.getBBox();
return {
minX: Math.min(acc.minX, bbox.x),
minY: Math.min(acc.minY, bbox.y),
maxX: Math.max(acc.maxX, bbox.x + bbox.width),
maxY: Math.max(acc.maxY, bbox.y + bbox.height)
};
}, { minX: Infinity, minY: Infinity, maxX: -Infinity, maxY: -Infinity });
// If adding a loop/iteration node, create cycle-start, add-node and inner edge regardless of source type
if (isCycleContainer(newNodeType)) {
const parentBBox = newNode.getBBox();
const cycleStartId = `cycle_start_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
const cycleStartNode = graph.addNode({
...graphNodeLibrary.cycleStart,
x: parentBBox.x + 24,
y: parentBBox.y + 70,
id: cycleStartId,
data: { id: cycleStartId, type: 'cycle-start', parentId: id, isDefault: true, cycle: id },
});
const addNodePlaceholder = graph.addNode({
...graphNodeLibrary.addStart,
x: parentBBox.x + 24 + 84,
y: parentBBox.y + 70 + 4,
data: { type: 'add-node', label: t('workflow.addNode'), icon: '+', parentId: id, cycle: id },
});
newNode.addChild(cycleStartNode, { silent: true });
newNode.addChild(addNodePlaceholder, { silent: true });
const innerEdge = graph.addEdge({
source: { cell: cycleStartNode.id, port: cycleStartNode.getPorts().find((p: any) => p.group === 'right')?.id || 'right' },
target: { cell: addNodePlaceholder.id, port: addNodePlaceholder.getPorts().find((p: any) => p.group === 'left')?.id || 'left' },
...edgeAttrs,
});
addedCells.push(cycleStartNode, addNodePlaceholder, innerEdge);
}
const padding = 50;
const newWidth = Math.max(nodeWidth, bounds.maxX - bounds.minX + padding * 2);
const newHeight = Math.max(120, bounds.maxY - bounds.minY + padding * 2);
parentNode.prop('size', { width: newWidth, height: newHeight });
// Update right port x position
const ports = parentNode.getPorts();
ports.forEach((port: any) => {
if (port.group === 'right' && port.args) {
parentNode.portProp(port.id!, 'args/x', newWidth);
}
});
}
};
adjustLoopSize();
// Listen to child node movement events
const childNodes = graph.getNodes().filter((n: any) => n.getData()?.cycle === cycleId);
childNodes.forEach((childNode: any) => {
childNode.on('change:position', adjustLoopSize);
// Adjust parent size if adding inside a cycle container
const cycleId = sourceNodeData.cycle;
if (cycleId) {
const parentNode = graph.getNodes().find((n: any) => n.getData()?.id === cycleId);
if (parentNode) {
const childNodes = graph.getNodes().filter((n: any) => n.getData()?.cycle === cycleId);
if (childNodes.length > 0) {
const bounds = childNodes.reduce((acc: any, child: any) => {
const b = child.getBBox();
return { minX: Math.min(acc.minX, b.x), minY: Math.min(acc.minY, b.y), maxX: Math.max(acc.maxX, b.x + b.width), maxY: Math.max(acc.maxY, b.y + b.height) };
}, { minX: Infinity, minY: Infinity, maxX: -Infinity, maxY: -Infinity });
const padding = 50;
const newWidth = Math.max(nodeWidth, bounds.maxX - bounds.minX + padding * 2);
const newHeight = Math.max(120, bounds.maxY - bounds.minY + padding * 2);
parentNode.prop('size', { width: newWidth, height: newHeight });
parentNode.getPorts().forEach((port: any) => {
if (port.group === 'right' && port.args) parentNode.portProp(port.id!, 'args/x', newWidth);
});
}
}
}
const isCycleContainer = (type: string) => type === 'loop' || type === 'iteration';
const newNodeType = selectedNodeType.type;
// toFront
const bringCycleChildrenToFront = (cycleContainerId: string) => {
graph.getEdges().forEach((e: any) => {
const src = graph.getCellById(e.getSourceCellId());
const tgt = graph.getCellById(e.getTargetCellId());
if (src?.getData()?.cycle === cycleContainerId || tgt?.getData()?.cycle === cycleContainerId) e.toFront();
});
graph.getNodes().forEach((n: any) => { if (n.getData()?.cycle === cycleContainerId) n.toFront(); });
};
// Helper: bring all child nodes and their edges of a cycle container to front
const bringCycleChildrenToFront = (cycleContainerId: string) => {
graph.getEdges().forEach((e: any) => {
const src = graph.getCellById(e.getSourceCellId());
const tgt = graph.getCellById(e.getTargetCellId());
if (src?.getData()?.cycle === cycleContainerId || tgt?.getData()?.cycle === cycleContainerId) e.toFront();
});
graph.getNodes().forEach((n: any) => {
if (n.getData()?.cycle === cycleContainerId) n.toFront();
});
};
if (isCycleContainer(sourceNodeType)) {
newNode.toFront(); sourceNode.toFront(); bringCycleChildrenToFront(sourceNodeData.id);
if (isCycleContainer(newNodeType)) bringCycleChildrenToFront(id);
} else if (isCycleContainer(newNodeType)) {
newNode.toFront(); sourceNode.toFront(); bringCycleChildrenToFront(id);
} else {
addedCells.forEach(c => { if (c.isNode?.()) c.toFront(); });
}
if (isCycleContainer(sourceNodeType)) {
console.log('isCycleContainer(sourceNodeType)')
// Case 4: source is a loop/iteration node — bring new node to front, then its children
newNode.toFront();
sourceNode.toFront();
bringCycleChildrenToFront(sourceNodeData.id);
} else if (isCycleContainer(newNodeType)) {
console.log('isCycleContainer(newNodeType)')
// Case 3: adding a loop/iteration node from a normal node — bring new node to front, then its children
newNode.toFront();
sourceNode.toFront()
bringCycleChildrenToFront(id);
} else {
// Case 2: normal node → normal node
addedEdges.forEach(e => {
const src = graph.getCellById(e.getSourceCellId());
const tgt = graph.getCellById(e.getTargetCellId());
if (src?.isNode()) src.toFront();
if (tgt?.isNode()) tgt.toFront();
});
}
}, 50);
// Re-enable history and manually push one batch frame for all added cells
graph.enableHistory();
const history = graph.getPlugin('history') as any;
if (history) {
const batchFrame = addedCells.map((cell: any) => ({
batch: true,
event: 'cell:added',
data: { id: cell.id, node: cell.isNode(), edge: cell.isEdge(), props: cell.toJSON() },
options: {},
}));
history.undoStack.push(batchFrame);
history.redoStack = [];
graph.trigger('history:change', { cmds: batchFrame, options: { name: 'add-node' } });
}
// Clean up temporary element
if (tempElement) {
document.body.removeChild(tempElement);
setTempElement(null);
}
setPopoverVisible(false);
};
@@ -391,4 +335,4 @@ const PortClickHandler: React.FC<PortClickHandlerProps> = ({ graph }) => {
);
};
export default PortClickHandler;
export default PortClickHandler;

View File

@@ -242,10 +242,11 @@ const ToolConfig: FC<{ options: Suggestion[]; }> = ({
className={parameter.type === 'boolean' ? 'rb:mb-0!' : ''}
>
{parameter.type === 'string' && parameter.enum && parameter.enum.length > 0
? <Select size="small" options={parameter.enum.map(vo => ({ value: vo, label: vo }))} placeholder={t('common.pleaseSelect')} />
? <Select key={values.tool_id} size="small" options={parameter.enum.map(vo => ({ value: vo, label: vo }))} placeholder={t('common.pleaseSelect')} />
: parameter.type === 'boolean'
? <Switch size="small" />
? <Switch key={values.tool_id} size="small" />
: <Editor
key={values.tool_id}
variant="outlined"
type="input"
size="small"

View File

@@ -2,7 +2,7 @@
* @Author: ZhaoYing
* @Date: 2026-02-03 15:06:18
* @Last Modified by: ZhaoYing
* @Last Modified time: 2026-04-21 18:23:31
* @Last Modified time: 2026-04-27 14:07:14
*/
import type { ReactShapeConfig } from '@antv/x6-react-shape';
import type { GroupMetadata, PortMetadata } from '@antv/x6/lib/model/port';
@@ -948,6 +948,15 @@ export const graphNodeLibrary: Record<string, NodeConfig> = {
width: nodeWidth,
height: 120,
shape: 'notes-node',
},
output: {
width: nodeWidth,
height: 76,
shape: 'normal-node',
ports: {
groups: { left: defaultPortGroup },
items: [defaultPortItems[0]],
},
}
}

View File

@@ -2,10 +2,9 @@
* @Author: ZhaoYing
* @Date: 2026-02-03 15:17:48
* @Last Modified by: ZhaoYing
* @Last Modified time: 2026-04-24 17:21:09
* @Last Modified time: 2026-04-28 13:49:11
*/
import { Clipboard, Graph, Keyboard, MiniMap, Node, Snapline, History, type Edge } from '@antv/x6';
import type { HistoryCommand as Command } from '@antv/x6/lib/plugin/history/type';
import { register } from '@antv/x6-react-shape';
import type { PortMetadata } from '@antv/x6/lib/model/port';
import { App } from 'antd';
@@ -17,7 +16,7 @@ import { getWorkflowConfig, saveWorkflowConfig } from '@/api/application';
import { useUser } from '@/store/user';
import type { FeaturesConfigForm } from '@/views/ApplicationConfig/types';
import { conditionNodeHeight, conditionNodeItemHeight, conditionNodePortItemArgsY, defaultAbsolutePortGroups, defaultPortItems, edgeAttrs, edgeHoverTool, edge_color, edge_selected_color, edge_width, graphNodeLibrary, nodeLibrary, nodeRegisterLibrary, nodeWidth, notesConfig, portAttrs, portItemArgsY, portMarkup, portTextAttrs, unknownNode } from '../constant';
import type { ChatVariable, NodeProperties, WorkflowConfig } from '../types';
import type { ChatVariable, HistoryRecord, NodeProperties, WorkflowConfig } from '../types';
import { calcConditionNodeTotalHeight, getConditionNodeCasePortY } from '../utils';
import { useWorkflowStore } from '@/store/workflow';
@@ -86,6 +85,10 @@ export interface UseWorkflowGraphReturn {
/** Get start node output variable list (user-defined + system variables) */
getStartNodeVariables: () => Array<{ name: string; type: string; readonly?: boolean }>;
nodeClick: ({ node }: { node: Node }) => void;
/** All recorded history operations */
historyRecords: HistoryRecord[];
/** Clear history records */
clearHistoryRecords: () => void;
}
/**
@@ -119,14 +122,19 @@ export const useWorkflowGraph = ({
const featuresRef = useRef<FeaturesConfigForm | undefined>(undefined)
const [canUndo, setCanUndo] = useState(false)
const [canRedo, setCanRedo] = useState(false)
const [historyRecords, setHistoryRecords] = useState<HistoryRecord[]>([])
const lastHistoryRef = useRef<{ cellIds: string[]; timestamp: number; type: string } | null>(null)
const undoRef = useRef<() => void>(() => {})
const redoRef = useRef<() => void>(() => {})
const syncChildRelationshipsRef = useRef<() => void>(() => {})
const isSyncingRef = useRef(false)
useEffect(() => {
if (!graphRef.current) return
graphRef.current.getNodes().forEach(node => {
const data = node.getData()
if (data?.type === 'if-else' || data?.type === 'question-classifier') {
console.log('chatVariables', chatVariables)
node.setData({ ...data, chatVariables }, { silent: true })
node.setData({ ...data, chatVariables })
}
})
}, [chatVariables])
@@ -343,7 +351,7 @@ export const useWorkflowGraph = ({
if (parentNode) {
const addedChild = graphRef.current?.addNode(childNode)
if (addedChild) {
parentNode.addChild(addedChild)
parentNode.addChild(addedChild, { silent: true })
}
}
}
@@ -374,8 +382,6 @@ export const useWorkflowGraph = ({
const newWidth = Math.max(parentBBox.width, maxX - minX + padding * 2)
const newHeight = Math.max(parentBBox.height, maxY - minY + padding * 2 + headerHeight)
console.log('newWidth', newHeight, newWidth)
parentNode.prop('size', { width: newWidth, height: newHeight })
// Update x position of right group ports
@@ -488,8 +494,77 @@ export const useWorkflowGraph = ({
graphRef.current.cleanHistory()
}
}, 200)
} else {
graphRef.current.enableHistory()
graphRef.current.cleanHistory()
}
}
const resizeGroupNodes = (graph: Graph) => {
graph.getNodes().forEach(parentNode => {
const parentType = parentNode.getData()?.type
if (parentType !== 'loop' && parentType !== 'iteration') return
const children = graph.getNodes().filter(
n => n.getData()?.cycle === parentNode.getData()?.id && n.getData()?.type !== 'add-node'
)
if (!children.length) return
const padding = 24
const headerHeight = 50
const childBounds = children.map(c => c.getBBox())
const minX = Math.min(...childBounds.map(b => b.x))
const minY = Math.min(...childBounds.map(b => b.y))
const maxX = Math.max(...childBounds.map(b => b.x + b.width))
const maxY = Math.max(...childBounds.map(b => b.y + b.height))
const parentBBox = parentNode.getBBox()
const newWidth = Math.max(parentBBox.width, maxX - minX + padding * 2)
const newHeight = Math.max(parentBBox.height, maxY - minY + padding * 2 + headerHeight)
parentNode.prop('size', { width: newWidth, height: newHeight })
parentNode.getPorts().forEach(port => {
if (port.group === 'right' && port.args) {
parentNode.portProp(port.id!, 'args/x', newWidth)
}
})
})
}
const syncChildRelationships = () => {
if (!graphRef.current) return
const graph = graphRef.current
graph.disableHistory()
graph.getNodes().forEach(node => {
const cycleId = node.getData()?.cycle
if (!cycleId) return
const parentNode = graph.getCellById(cycleId) as Node | null
if (!parentNode) return
if (!parentNode.getChildren()?.some(c => c.id === node.id)) {
parentNode.addChild(node, { silent: true })
}
})
graph.getNodes().forEach(node => {
const children = node.getChildren()
if (!children?.length) return
children.forEach(child => {
if (!child.isNode()) return
const childCycleId = (child as Node).getData?.()?.cycle
if (childCycleId !== node.id && childCycleId !== node.getData?.()?.id) {
node.removeChild(child, { silent: true })
}
})
})
resizeGroupNodes(graph)
graph.getEdges().forEach(edge => {
const src = graph.getCellById(edge.getSourceCellId())
const tgt = graph.getCellById(edge.getTargetCellId())
if (src?.getData()?.cycle || tgt?.getData()?.cycle) {
edge.toFront()
}
})
graph.getNodes().forEach(node => {
if (node.getData()?.cycle) node.toFront()
})
graph.enableHistory()
}
syncChildRelationshipsRef.current = syncChildRelationships
/**
* Setup X6 graph plugins (MiniMap, Snapline, Clipboard, Keyboard)
*/
@@ -525,18 +600,44 @@ export const useWorkflowGraph = ({
new History({
enabled: false,
beforeAddCommand(_event, args: any) {
const event = args?.key ? `cell:change:${args.key}` : _event;
if (event.startsWith('cell:change:') &&
event !== 'cell:change:position' &&
event !== 'cell:change:source' &&
event !== 'cell:change:target') return false;
const key = args?.key
if (key === 'attrs' || key === 'tools') return false
},
}),
);
graphRef.current.on('history:change', ({ cmds }: { cmds: Command[] }) => {
const MERGE_INTERVAL = 1000
graphRef.current.on('history:change', ({ cmds, options }: { cmds: any[]; options: any }) => {
setCanUndo(graphRef.current?.canUndo() ?? false)
setCanRedo(graphRef.current?.canRedo() ?? false)
console.log('history:change', cmds, options)
const batchName: string | undefined = options?.name
const actionType = batchName === 'undo' ? 'undo' : batchName === 'redo' ? 'redo' : batchName ? 'batch' : 'change'
const cellIds = [...new Set(cmds?.map((cmd: any) => cmd.data?.id).filter(Boolean))]
const now = Date.now()
const last = lastHistoryRef.current
const canMerge =
actionType === 'change' &&
last?.type === 'change' &&
now - last.timestamp < MERGE_INTERVAL &&
cellIds.length > 0 &&
cellIds.length === last.cellIds.length &&
cellIds.every((id, i) => id === last.cellIds[i])
if (canMerge) {
lastHistoryRef.current!.timestamp = now
setHistoryRecords(prev => {
const next = [...prev]
next[next.length - 1] = { ...next[next.length - 1], timestamp: now }
return next
})
} else {
const record: HistoryRecord = { type: actionType, timestamp: now, batchName, cellIds }
lastHistoryRef.current = { cellIds, timestamp: now, type: actionType }
setHistoryRecords(prev => [...prev, record])
}
})
graphRef.current.on('history:undo', () => { if (!isSyncingRef.current) syncChildRelationshipsRef.current() })
graphRef.current.on('history:redo', () => { if (!isSyncingRef.current) syncChildRelationshipsRef.current() })
};
// 显示/隐藏连接桩
// const showPorts = (show: boolean) => {
@@ -569,13 +670,13 @@ export const useWorkflowGraph = ({
vo.setData({
...data,
isSelected: false,
});
}, { silent: true });
}
});
node.setData({
...nodeData,
isSelected: true,
});
}, { silent: true });
clearEdgeSelect()
if (nodeData.type !== 'notes') {
setSelectedNode(node);
@@ -589,7 +690,7 @@ export const useWorkflowGraph = ({
const edgeClick = ({ edge }: { edge: Edge }) => {
clearEdgeSelect();
edge.setAttrByPath('line/stroke', edge_selected_color);
edge.setData({ ...edge.getData(), isSelected: true });
edge.setData({ ...edge.getData(), isSelected: true }, { silent: true });
clearNodeSelect();
};
/**
@@ -604,7 +705,7 @@ export const useWorkflowGraph = ({
node.setData({
...data,
isSelected: false,
});
}, { silent: true });
}
});
setSelectedNode(null);
@@ -614,7 +715,7 @@ export const useWorkflowGraph = ({
*/
const clearEdgeSelect = () => {
graphRef.current?.getEdges().forEach(e => {
e.setData({ ...e.getData(), isSelected: false, isNodeHover: false });
e.setData({ ...e.getData(), isSelected: false, isNodeHover: false }, { silent: true });
e.setAttrByPath('line/stroke', edge_color);
e.setAttrByPath('line/strokeWidth', edge_width);
});
@@ -753,8 +854,6 @@ export const useWorkflowGraph = ({
// Find corresponding parent node
const parentNode = nodes?.find(n => n.id === nodeData.cycle);
if (parentNode) {
// Use removeChild method to delete child node
parentNode.removeChild(nodeToDelete);
parentNodesToUpdate.push(parentNode);
}
// Add child node to deletion list
@@ -782,42 +881,51 @@ export const useWorkflowGraph = ({
// Delete all collected nodes and edges
if (cells.length > 0) {
// Pre-calculate which parents need an add-node restored (before removal changes the graph)
const parentsNeedingAddNode = parentNodesToUpdate
.filter(parentNode => {
const parentShape = parentNode.shape;
if (parentShape !== 'loop-node' && parentShape !== 'iteration-node') return false;
const parentData = parentNode.getData();
const allChildren = graphRef.current!.getNodes().filter(n => n.getData()?.cycle === parentData.id);
const cycleStartNodes = allChildren.filter(n => n.getData()?.type === 'cycle-start');
// After deletion, only cycle-start will remain
const nonCycleStartToDelete = cells.filter(c =>
c.isNode() &&
(c as Node).getData()?.cycle === parentData.id &&
(c as Node).getData()?.type !== 'cycle-start'
);
return cycleStartNodes.length === 1 && (allChildren.length - nonCycleStartToDelete.length) === 1;
})
.map(parentNode => ({
parentNode,
cycleStartNode: graphRef.current!.getNodes().find(
n => n.getData()?.cycle === parentNode.getData().id && n.getData()?.type === 'cycle-start'
)!
}))
.filter(({ cycleStartNode }) => !!cycleStartNode);
graphRef.current?.startBatch('delete');
graphRef.current?.removeCells(cells);
// If parent is iteration/loop and only cycle-start remains, add add-node connected to it
parentNodesToUpdate.forEach(parentNode => {
const parentShape = parentNode.shape;
if (parentShape !== 'loop-node' && parentShape !== 'iteration-node') return;
parentsNeedingAddNode.forEach(({ parentNode, cycleStartNode }) => {
const parentData = parentNode.getData();
const remainingChildren = graphRef.current!.getNodes().filter(
n => n.getData()?.cycle === parentData.id
);
const cycleStartNodes = remainingChildren.filter(n => n.getData()?.type === 'cycle-start');
if (cycleStartNodes.length === 1 && remainingChildren.length === 1) {
const cycleStartNode = cycleStartNodes[0];
const bbox = cycleStartNode.getBBox();
const addNode = graphRef.current!.addNode({
...graphNodeLibrary.addStart,
x: bbox.x + 84,
y: bbox.y + 4,
data: {
type: 'add-node',
parentId: parentNode.id,
cycle: parentData.id,
label: t('workflow.addNode'),
icon: '+',
},
});
parentNode.addChild(addNode);
const sourcePort = cycleStartNode.getPorts().find(p => p.group === 'right')?.id || 'right';
const targetPort = addNode.getPorts().find(p => p.group === 'left')?.id || 'left';
graphRef.current!.addEdge({
source: { cell: cycleStartNode.id, port: sourcePort },
target: { cell: addNode.id, port: targetPort },
...edgeAttrs,
});
}
const bbox = cycleStartNode.getBBox();
const addNode = graphRef.current!.addNode({
...graphNodeLibrary.addStart,
x: bbox.x + 84,
y: bbox.y + 4,
data: { type: 'add-node', parentId: parentNode.id, cycle: parentData.id, label: t('workflow.addNode'), icon: '+' },
});
parentNode.addChild(addNode, { silent: true });
graphRef.current!.addEdge({
source: { cell: cycleStartNode.id, port: cycleStartNode.getPorts().find(p => p.group === 'right')?.id || 'right' },
target: { cell: addNode.id, port: addNode.getPorts().find(p => p.group === 'left')?.id || 'left' },
...edgeAttrs,
});
});
graphRef.current?.stopBatch('delete');
}
return false;
};
@@ -1036,7 +1144,7 @@ export const useWorkflowGraph = ({
graphRef.current?.getConnectedEdges(node).forEach(edge => {
if (!edge.getData()?.isSelected) {
edge.setAttrByPath('line/stroke', edge_selected_color);
edge.setData({ ...edge.getData(), isNodeHover: true });
edge.setData({ ...edge.getData(), isNodeHover: true }, { silent: true });
}
});
});
@@ -1044,7 +1152,7 @@ export const useWorkflowGraph = ({
graphRef.current?.getConnectedEdges(node).forEach(edge => {
if (!edge.getData()?.isSelected) {
edge.setAttrByPath('line/stroke', edge_color);
edge.setData({ ...edge.getData(), isNodeHover: false });
edge.setData({ ...edge.getData(), isNodeHover: false }, { silent: true });
}
});
});
@@ -1126,8 +1234,8 @@ export const useWorkflowGraph = ({
// Delete selected nodes and edges
graphRef.current.bindKey(['ctrl+d', 'cmd+d', 'delete', 'backspace'], deleteEvent);
// Undo / Redo
graphRef.current.bindKey(['ctrl+z', 'cmd+z'], () => { graphRef.current?.undo(); return false; });
graphRef.current.bindKey(['ctrl+y', 'cmd+y', 'ctrl+shift+z', 'cmd+shift+z'], () => { graphRef.current?.redo(); return false; });
graphRef.current.bindKey(['ctrl+z', 'cmd+z'], () => { undo(); return false; });
graphRef.current.bindKey(['ctrl+y', 'cmd+y', 'ctrl+shift+z', 'cmd+shift+z'], () => { redo(); return false; });
};
@@ -1193,13 +1301,51 @@ export const useWorkflowGraph = ({
};
if (dragData.type === 'loop' || dragData.type === 'iteration') {
graphRef.current.addNode({
graph.disableHistory()
const parentNode = graphRef.current.addNode({
...graphNodeLibrary[dragData.type],
x: point.x - 150,
y: point.y - 100,
id: cleanNodeData.id,
data: { ...cleanNodeData, isGroup: true },
});
})
const parentBBox = parentNode.getBBox()
const cycleStartId = `cycle_start_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`
const cycleStartNode = graphRef.current.addNode({
...graphNodeLibrary.cycleStart,
x: parentBBox.x + 24,
y: parentBBox.y + 70,
id: cycleStartId,
data: { id: cycleStartId, type: 'cycle-start', parentId: cleanNodeData.id, isDefault: true, cycle: cleanNodeData.id },
})
const addNode = graphRef.current.addNode({
...graphNodeLibrary.addStart,
x: parentBBox.x + 24 + 84,
y: parentBBox.y + 70 + 4,
data: { type: 'add-node', label: t('workflow.addNode'), icon: '+', parentId: cleanNodeData.id, cycle: cleanNodeData.id },
})
parentNode.addChild(cycleStartNode, { silent: true })
parentNode.addChild(addNode, { silent: true })
const newEdge = graphRef.current.addEdge({
source: { cell: cycleStartNode.id, port: cycleStartNode.getPorts().find(p => p.group === 'right')?.id || 'right' },
target: { cell: addNode.id, port: addNode.getPorts().find(p => p.group === 'left')?.id || 'left' },
...edgeAttrs,
})
cycleStartNode.toFront()
addNode.toFront()
graph.enableHistory()
// Manually push a single batch frame covering all 4 cells into undoStack
const history = graph.getPlugin('history') as History
const makeBatchCmd = (cell: any) => ({
batch: true,
event: 'cell:added',
data: { id: cell.id, node: cell.isNode(), edge: cell.isEdge(), props: cell.toJSON() },
options: {},
})
const batchFrame = [parentNode, cycleStartNode, addNode, newEdge].map(makeBatchCmd)
;(history as any).undoStack.push(batchFrame)
;(history as any).redoStack = []
graph.trigger('history:change', { cmds: batchFrame, options: { name: 'add-group' } })
} else if (dragData.type === 'if-else') {
// Create condition node
graphRef.current.addNode({
@@ -1446,8 +1592,80 @@ export const useWorkflowGraph = ({
return userVars
}
const undo = () => graphRef.current?.undo()
const redo = () => graphRef.current?.redo()
const clearHistoryRecords = () => {
setHistoryRecords([])
lastHistoryRef.current = null
}
const getStackCellIds = (cmds: any): string[] => {
const arr = Array.isArray(cmds) ? cmds : [cmds]
return [...new Set(arr.map((c: any) => c.data?.id).filter(Boolean))]
}
const isSkippableFrame = (frame: any): boolean => {
const arr = Array.isArray(frame) ? frame : [frame]
return arr.every((c: any) => ['zIndex', 'attrs', 'tools'].includes(c.data?.key))
}
const undo = () => {
const history = graphRef.current?.getPlugin('history') as History | undefined
if (!history || history.getUndoSize() === 0) return
const undoStack = (history as any).undoStack as any[]
isSyncingRef.current = true
while (undoStack.length > 0 && isSkippableFrame(undoStack[undoStack.length - 1])) {
graphRef.current!.undo()
}
if (undoStack.length === 0) {
isSyncingRef.current = false
return
}
const topIds = getStackCellIds(undoStack[undoStack.length - 1])
graphRef.current!.undo()
while (undoStack.length > 0) {
if (isSkippableFrame(undoStack[undoStack.length - 1])) {
graphRef.current!.undo()
continue
}
const nextIds = getStackCellIds(undoStack[undoStack.length - 1])
if (nextIds.length === topIds.length && nextIds.every((id, i) => id === topIds[i])) {
graphRef.current!.undo()
} else {
break
}
}
isSyncingRef.current = false
syncChildRelationships()
}
const redo = () => {
const history = graphRef.current?.getPlugin('history') as History | undefined
if (!history || history.getRedoSize() === 0) return
const redoStack = (history as any).redoStack as any[]
isSyncingRef.current = true
while (redoStack.length > 0 && isSkippableFrame(redoStack[redoStack.length - 1])) {
graphRef.current!.redo()
}
if (redoStack.length === 0) {
isSyncingRef.current = false
return
}
const topIds = getStackCellIds(redoStack[redoStack.length - 1])
graphRef.current!.redo()
while (redoStack.length > 0) {
if (isSkippableFrame(redoStack[redoStack.length - 1])) {
graphRef.current!.redo()
continue
}
const nextIds = getStackCellIds(redoStack[redoStack.length - 1])
if (nextIds.length === topIds.length && nextIds.every((id, i) => id === topIds[i])) {
graphRef.current!.redo()
} else {
break
}
}
isSyncingRef.current = false
syncChildRelationships()
}
const handleSaveFeaturesConfig = (value?: FeaturesConfigForm) => {
const { statement = '' } = value?.opening_statement || {}
@@ -1488,20 +1706,16 @@ export const useWorkflowGraph = ({
if (!graphRef.current) return;
const nodes = graphRef.current.getNodes();
const lastWithSub = [...chatHistory].reverse().find(item => item.subContent?.length);
// Reset all node execution status first
// Reset all node execution status on every chatHistory change
nodes.forEach(node => {
const data = node.getData();
if (typeof data.executionStatus === 'string') {
node.setData({ ...data, executionStatus: undefined });
}
node.setData({ ...data, executionStatus: '' });
});
if (!lastWithSub?.subContent) return;
// Build a nodeId -> status map first
const statusMap: Record<string, string> = {};
lastWithSub.subContent.forEach(sub => {
const lastAssistant = [...chatHistory].reverse().find(item => item.role === 'assistant');
if (!lastAssistant?.subContent?.length) return;
lastAssistant.subContent.forEach(sub => {
if (typeof sub.status === 'string') {
statusMap[sub.node_id] = sub.status;
const node = nodes.find(n => n.getData()?.id === sub.node_id);
if (node) {
node.setData({ ...node.getData(), executionStatus: sub.status });
@@ -1537,5 +1751,7 @@ export const useWorkflowGraph = ({
canRedo,
undo,
redo,
historyRecords,
clearHistoryRecords,
};
};

View File

@@ -113,4 +113,13 @@ export interface ChatVariable {
}
export interface AddChatVariableRef {
handleOpen: (value?: ChatVariable) => void;
}
export type HistoryActionType = 'add' | 'remove' | 'change' | 'undo' | 'redo' | 'batch'
export interface HistoryRecord {
type: HistoryActionType;
timestamp: number;
batchName?: string;
cellIds?: string[];
}

View File

@@ -17,6 +17,7 @@ export const isSubExprSet = (sub: any) => {
* Uses the same per-expression height logic as getConditionNodeCasePortY.
*/
export const calcConditionNodeTotalHeight = (cases: any[]) => {
if (!cases?.length) return conditionNodeHeight;
const casesHeight = cases.reduce((acc: number, c: any) => {
const exprs = c?.expressions ?? [];
const n = exprs.length;