Files
MemoryBear/api/app/services/workflow_import_service.py
wwq 404ce9f9ba feat(workflow): enhance HTTP request node with curl debugging support
- Augment HTTP request node capabilities and add generated curl commands for easier debugging.

feat(log): implement workflow execution logs and search functionality

- Add detailed logging for workflow node execution and enable search capabilities within application logs.

feat(auth): introduce middleware to verify application publication status

- Add a check to ensure the application is published before allowing access.

fix(converter): rectify variable handling logic in Dify converter

- Correct issues related to processing variables within the Dify converter module.

refactor(model): remove quota check decorator from model update operations

- Decouple quota validation from the model update process to streamline the logic.
2026-04-23 15:46:12 +08:00

107 lines
3.7 KiB
Python

# -*- coding: UTF-8 -*-
# Author: Eternity
# @Email: 1533512157@qq.com
# @Time : 2026/2/25 14:39
import json
import uuid
from typing import Any
from sqlalchemy.orm import Session
from app.aioRedis import aio_redis_set, aio_redis_get
from app.core.config import settings
from app.core.exceptions import BusinessException
from app.core.workflow.adapters.base_adapter import WorkflowImportResult, WorkflowParserResult
from app.core.workflow.adapters.errors import UnsupportedPlatform, InvalidConfiguration
from app.core.workflow.adapters.registry import PlatformAdapterRegistry
from app.models.app_model import AppType
from app.schemas import AppCreate
from app.schemas.workflow_schema import WorkflowConfigCreate
from app.services.app_service import AppService
from app.services.workflow_service import WorkflowService
class WorkflowImportService:
def __init__(self, db: Session):
self.db = db
self.registry = PlatformAdapterRegistry
self.cache_timeout = settings.WORKFLOW_IMPORT_CACHE_TIMEOUT
self.app_service = AppService(db)
self.workflow_service = WorkflowService(db)
async def flush_config(self, temp_id: str, config: WorkflowParserResult):
config_cache = await aio_redis_get(temp_id)
if not config_cache:
raise BusinessException("Workflow configuration has expired. Please re-upload it.")
await aio_redis_set(temp_id, config.model_dump_json(), expire=self.cache_timeout)
async def upload_config(
self,
platform: str,
config: dict[str, Any],
):
if not self.registry.is_supported(platform):
return WorkflowImportResult(
success=False,
temp_id=None,
workflow_id=None,
errors=[UnsupportedPlatform(platform=platform)]
)
adapter = self.registry.get_adapter(platform, config)
if not adapter.validate_config():
return WorkflowImportResult(
success=False,
temp_id=None,
workflow_id=None,
errors=[InvalidConfiguration()] + adapter.errors
)
workflow_config = adapter.parse_workflow()
temp_id = uuid.uuid4().hex
await aio_redis_set(temp_id, workflow_config.model_dump(), expire=self.cache_timeout)
return WorkflowImportResult(
success=True,
temp_id=temp_id,
workflow_id=None,
edges=workflow_config.edges,
nodes=workflow_config.nodes,
variables=workflow_config.variables,
features=workflow_config.features,
warnings=workflow_config.warnings,
errors=workflow_config.errors
)
async def save_workflow(
self,
user_id: uuid.UUID,
workspace_id: uuid.UUID,
temp_id: str,
name: str,
description: str | None,
):
config = await aio_redis_get(temp_id)
if config is None:
raise BusinessException("Configuration import timed out. Please try again.")
config = json.loads(config)
unique_name = self.app_service._unique_app_name(name, workspace_id, AppType.WORKFLOW)
app = self.app_service.create_app(
user_id=user_id,
workspace_id=workspace_id,
data=AppCreate(
name=unique_name,
description=description,
type="workflow",
workflow_config=WorkflowConfigCreate(
nodes=config["nodes"],
edges=config["edges"],
variables=config["variables"],
features=config.get("features", {})
)
)
)
return app