Compare commits

..

1 Commits

Author SHA1 Message Date
Stephen Zhou
e535a1def2 test: stable test 2026-02-09 16:40:29 +08:00
196 changed files with 3583 additions and 14574 deletions

View File

@@ -715,7 +715,6 @@ ANNOTATION_IMPORT_MAX_CONCURRENT=5
# Sandbox expired records clean configuration
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000

View File

@@ -54,7 +54,7 @@
"--loglevel",
"DEBUG",
"-Q",
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,workflow_based_app_execution,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
]
}
]

View File

@@ -1344,10 +1344,6 @@ class SandboxExpiredRecordsCleanConfig(BaseSettings):
description="Maximum number of records to process in each batch",
default=1000,
)
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: PositiveInt = Field(
description="Maximum interval in milliseconds between batches",
default=200,
)
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: PositiveInt = Field(
description="Retention days for sandbox expired workflow_run records and message records",
default=30,

View File

@@ -259,20 +259,11 @@ class CeleryConfig(DatabaseConfig):
description="Password of the Redis Sentinel master.",
default=None,
)
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
description="Timeout for Redis Sentinel socket operations in seconds.",
default=0.1,
)
CELERY_TASK_ANNOTATIONS: dict[str, Any] | None = Field(
description=(
"Annotations for Celery tasks as a JSON mapping of task name -> options "
"(for example, rate limits or other task-specific settings)."
),
default=None,
)
@computed_field
def CELERY_RESULT_BACKEND(self) -> str | None:
if self.CELERY_BACKEND in ("database", "rabbitmq"):

View File

@@ -21,7 +21,6 @@ language_timezone_mapping = {
"th-TH": "Asia/Bangkok",
"id-ID": "Asia/Jakarta",
"ar-TN": "Africa/Tunis",
"nl-NL": "Europe/Amsterdam",
}
languages = list(language_timezone_mapping.keys())

View File

@@ -599,12 +599,7 @@ def _get_conversation(app_model, conversation_id):
db.session.execute(
sa.update(Conversation)
.where(Conversation.id == conversation_id, Conversation.read_at.is_(None))
# Keep updated_at unchanged when only marking a conversation as read.
.values(
read_at=naive_utc_now(),
read_account_id=current_user.id,
updated_at=Conversation.updated_at,
)
.values(read_at=naive_utc_now(), read_account_id=current_user.id)
)
db.session.commit()
db.session.refresh(conversation)

View File

@@ -1,7 +1,6 @@
import urllib.parse
import httpx
from flask_restx import Resource
from pydantic import BaseModel, Field
import services
@@ -11,12 +10,12 @@ from controllers.common.errors import (
RemoteFileUploadError,
UnsupportedFileTypeError,
)
from controllers.console import console_ns
from controllers.fastopenapi import console_router
from core.file import helpers as file_helpers
from core.helper import ssrf_proxy
from extensions.ext_database import db
from fields.file_fields import FileWithSignedUrl, RemoteFileInfo
from libs.login import current_account_with_tenant, login_required
from libs.login import current_account_with_tenant
from services.file_service import FileService
@@ -24,73 +23,69 @@ class RemoteFileUploadPayload(BaseModel):
url: str = Field(..., description="URL to fetch")
@console_ns.route("/remote-files/<path:url>")
class GetRemoteFileInfo(Resource):
@login_required
def get(self, url: str):
decoded_url = urllib.parse.unquote(url)
resp = ssrf_proxy.head(decoded_url)
@console_router.get(
"/remote-files/<path:url>",
response_model=RemoteFileInfo,
tags=["console"],
)
def get_remote_file_info(url: str) -> RemoteFileInfo:
decoded_url = urllib.parse.unquote(url)
resp = ssrf_proxy.head(decoded_url)
if resp.status_code != httpx.codes.OK:
resp = ssrf_proxy.get(decoded_url, timeout=3)
resp.raise_for_status()
return RemoteFileInfo(
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
file_length=int(resp.headers.get("Content-Length", 0)),
)
@console_router.post(
"/remote-files/upload",
response_model=FileWithSignedUrl,
tags=["console"],
status_code=201,
)
def upload_remote_file(payload: RemoteFileUploadPayload) -> FileWithSignedUrl:
url = payload.url
try:
resp = ssrf_proxy.head(url=url)
if resp.status_code != httpx.codes.OK:
resp = ssrf_proxy.get(decoded_url, timeout=3)
resp.raise_for_status()
return RemoteFileInfo(
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
file_length=int(resp.headers.get("Content-Length", 0)),
).model_dump(mode="json")
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
if resp.status_code != httpx.codes.OK:
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
except httpx.RequestError as e:
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
file_info = helpers.guess_file_info_from_response(resp)
@console_ns.route("/remote-files/upload")
class RemoteFileUpload(Resource):
@login_required
def post(self):
payload = RemoteFileUploadPayload.model_validate(console_ns.payload)
url = payload.url
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
raise FileTooLargeError
# Try to fetch remote file metadata/content first
try:
resp = ssrf_proxy.head(url=url)
if resp.status_code != httpx.codes.OK:
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
if resp.status_code != httpx.codes.OK:
# Normalize into a user-friendly error message expected by tests
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
except httpx.RequestError as e:
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
file_info = helpers.guess_file_info_from_response(resp)
# Enforce file size limit with 400 (Bad Request) per tests' expectation
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
raise FileTooLargeError()
# Load content if needed
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
try:
user, _ = current_account_with_tenant()
upload_file = FileService(db.engine).upload_file(
filename=file_info.filename,
content=content,
mimetype=file_info.mimetype,
user=user,
source_url=url,
)
except services.errors.file.FileTooLargeError as file_too_large_error:
raise FileTooLargeError(file_too_large_error.description)
except services.errors.file.UnsupportedFileTypeError:
raise UnsupportedFileTypeError()
# Success: return created resource with 201 status
return (
FileWithSignedUrl(
id=upload_file.id,
name=upload_file.name,
size=upload_file.size,
extension=upload_file.extension,
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
mime_type=upload_file.mime_type,
created_by=upload_file.created_by,
created_at=int(upload_file.created_at.timestamp()),
).model_dump(mode="json"),
201,
try:
user, _ = current_account_with_tenant()
upload_file = FileService(db.engine).upload_file(
filename=file_info.filename,
content=content,
mimetype=file_info.mimetype,
user=user,
source_url=url,
)
except services.errors.file.FileTooLargeError as file_too_large_error:
raise FileTooLargeError(file_too_large_error.description)
except services.errors.file.UnsupportedFileTypeError:
raise UnsupportedFileTypeError()
return FileWithSignedUrl(
id=upload_file.id,
name=upload_file.name,
size=upload_file.size,
extension=upload_file.extension,
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
mime_type=upload_file.mime_type,
created_by=upload_file.created_by,
created_at=int(upload_file.created_at.timestamp()),
)

View File

@@ -42,15 +42,7 @@ class SetupResponse(BaseModel):
tags=["console"],
)
def get_setup_status_api() -> SetupStatusResponse:
"""Get system setup status.
NOTE: This endpoint is unauthenticated by design.
During first-time bootstrap there is no admin account yet, so frontend initialization must be
able to query setup progress before any login flow exists.
Only bootstrap-safe status information should be returned by this endpoint.
"""
"""Get system setup status."""
if dify_config.EDITION == "SELF_HOSTED":
setup_status = get_setup_status()
if setup_status and not isinstance(setup_status, bool):
@@ -69,12 +61,7 @@ def get_setup_status_api() -> SetupStatusResponse:
)
@only_edition_self_hosted
def setup_system(payload: SetupRequestPayload) -> SetupResponse:
"""Initialize system setup with admin account.
NOTE: This endpoint is unauthenticated by design for first-time bootstrap.
Access is restricted by deployment mode (`SELF_HOSTED`), one-time setup guards,
and init-password validation rather than user session authentication.
"""
"""Initialize system setup with admin account."""
if get_setup_status():
raise AlreadySetupError()

View File

@@ -34,7 +34,6 @@ from .dataset import (
metadata,
segment,
)
from .dataset.rag_pipeline import rag_pipeline_workflow
from .end_user import end_user
from .workspace import models
@@ -54,7 +53,6 @@ __all__ = [
"message",
"metadata",
"models",
"rag_pipeline_workflow",
"segment",
"site",
"workflow",

View File

@@ -1,3 +1,5 @@
import string
import uuid
from collections.abc import Generator
from typing import Any
@@ -10,7 +12,6 @@ from controllers.common.errors import FilenameNotExistsError, NoFileUploadedErro
from controllers.common.schema import register_schema_model
from controllers.service_api import service_api_ns
from controllers.service_api.dataset.error import PipelineRunError
from controllers.service_api.dataset.rag_pipeline.serializers import serialize_upload_file
from controllers.service_api.wraps import DatasetApiResource
from core.app.apps.pipeline.pipeline_generator import PipelineGenerator
from core.app.entities.app_invoke_entities import InvokeFrom
@@ -40,7 +41,7 @@ register_schema_model(service_api_ns, DatasourceNodeRunPayload)
register_schema_model(service_api_ns, PipelineRunApiEntity)
@service_api_ns.route("/datasets/<uuid:dataset_id>/pipeline/datasource-plugins")
@service_api_ns.route(f"/datasets/{uuid:dataset_id}/pipeline/datasource-plugins")
class DatasourcePluginsApi(DatasetApiResource):
"""Resource for datasource plugins."""
@@ -75,7 +76,7 @@ class DatasourcePluginsApi(DatasetApiResource):
return datasource_plugins, 200
@service_api_ns.route("/datasets/<uuid:dataset_id>/pipeline/datasource/nodes/<string:node_id>/run")
@service_api_ns.route(f"/datasets/{uuid:dataset_id}/pipeline/datasource/nodes/{string:node_id}/run")
class DatasourceNodeRunApi(DatasetApiResource):
"""Resource for datasource node run."""
@@ -130,7 +131,7 @@ class DatasourceNodeRunApi(DatasetApiResource):
)
@service_api_ns.route("/datasets/<uuid:dataset_id>/pipeline/run")
@service_api_ns.route(f"/datasets/{uuid:dataset_id}/pipeline/run")
class PipelineRunApi(DatasetApiResource):
"""Resource for datasource node run."""
@@ -231,4 +232,12 @@ class KnowledgebasePipelineFileUploadApi(DatasetApiResource):
except services.errors.file.UnsupportedFileTypeError:
raise UnsupportedFileTypeError()
return serialize_upload_file(upload_file), 201
return {
"id": upload_file.id,
"name": upload_file.name,
"size": upload_file.size,
"extension": upload_file.extension,
"mime_type": upload_file.mime_type,
"created_by": upload_file.created_by,
"created_at": upload_file.created_at,
}, 201

View File

@@ -1,22 +0,0 @@
"""
Serialization helpers for Service API knowledge pipeline endpoints.
"""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from models.model import UploadFile
def serialize_upload_file(upload_file: UploadFile) -> dict[str, Any]:
return {
"id": upload_file.id,
"name": upload_file.name,
"size": upload_file.size,
"extension": upload_file.extension,
"mime_type": upload_file.mime_type,
"created_by": upload_file.created_by,
"created_at": upload_file.created_at.isoformat() if upload_file.created_at else None,
}

View File

@@ -217,8 +217,6 @@ def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
def decorator(view: Callable[Concatenate[T, P], R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
api_token = validate_and_get_api_token("dataset")
# get url path dataset_id from positional args or kwargs
# Flask passes URL path parameters as positional arguments
dataset_id = None
@@ -255,18 +253,12 @@ def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
# Validate dataset if dataset_id is provided
if dataset_id:
dataset_id = str(dataset_id)
dataset = (
db.session.query(Dataset)
.where(
Dataset.id == dataset_id,
Dataset.tenant_id == api_token.tenant_id,
)
.first()
)
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise NotFound("Dataset not found.")
if not dataset.enable_api:
raise Forbidden("Dataset api access is not enabled.")
api_token = validate_and_get_api_token("dataset")
tenant_account_join = (
db.session.query(Tenant, TenantAccountJoin)
.where(Tenant.id == api_token.tenant_id)

View File

@@ -34,7 +34,7 @@ def stream_topic_events(
on_subscribe()
while True:
try:
msg = sub.receive(timeout=1)
msg = sub.receive(timeout=0.1)
except SubscriptionClosedError:
return
if msg is None:

View File

@@ -45,8 +45,6 @@ from core.app.entities.task_entities import (
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
from core.app.task_pipeline.message_cycle_manager import MessageCycleManager
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
from core.file import helpers as file_helpers
from core.file.enums import FileTransferMethod
from core.model_manager import ModelInstance
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
from core.model_runtime.entities.message_entities import (
@@ -58,11 +56,10 @@ from core.ops.entities.trace_entity import TraceTaskName
from core.ops.ops_trace_manager import TraceQueueManager, TraceTask
from core.prompt.utils.prompt_message_util import PromptMessageUtil
from core.prompt.utils.prompt_template_parser import PromptTemplateParser
from core.tools.signature import sign_tool_file
from events.message_event import message_was_created
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.model import AppMode, Conversation, Message, MessageAgentThought, MessageFile, UploadFile
from models.model import AppMode, Conversation, Message, MessageAgentThought
logger = logging.getLogger(__name__)
@@ -157,7 +154,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
id=self._message_id,
mode=self._conversation_mode,
message_id=self._message_id,
answer=self._task_state.llm_result.message.get_text_content(),
answer=cast(str, self._task_state.llm_result.message.content),
created_at=self._message_created_at,
**extras,
),
@@ -170,7 +167,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
mode=self._conversation_mode,
conversation_id=self._conversation_id,
message_id=self._message_id,
answer=self._task_state.llm_result.message.get_text_content(),
answer=cast(str, self._task_state.llm_result.message.content),
created_at=self._message_created_at,
**extras,
),
@@ -283,7 +280,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
# handle output moderation
output_moderation_answer = self.handle_output_moderation_when_task_finished(
self._task_state.llm_result.message.get_text_content()
cast(str, self._task_state.llm_result.message.content)
)
if output_moderation_answer:
self._task_state.llm_result.message.content = output_moderation_answer
@@ -397,7 +394,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
message.message_unit_price = usage.prompt_unit_price
message.message_price_unit = usage.prompt_price_unit
message.answer = (
PromptTemplateParser.remove_template_variables(llm_result.message.get_text_content().strip())
PromptTemplateParser.remove_template_variables(cast(str, llm_result.message.content).strip())
if llm_result.message.content
else ""
)
@@ -466,85 +463,6 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
metadata=metadata_dict,
)
def _record_files(self):
with Session(db.engine, expire_on_commit=False) as session:
message_files = session.scalars(select(MessageFile).where(MessageFile.message_id == self._message_id)).all()
if not message_files:
return None
files_list = []
upload_file_ids = [
mf.upload_file_id
for mf in message_files
if mf.transfer_method == FileTransferMethod.LOCAL_FILE and mf.upload_file_id
]
upload_files_map = {}
if upload_file_ids:
upload_files = session.scalars(select(UploadFile).where(UploadFile.id.in_(upload_file_ids))).all()
upload_files_map = {uf.id: uf for uf in upload_files}
for message_file in message_files:
upload_file = None
if message_file.transfer_method == FileTransferMethod.LOCAL_FILE and message_file.upload_file_id:
upload_file = upload_files_map.get(message_file.upload_file_id)
url = None
filename = "file"
mime_type = "application/octet-stream"
size = 0
extension = ""
if message_file.transfer_method == FileTransferMethod.REMOTE_URL:
url = message_file.url
if message_file.url:
filename = message_file.url.split("/")[-1].split("?")[0] # Remove query params
elif message_file.transfer_method == FileTransferMethod.LOCAL_FILE:
if upload_file:
url = file_helpers.get_signed_file_url(upload_file_id=str(upload_file.id))
filename = upload_file.name
mime_type = upload_file.mime_type or "application/octet-stream"
size = upload_file.size or 0
extension = f".{upload_file.extension}" if upload_file.extension else ""
elif message_file.upload_file_id:
# Fallback: generate URL even if upload_file not found
url = file_helpers.get_signed_file_url(upload_file_id=str(message_file.upload_file_id))
elif message_file.transfer_method == FileTransferMethod.TOOL_FILE and message_file.url:
# For tool files, use URL directly if it's HTTP, otherwise sign it
if message_file.url.startswith("http"):
url = message_file.url
filename = message_file.url.split("/")[-1].split("?")[0]
else:
# Extract tool file id and extension from URL
url_parts = message_file.url.split("/")
if url_parts:
file_part = url_parts[-1].split("?")[0] # Remove query params first
# Use rsplit to correctly handle filenames with multiple dots
if "." in file_part:
tool_file_id, ext = file_part.rsplit(".", 1)
extension = f".{ext}"
else:
tool_file_id = file_part
extension = ".bin"
url = sign_tool_file(tool_file_id=tool_file_id, extension=extension)
filename = file_part
transfer_method_value = message_file.transfer_method
remote_url = message_file.url if message_file.transfer_method == FileTransferMethod.REMOTE_URL else ""
file_dict = {
"related_id": message_file.id,
"extension": extension,
"filename": filename,
"size": size,
"mime_type": mime_type,
"transfer_method": transfer_method_value,
"type": message_file.type,
"url": url or "",
"upload_file_id": message_file.upload_file_id or message_file.id,
"remote_url": remote_url,
}
files_list.append(file_dict)
return files_list or None
def _agent_message_to_stream_response(self, answer: str, message_id: str) -> AgentMessageStreamResponse:
"""
Agent message to stream response.

View File

@@ -64,13 +64,7 @@ class MessageCycleManager:
# Use SQLAlchemy 2.x style session.scalar(select(...))
with session_factory.create_session() as session:
message_file = session.scalar(
select(MessageFile)
.where(
MessageFile.message_id == message_id,
)
.where(MessageFile.belongs_to == "assistant")
)
message_file = session.scalar(select(MessageFile).where(MessageFile.message_id == message_id))
if message_file:
self._message_has_file.add(message_id)

View File

@@ -5,7 +5,7 @@ from collections.abc import Generator
from copy import deepcopy
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING: # pragma: no cover
if TYPE_CHECKING:
from models.model import File
from core.tools.__base.tool_runtime import ToolRuntime
@@ -171,7 +171,7 @@ class Tool(ABC):
def create_file_message(self, file: File) -> ToolInvokeMessage:
return ToolInvokeMessage(
type=ToolInvokeMessage.MessageType.FILE,
message=ToolInvokeMessage.FileMessage(file_marker="file_marker"),
message=ToolInvokeMessage.FileMessage(),
meta={"file": file},
)

View File

@@ -80,14 +80,8 @@ def init_app(app: DifyApp) -> Celery:
worker_hijack_root_logger=False,
timezone=pytz.timezone(dify_config.LOG_TZ or "UTC"),
task_ignore_result=True,
task_annotations=dify_config.CELERY_TASK_ANNOTATIONS,
)
if dify_config.CELERY_BACKEND == "redis":
celery_app.conf.update(
result_backend_transport_options=broker_transport_options,
)
# Apply SSL configuration if enabled
ssl_options = _get_celery_ssl_options()
if ssl_options:

View File

@@ -119,7 +119,7 @@ class RedisClientWrapper:
redis_client: RedisClientWrapper = RedisClientWrapper()
_pubsub_redis_client: redis.Redis | RedisCluster | None = None
pubsub_redis_client: RedisClientWrapper = RedisClientWrapper()
def _get_ssl_configuration() -> tuple[type[Union[Connection, SSLConnection]], dict[str, Any]]:
@@ -232,7 +232,7 @@ def _create_standalone_client(redis_params: dict[str, Any]) -> Union[redis.Redis
return client
def _create_pubsub_client(pubsub_url: str, use_clusters: bool) -> redis.Redis | RedisCluster:
def _create_pubsub_client(pubsub_url: str, use_clusters: bool) -> Union[redis.Redis, RedisCluster]:
if use_clusters:
return RedisCluster.from_url(pubsub_url)
return redis.Redis.from_url(pubsub_url)
@@ -256,19 +256,23 @@ def init_app(app: DifyApp):
redis_client.initialize(client)
app.extensions["redis"] = redis_client
global _pubsub_redis_client
_pubsub_redis_client = client
pubsub_client = client
if dify_config.normalized_pubsub_redis_url:
_pubsub_redis_client = _create_pubsub_client(
pubsub_client = _create_pubsub_client(
dify_config.normalized_pubsub_redis_url, dify_config.PUBSUB_REDIS_USE_CLUSTERS
)
pubsub_redis_client.initialize(pubsub_client)
def get_pubsub_redis_client() -> RedisClientWrapper:
return pubsub_redis_client
def get_pubsub_broadcast_channel() -> BroadcastChannelProtocol:
assert _pubsub_redis_client is not None, "PubSub redis Client should be initialized here."
redis_conn = get_pubsub_redis_client()
if dify_config.PUBSUB_REDIS_CHANNEL_TYPE == "sharded":
return ShardedRedisBroadcastChannel(_pubsub_redis_client)
return RedisBroadcastChannel(_pubsub_redis_client)
return ShardedRedisBroadcastChannel(redis_conn) # pyright: ignore[reportArgumentType]
return RedisBroadcastChannel(redis_conn) # pyright: ignore[reportArgumentType]
P = ParamSpec("P")

View File

@@ -1,6 +1,6 @@
import functools
from collections.abc import Callable
from typing import ParamSpec, TypeVar, cast
from typing import Any, TypeVar, cast
from opentelemetry.trace import get_tracer
@@ -8,8 +8,7 @@ from configs import dify_config
from extensions.otel.decorators.handler import SpanHandler
from extensions.otel.runtime import is_instrument_flag_enabled
P = ParamSpec("P")
R = TypeVar("R")
T = TypeVar("T", bound=Callable[..., Any])
_HANDLER_INSTANCES: dict[type[SpanHandler], SpanHandler] = {SpanHandler: SpanHandler()}
@@ -21,7 +20,7 @@ def _get_handler_instance(handler_class: type[SpanHandler]) -> SpanHandler:
return _HANDLER_INSTANCES[handler_class]
def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[Callable[P, R]], Callable[P, R]]:
def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T], T]:
"""
Decorator that traces a function with an OpenTelemetry span.
@@ -31,9 +30,9 @@ def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[Call
:param handler_class: Optional handler class to use for this span. If None, uses the default SpanHandler.
"""
def decorator(func: Callable[P, R]) -> Callable[P, R]:
def decorator(func: T) -> T:
@functools.wraps(func)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
def wrapper(*args: Any, **kwargs: Any) -> Any:
if not (dify_config.ENABLE_OTEL or is_instrument_flag_enabled()):
return func(*args, **kwargs)
@@ -47,6 +46,6 @@ def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[Call
kwargs=kwargs,
)
return cast(Callable[P, R], wrapper)
return cast(T, wrapper)
return decorator

View File

@@ -1,11 +1,9 @@
import inspect
from collections.abc import Callable, Mapping
from typing import Any, TypeVar
from typing import Any
from opentelemetry.trace import SpanKind, Status, StatusCode
R = TypeVar("R")
class SpanHandler:
"""
@@ -33,9 +31,9 @@ class SpanHandler:
def _extract_arguments(
self,
wrapped: Callable[..., R],
args: tuple[object, ...],
kwargs: Mapping[str, object],
wrapped: Callable[..., Any],
args: tuple[Any, ...],
kwargs: Mapping[str, Any],
) -> dict[str, Any] | None:
"""
Extract function arguments using inspect.signature.
@@ -64,10 +62,10 @@ class SpanHandler:
def wrapper(
self,
tracer: Any,
wrapped: Callable[..., R],
args: tuple[object, ...],
kwargs: Mapping[str, object],
) -> R:
wrapped: Callable[..., Any],
args: tuple[Any, ...],
kwargs: Mapping[str, Any],
) -> Any:
"""
Fully control the wrapper behavior.

View File

@@ -1,6 +1,6 @@
import logging
from collections.abc import Callable, Mapping
from typing import Any, TypeVar
from typing import Any
from opentelemetry.trace import SpanKind, Status, StatusCode
from opentelemetry.util.types import AttributeValue
@@ -12,19 +12,16 @@ from models.model import Account
logger = logging.getLogger(__name__)
R = TypeVar("R")
class AppGenerateHandler(SpanHandler):
"""Span handler for ``AppGenerateService.generate``."""
def wrapper(
self,
tracer: Any,
wrapped: Callable[..., R],
args: tuple[object, ...],
kwargs: Mapping[str, object],
) -> R:
wrapped: Callable[..., Any],
args: tuple[Any, ...],
kwargs: Mapping[str, Any],
) -> Any:
try:
arguments = self._extract_arguments(wrapped, args, kwargs)
if not arguments:

View File

@@ -152,7 +152,7 @@ class RedisSubscriptionBase(Subscription):
"""Iterator for consuming messages from the subscription."""
while not self._closed.is_set():
try:
item = self._queue.get(timeout=1)
item = self._queue.get(timeout=0.1)
except queue.Empty:
continue

View File

@@ -1,7 +1,7 @@
from __future__ import annotations
from libs.broadcast_channel.channel import Producer, Subscriber, Subscription
from redis import Redis, RedisCluster
from redis import Redis
from ._subscription import RedisSubscriptionBase
@@ -18,7 +18,7 @@ class BroadcastChannel:
def __init__(
self,
redis_client: Redis | RedisCluster,
redis_client: Redis,
):
self._client = redis_client
@@ -27,7 +27,7 @@ class BroadcastChannel:
class Topic:
def __init__(self, redis_client: Redis | RedisCluster, topic: str):
def __init__(self, redis_client: Redis, topic: str):
self._client = redis_client
self._topic = topic

View File

@@ -70,9 +70,8 @@ class _RedisShardedSubscription(RedisSubscriptionBase):
# Since we have already filtered at the caller's site, we can safely set
# `ignore_subscribe_messages=False`.
if isinstance(self._client, RedisCluster):
# NOTE(QuantumGhost): due to an issue in upstream code, calling `get_sharded_message` without
# specifying the `target_node` argument would use busy-looping to wait
# for incoming message, consuming excessive CPU quota.
# NOTE(QuantumGhost): due to an issue in upstream code, calling `get_sharded_message`
# would use busy-looping to wait for incoming message, consuming excessive CPU quota.
#
# Here we specify the `target_node` to mitigate this problem.
node = self._client.get_node_from_key(self._topic)
@@ -81,10 +80,8 @@ class _RedisShardedSubscription(RedisSubscriptionBase):
timeout=1,
target_node=node,
)
elif isinstance(self._client, Redis):
return self._pubsub.get_sharded_message(ignore_subscribe_messages=False, timeout=1) # type: ignore[attr-defined]
else:
raise AssertionError("client should be either Redis or RedisCluster.")
return self._pubsub.get_sharded_message(ignore_subscribe_messages=False, timeout=1) # type: ignore[attr-defined]
def _get_message_type(self) -> str:
return "smessage"

View File

@@ -1,59 +0,0 @@
"""add unique constraint to tenant_default_models
Revision ID: fix_tenant_default_model_unique
Revises: 9d77545f524e
Create Date: 2026-01-19 15:07:00.000000
"""
from alembic import op
import sqlalchemy as sa
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'f55813ffe2c8'
down_revision = 'c3df22613c99'
branch_labels = None
depends_on = None
def upgrade():
# First, remove duplicate records keeping only the most recent one per (tenant_id, model_type)
# This is necessary before adding the unique constraint
conn = op.get_bind()
# Delete duplicates: keep the record with the latest updated_at for each (tenant_id, model_type)
# If updated_at is the same, keep the one with the largest id as tiebreaker
if _is_pg(conn):
# PostgreSQL: Use DISTINCT ON for efficient deduplication
conn.execute(sa.text("""
DELETE FROM tenant_default_models
WHERE id NOT IN (
SELECT DISTINCT ON (tenant_id, model_type) id
FROM tenant_default_models
ORDER BY tenant_id, model_type, updated_at DESC, id DESC
)
"""))
else:
# MySQL: Use self-join to find and delete duplicates
# Keep the record with latest updated_at (or largest id if updated_at is equal)
conn.execute(sa.text("""
DELETE t1 FROM tenant_default_models t1
INNER JOIN tenant_default_models t2
ON t1.tenant_id = t2.tenant_id
AND t1.model_type = t2.model_type
AND (t1.updated_at < t2.updated_at
OR (t1.updated_at = t2.updated_at AND t1.id < t2.id))
"""))
# Now add the unique constraint
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
batch_op.create_unique_constraint('unique_tenant_default_model_type', ['tenant_id', 'model_type'])
def downgrade():
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
batch_op.drop_constraint('unique_tenant_default_model_type', type_='unique')

View File

@@ -1,39 +0,0 @@
"""fix index to optimize message clean job performance
Revision ID: fce013ca180e
Revises: f55813ffe2c8
Create Date: 2026-02-11 15:49:17.603638
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fce013ca180e'
down_revision = 'f55813ffe2c8'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('message_created_at_idx'))
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
batch_op.create_index('saved_message_message_id_idx', ['message_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
batch_op.drop_index('saved_message_message_id_idx')
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.create_index(batch_op.f('message_created_at_idx'), ['created_at'], unique=False)
# ### end Alembic commands ###

View File

@@ -227,7 +227,7 @@ class App(Base):
with Session(db.engine) as session:
if api_provider_ids:
existing_api_providers = [
str(api_provider.id)
api_provider.id
for api_provider in session.execute(
text("SELECT id FROM tool_api_providers WHERE id IN :provider_ids"),
{"provider_ids": tuple(api_provider_ids)},
@@ -1040,6 +1040,7 @@ class Message(Base):
Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"),
Index("message_account_idx", "app_id", "from_source", "from_account_id"),
Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"),
Index("message_created_at_idx", "created_at"),
Index("message_app_mode_idx", "app_mode"),
Index("message_created_at_id_idx", "created_at", "id"),
)

View File

@@ -181,7 +181,6 @@ class TenantDefaultModel(TypeBase):
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="tenant_default_model_pkey"),
sa.Index("tenant_default_model_tenant_id_provider_type_idx", "tenant_id", "provider_name", "model_type"),
sa.UniqueConstraint("tenant_id", "model_type", name="unique_tenant_default_model_type"),
)
id: Mapped[str] = mapped_column(

View File

@@ -16,7 +16,6 @@ class SavedMessage(TypeBase):
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="saved_message_pkey"),
sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"),
sa.Index("saved_message_message_id_idx", "message_id"),
)
id: Mapped[str] = mapped_column(

View File

@@ -1,6 +1,6 @@
[project]
name = "dify-api"
version = "1.13.0"
version = "1.12.1"
requires-python = ">=3.11,<3.13"
dependencies = [
@@ -23,7 +23,7 @@ dependencies = [
"gevent~=25.9.1",
"gmpy2~=2.2.1",
"google-api-core==2.18.0",
"google-api-python-client==2.189.0",
"google-api-python-client==2.90.0",
"google-auth==2.29.0",
"google-auth-httplib2==0.2.0",
"google-cloud-aiplatform==1.49.0",

View File

@@ -1225,12 +1225,7 @@ class TenantService:
@staticmethod
def remove_member_from_tenant(tenant: Tenant, account: Account, operator: Account):
"""Remove member from tenant.
If the removed member has ``AccountStatus.PENDING`` (invited but never
activated) and no remaining workspace memberships, the orphaned account
record is deleted as well.
"""
"""Remove member from tenant"""
if operator.id == account.id:
raise CannotOperateSelfError("Cannot operate self.")
@@ -1240,31 +1235,9 @@ class TenantService:
if not ta:
raise MemberNotInTenantError("Member not in tenant.")
# Capture identifiers before any deletions; attribute access on the ORM
# object may fail after commit() expires the instance.
account_id = account.id
account_email = account.email
db.session.delete(ta)
# Clean up orphaned pending accounts (invited but never activated)
should_delete_account = False
if account.status == AccountStatus.PENDING:
# autoflush flushes ta deletion before this query, so 0 means no remaining joins
remaining_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).count()
if remaining_joins == 0:
db.session.delete(account)
should_delete_account = True
db.session.commit()
if should_delete_account:
logger.info(
"Deleted orphaned pending account: account_id=%s, email=%s",
account_id,
account_email,
)
if dify_config.BILLING_ENABLED:
BillingService.clean_billing_info_cache(tenant.id)
@@ -1272,13 +1245,13 @@ class TenantService:
from services.enterprise.account_deletion_sync import sync_workspace_member_removal
sync_success = sync_workspace_member_removal(
workspace_id=tenant.id, member_id=account_id, source="workspace_member_removed"
workspace_id=tenant.id, member_id=account.id, source="workspace_member_removed"
)
if not sync_success:
logger.warning(
"Enterprise workspace member removal sync failed: workspace_id=%s, member_id=%s",
tenant.id,
account_id,
account.id,
)
@staticmethod

View File

@@ -131,54 +131,33 @@ class AppGenerateService:
elif app_model.mode == AppMode.ADVANCED_CHAT:
workflow_id = args.get("workflow_id")
workflow = cls._get_workflow(app_model, invoke_from, workflow_id)
if streaming:
# Streaming mode: subscribe to SSE and enqueue the execution on first subscriber
with rate_limit_context(rate_limit, request_id):
payload = AppExecutionParams.new(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
streaming=True,
call_depth=0,
)
payload_json = payload.model_dump_json()
def on_subscribe():
workflow_based_app_execution_task.delay(payload_json)
on_subscribe = cls._build_streaming_task_on_subscribe(on_subscribe)
generator = AdvancedChatAppGenerator()
return rate_limit.generate(
generator.convert_to_event_stream(
generator.retrieve_events(
AppMode.ADVANCED_CHAT,
payload.workflow_run_id,
on_subscribe=on_subscribe,
),
),
request_id=request_id,
with rate_limit_context(rate_limit, request_id):
payload = AppExecutionParams.new(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
streaming=streaming,
call_depth=0,
)
else:
# Blocking mode: run synchronously and return JSON instead of SSE
# Keep behaviour consistent with WORKFLOW blocking branch.
advanced_generator = AdvancedChatAppGenerator()
return rate_limit.generate(
advanced_generator.convert_to_event_stream(
advanced_generator.generate(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
workflow_run_id=str(uuid.uuid4()),
streaming=False,
)
payload_json = payload.model_dump_json()
def on_subscribe():
workflow_based_app_execution_task.delay(payload_json)
on_subscribe = cls._build_streaming_task_on_subscribe(on_subscribe)
generator = AdvancedChatAppGenerator()
return rate_limit.generate(
generator.convert_to_event_stream(
generator.retrieve_events(
AppMode.ADVANCED_CHAT,
payload.workflow_run_id,
on_subscribe=on_subscribe,
),
request_id=request_id,
)
),
request_id=request_id,
)
elif app_model.mode == AppMode.WORKFLOW:
workflow_id = args.get("workflow_id")
workflow = cls._get_workflow(app_model, invoke_from, workflow_id)

View File

@@ -22,7 +22,7 @@ from libs.exception import BaseHTTPException
from models.human_input import RecipientType
from models.model import App, AppMode
from repositories.factory import DifyAPIRepositoryFactory
from tasks.app_generate.workflow_execute_task import resume_app_execution
from tasks.app_generate.workflow_execute_task import WORKFLOW_BASED_APP_EXECUTION_QUEUE, resume_app_execution
class Form:
@@ -230,6 +230,7 @@ class HumanInputService:
try:
resume_app_execution.apply_async(
kwargs={"payload": payload},
queue=WORKFLOW_BASED_APP_EXECUTION_QUEUE,
)
except Exception: # pragma: no cover
logger.exception("Failed to enqueue resume task for workflow run %s", workflow_run_id)

View File

@@ -1329,24 +1329,10 @@ class RagPipelineService:
"""
Get datasource plugins
"""
dataset: Dataset | None = (
db.session.query(Dataset)
.where(
Dataset.id == dataset_id,
Dataset.tenant_id == tenant_id,
)
.first()
)
dataset: Dataset | None = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise ValueError("Dataset not found")
pipeline: Pipeline | None = (
db.session.query(Pipeline)
.where(
Pipeline.id == dataset.pipeline_id,
Pipeline.tenant_id == tenant_id,
)
.first()
)
pipeline: Pipeline | None = db.session.query(Pipeline).where(Pipeline.id == dataset.pipeline_id).first()
if not pipeline:
raise ValueError("Pipeline not found")
@@ -1427,24 +1413,10 @@ class RagPipelineService:
"""
Get pipeline
"""
dataset: Dataset | None = (
db.session.query(Dataset)
.where(
Dataset.id == dataset_id,
Dataset.tenant_id == tenant_id,
)
.first()
)
dataset: Dataset | None = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise ValueError("Dataset not found")
pipeline: Pipeline | None = (
db.session.query(Pipeline)
.where(
Pipeline.id == dataset.pipeline_id,
Pipeline.tenant_id == tenant_id,
)
.first()
)
pipeline: Pipeline | None = db.session.query(Pipeline).where(Pipeline.id == dataset.pipeline_id).first()
if not pipeline:
raise ValueError("Pipeline not found")
return pipeline

View File

@@ -1,13 +1,10 @@
import datetime
import logging
import os
import random
import time
from collections.abc import Sequence
from typing import cast
import sqlalchemy as sa
from sqlalchemy import delete, select, tuple_
from sqlalchemy import delete, select
from sqlalchemy.engine import CursorResult
from sqlalchemy.orm import Session
@@ -196,15 +193,11 @@ class MessagesCleanService:
self._end_before,
)
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
while True:
stats["batches"] += 1
batch_start = time.monotonic()
# Step 1: Fetch a batch of messages using cursor
with Session(db.engine, expire_on_commit=False) as session:
fetch_messages_start = time.monotonic()
msg_stmt = (
select(Message.id, Message.app_id, Message.created_at)
.where(Message.created_at < self._end_before)
@@ -216,13 +209,13 @@ class MessagesCleanService:
msg_stmt = msg_stmt.where(Message.created_at >= self._start_from)
# Apply cursor condition: (created_at, id) > (last_created_at, last_message_id)
# This translates to:
# created_at > last_created_at OR (created_at = last_created_at AND id > last_message_id)
if _cursor:
# Continuing from previous batch
msg_stmt = msg_stmt.where(
tuple_(Message.created_at, Message.id)
> tuple_(
sa.literal(_cursor[0], type_=sa.DateTime()),
sa.literal(_cursor[1], type_=Message.id.type),
)
(Message.created_at > _cursor[0])
| ((Message.created_at == _cursor[0]) & (Message.id > _cursor[1]))
)
raw_messages = list(session.execute(msg_stmt).all())
@@ -230,12 +223,6 @@ class MessagesCleanService:
SimpleMessage(id=msg_id, app_id=app_id, created_at=msg_created_at)
for msg_id, app_id, msg_created_at in raw_messages
]
logger.info(
"clean_messages (batch %s): fetched %s messages in %sms",
stats["batches"],
len(messages),
int((time.monotonic() - fetch_messages_start) * 1000),
)
# Track total messages fetched across all batches
stats["total_messages"] += len(messages)
@@ -254,16 +241,8 @@ class MessagesCleanService:
logger.info("clean_messages (batch %s): no app_ids found, skip", stats["batches"])
continue
fetch_apps_start = time.monotonic()
app_stmt = select(App.id, App.tenant_id).where(App.id.in_(app_ids))
apps = list(session.execute(app_stmt).all())
logger.info(
"clean_messages (batch %s): fetched %s apps for %s app_ids in %sms",
stats["batches"],
len(apps),
len(app_ids),
int((time.monotonic() - fetch_apps_start) * 1000),
)
if not apps:
logger.info("clean_messages (batch %s): no apps found, skip", stats["batches"])
@@ -273,15 +252,7 @@ class MessagesCleanService:
app_to_tenant: dict[str, str] = {app.id: app.tenant_id for app in apps}
# Step 3: Delegate to policy to determine which messages to delete
policy_start = time.monotonic()
message_ids_to_delete = self._policy.filter_message_ids(messages, app_to_tenant)
logger.info(
"clean_messages (batch %s): policy selected %s/%s messages in %sms",
stats["batches"],
len(message_ids_to_delete),
len(messages),
int((time.monotonic() - policy_start) * 1000),
)
if not message_ids_to_delete:
logger.info("clean_messages (batch %s): no messages to delete, skip", stats["batches"])
@@ -292,20 +263,14 @@ class MessagesCleanService:
# Step 4: Batch delete messages and their relations
if not self._dry_run:
with Session(db.engine, expire_on_commit=False) as session:
delete_relations_start = time.monotonic()
# Delete related records first
self._batch_delete_message_relations(session, message_ids_to_delete)
delete_relations_ms = int((time.monotonic() - delete_relations_start) * 1000)
# Delete messages
delete_messages_start = time.monotonic()
delete_stmt = delete(Message).where(Message.id.in_(message_ids_to_delete))
delete_result = cast(CursorResult, session.execute(delete_stmt))
messages_deleted = delete_result.rowcount
delete_messages_ms = int((time.monotonic() - delete_messages_start) * 1000)
commit_start = time.monotonic()
session.commit()
commit_ms = int((time.monotonic() - commit_start) * 1000)
stats["total_deleted"] += messages_deleted
@@ -315,19 +280,6 @@ class MessagesCleanService:
len(messages),
messages_deleted,
)
logger.info(
"clean_messages (batch %s): relations %sms, messages %sms, commit %sms, batch total %sms",
stats["batches"],
delete_relations_ms,
delete_messages_ms,
commit_ms,
int((time.monotonic() - batch_start) * 1000),
)
# Random sleep between batches to avoid overwhelming the database
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
logger.info("clean_messages (batch %s): sleeping for %.2fms", stats["batches"], sleep_ms)
time.sleep(sleep_ms / 1000)
else:
# Log random sample of message IDs that would be deleted (up to 10)
sample_size = min(10, len(message_ids_to_delete))

View File

@@ -1,8 +1,5 @@
import datetime
import logging
import os
import random
import time
from collections.abc import Iterable, Sequence
import click
@@ -75,12 +72,7 @@ class WorkflowRunCleanup:
batch_index = 0
last_seen: tuple[datetime.datetime, str] | None = None
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
while True:
batch_start = time.monotonic()
fetch_start = time.monotonic()
run_rows = self.workflow_run_repo.get_runs_batch_by_time_range(
start_from=self.window_start,
end_before=self.window_end,
@@ -88,30 +80,12 @@ class WorkflowRunCleanup:
batch_size=self.batch_size,
)
if not run_rows:
logger.info("workflow_run_cleanup (batch #%s): no more rows to process", batch_index + 1)
break
batch_index += 1
last_seen = (run_rows[-1].created_at, run_rows[-1].id)
logger.info(
"workflow_run_cleanup (batch #%s): fetched %s rows in %sms",
batch_index,
len(run_rows),
int((time.monotonic() - fetch_start) * 1000),
)
tenant_ids = {row.tenant_id for row in run_rows}
filter_start = time.monotonic()
free_tenants = self._filter_free_tenants(tenant_ids)
logger.info(
"workflow_run_cleanup (batch #%s): filtered %s free tenants from %s tenants in %sms",
batch_index,
len(free_tenants),
len(tenant_ids),
int((time.monotonic() - filter_start) * 1000),
)
free_runs = [row for row in run_rows if row.tenant_id in free_tenants]
paid_or_skipped = len(run_rows) - len(free_runs)
@@ -130,17 +104,11 @@ class WorkflowRunCleanup:
total_runs_targeted += len(free_runs)
if self.dry_run:
count_start = time.monotonic()
batch_counts = self.workflow_run_repo.count_runs_with_related(
free_runs,
count_node_executions=self._count_node_executions,
count_trigger_logs=self._count_trigger_logs,
)
logger.info(
"workflow_run_cleanup (batch #%s, dry_run): counted related records in %sms",
batch_index,
int((time.monotonic() - count_start) * 1000),
)
if related_totals is not None:
for key in related_totals:
related_totals[key] += batch_counts.get(key, 0)
@@ -152,21 +120,14 @@ class WorkflowRunCleanup:
fg="yellow",
)
)
logger.info(
"workflow_run_cleanup (batch #%s, dry_run): batch total %sms",
batch_index,
int((time.monotonic() - batch_start) * 1000),
)
continue
try:
delete_start = time.monotonic()
counts = self.workflow_run_repo.delete_runs_with_related(
free_runs,
delete_node_executions=self._delete_node_executions,
delete_trigger_logs=self._delete_trigger_logs,
)
delete_ms = int((time.monotonic() - delete_start) * 1000)
except Exception:
logger.exception("Failed to delete workflow runs batch ending at %s", last_seen[0])
raise
@@ -182,17 +143,6 @@ class WorkflowRunCleanup:
fg="green",
)
)
logger.info(
"workflow_run_cleanup (batch #%s): delete %sms, batch total %sms",
batch_index,
delete_ms,
int((time.monotonic() - batch_start) * 1000),
)
# Random sleep between batches to avoid overwhelming the database
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
logger.info("workflow_run_cleanup (batch #%s): sleeping for %.2fms", batch_index, sleep_ms)
time.sleep(sleep_ms / 1000)
if self.dry_run:
if self.window_start:

View File

@@ -129,15 +129,15 @@ def build_workflow_event_stream(
return
try:
event = buffer_state.queue.get(timeout=1)
event = buffer_state.queue.get(timeout=0.1)
except queue.Empty:
current_time = time.time()
if current_time - last_msg_time > idle_timeout:
logger.debug(
"Idle timeout of %s seconds reached, closing workflow event stream.",
"No workflow events received for %s seconds, keeping stream open",
idle_timeout,
)
return
last_msg_time = current_time
if current_time - last_ping_time >= ping_interval:
yield StreamEvent.PING.value
last_ping_time = current_time
@@ -405,7 +405,7 @@ def _start_buffering(subscription) -> BufferState:
dropped_count = 0
try:
while not buffer_state.stop_event.is_set():
msg = subscription.receive(timeout=1)
msg = subscription.receive(timeout=0.1)
if msg is None:
continue
event = _parse_event_message(msg)

View File

@@ -1,7 +1,6 @@
from flask_login import current_user
from configs import dify_config
from enums.cloud_plan import CloudPlan
from extensions.ext_database import db
from models.account import Tenant, TenantAccountJoin, TenantAccountRole
from services.account_service import TenantService
@@ -54,12 +53,7 @@ class WorkspaceService:
from services.credit_pool_service import CreditPoolService
paid_pool = CreditPoolService.get_pool(tenant_id=tenant.id, pool_type="paid")
# if the tenant is not on the sandbox plan and the paid pool is not full, use the paid pool
if (
feature.billing.subscription.plan != CloudPlan.SANDBOX
and paid_pool is not None
and (paid_pool.quota_limit == -1 or paid_pool.quota_limit > paid_pool.quota_used)
):
if paid_pool:
tenant_info["trial_credits"] = paid_pool.quota_limit
tenant_info["trial_credits_used"] = paid_pool.quota_used
else:

View File

@@ -23,40 +23,40 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str):
"""
logger.info(click.style(f"Start clean document when import form notion document deleted: {dataset_id}", fg="green"))
start_at = time.perf_counter()
total_index_node_ids = []
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
try:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Document has no dataset")
index_type = dataset.doc_form
index_processor = IndexProcessorFactory(index_type).init_index_processor()
if not dataset:
raise Exception("Document has no dataset")
index_type = dataset.doc_form
index_processor = IndexProcessorFactory(index_type).init_index_processor()
document_delete_stmt = delete(Document).where(Document.id.in_(document_ids))
session.execute(document_delete_stmt)
document_delete_stmt = delete(Document).where(Document.id.in_(document_ids))
session.execute(document_delete_stmt)
for document_id in document_ids:
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
total_index_node_ids.extend([segment.index_node_id for segment in segments])
for document_id in document_ids:
segments = session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id == document_id)
).all()
index_node_ids = [segment.index_node_id for segment in segments]
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if dataset:
index_processor.clean(
dataset, total_index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
segment_ids = [segment.id for segment in segments]
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
session.execute(segment_delete_stmt)
session.commit()
end_at = time.perf_counter()
logger.info(
click.style(
"Clean document when import form notion document deleted end :: {} latency: {}".format(
dataset_id, end_at - start_at
),
fg="green",
)
)
with session_factory.create_session() as session, session.begin():
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
session.execute(segment_delete_stmt)
end_at = time.perf_counter()
logger.info(
click.style(
"Clean document when import form notion document deleted end :: {} latency: {}".format(
dataset_id, end_at - start_at
),
fg="green",
)
)
except Exception:
logger.exception("Cleaned document when import form notion document deleted failed")

View File

@@ -27,7 +27,6 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
"""
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
start_at = time.perf_counter()
tenant_id = None
with session_factory.create_session() as session, session.begin():
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
@@ -36,120 +35,94 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
logger.info(click.style(f"Document not found: {document_id}", fg="red"))
return
if document.indexing_status == "parsing":
logger.info(click.style(f"Document {document_id} is already being processed, skipping", fg="yellow"))
return
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Dataset not found")
data_source_info = document.data_source_info_dict
if document.data_source_type != "notion_import":
logger.info(click.style(f"Document {document_id} is not a notion_import, skipping", fg="yellow"))
return
if document.data_source_type == "notion_import":
if (
not data_source_info
or "notion_page_id" not in data_source_info
or "notion_workspace_id" not in data_source_info
):
raise ValueError("no notion page found")
workspace_id = data_source_info["notion_workspace_id"]
page_id = data_source_info["notion_page_id"]
page_type = data_source_info["type"]
page_edited_time = data_source_info["last_edited_time"]
credential_id = data_source_info.get("credential_id")
if (
not data_source_info
or "notion_page_id" not in data_source_info
or "notion_workspace_id" not in data_source_info
):
raise ValueError("no notion page found")
# Get credentials from datasource provider
datasource_provider_service = DatasourceProviderService()
credential = datasource_provider_service.get_datasource_credentials(
tenant_id=document.tenant_id,
credential_id=credential_id,
provider="notion_datasource",
plugin_id="langgenius/notion_datasource",
)
workspace_id = data_source_info["notion_workspace_id"]
page_id = data_source_info["notion_page_id"]
page_type = data_source_info["type"]
page_edited_time = data_source_info["last_edited_time"]
credential_id = data_source_info.get("credential_id")
tenant_id = document.tenant_id
index_type = document.doc_form
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
index_node_ids = [segment.index_node_id for segment in segments]
# Get credentials from datasource provider
datasource_provider_service = DatasourceProviderService()
credential = datasource_provider_service.get_datasource_credentials(
tenant_id=tenant_id,
credential_id=credential_id,
provider="notion_datasource",
plugin_id="langgenius/notion_datasource",
)
if not credential:
logger.error(
"Datasource credential not found for document %s, tenant_id: %s, credential_id: %s",
document_id,
tenant_id,
credential_id,
)
with session_factory.create_session() as session, session.begin():
document = session.query(Document).filter_by(id=document_id).first()
if document:
if not credential:
logger.error(
"Datasource credential not found for document %s, tenant_id: %s, credential_id: %s",
document_id,
document.tenant_id,
credential_id,
)
document.indexing_status = "error"
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
document.stopped_at = naive_utc_now()
return
return
loader = NotionExtractor(
notion_workspace_id=workspace_id,
notion_obj_id=page_id,
notion_page_type=page_type,
notion_access_token=credential.get("integration_secret"),
tenant_id=tenant_id,
)
loader = NotionExtractor(
notion_workspace_id=workspace_id,
notion_obj_id=page_id,
notion_page_type=page_type,
notion_access_token=credential.get("integration_secret"),
tenant_id=document.tenant_id,
)
last_edited_time = loader.get_notion_last_edited_time()
if last_edited_time == page_edited_time:
logger.info(click.style(f"Document {document_id} content unchanged, skipping sync", fg="yellow"))
return
last_edited_time = loader.get_notion_last_edited_time()
logger.info(click.style(f"Document {document_id} content changed, starting sync", fg="green"))
# check the page is updated
if last_edited_time != page_edited_time:
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
try:
index_processor = IndexProcessorFactory(index_type).init_index_processor()
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if dataset:
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
logger.info(click.style(f"Cleaned vector index for document {document_id}", fg="green"))
except Exception:
logger.exception("Failed to clean vector index for document %s", document_id)
# delete all document segment and index
try:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Dataset not found")
index_type = document.doc_form
index_processor = IndexProcessorFactory(index_type).init_index_processor()
with session_factory.create_session() as session, session.begin():
document = session.query(Document).filter_by(id=document_id).first()
if not document:
logger.warning(click.style(f"Document {document_id} not found during sync", fg="yellow"))
return
segments = session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id == document_id)
).all()
index_node_ids = [segment.index_node_id for segment in segments]
data_source_info = document.data_source_info_dict
data_source_info["last_edited_time"] = last_edited_time
document.data_source_info = data_source_info
# delete from vector index
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
segment_ids = [segment.id for segment in segments]
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
session.execute(segment_delete_stmt)
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id == document_id)
session.execute(segment_delete_stmt)
end_at = time.perf_counter()
logger.info(
click.style(
"Cleaned document when document update data source or process rule: {} latency: {}".format(
document_id, end_at - start_at
),
fg="green",
)
)
except Exception:
logger.exception("Cleaned document when document update data source or process rule failed")
logger.info(click.style(f"Deleted segments for document {document_id}", fg="green"))
try:
indexing_runner = IndexingRunner()
with session_factory.create_session() as session:
document = session.query(Document).filter_by(id=document_id).first()
if document:
indexing_runner.run([document])
end_at = time.perf_counter()
logger.info(click.style(f"Sync completed for document {document_id} latency: {end_at - start_at}", fg="green"))
except DocumentIsPausedError as ex:
logger.info(click.style(str(ex), fg="yellow"))
except Exception as e:
logger.exception("document_indexing_sync_task failed for document_id: %s", document_id)
with session_factory.create_session() as session, session.begin():
document = session.query(Document).filter_by(id=document_id).first()
if document:
document.indexing_status = "error"
document.error = str(e)
document.stopped_at = naive_utc_now()
try:
indexing_runner = IndexingRunner()
indexing_runner.run([document])
end_at = time.perf_counter()
logger.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green"))
except DocumentIsPausedError as ex:
logger.info(click.style(str(ex), fg="yellow"))
except Exception:
logger.exception("document_indexing_sync_task failed, document_id: %s", document_id)

View File

@@ -153,7 +153,8 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task
clean_notion_document_task(document_ids, dataset.id)
# Verify segments are deleted
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id.in_(document_ids)).count() == 0
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id.in_(document_ids))
@@ -161,9 +162,9 @@ class TestCleanNotionDocumentTask:
== 0
)
# Verify index processor was called
# Verify index processor was called for each document
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
mock_processor.clean.assert_called_once()
assert mock_processor.clean.call_count == len(document_ids)
# This test successfully verifies:
# 1. Document records are properly deleted from the database
@@ -185,12 +186,12 @@ class TestCleanNotionDocumentTask:
non_existent_dataset_id = str(uuid.uuid4())
document_ids = [str(uuid.uuid4()), str(uuid.uuid4())]
# Execute cleanup task with non-existent dataset - expect exception
with pytest.raises(Exception, match="Document has no dataset"):
clean_notion_document_task(document_ids, non_existent_dataset_id)
# Execute cleanup task with non-existent dataset
clean_notion_document_task(document_ids, non_existent_dataset_id)
# Verify that the index processor factory was not used
mock_index_processor_factory.return_value.init_index_processor.assert_not_called()
# Verify that the index processor was not called
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
mock_processor.clean.assert_not_called()
def test_clean_notion_document_task_empty_document_list(
self, db_session_with_containers, mock_index_processor_factory, mock_external_service_dependencies
@@ -228,13 +229,9 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task with empty document list
clean_notion_document_task([], dataset.id)
# Verify that the index processor was called once with empty node list
# Verify that the index processor was not called
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
assert mock_processor.clean.call_count == 1
args, kwargs = mock_processor.clean.call_args
# args: (dataset, total_index_node_ids)
assert isinstance(args[0], Dataset)
assert args[1] == []
mock_processor.clean.assert_not_called()
def test_clean_notion_document_task_with_different_index_types(
self, db_session_with_containers, mock_index_processor_factory, mock_external_service_dependencies
@@ -318,7 +315,8 @@ class TestCleanNotionDocumentTask:
# Note: This test successfully verifies cleanup with different document types.
# The task properly handles various index types and document configurations.
# Verify segments are deleted
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id == document.id)
@@ -406,7 +404,8 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task
clean_notion_document_task([document.id], dataset.id)
# Verify segments are deleted
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).count()
== 0
@@ -509,7 +508,8 @@ class TestCleanNotionDocumentTask:
clean_notion_document_task(documents_to_clean, dataset.id)
# Verify only specified documents' segments are deleted
# Verify only specified documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id.in_(documents_to_clean)).count() == 0
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id.in_(documents_to_clean))
@@ -697,12 +697,11 @@ class TestCleanNotionDocumentTask:
db_session_with_containers.commit()
# Mock index processor to raise an exception
mock_index_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
mock_index_processor = mock_index_processor_factory.init_index_processor.return_value
mock_index_processor.clean.side_effect = Exception("Index processor error")
# Execute cleanup task - current implementation propagates the exception
with pytest.raises(Exception, match="Index processor error"):
clean_notion_document_task([document.id], dataset.id)
# Execute cleanup task - it should handle the exception gracefully
clean_notion_document_task([document.id], dataset.id)
# Note: This test demonstrates the task's error handling capability.
# Even with external service errors, the database operations complete successfully.
@@ -804,7 +803,8 @@ class TestCleanNotionDocumentTask:
all_document_ids = [doc.id for doc in documents]
clean_notion_document_task(all_document_ids, dataset.id)
# Verify all segments are deleted
# Verify all documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.dataset_id == dataset.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.dataset_id == dataset.id).count()
== 0
@@ -914,7 +914,8 @@ class TestCleanNotionDocumentTask:
clean_notion_document_task([target_document.id], target_dataset.id)
# Verify only documents' segments from target dataset are deleted
# Verify only documents from target dataset are deleted
assert db_session_with_containers.query(Document).filter(Document.id == target_document.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id == target_document.id)
@@ -1029,7 +1030,8 @@ class TestCleanNotionDocumentTask:
all_document_ids = [doc.id for doc in documents]
clean_notion_document_task(all_document_ids, dataset.id)
# Verify all segments are deleted regardless of status
# Verify all documents and segments are deleted regardless of status
assert db_session_with_containers.query(Document).filter(Document.dataset_id == dataset.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.dataset_id == dataset.id).count()
== 0
@@ -1140,7 +1142,8 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task
clean_notion_document_task([document.id], dataset.id)
# Verify segments are deleted
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).count()
== 0

View File

@@ -51,7 +51,7 @@ def _patch_redis_clients_on_loaded_modules():
continue
if hasattr(module, "redis_client"):
module.redis_client = redis_mock
if hasattr(module, "_pubsub_redis_client"):
if hasattr(module, "pubsub_redis_client"):
module.pubsub_redis_client = redis_mock
@@ -72,7 +72,7 @@ def _patch_redis_clients():
with (
patch.object(ext_redis, "redis_client", redis_mock),
patch.object(ext_redis, "_pubsub_redis_client", redis_mock),
patch.object(ext_redis, "pubsub_redis_client", redis_mock),
):
_patch_redis_clients_on_loaded_modules()
yield

View File

@@ -1,34 +0,0 @@
from datetime import datetime
from types import SimpleNamespace
from unittest.mock import MagicMock, patch
from controllers.console.app.conversation import _get_conversation
def test_get_conversation_mark_read_keeps_updated_at_unchanged():
app_model = SimpleNamespace(id="app-id")
account = SimpleNamespace(id="account-id")
conversation = MagicMock()
conversation.id = "conversation-id"
with (
patch("controllers.console.app.conversation.current_account_with_tenant", return_value=(account, None)),
patch("controllers.console.app.conversation.naive_utc_now", return_value=datetime(2026, 2, 9, 0, 0, 0)),
patch("controllers.console.app.conversation.db.session") as mock_session,
):
mock_session.query.return_value.where.return_value.first.return_value = conversation
_get_conversation(app_model, "conversation-id")
statement = mock_session.execute.call_args[0][0]
compiled = statement.compile()
sql_text = str(compiled).lower()
compact_sql_text = sql_text.replace(" ", "")
params = compiled.params
assert "updated_at=current_timestamp" not in compact_sql_text
assert "updated_at=conversations.updated_at" in compact_sql_text
assert "read_at=:read_at" in compact_sql_text
assert "read_account_id=:read_account_id" in compact_sql_text
assert params["read_at"] == datetime(2026, 2, 9, 0, 0, 0)
assert params["read_account_id"] == "account-id"

View File

@@ -1,286 +1,92 @@
"""Tests for remote file upload API endpoints using Flask-RESTX."""
import contextlib
import builtins
from datetime import datetime
from types import SimpleNamespace
from unittest.mock import Mock, patch
from unittest.mock import patch
import httpx
import pytest
from flask import Flask, g
from flask import Flask
from flask.views import MethodView
from extensions import ext_fastopenapi
if not hasattr(builtins, "MethodView"):
builtins.MethodView = MethodView # type: ignore[attr-defined]
@pytest.fixture
def app() -> Flask:
"""Create Flask app for testing."""
app = Flask(__name__)
app.config["TESTING"] = True
app.config["SECRET_KEY"] = "test-secret-key"
return app
@pytest.fixture
def client(app):
"""Create test client with console blueprint registered."""
from controllers.console import bp
def test_console_remote_files_fastopenapi_get_info(app: Flask):
ext_fastopenapi.init_app(app)
app.register_blueprint(bp)
return app.test_client()
@pytest.fixture
def mock_account():
"""Create a mock account for testing."""
from models import Account
account = Mock(spec=Account)
account.id = "test-account-id"
account.current_tenant_id = "test-tenant-id"
return account
@pytest.fixture
def auth_ctx(app, mock_account):
"""Context manager to set auth/tenant context in flask.g for a request."""
@contextlib.contextmanager
def _ctx():
with app.test_request_context():
g._login_user = mock_account
g._current_tenant = mock_account.current_tenant_id
yield
return _ctx
class TestGetRemoteFileInfo:
"""Test GET /console/api/remote-files/<path:url> endpoint."""
def test_get_remote_file_info_success(self, app, client, mock_account):
"""Test successful retrieval of remote file info."""
response = httpx.Response(
200,
request=httpx.Request("HEAD", "http://example.com/file.txt"),
headers={"Content-Type": "text/plain", "Content-Length": "1024"},
)
with (
patch(
"controllers.console.remote_files.current_account_with_tenant",
return_value=(mock_account, "test-tenant-id"),
),
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response),
patch("libs.login.check_csrf_token", return_value=None),
):
with app.test_request_context():
g._login_user = mock_account
g._current_tenant = mock_account.current_tenant_id
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt"
resp = client.get(f"/console/api/remote-files/{encoded_url}")
assert resp.status_code == 200
data = resp.get_json()
assert data["file_type"] == "text/plain"
assert data["file_length"] == 1024
def test_get_remote_file_info_fallback_to_get_on_head_failure(self, app, client, mock_account):
"""Test fallback to GET when HEAD returns non-200 status."""
head_response = httpx.Response(
404,
request=httpx.Request("HEAD", "http://example.com/file.pdf"),
)
get_response = httpx.Response(
200,
request=httpx.Request("GET", "http://example.com/file.pdf"),
headers={"Content-Type": "application/pdf", "Content-Length": "2048"},
)
with (
patch(
"controllers.console.remote_files.current_account_with_tenant",
return_value=(mock_account, "test-tenant-id"),
),
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response),
patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_response),
patch("libs.login.check_csrf_token", return_value=None),
):
with app.test_request_context():
g._login_user = mock_account
g._current_tenant = mock_account.current_tenant_id
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.pdf"
resp = client.get(f"/console/api/remote-files/{encoded_url}")
assert resp.status_code == 200
data = resp.get_json()
assert data["file_type"] == "application/pdf"
assert data["file_length"] == 2048
class TestRemoteFileUpload:
"""Test POST /console/api/remote-files/upload endpoint."""
@pytest.mark.parametrize(
("head_status", "use_get"),
[
(200, False), # HEAD succeeds
(405, True), # HEAD fails -> fallback GET
],
response = httpx.Response(
200,
request=httpx.Request("HEAD", "http://example.com/file.txt"),
headers={"Content-Type": "text/plain", "Content-Length": "10"},
)
def test_upload_remote_file_success_paths(self, client, mock_account, auth_ctx, head_status, use_get):
url = "http://example.com/file.pdf"
head_resp = httpx.Response(
head_status,
request=httpx.Request("HEAD", url),
headers={"Content-Type": "application/pdf", "Content-Length": "1024"},
)
get_resp = httpx.Response(
200,
request=httpx.Request("GET", url),
headers={"Content-Type": "application/pdf", "Content-Length": "1024"},
content=b"file content",
)
file_info = SimpleNamespace(
extension="pdf",
size=1024,
filename="file.pdf",
mimetype="application/pdf",
)
uploaded_file = SimpleNamespace(
id="uploaded-file-id",
name="file.pdf",
size=1024,
extension="pdf",
mime_type="application/pdf",
created_by="test-account-id",
created_at=datetime(2024, 1, 1, 12, 0, 0),
)
with patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response):
client = app.test_client()
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt"
resp = client.get(f"/console/api/remote-files/{encoded_url}")
with (
patch(
"controllers.console.remote_files.current_account_with_tenant",
return_value=(mock_account, "test-tenant-id"),
),
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp) as p_head,
patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_resp) as p_get,
patch(
"controllers.console.remote_files.helpers.guess_file_info_from_response",
return_value=file_info,
),
patch(
"controllers.console.remote_files.FileService.is_file_size_within_limit",
return_value=True,
),
patch("controllers.console.remote_files.db", spec=["engine"]),
patch("controllers.console.remote_files.FileService") as mock_file_service,
patch(
"controllers.console.remote_files.file_helpers.get_signed_file_url",
return_value="http://example.com/signed-url",
),
patch("libs.login.check_csrf_token", return_value=None),
):
mock_file_service.return_value.upload_file.return_value = uploaded_file
assert resp.status_code == 200
assert resp.get_json() == {"file_type": "text/plain", "file_length": 10}
with auth_ctx():
resp = client.post(
"/console/api/remote-files/upload",
json={"url": url},
)
assert resp.status_code == 201
p_head.assert_called_once()
# GET is used either for fallback (HEAD fails) or to fetch content after HEAD succeeds
p_get.assert_called_once()
mock_file_service.return_value.upload_file.assert_called_once()
def test_console_remote_files_fastopenapi_upload(app: Flask):
ext_fastopenapi.init_app(app)
data = resp.get_json()
assert data["id"] == "uploaded-file-id"
assert data["name"] == "file.pdf"
assert data["size"] == 1024
assert data["extension"] == "pdf"
assert data["url"] == "http://example.com/signed-url"
assert data["mime_type"] == "application/pdf"
assert data["created_by"] == "test-account-id"
@pytest.mark.parametrize(
("size_ok", "raises", "expected_status", "expected_msg"),
[
# When size check fails in controller, API returns 413 with message "File size exceeded..."
(False, None, 413, "file size exceeded"),
# When service raises unsupported type, controller maps to 415 with message "File type not allowed."
(True, "unsupported", 415, "file type not allowed"),
],
head_response = httpx.Response(
200,
request=httpx.Request("GET", "http://example.com/file.txt"),
content=b"hello",
)
def test_upload_remote_file_errors(
self, client, mock_account, auth_ctx, size_ok, raises, expected_status, expected_msg
file_info = SimpleNamespace(
extension="txt",
size=5,
filename="file.txt",
mimetype="text/plain",
)
uploaded = SimpleNamespace(
id="file-id",
name="file.txt",
size=5,
extension="txt",
mime_type="text/plain",
created_by="user-id",
created_at=datetime(2024, 1, 1),
)
with (
patch("controllers.console.remote_files.db", new=SimpleNamespace(engine=object())),
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response),
patch("controllers.console.remote_files.helpers.guess_file_info_from_response", return_value=file_info),
patch("controllers.console.remote_files.FileService.is_file_size_within_limit", return_value=True),
patch("controllers.console.remote_files.FileService.__init__", return_value=None),
patch("controllers.console.remote_files.current_account_with_tenant", return_value=(object(), "tenant-id")),
patch("controllers.console.remote_files.FileService.upload_file", return_value=uploaded),
patch("controllers.console.remote_files.file_helpers.get_signed_file_url", return_value="signed-url"),
):
url = "http://example.com/x.pdf"
head_resp = httpx.Response(
200,
request=httpx.Request("HEAD", url),
headers={"Content-Type": "application/pdf", "Content-Length": "9"},
client = app.test_client()
resp = client.post(
"/console/api/remote-files/upload",
json={"url": "http://example.com/file.txt"},
)
file_info = SimpleNamespace(extension="pdf", size=9, filename="x.pdf", mimetype="application/pdf")
with (
patch(
"controllers.console.remote_files.current_account_with_tenant",
return_value=(mock_account, "test-tenant-id"),
),
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp),
patch(
"controllers.console.remote_files.helpers.guess_file_info_from_response",
return_value=file_info,
),
patch(
"controllers.console.remote_files.FileService.is_file_size_within_limit",
return_value=size_ok,
),
patch("controllers.console.remote_files.db", spec=["engine"]),
patch("libs.login.check_csrf_token", return_value=None),
):
if raises == "unsupported":
from services.errors.file import UnsupportedFileTypeError
with patch("controllers.console.remote_files.FileService") as mock_file_service:
mock_file_service.return_value.upload_file.side_effect = UnsupportedFileTypeError("bad")
with auth_ctx():
resp = client.post(
"/console/api/remote-files/upload",
json={"url": url},
)
else:
with auth_ctx():
resp = client.post(
"/console/api/remote-files/upload",
json={"url": url},
)
assert resp.status_code == expected_status
data = resp.get_json()
msg = (data.get("error") or {}).get("message") or data.get("message", "")
assert expected_msg in msg.lower()
def test_upload_remote_file_fetch_failure(self, client, mock_account, auth_ctx):
"""Test upload when fetching of remote file fails."""
with (
patch(
"controllers.console.remote_files.current_account_with_tenant",
return_value=(mock_account, "test-tenant-id"),
),
patch(
"controllers.console.remote_files.ssrf_proxy.head",
side_effect=httpx.RequestError("Connection failed"),
),
patch("libs.login.check_csrf_token", return_value=None),
):
with auth_ctx():
resp = client.post(
"/console/api/remote-files/upload",
json={"url": "http://unreachable.com/file.pdf"},
)
assert resp.status_code == 400
data = resp.get_json()
msg = (data.get("error") or {}).get("message") or data.get("message", "")
assert "failed to fetch" in msg.lower()
assert resp.status_code == 201
assert resp.get_json() == {
"id": "file-id",
"name": "file.txt",
"size": 5,
"extension": "txt",
"url": "signed-url",
"mime_type": "text/plain",
"created_by": "user-id",
"created_at": int(uploaded.created_at.timestamp()),
}

View File

@@ -1,62 +0,0 @@
"""
Unit tests for Service API knowledge pipeline file-upload serialization.
"""
import importlib.util
from datetime import UTC, datetime
from pathlib import Path
class FakeUploadFile:
id: str
name: str
size: int
extension: str
mime_type: str
created_by: str
created_at: datetime | None
def _load_serialize_upload_file():
api_dir = Path(__file__).resolve().parents[5]
serializers_path = api_dir / "controllers" / "service_api" / "dataset" / "rag_pipeline" / "serializers.py"
spec = importlib.util.spec_from_file_location("rag_pipeline_serializers", serializers_path)
assert spec
assert spec.loader
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module) # type: ignore[attr-defined]
return module.serialize_upload_file
def test_file_upload_created_at_is_isoformat_string():
serialize_upload_file = _load_serialize_upload_file()
created_at = datetime(2026, 2, 8, 12, 0, 0, tzinfo=UTC)
upload_file = FakeUploadFile()
upload_file.id = "file-1"
upload_file.name = "test.pdf"
upload_file.size = 123
upload_file.extension = "pdf"
upload_file.mime_type = "application/pdf"
upload_file.created_by = "account-1"
upload_file.created_at = created_at
result = serialize_upload_file(upload_file)
assert result["created_at"] == created_at.isoformat()
def test_file_upload_created_at_none_serializes_to_null():
serialize_upload_file = _load_serialize_upload_file()
upload_file = FakeUploadFile()
upload_file.id = "file-1"
upload_file.name = "test.pdf"
upload_file.size = 123
upload_file.extension = "pdf"
upload_file.mime_type = "application/pdf"
upload_file.created_by = "account-1"
upload_file.created_at = None
result = serialize_upload_file(upload_file)
assert result["created_at"] is None

View File

@@ -1,54 +0,0 @@
"""
Unit tests for Service API knowledge pipeline route registration.
"""
import ast
from pathlib import Path
def test_rag_pipeline_routes_registered():
api_dir = Path(__file__).resolve().parents[5]
service_api_init = api_dir / "controllers" / "service_api" / "__init__.py"
rag_pipeline_workflow = (
api_dir / "controllers" / "service_api" / "dataset" / "rag_pipeline" / "rag_pipeline_workflow.py"
)
assert service_api_init.exists()
assert rag_pipeline_workflow.exists()
init_tree = ast.parse(service_api_init.read_text(encoding="utf-8"))
import_found = False
for node in ast.walk(init_tree):
if not isinstance(node, ast.ImportFrom):
continue
if node.module != "dataset.rag_pipeline" or node.level != 1:
continue
if any(alias.name == "rag_pipeline_workflow" for alias in node.names):
import_found = True
break
assert import_found, "from .dataset.rag_pipeline import rag_pipeline_workflow not found in service_api/__init__.py"
workflow_tree = ast.parse(rag_pipeline_workflow.read_text(encoding="utf-8"))
route_paths: set[str] = set()
for node in ast.walk(workflow_tree):
if not isinstance(node, ast.ClassDef):
continue
for decorator in node.decorator_list:
if not isinstance(decorator, ast.Call):
continue
if not isinstance(decorator.func, ast.Attribute):
continue
if decorator.func.attr != "route":
continue
if not decorator.args:
continue
first_arg = decorator.args[0]
if isinstance(first_arg, ast.Constant) and isinstance(first_arg.value, str):
route_paths.add(first_arg.value)
assert "/datasets/<uuid:dataset_id>/pipeline/datasource-plugins" in route_paths
assert "/datasets/<uuid:dataset_id>/pipeline/datasource/nodes/<string:node_id>/run" in route_paths
assert "/datasets/<uuid:dataset_id>/pipeline/run" in route_paths
assert "/datasets/pipeline/file-upload" in route_paths

View File

@@ -25,19 +25,15 @@ class TestMessageCycleManagerOptimization:
task_state = Mock()
return MessageCycleManager(application_generate_entity=mock_application_generate_entity, task_state=task_state)
def test_get_message_event_type_with_assistant_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE_FILE when message has assistant-generated files.
This ensures that AI-generated images (belongs_to='assistant') trigger the MESSAGE_FILE event,
allowing the frontend to properly display generated image files with url field.
"""
def test_get_message_event_type_with_message_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE_FILE when message has files."""
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
# Setup mock session and message file
mock_session = Mock()
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
mock_message_file = Mock()
mock_message_file.belongs_to = "assistant"
# Current implementation uses session.scalar(select(...))
mock_session.scalar.return_value = mock_message_file
# Execute
@@ -48,31 +44,6 @@ class TestMessageCycleManagerOptimization:
assert result == StreamEvent.MESSAGE_FILE
mock_session.scalar.assert_called_once()
def test_get_message_event_type_with_user_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE when message only has user-uploaded files.
This is a regression test for the issue where user-uploaded images (belongs_to='user')
caused the LLM text response to be incorrectly tagged with MESSAGE_FILE event,
resulting in broken images in the chat UI. The query filters for belongs_to='assistant',
so when only user files exist, the database query returns None, resulting in MESSAGE event type.
"""
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
# Setup mock session and message file
mock_session = Mock()
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
# When querying for assistant files with only user files present, return None
# (simulates database query with belongs_to='assistant' filter returning no results)
mock_session.scalar.return_value = None
# Execute
with current_app.app_context():
result = message_cycle_manager.get_message_event_type("test-message-id")
# Assert
assert result == StreamEvent.MESSAGE
mock_session.scalar.assert_called_once()
def test_get_message_event_type_without_message_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE when message has no files."""
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
@@ -98,7 +69,7 @@ class TestMessageCycleManagerOptimization:
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
mock_message_file = Mock()
mock_message_file.belongs_to = "assistant"
# Current implementation uses session.scalar(select(...))
mock_session.scalar.return_value = mock_message_file
# Execute: compute event type once, then pass to message_to_stream_response

View File

@@ -496,9 +496,6 @@ class TestSchemaResolverClass:
avg_time_no_cache = sum(results1) / len(results1)
# Second run (with cache) - run multiple times
# Warm up cache first
resolve_dify_schema_refs(schema)
results2 = []
for _ in range(3):
start = time.perf_counter()

View File

@@ -1,211 +0,0 @@
from __future__ import annotations
from collections.abc import Generator
from dataclasses import dataclass
from typing import Any, cast
from core.app.entities.app_invoke_entities import InvokeFrom
from core.tools.__base.tool import Tool
from core.tools.__base.tool_runtime import ToolRuntime
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_entities import ToolEntity, ToolIdentity, ToolInvokeMessage, ToolProviderType
class DummyCastType:
def cast_value(self, value: Any) -> str:
return f"cast:{value}"
@dataclass
class DummyParameter:
name: str
type: DummyCastType
form: str = "llm"
required: bool = False
default: Any = None
options: list[Any] | None = None
llm_description: str | None = None
class DummyTool(Tool):
def __init__(self, entity: ToolEntity, runtime: ToolRuntime):
super().__init__(entity=entity, runtime=runtime)
self.result: ToolInvokeMessage | list[ToolInvokeMessage] | Generator[ToolInvokeMessage, None, None] = (
self.create_text_message("default")
)
self.runtime_parameter_overrides: list[Any] | None = None
self.last_invocation: dict[str, Any] | None = None
def tool_provider_type(self) -> ToolProviderType:
return ToolProviderType.BUILT_IN
def _invoke(
self,
user_id: str,
tool_parameters: dict[str, Any],
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
) -> ToolInvokeMessage | list[ToolInvokeMessage] | Generator[ToolInvokeMessage, None, None]:
self.last_invocation = {
"user_id": user_id,
"tool_parameters": tool_parameters,
"conversation_id": conversation_id,
"app_id": app_id,
"message_id": message_id,
}
return self.result
def get_runtime_parameters(
self,
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
):
if self.runtime_parameter_overrides is not None:
return self.runtime_parameter_overrides
return super().get_runtime_parameters(
conversation_id=conversation_id,
app_id=app_id,
message_id=message_id,
)
def _build_tool(runtime: ToolRuntime | None = None) -> DummyTool:
entity = ToolEntity(
identity=ToolIdentity(author="test", name="dummy", label=I18nObject(en_US="dummy"), provider="test"),
parameters=[],
description=None,
has_runtime_parameters=False,
)
runtime = runtime or ToolRuntime(tenant_id="tenant-1", invoke_from=InvokeFrom.DEBUGGER, runtime_parameters={})
return DummyTool(entity=entity, runtime=runtime)
def test_invoke_supports_single_message_and_parameter_casting():
runtime = ToolRuntime(
tenant_id="tenant-1",
invoke_from=InvokeFrom.DEBUGGER,
runtime_parameters={"from_runtime": "runtime-value"},
)
tool = _build_tool(runtime)
tool.entity.parameters = cast(
Any,
[
DummyParameter(name="unused", type=DummyCastType()),
DummyParameter(name="age", type=DummyCastType()),
],
)
tool.result = tool.create_text_message("ok")
messages = list(
tool.invoke(
user_id="user-1",
tool_parameters={"age": "18", "raw": "keep"},
conversation_id="conv-1",
app_id="app-1",
message_id="msg-1",
)
)
assert len(messages) == 1
assert messages[0].message.text == "ok"
assert tool.last_invocation == {
"user_id": "user-1",
"tool_parameters": {"age": "cast:18", "raw": "keep", "from_runtime": "runtime-value"},
"conversation_id": "conv-1",
"app_id": "app-1",
"message_id": "msg-1",
}
def test_invoke_supports_list_and_generator_results():
tool = _build_tool()
tool.result = [tool.create_text_message("a"), tool.create_text_message("b")]
list_messages = list(tool.invoke(user_id="user-1", tool_parameters={}))
assert [msg.message.text for msg in list_messages] == ["a", "b"]
def _message_generator() -> Generator[ToolInvokeMessage, None, None]:
yield tool.create_text_message("g1")
yield tool.create_text_message("g2")
tool.result = _message_generator()
generated_messages = list(tool.invoke(user_id="user-2", tool_parameters={}))
assert [msg.message.text for msg in generated_messages] == ["g1", "g2"]
def test_fork_tool_runtime_returns_new_tool_with_copied_entity():
tool = _build_tool()
new_runtime = ToolRuntime(tenant_id="tenant-2", invoke_from=InvokeFrom.EXPLORE, runtime_parameters={})
forked = tool.fork_tool_runtime(new_runtime)
assert isinstance(forked, DummyTool)
assert forked is not tool
assert forked.runtime == new_runtime
assert forked.entity == tool.entity
assert forked.entity is not tool.entity
def test_get_runtime_parameters_and_merge_runtime_parameters():
tool = _build_tool()
original = DummyParameter(name="temperature", type=DummyCastType(), form="schema", required=True, default="0.7")
tool.entity.parameters = cast(Any, [original])
default_runtime_parameters = tool.get_runtime_parameters()
assert default_runtime_parameters == [original]
override = DummyParameter(name="temperature", type=DummyCastType(), form="llm", required=False, default="0.5")
appended = DummyParameter(name="new_param", type=DummyCastType(), form="form", required=False, default="x")
tool.runtime_parameter_overrides = [override, appended]
merged = tool.get_merged_runtime_parameters()
assert len(merged) == 2
assert merged[0].name == "temperature"
assert merged[0].form == "llm"
assert merged[0].required is False
assert merged[0].default == "0.5"
assert merged[1].name == "new_param"
def test_message_factory_helpers():
tool = _build_tool()
image_message = tool.create_image_message("https://example.com/image.png")
assert image_message.type == ToolInvokeMessage.MessageType.IMAGE
assert image_message.message.text == "https://example.com/image.png"
file_obj = object()
file_message = tool.create_file_message(file_obj) # type: ignore[arg-type]
assert file_message.type == ToolInvokeMessage.MessageType.FILE
assert file_message.message.file_marker == "file_marker"
assert file_message.meta == {"file": file_obj}
link_message = tool.create_link_message("https://example.com")
assert link_message.type == ToolInvokeMessage.MessageType.LINK
assert link_message.message.text == "https://example.com"
text_message = tool.create_text_message("hello")
assert text_message.type == ToolInvokeMessage.MessageType.TEXT
assert text_message.message.text == "hello"
blob_message = tool.create_blob_message(b"blob", meta={"source": "unit-test"})
assert blob_message.type == ToolInvokeMessage.MessageType.BLOB
assert blob_message.message.blob == b"blob"
assert blob_message.meta == {"source": "unit-test"}
json_message = tool.create_json_message({"k": "v"}, suppress_output=True)
assert json_message.type == ToolInvokeMessage.MessageType.JSON
assert json_message.message.json_object == {"k": "v"}
assert json_message.message.suppress_output is True
variable_message = tool.create_variable_message("answer", 42, stream=False)
assert variable_message.type == ToolInvokeMessage.MessageType.VARIABLE
assert variable_message.message.variable_name == "answer"
assert variable_message.message.variable_value == 42
assert variable_message.message.stream is False
def test_base_abstract_invoke_placeholder_returns_none():
tool = _build_tool()
assert Tool._invoke(tool, user_id="u", tool_parameters={}) is None

View File

@@ -255,32 +255,6 @@ def test_create_variable_message():
assert message.message.stream is False
def test_create_file_message_should_include_file_marker():
entity = ToolEntity(
identity=ToolIdentity(author="test", name="test tool", label=I18nObject(en_US="test tool"), provider="test"),
parameters=[],
description=None,
has_runtime_parameters=False,
)
runtime = ToolRuntime(tenant_id="test_tool", invoke_from=InvokeFrom.EXPLORE)
tool = WorkflowTool(
workflow_app_id="",
workflow_as_tool_id="",
version="1",
workflow_entities={},
workflow_call_depth=1,
entity=entity,
runtime=runtime,
)
file_obj = object()
message = tool.create_file_message(file_obj) # type: ignore[arg-type]
assert message.type == ToolInvokeMessage.MessageType.FILE
assert message.message.file_marker == "file_marker"
assert message.meta == {"file": file_obj}
def test_resolve_user_from_database_falls_back_to_end_user(monkeypatch: pytest.MonkeyPatch):
"""Ensure worker context can resolve EndUser when Account is missing."""

View File

@@ -4,7 +4,7 @@ from typing import Any
from uuid import uuid4
import pytest
from hypothesis import HealthCheck, given, settings
from hypothesis import given, settings
from hypothesis import strategies as st
from core.file import File, FileTransferMethod, FileType
@@ -493,7 +493,7 @@ def _scalar_value() -> st.SearchStrategy[int | float | str | File | None]:
)
@settings(max_examples=30, suppress_health_check=[HealthCheck.too_slow, HealthCheck.filter_too_much], deadline=None)
@settings(max_examples=50)
@given(_scalar_value())
def test_build_segment_and_extract_values_for_scalar_types(value):
seg = variable_factory.build_segment(value)
@@ -504,7 +504,7 @@ def test_build_segment_and_extract_values_for_scalar_types(value):
assert seg.value == value
@settings(max_examples=30, suppress_health_check=[HealthCheck.too_slow, HealthCheck.filter_too_much], deadline=None)
@settings(max_examples=50)
@given(values=st.lists(_scalar_value(), max_size=20))
def test_build_segment_and_extract_values_for_array_types(values):
seg = variable_factory.build_segment(values)

View File

@@ -198,15 +198,6 @@ class SubscriptionTestCase:
description: str = ""
class FakeRedisClient:
"""Minimal fake Redis client for unit tests."""
def __init__(self) -> None:
self.publish = MagicMock()
self.spublish = MagicMock()
self.pubsub = MagicMock(return_value=MagicMock())
class TestRedisSubscription:
"""Test cases for the _RedisSubscription class."""
@@ -628,13 +619,10 @@ class TestRedisSubscription:
class TestRedisShardedSubscription:
"""Test cases for the _RedisShardedSubscription class."""
@pytest.fixture(autouse=True)
def patch_sharded_redis_type(self, monkeypatch):
monkeypatch.setattr("libs.broadcast_channel.redis.sharded_channel.Redis", FakeRedisClient)
@pytest.fixture
def mock_redis_client(self) -> FakeRedisClient:
return FakeRedisClient()
def mock_redis_client(self) -> MagicMock:
client = MagicMock()
return client
@pytest.fixture
def mock_pubsub(self) -> MagicMock:
@@ -648,7 +636,7 @@ class TestRedisShardedSubscription:
@pytest.fixture
def sharded_subscription(
self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient
self, mock_pubsub: MagicMock, mock_redis_client: MagicMock
) -> Generator[_RedisShardedSubscription, None, None]:
"""Create a _RedisShardedSubscription instance for testing."""
subscription = _RedisShardedSubscription(
@@ -669,7 +657,7 @@ class TestRedisShardedSubscription:
# ==================== Lifecycle Tests ====================
def test_sharded_subscription_initialization(self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
def test_sharded_subscription_initialization(self, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
"""Test that sharded subscription is properly initialized."""
subscription = _RedisShardedSubscription(
client=mock_redis_client,
@@ -982,7 +970,7 @@ class TestRedisShardedSubscription:
],
)
def test_sharded_subscription_scenarios(
self, test_case: SubscriptionTestCase, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient
self, test_case: SubscriptionTestCase, mock_pubsub: MagicMock, mock_redis_client: MagicMock
):
"""Test various sharded subscription scenarios using table-driven approach."""
subscription = _RedisShardedSubscription(
@@ -1070,7 +1058,7 @@ class TestRedisShardedSubscription:
# Close should still work
sharded_subscription.close() # Should not raise
def test_channel_name_variations(self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
def test_channel_name_variations(self, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
"""Test various sharded channel name formats."""
channel_names = [
"simple",
@@ -1132,13 +1120,10 @@ class TestRedisSubscriptionCommon:
"""Parameterized fixture providing subscription type and class."""
return request.param
@pytest.fixture(autouse=True)
def patch_sharded_redis_type(self, monkeypatch):
monkeypatch.setattr("libs.broadcast_channel.redis.sharded_channel.Redis", FakeRedisClient)
@pytest.fixture
def mock_redis_client(self) -> FakeRedisClient:
return FakeRedisClient()
def mock_redis_client(self) -> MagicMock:
client = MagicMock()
return client
@pytest.fixture
def mock_pubsub(self) -> MagicMock:
@@ -1155,7 +1140,7 @@ class TestRedisSubscriptionCommon:
return pubsub
@pytest.fixture
def subscription(self, subscription_params, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
def subscription(self, subscription_params, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
"""Create a subscription instance based on parameterized type."""
subscription_type, subscription_class = subscription_params
topic_name = f"test-{subscription_type}-topic"

View File

@@ -698,132 +698,6 @@ class TestTenantService:
self._assert_database_operations_called(mock_db_dependencies["db"])
# ==================== Member Removal Tests ====================
def test_remove_pending_member_deletes_orphaned_account(self):
"""Test that removing a pending member with no other workspaces deletes the account."""
# Arrange
mock_tenant = MagicMock()
mock_tenant.id = "tenant-456"
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
mock_pending_member = TestAccountAssociatedDataFactory.create_account_mock(
account_id="pending-user-789", email="pending@example.com", status=AccountStatus.PENDING
)
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="pending-user-789", role="normal"
)
with patch("services.account_service.db") as mock_db:
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="operator-123", role="owner"
)
query_mock_permission = MagicMock()
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
query_mock_ta = MagicMock()
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
query_mock_count = MagicMock()
query_mock_count.filter_by.return_value.count.return_value = 0
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta, query_mock_count]
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
mock_sync.return_value = True
# Act
TenantService.remove_member_from_tenant(mock_tenant, mock_pending_member, mock_operator)
# Assert: enterprise sync still receives the correct member ID
mock_sync.assert_called_once_with(
workspace_id="tenant-456",
member_id="pending-user-789",
source="workspace_member_removed",
)
# Assert: both join record and account should be deleted
mock_db.session.delete.assert_any_call(mock_ta)
mock_db.session.delete.assert_any_call(mock_pending_member)
assert mock_db.session.delete.call_count == 2
def test_remove_pending_member_keeps_account_with_other_workspaces(self):
"""Test that removing a pending member who belongs to other workspaces preserves the account."""
# Arrange
mock_tenant = MagicMock()
mock_tenant.id = "tenant-456"
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
mock_pending_member = TestAccountAssociatedDataFactory.create_account_mock(
account_id="pending-user-789", email="pending@example.com", status=AccountStatus.PENDING
)
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="pending-user-789", role="normal"
)
with patch("services.account_service.db") as mock_db:
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="operator-123", role="owner"
)
query_mock_permission = MagicMock()
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
query_mock_ta = MagicMock()
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
# Remaining join count = 1 (still in another workspace)
query_mock_count = MagicMock()
query_mock_count.filter_by.return_value.count.return_value = 1
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta, query_mock_count]
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
mock_sync.return_value = True
# Act
TenantService.remove_member_from_tenant(mock_tenant, mock_pending_member, mock_operator)
# Assert: only the join record should be deleted, not the account
mock_db.session.delete.assert_called_once_with(mock_ta)
def test_remove_active_member_preserves_account(self):
"""Test that removing an active member never deletes the account, even with no other workspaces."""
# Arrange
mock_tenant = MagicMock()
mock_tenant.id = "tenant-456"
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
mock_active_member = TestAccountAssociatedDataFactory.create_account_mock(
account_id="active-user-789", email="active@example.com", status=AccountStatus.ACTIVE
)
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="active-user-789", role="normal"
)
with patch("services.account_service.db") as mock_db:
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="operator-123", role="owner"
)
query_mock_permission = MagicMock()
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
query_mock_ta = MagicMock()
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta]
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
mock_sync.return_value = True
# Act
TenantService.remove_member_from_tenant(mock_tenant, mock_active_member, mock_operator)
# Assert: only the join record should be deleted
mock_db.session.delete.assert_called_once_with(mock_ta)
# ==================== Tenant Switching Tests ====================
def test_switch_tenant_success(self):

View File

@@ -63,56 +63,3 @@ def test_workflow_blocking_injects_pause_state_config(mocker, monkeypatch):
pause_state_config = call_kwargs.get("pause_state_config")
assert pause_state_config is not None
assert pause_state_config.state_owner_user_id == "owner-id"
def test_advanced_chat_blocking_returns_dict_and_does_not_use_event_retrieval(mocker, monkeypatch):
"""
Regression test: ADVANCED_CHAT in blocking mode should return a plain dict
(non-streaming), and must not go through the async retrieve_events path.
Keeps behavior consistent with WORKFLOW blocking branch.
"""
# Disable billing and stub RateLimit to a no-op that just passes values through
monkeypatch.setattr(app_generate_service_module.dify_config, "BILLING_ENABLED", False)
mocker.patch("services.app_generate_service.RateLimit", _DummyRateLimit)
# Arrange a fake workflow and wire AppGenerateService._get_workflow to return it
workflow = MagicMock()
workflow.id = "workflow-id"
mocker.patch.object(AppGenerateService, "_get_workflow", return_value=workflow)
# Spy on the streaming retrieval path to ensure it's NOT called
retrieve_spy = mocker.patch("services.app_generate_service.AdvancedChatAppGenerator.retrieve_events")
# Make AdvancedChatAppGenerator.generate return a plain dict when streaming=False
generate_spy = mocker.patch(
"services.app_generate_service.AdvancedChatAppGenerator.generate",
return_value={"result": "ok"},
)
# Minimal app model for ADVANCED_CHAT
app_model = MagicMock()
app_model.mode = AppMode.ADVANCED_CHAT
app_model.id = "app-id"
app_model.tenant_id = "tenant-id"
app_model.max_active_requests = 0
app_model.is_agent = False
user = MagicMock()
user.id = "user-id"
# Must include query and inputs for AdvancedChatAppGenerator
args = {"workflow_id": "wf-1", "query": "hello", "inputs": {}}
# Act: call service with streaming=False (blocking mode)
result = AppGenerateService.generate(
app_model=app_model,
user=user,
args=args,
invoke_from=MagicMock(),
streaming=False,
)
# Assert: returns the dict from generate(), and did not call retrieve_events()
assert result == {"result": "ok"}
assert generate_spy.call_args.kwargs.get("streaming") is False
retrieve_spy.assert_not_called()

View File

@@ -17,6 +17,7 @@ from core.workflow.nodes.human_input.entities import (
from core.workflow.nodes.human_input.enums import FormInputType, HumanInputFormKind, HumanInputFormStatus
from models.human_input import RecipientType
from services.human_input_service import Form, FormExpiredError, HumanInputService, InvalidFormDataError
from tasks.app_generate.workflow_execute_task import WORKFLOW_BASED_APP_EXECUTION_QUEUE
@pytest.fixture
@@ -87,6 +88,7 @@ def test_enqueue_resume_dispatches_task_for_workflow(mocker, mock_session_factor
resume_task.apply_async.assert_called_once()
call_kwargs = resume_task.apply_async.call_args.kwargs
assert call_kwargs["queue"] == WORKFLOW_BASED_APP_EXECUTION_QUEUE
assert call_kwargs["kwargs"]["payload"]["workflow_run_id"] == "workflow-run-id"
@@ -128,6 +130,7 @@ def test_enqueue_resume_dispatches_task_for_advanced_chat(mocker, mock_session_f
resume_task.apply_async.assert_called_once()
call_kwargs = resume_task.apply_async.call_args.kwargs
assert call_kwargs["queue"] == WORKFLOW_BASED_APP_EXECUTION_QUEUE
assert call_kwargs["kwargs"]["payload"]["workflow_run_id"] == "workflow-run-id"

View File

@@ -109,87 +109,40 @@ def mock_document_segments(document_id):
@pytest.fixture
def mock_db_session():
"""Mock database session via session_factory.create_session().
After session split refactor, the code calls create_session() multiple times.
This fixture creates shared query mocks so all sessions use the same
query configuration, simulating database persistence across sessions.
The fixture automatically converts side_effect to cycle to prevent StopIteration.
Tests configure mocks the same way as before, but behind the scenes the values
are cycled infinitely for all sessions.
"""
from itertools import cycle
"""Mock database session via session_factory.create_session()."""
with patch("tasks.document_indexing_sync_task.session_factory") as mock_sf:
sessions = []
session = MagicMock()
# Ensure tests can observe session.close() via context manager teardown
session.close = MagicMock()
session.commit = MagicMock()
# Shared query mocks - all sessions use these
shared_query = MagicMock()
shared_filter_by = MagicMock()
shared_scalars_result = MagicMock()
# Mock session.begin() context manager to auto-commit on exit
begin_cm = MagicMock()
begin_cm.__enter__.return_value = session
# Create custom first mock that auto-cycles side_effect
class CyclicMock(MagicMock):
def __setattr__(self, name, value):
if name == "side_effect" and value is not None:
# Convert list/tuple to infinite cycle
if isinstance(value, (list, tuple)):
value = cycle(value)
super().__setattr__(name, value)
def _begin_exit_side_effect(*args, **kwargs):
# session.begin().__exit__() should commit if no exception
if args[0] is None: # No exception
session.commit()
shared_query.where.return_value.first = CyclicMock()
shared_filter_by.first = CyclicMock()
begin_cm.__exit__.side_effect = _begin_exit_side_effect
session.begin.return_value = begin_cm
def _create_session():
"""Create a new mock session for each create_session() call."""
session = MagicMock()
session.close = MagicMock()
session.commit = MagicMock()
# Mock create_session() context manager
cm = MagicMock()
cm.__enter__.return_value = session
# Mock session.begin() context manager
begin_cm = MagicMock()
begin_cm.__enter__.return_value = session
def _exit_side_effect(*args, **kwargs):
session.close()
def _begin_exit_side_effect(exc_type, exc, tb):
# commit on success
if exc_type is None:
session.commit()
# return False to propagate exceptions
return False
cm.__exit__.side_effect = _exit_side_effect
mock_sf.create_session.return_value = cm
begin_cm.__exit__.side_effect = _begin_exit_side_effect
session.begin.return_value = begin_cm
# Mock create_session() context manager
cm = MagicMock()
cm.__enter__.return_value = session
def _exit_side_effect(exc_type, exc, tb):
session.close()
return False
cm.__exit__.side_effect = _exit_side_effect
# All sessions use the same shared query mocks
session.query.return_value = shared_query
shared_query.where.return_value = shared_query
shared_query.filter_by.return_value = shared_filter_by
session.scalars.return_value = shared_scalars_result
sessions.append(session)
# Attach helpers on the first created session for assertions across all sessions
if len(sessions) == 1:
session.get_all_sessions = lambda: sessions
session.any_close_called = lambda: any(s.close.called for s in sessions)
session.any_commit_called = lambda: any(s.commit.called for s in sessions)
return cm
mock_sf.create_session.side_effect = _create_session
# Create first session and return it
_create_session()
yield sessions[0]
query = MagicMock()
session.query.return_value = query
query.where.return_value = query
session.scalars.return_value = MagicMock()
yield session
@pytest.fixture
@@ -248,8 +201,8 @@ class TestDocumentIndexingSyncTask:
# Act
document_indexing_sync_task(dataset_id, document_id)
# Assert - at least one session should have been closed
assert mock_db_session.any_close_called()
# Assert
mock_db_session.close.assert_called_once()
def test_missing_notion_workspace_id(self, mock_db_session, mock_document, dataset_id, document_id):
"""Test that task raises error when notion_workspace_id is missing."""
@@ -292,7 +245,6 @@ class TestDocumentIndexingSyncTask:
"""Test that task handles missing credentials by updating document status."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_datasource_provider_service.get_datasource_credentials.return_value = None
# Act
@@ -302,8 +254,8 @@ class TestDocumentIndexingSyncTask:
assert mock_document.indexing_status == "error"
assert "Datasource credential not found" in mock_document.error
assert mock_document.stopped_at is not None
assert mock_db_session.any_commit_called()
assert mock_db_session.any_close_called()
mock_db_session.commit.assert_called()
mock_db_session.close.assert_called()
def test_page_not_updated(
self,
@@ -317,7 +269,6 @@ class TestDocumentIndexingSyncTask:
"""Test that task does nothing when page has not been updated."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
# Return same time as stored in document
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z"
@@ -327,8 +278,8 @@ class TestDocumentIndexingSyncTask:
# Assert
# Document status should remain unchanged
assert mock_document.indexing_status == "completed"
# At least one session should have been closed via context manager teardown
assert mock_db_session.any_close_called()
# Session should still be closed via context manager teardown
assert mock_db_session.close.called
def test_successful_sync_when_page_updated(
self,
@@ -345,20 +296,7 @@ class TestDocumentIndexingSyncTask:
):
"""Test successful sync flow when Notion page has been updated."""
# Arrange
# Set exact sequence of returns across calls to `.first()`:
# 1) document (initial fetch)
# 2) dataset (pre-check)
# 3) dataset (cleaning phase)
# 4) document (pre-indexing update)
# 5) document (indexing runner fetch)
mock_db_session.query.return_value.where.return_value.first.side_effect = [
mock_document,
mock_dataset,
mock_dataset,
mock_document,
mock_document,
]
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
# NotionExtractor returns updated time
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
@@ -376,40 +314,28 @@ class TestDocumentIndexingSyncTask:
mock_processor.clean.assert_called_once()
# Verify segments were deleted from database in batch (DELETE FROM document_segments)
# Aggregate execute calls across all created sessions
execute_sqls = []
for s in mock_db_session.get_all_sessions():
execute_sqls.extend([" ".join(str(c[0][0]).split()) for c in s.execute.call_args_list])
execute_sqls = [" ".join(str(c[0][0]).split()) for c in mock_db_session.execute.call_args_list]
assert any("DELETE FROM document_segments" in sql for sql in execute_sqls)
# Verify indexing runner was called
mock_indexing_runner.run.assert_called_once_with([mock_document])
# Verify session operations (across any created session)
assert mock_db_session.any_commit_called()
assert mock_db_session.any_close_called()
# Verify session operations
assert mock_db_session.commit.called
mock_db_session.close.assert_called_once()
def test_dataset_not_found_during_cleaning(
self,
mock_db_session,
mock_datasource_provider_service,
mock_notion_extractor,
mock_indexing_runner,
mock_document,
dataset_id,
document_id,
):
"""Test that task handles dataset not found during cleaning phase."""
# Arrange
# Sequence: document (initial), dataset (pre-check), None (cleaning), document (update), document (indexing)
mock_db_session.query.return_value.where.return_value.first.side_effect = [
mock_document,
mock_dataset,
None,
mock_document,
mock_document,
]
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, None]
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
# Act
@@ -418,8 +344,8 @@ class TestDocumentIndexingSyncTask:
# Assert
# Document should still be set to parsing
assert mock_document.indexing_status == "parsing"
# At least one session should be closed after error
assert mock_db_session.any_close_called()
# Session should be closed after error
mock_db_session.close.assert_called_once()
def test_cleaning_error_continues_to_indexing(
self,
@@ -435,14 +361,8 @@ class TestDocumentIndexingSyncTask:
):
"""Test that indexing continues even if cleaning fails."""
# Arrange
from itertools import cycle
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
# Make the cleaning step fail but not the segment fetch
processor = mock_index_processor_factory.return_value.init_index_processor.return_value
processor.clean.side_effect = Exception("Cleaning error")
mock_db_session.scalars.return_value.all.return_value = []
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
mock_db_session.scalars.return_value.all.side_effect = Exception("Cleaning error")
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
# Act
@@ -451,7 +371,7 @@ class TestDocumentIndexingSyncTask:
# Assert
# Indexing should still be attempted despite cleaning error
mock_indexing_runner.run.assert_called_once_with([mock_document])
assert mock_db_session.any_close_called()
mock_db_session.close.assert_called_once()
def test_indexing_runner_document_paused_error(
self,
@@ -468,10 +388,7 @@ class TestDocumentIndexingSyncTask:
):
"""Test that DocumentIsPausedError is handled gracefully."""
# Arrange
from itertools import cycle
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
mock_indexing_runner.run.side_effect = DocumentIsPausedError("Document paused")
@@ -481,7 +398,7 @@ class TestDocumentIndexingSyncTask:
# Assert
# Session should be closed after handling error
assert mock_db_session.any_close_called()
mock_db_session.close.assert_called_once()
def test_indexing_runner_general_error(
self,
@@ -498,10 +415,7 @@ class TestDocumentIndexingSyncTask:
):
"""Test that general exceptions during indexing are handled."""
# Arrange
from itertools import cycle
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
mock_indexing_runner.run.side_effect = Exception("Indexing error")
@@ -511,7 +425,7 @@ class TestDocumentIndexingSyncTask:
# Assert
# Session should be closed after error
assert mock_db_session.any_close_called()
mock_db_session.close.assert_called_once()
def test_notion_extractor_initialized_with_correct_params(
self,
@@ -618,14 +532,7 @@ class TestDocumentIndexingSyncTask:
):
"""Test that index processor clean is called with correct parameters."""
# Arrange
# Sequence: document (initial), dataset (pre-check), dataset (cleaning), document (update), document (indexing)
mock_db_session.query.return_value.where.return_value.first.side_effect = [
mock_document,
mock_dataset,
mock_dataset,
mock_document,
mock_document,
]
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"

84
api/uv.lock generated
View File

@@ -1237,47 +1237,49 @@ wheels = [
[[package]]
name = "cryptography"
version = "46.0.5"
version = "46.0.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" }
sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" },
{ url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" },
{ url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" },
{ url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" },
{ url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" },
{ url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" },
{ url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" },
{ url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" },
{ url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" },
{ url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" },
{ url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" },
{ url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" },
{ url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" },
{ url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" },
{ url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" },
{ url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" },
{ url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" },
{ url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" },
{ url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" },
{ url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" },
{ url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" },
{ url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" },
{ url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" },
{ url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" },
{ url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" },
{ url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" },
{ url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" },
{ url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" },
{ url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" },
{ url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" },
{ url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" },
{ url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" },
{ url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" },
{ url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" },
{ url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" },
{ url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" },
{ url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
{ url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
{ url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
{ url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
{ url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
{ url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
{ url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
{ url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
{ url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
{ url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
{ url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" },
{ url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" },
{ url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" },
{ url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" },
{ url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" },
{ url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
{ url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
{ url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
{ url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
{ url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
{ url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
{ url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
{ url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
{ url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
{ url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
{ url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" },
{ url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" },
{ url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
{ url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" },
{ url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" },
{ url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" },
{ url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" },
{ url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" },
{ url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" },
]
[[package]]
@@ -1366,7 +1368,7 @@ wheels = [
[[package]]
name = "dify-api"
version = "1.13.0"
version = "1.12.1"
source = { virtual = "." }
dependencies = [
{ name = "aliyun-log-python-sdk" },
@@ -1592,7 +1594,7 @@ requires-dist = [
{ name = "gevent", specifier = "~=25.9.1" },
{ name = "gmpy2", specifier = "~=2.2.1" },
{ name = "google-api-core", specifier = "==2.18.0" },
{ name = "google-api-python-client", specifier = "==2.189.0" },
{ name = "google-api-python-client", specifier = "==2.90.0" },
{ name = "google-auth", specifier = "==2.29.0" },
{ name = "google-auth-httplib2", specifier = "==0.2.0" },
{ name = "google-cloud-aiplatform", specifier = "==1.49.0" },
@@ -2304,7 +2306,7 @@ grpc = [
[[package]]
name = "google-api-python-client"
version = "2.189.0"
version = "2.90.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "google-api-core" },
@@ -2313,9 +2315,9 @@ dependencies = [
{ name = "httplib2" },
{ name = "uritemplate" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6f/f8/0783aeca3410ee053d4dd1fccafd85197847b8f84dd038e036634605d083/google_api_python_client-2.189.0.tar.gz", hash = "sha256:45f2d8559b5c895dde6ad3fb33de025f5cb2c197fa5862f18df7f5295a172741", size = 13979470, upload-time = "2026-02-03T19:24:55.432Z" }
sdist = { url = "https://files.pythonhosted.org/packages/35/8b/d990f947c261304a5c1599d45717d02c27d46af5f23e1fee5dc19c8fa79d/google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03", size = 10891311, upload-time = "2023-06-20T16:29:25.008Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/44/3677ff27998214f2fa7957359da48da378a0ffff1bd0bdaba42e752bc13e/google_api_python_client-2.189.0-py3-none-any.whl", hash = "sha256:a258c09660a49c6159173f8bbece171278e917e104a11f0640b34751b79c8a1a", size = 14547633, upload-time = "2026-02-03T19:24:52.845Z" },
{ url = "https://files.pythonhosted.org/packages/39/03/209b5c36a621ae644dc7d4743746cd3b38b18e133f8779ecaf6b95cc01ce/google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913", size = 11379891, upload-time = "2023-06-20T16:29:19.532Z" },
]
[[package]]

View File

@@ -106,10 +106,10 @@ if [[ -z "${QUEUES}" ]]; then
# Configure queues based on edition
if [[ "${EDITION}" == "CLOUD" ]]; then
# Cloud edition: separate queues for dataset and trigger tasks
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
else
# Community edition (SELF_HOSTED): dataset and workflow have separate queues
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
fi
echo "No queues specified, using edition-based defaults: ${QUEUES}"

View File

@@ -62,9 +62,6 @@ LANG=C.UTF-8
LC_ALL=C.UTF-8
PYTHONIOENCODING=utf-8
# Set UV cache directory to avoid permission issues with non-existent home directory
UV_CACHE_DIR=/tmp/.uv-cache
# ------------------------------
# Server Configuration
# ------------------------------
@@ -387,8 +384,6 @@ CELERY_USE_SENTINEL=false
CELERY_SENTINEL_MASTER_NAME=
CELERY_SENTINEL_PASSWORD=
CELERY_SENTINEL_SOCKET_TIMEOUT=0.1
# e.g. {"tasks.add": {"rate_limit": "10/s"}}
CELERY_TASK_ANNOTATIONS=null
# ------------------------------
# CORS Configuration
@@ -1523,7 +1518,6 @@ AMPLITUDE_API_KEY=
# Sandbox expired records clean configuration
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30

View File

@@ -21,7 +21,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.13.0
image: langgenius/dify-api:1.12.1
restart: always
environment:
# Use the shared environment variables.
@@ -63,7 +63,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.13.0
image: langgenius/dify-api:1.12.1
restart: always
environment:
# Use the shared environment variables.
@@ -102,7 +102,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.13.0
image: langgenius/dify-api:1.12.1
restart: always
environment:
# Use the shared environment variables.
@@ -132,7 +132,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.13.0
image: langgenius/dify-web:1.12.1
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

View File

@@ -16,7 +16,6 @@ x-shared-env: &shared-api-worker-env
LANG: ${LANG:-C.UTF-8}
LC_ALL: ${LC_ALL:-C.UTF-8}
PYTHONIOENCODING: ${PYTHONIOENCODING:-utf-8}
UV_CACHE_DIR: ${UV_CACHE_DIR:-/tmp/.uv-cache}
LOG_LEVEL: ${LOG_LEVEL:-INFO}
LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text}
LOG_FILE: ${LOG_FILE:-/app/logs/server.log}
@@ -106,7 +105,6 @@ x-shared-env: &shared-api-worker-env
CELERY_SENTINEL_MASTER_NAME: ${CELERY_SENTINEL_MASTER_NAME:-}
CELERY_SENTINEL_PASSWORD: ${CELERY_SENTINEL_PASSWORD:-}
CELERY_SENTINEL_SOCKET_TIMEOUT: ${CELERY_SENTINEL_SOCKET_TIMEOUT:-0.1}
CELERY_TASK_ANNOTATIONS: ${CELERY_TASK_ANNOTATIONS:-null}
WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS:-*}
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*}
COOKIE_DOMAIN: ${COOKIE_DOMAIN:-}
@@ -684,7 +682,6 @@ x-shared-env: &shared-api-worker-env
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: ${SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD:-21}
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE:-1000}
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL:-200}
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: ${SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS:-30}
PUBSUB_REDIS_URL: ${PUBSUB_REDIS_URL:-}
PUBSUB_REDIS_CHANNEL_TYPE: ${PUBSUB_REDIS_CHANNEL_TYPE:-pubsub}
@@ -715,7 +712,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.13.0
image: langgenius/dify-api:1.12.1
restart: always
environment:
# Use the shared environment variables.
@@ -757,7 +754,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.13.0
image: langgenius/dify-api:1.12.1
restart: always
environment:
# Use the shared environment variables.
@@ -796,7 +793,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.13.0
image: langgenius/dify-api:1.12.1
restart: always
environment:
# Use the shared environment variables.
@@ -826,7 +823,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.13.0
image: langgenius/dify-web:1.12.1
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

View File

@@ -10,7 +10,7 @@ importers:
dependencies:
axios:
specifier: ^1.13.2
version: 1.13.5
version: 1.13.2
devDependencies:
'@eslint/js':
specifier: ^9.39.2
@@ -544,8 +544,8 @@ packages:
asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
axios@1.13.5:
resolution: {integrity: sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==}
axios@1.13.2:
resolution: {integrity: sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==}
balanced-match@1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
@@ -1677,7 +1677,7 @@ snapshots:
asynckit@0.4.0: {}
axios@1.13.5:
axios@1.13.2:
dependencies:
follow-redirects: 1.15.11
form-data: 4.0.5

View File

@@ -1,5 +1,5 @@
# base image
FROM node:22-alpine AS base
FROM node:24-alpine AS base
LABEL maintainer="takatost@gmail.com"
# if you located in China, you can use aliyun mirror to speed up
@@ -81,8 +81,6 @@ COPY --chown=dify:dify --chmod=755 docker/entrypoint.sh ./entrypoint.sh
ARG COMMIT_SHA
ENV COMMIT_SHA=${COMMIT_SHA}
RUN chown -R dify:dify /app/web
USER dify
EXPOSE 3000
ENTRYPOINT ["/bin/sh", "./entrypoint.sh"]

View File

@@ -0,0 +1,261 @@
/**
* MAX_PARALLEL_LIMIT Configuration Bug Test
*
* This test reproduces and verifies the fix for issue #23083:
* MAX_PARALLEL_LIMIT environment variable does not take effect in iteration panel
*/
import { render, screen } from '@testing-library/react'
import * as React from 'react'
// Mock environment variables before importing constants
const originalEnv = process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
// Test with different environment values
function setupEnvironment(value?: string) {
if (value)
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = value
else
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
// Clear module cache to force re-evaluation
vi.resetModules()
}
function restoreEnvironment() {
if (originalEnv)
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = originalEnv
else
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
vi.resetModules()
}
// Mock i18next with proper implementation
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string) => {
if (key.includes('MaxParallelismTitle'))
return 'Max Parallelism'
if (key.includes('MaxParallelismDesc'))
return 'Maximum number of parallel executions'
if (key.includes('parallelMode'))
return 'Parallel Mode'
if (key.includes('parallelPanelDesc'))
return 'Enable parallel execution'
if (key.includes('errorResponseMethod'))
return 'Error Response Method'
return key
},
}),
initReactI18next: {
type: '3rdParty',
init: vi.fn(),
},
}))
// Mock i18next module completely to prevent initialization issues
vi.mock('i18next', () => ({
use: vi.fn().mockReturnThis(),
init: vi.fn().mockReturnThis(),
t: vi.fn(key => key),
isInitialized: true,
}))
// Mock the useConfig hook
vi.mock('@/app/components/workflow/nodes/iteration/use-config', () => ({
default: () => ({
inputs: {
is_parallel: true,
parallel_nums: 5,
error_handle_mode: 'terminated',
},
changeParallel: vi.fn(),
changeParallelNums: vi.fn(),
changeErrorHandleMode: vi.fn(),
}),
}))
// Mock other components
vi.mock('@/app/components/workflow/nodes/_base/components/variable/var-reference-picker', () => ({
default: function MockVarReferencePicker() {
return <div data-testid="var-reference-picker">VarReferencePicker</div>
},
}))
vi.mock('@/app/components/workflow/nodes/_base/components/split', () => ({
default: function MockSplit() {
return <div data-testid="split">Split</div>
},
}))
vi.mock('@/app/components/workflow/nodes/_base/components/field', () => ({
default: function MockField({ title, children }: { title: string, children: React.ReactNode }) {
return (
<div data-testid="field">
<label>{title}</label>
{children}
</div>
)
},
}))
const getParallelControls = () => ({
numberInput: screen.getByRole('spinbutton'),
slider: screen.getByRole('slider'),
})
describe('MAX_PARALLEL_LIMIT Configuration Bug', () => {
const mockNodeData = {
id: 'test-iteration-node',
type: 'iteration' as const,
data: {
title: 'Test Iteration',
desc: 'Test iteration node',
iterator_selector: ['test'],
output_selector: ['output'],
is_parallel: true,
parallel_nums: 5,
error_handle_mode: 'terminated' as const,
},
}
beforeEach(() => {
vi.clearAllMocks()
})
afterEach(() => {
restoreEnvironment()
})
afterAll(() => {
restoreEnvironment()
})
describe('Environment Variable Parsing', () => {
it('should parse MAX_PARALLEL_LIMIT from NEXT_PUBLIC_MAX_PARALLEL_LIMIT environment variable', async () => {
setupEnvironment('25')
const { MAX_PARALLEL_LIMIT } = await import('@/config')
expect(MAX_PARALLEL_LIMIT).toBe(25)
})
it('should fallback to default when environment variable is not set', async () => {
setupEnvironment() // No environment variable
const { MAX_PARALLEL_LIMIT } = await import('@/config')
expect(MAX_PARALLEL_LIMIT).toBe(10)
})
it('should handle invalid environment variable values', async () => {
setupEnvironment('invalid')
const { MAX_PARALLEL_LIMIT } = await import('@/config')
// Should fall back to default when parsing fails
expect(MAX_PARALLEL_LIMIT).toBe(10)
})
it('should handle empty environment variable', async () => {
setupEnvironment('')
const { MAX_PARALLEL_LIMIT } = await import('@/config')
// Should fall back to default when empty
expect(MAX_PARALLEL_LIMIT).toBe(10)
})
// Edge cases for boundary values
it('should clamp MAX_PARALLEL_LIMIT to MIN when env is 0 or negative', async () => {
setupEnvironment('0')
let { MAX_PARALLEL_LIMIT } = await import('@/config')
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
setupEnvironment('-5')
;({ MAX_PARALLEL_LIMIT } = await import('@/config'))
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
})
it('should handle float numbers by parseInt behavior', async () => {
setupEnvironment('12.7')
const { MAX_PARALLEL_LIMIT } = await import('@/config')
// parseInt truncates to integer
expect(MAX_PARALLEL_LIMIT).toBe(12)
})
})
describe('UI Component Integration (Main Fix Verification)', () => {
it('should render iteration panel with environment-configured max value', async () => {
// Set environment variable to a different value
setupEnvironment('30')
// Import Panel after setting environment
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
const { MAX_PARALLEL_LIMIT } = await import('@/config')
render(
<Panel
id="test-node"
// @ts-expect-error key type mismatch
data={mockNodeData.data}
/>,
)
// Behavior-focused assertion: UI max should equal MAX_PARALLEL_LIMIT
const { numberInput, slider } = getParallelControls()
expect(numberInput).toHaveAttribute('max', String(MAX_PARALLEL_LIMIT))
expect(slider).toHaveAttribute('aria-valuemax', String(MAX_PARALLEL_LIMIT))
// Verify the actual values
expect(MAX_PARALLEL_LIMIT).toBe(30)
expect(numberInput.getAttribute('max')).toBe('30')
expect(slider.getAttribute('aria-valuemax')).toBe('30')
})
it('should maintain UI consistency with different environment values', async () => {
setupEnvironment('15')
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
const { MAX_PARALLEL_LIMIT } = await import('@/config')
render(
<Panel
id="test-node"
// @ts-expect-error key type mismatch
data={mockNodeData.data}
/>,
)
// Both input and slider should use the same max value from MAX_PARALLEL_LIMIT
const { numberInput, slider } = getParallelControls()
expect(numberInput.getAttribute('max')).toBe(slider.getAttribute('aria-valuemax'))
expect(numberInput.getAttribute('max')).toBe(String(MAX_PARALLEL_LIMIT))
})
})
describe('Legacy Constant Verification (For Transition Period)', () => {
// Marked as transition/deprecation tests
it('should maintain MAX_ITERATION_PARALLEL_NUM for backward compatibility', async () => {
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
expect(typeof MAX_ITERATION_PARALLEL_NUM).toBe('number')
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10) // Hardcoded legacy value
})
it('should demonstrate MAX_PARALLEL_LIMIT vs legacy constant difference', async () => {
setupEnvironment('50')
const { MAX_PARALLEL_LIMIT } = await import('@/config')
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
// MAX_PARALLEL_LIMIT is configurable, MAX_ITERATION_PARALLEL_NUM is not
expect(MAX_PARALLEL_LIMIT).toBe(50)
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10)
expect(MAX_PARALLEL_LIMIT).not.toBe(MAX_ITERATION_PARALLEL_NUM)
})
})
describe('Constants Validation', () => {
it('should validate that required constants exist and have correct types', async () => {
const { MAX_PARALLEL_LIMIT } = await import('@/config')
const { MIN_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
expect(typeof MAX_PARALLEL_LIMIT).toBe('number')
expect(typeof MIN_ITERATION_PARALLEL_NUM).toBe('number')
expect(MAX_PARALLEL_LIMIT).toBeGreaterThanOrEqual(MIN_ITERATION_PARALLEL_NUM)
})
})
})

View File

@@ -0,0 +1,3 @@
export const OAUTH_AUTHORIZE_PENDING_KEY = 'oauth_authorize_pending'
export const REDIRECT_URL_KEY = 'oauth_redirect_url'
export const OAUTH_AUTHORIZE_PENDING_TTL = 60 * 3

View File

@@ -7,6 +7,7 @@ import {
RiMailLine,
RiTranslate2,
} from '@remixicon/react'
import dayjs from 'dayjs'
import { useRouter, useSearchParams } from 'next/navigation'
import * as React from 'react'
import { useEffect, useRef } from 'react'
@@ -16,10 +17,22 @@ import Button from '@/app/components/base/button'
import Loading from '@/app/components/base/loading'
import Toast from '@/app/components/base/toast'
import { useLanguage } from '@/app/components/header/account-setting/model-provider-page/hooks'
import { setPostLoginRedirect } from '@/app/signin/utils/post-login-redirect'
import { useAppContext } from '@/context/app-context'
import { useIsLogin } from '@/service/use-common'
import { useAuthorizeOAuthApp, useOAuthAppInfo } from '@/service/use-oauth'
import {
OAUTH_AUTHORIZE_PENDING_KEY,
OAUTH_AUTHORIZE_PENDING_TTL,
REDIRECT_URL_KEY,
} from './constants'
function setItemWithExpiry(key: string, value: string, ttl: number) {
const item = {
value,
expiry: dayjs().add(ttl, 'seconds').unix(),
}
localStorage.setItem(key, JSON.stringify(item))
}
function buildReturnUrl(pathname: string, search: string) {
try {
@@ -73,8 +86,8 @@ export default function OAuthAuthorize() {
const onLoginSwitchClick = () => {
try {
const returnUrl = buildReturnUrl('/account/oauth/authorize', `?client_id=${encodeURIComponent(client_id)}&redirect_uri=${encodeURIComponent(redirect_uri)}`)
setPostLoginRedirect(returnUrl)
router.push('/signin')
setItemWithExpiry(OAUTH_AUTHORIZE_PENDING_KEY, returnUrl, OAUTH_AUTHORIZE_PENDING_TTL)
router.push(`/signin?${REDIRECT_URL_KEY}=${encodeURIComponent(returnUrl)}`)
}
catch {
router.push('/signin')
@@ -132,7 +145,7 @@ export default function OAuthAuthorize() {
<div className="text-[var(--color-saas-dify-blue-inverted)]">{authAppInfo?.app_label[language] || authAppInfo?.app_label?.en_US || t('unknownApp', { ns: 'oauth' })}</div>
{!isLoggedIn && <div className="text-text-primary">{t('tips.notLoggedIn', { ns: 'oauth' })}</div>}
</div>
<div className="text-text-secondary body-md-regular">{isLoggedIn ? `${authAppInfo?.app_label[language] || authAppInfo?.app_label?.en_US || t('unknownApp', { ns: 'oauth' })} ${t('tips.loggedIn', { ns: 'oauth' })}` : t('tips.needLogin', { ns: 'oauth' })}</div>
<div className="body-md-regular text-text-secondary">{isLoggedIn ? `${authAppInfo?.app_label[language] || authAppInfo?.app_label?.en_US || t('unknownApp', { ns: 'oauth' })} ${t('tips.loggedIn', { ns: 'oauth' })}` : t('tips.needLogin', { ns: 'oauth' })}</div>
</div>
{isLoggedIn && userProfile && (
@@ -141,7 +154,7 @@ export default function OAuthAuthorize() {
<Avatar avatar={userProfile.avatar_url} name={userProfile.name} size={36} />
<div>
<div className="system-md-semi-bold text-text-secondary">{userProfile.name}</div>
<div className="text-text-tertiary system-xs-regular">{userProfile.email}</div>
<div className="system-xs-regular text-text-tertiary">{userProfile.email}</div>
</div>
</div>
<Button variant="tertiary" size="small" onClick={onLoginSwitchClick}>{t('switchAccount', { ns: 'oauth' })}</Button>
@@ -153,7 +166,7 @@ export default function OAuthAuthorize() {
{authAppInfo!.scope.split(/\s+/).filter(Boolean).map((scope: string) => {
const Icon = SCOPE_INFO_MAP[scope]
return (
<div key={scope} className="flex items-center gap-2 text-text-secondary body-sm-medium">
<div key={scope} className="body-sm-medium flex items-center gap-2 text-text-secondary">
{Icon ? <Icon.icon className="h-4 w-4" /> : <RiAccountCircleLine className="h-4 w-4" />}
{Icon.label}
</div>
@@ -186,7 +199,7 @@ export default function OAuthAuthorize() {
</defs>
</svg>
</div>
<div className="mt-3 text-text-tertiary system-xs-regular">{t('tips.common', { ns: 'oauth' })}</div>
<div className="system-xs-regular mt-3 text-text-tertiary">{t('tips.common', { ns: 'oauth' })}</div>
</div>
)
}

View File

@@ -84,7 +84,7 @@ export const AppInitializer = ({
return
}
const redirectUrl = resolvePostLoginRedirect()
const redirectUrl = resolvePostLoginRedirect(searchParams)
if (redirectUrl) {
location.replace(redirectUrl)
return

View File

@@ -3,8 +3,7 @@ import type { MockedFunction } from 'vitest'
import type { IndexingType } from '@/app/components/datasets/create/step-two'
import type { DataSet } from '@/models/datasets'
import type { RetrievalConfig } from '@/types/app'
import { fireEvent, render, screen, waitFor, within } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
import { ChunkingMode, DatasetPermission, DataSourceType } from '@/models/datasets'
import { RETRIEVE_METHOD } from '@/types/app'
@@ -137,6 +136,12 @@ const renderItem = (config: DataSet, props?: Partial<React.ComponentProps<typeof
return { onSave, onRemove }
}
const getActionButtons = (card: HTMLElement) => {
const actionButtons = Array.from(card.querySelectorAll<HTMLButtonElement>('button.action-btn'))
expect(actionButtons).toHaveLength(2)
return actionButtons
}
describe('dataset-config/card-item', () => {
beforeEach(() => {
vi.clearAllMocks()
@@ -155,7 +160,7 @@ describe('dataset-config/card-item', () => {
renderItem(dataset)
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
const actionButtons = within(card).getAllByRole('button', { hidden: true })
const actionButtons = getActionButtons(card)
expect(screen.getByText(dataset.name)).toBeInTheDocument()
expect(screen.getByText('dataset.indexingTechnique.high_quality · dataset.indexingMethod.semantic_search')).toBeInTheDocument()
@@ -164,20 +169,19 @@ describe('dataset-config/card-item', () => {
})
it('should open settings drawer from edit action and close after saving', async () => {
const user = userEvent.setup()
const dataset = createDataset()
const { onSave } = renderItem(dataset)
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
const [editButton] = within(card).getAllByRole('button', { hidden: true })
await user.click(editButton)
const [editButton] = getActionButtons(card)
fireEvent.click(editButton)
expect(screen.getByText('Mock settings modal')).toBeInTheDocument()
await waitFor(() => {
expect(screen.getByRole('dialog')).toBeVisible()
})
await user.click(screen.getByText('Save changes'))
fireEvent.click(screen.getByRole('button', { name: 'Save changes' }))
await waitFor(() => {
expect(onSave).toHaveBeenCalledWith(expect.objectContaining({ name: 'Updated dataset' }))
@@ -188,13 +192,11 @@ describe('dataset-config/card-item', () => {
})
it('should call onRemove and toggle destructive state on hover', async () => {
const user = userEvent.setup()
const dataset = createDataset()
const { onRemove } = renderItem(dataset)
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
const buttons = within(card).getAllByRole('button', { hidden: true })
const deleteButton = buttons[buttons.length - 1]
const [, deleteButton] = getActionButtons(card)
expect(deleteButton.className).not.toContain('action-btn-destructive')
@@ -205,7 +207,7 @@ describe('dataset-config/card-item', () => {
fireEvent.mouseLeave(deleteButton)
expect(deleteButton.className).not.toContain('action-btn-destructive')
await user.click(deleteButton)
fireEvent.click(deleteButton)
expect(onRemove).toHaveBeenCalledWith(dataset.id)
})
@@ -223,14 +225,13 @@ describe('dataset-config/card-item', () => {
it('should apply mask overlay on mobile when drawer is open', async () => {
mockedUseBreakpoints.mockReturnValue(MediaType.mobile)
const user = userEvent.setup()
const dataset = createDataset()
renderItem(dataset)
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
const [editButton] = within(card).getAllByRole('button', { hidden: true })
await user.click(editButton)
const [editButton] = getActionButtons(card)
fireEvent.click(editButton)
expect(screen.getByText('Mock settings modal')).toBeInTheDocument()
const overlay = Array.from(document.querySelectorAll('[class]'))

View File

@@ -3,6 +3,7 @@ import type { CSSProperties, ReactNode } from 'react'
import { cva } from 'class-variance-authority'
import * as React from 'react'
import { cn } from '@/utils/classnames'
import './index.css'
enum BadgeState {
Warning = 'warning',

View File

@@ -8,7 +8,6 @@ import { UserActionButtonType } from '@/app/components/workflow/nodes/human-inpu
import 'dayjs/locale/en'
import 'dayjs/locale/zh-cn'
import 'dayjs/locale/ja'
import 'dayjs/locale/nl'
dayjs.extend(utc)
dayjs.extend(relativeTime)
@@ -46,7 +45,6 @@ const localeMap: Record<string, string> = {
'en-US': 'en',
'zh-Hans': 'zh-cn',
'ja-JP': 'ja',
'nl-NL': 'nl',
}
export const getRelativeTime = (

View File

@@ -98,9 +98,7 @@ const VoiceParamConfig = ({
className="h-full w-full cursor-pointer rounded-lg border-0 bg-components-input-bg-normal py-1.5 pl-3 pr-10 focus-visible:bg-state-base-hover focus-visible:outline-none group-hover:bg-state-base-hover sm:text-sm sm:leading-6"
>
<span className={cn('block truncate text-left text-text-secondary', !languageItem?.name && 'text-text-tertiary')}>
{languageItem?.name
? t(`voice.language.${replace(languageItem?.value ?? '', '-', '')}`, languageItem?.name, { ns: 'common' as const })
: localLanguagePlaceholder}
{languageItem?.name ? t(`voice.language.${replace(languageItem?.value, '-', '')}`, { ns: 'common' }) : localLanguagePlaceholder}
</span>
<span className="pointer-events-none absolute inset-y-0 right-0 flex items-center pr-2">
<ChevronDownIcon
@@ -131,7 +129,7 @@ const VoiceParamConfig = ({
<span
className={cn('block', selected && 'font-normal')}
>
{t(`voice.language.${replace((item.value), '-', '')}`, item.name, { ns: 'common' as const })}
{t(`voice.language.${replace((item.value), '-', '')}`, { ns: 'common' })}
</span>
{(selected || item.value === text2speech?.language) && (
<span

View File

@@ -1,5 +1,5 @@
import type { RemixiconComponentType } from '@remixicon/react'
import * as z from 'zod'
import { z } from 'zod'
export const InputTypeEnum = z.enum([
'text-input',

View File

@@ -1,6 +1,6 @@
import type { ZodNumber, ZodSchema, ZodString } from 'zod'
import type { BaseConfiguration } from './types'
import * as z from 'zod'
import { z } from 'zod'
import { BaseFieldType } from './types'
export const generateZodSchema = (fields: BaseConfiguration[]) => {

View File

@@ -1,4 +1,4 @@
import * as z from 'zod'
import { z } from 'zod'
const ContactMethod = z.union([
z.literal('email'),
@@ -22,10 +22,10 @@ export const UserSchema = z.object({
.min(3, 'Surname must be at least 3 characters long')
.regex(/^[A-Z]/, 'Surname must start with a capital letter'),
isAcceptingTerms: z.boolean().refine(val => val, {
error: 'You must accept the terms and conditions',
message: 'You must accept the terms and conditions',
}),
contact: z.object({
email: z.email('Invalid email address'),
email: z.string().email('Invalid email address'),
phone: z.string().optional(),
preferredContactMethod: ContactMethod,
}),

View File

@@ -1,6 +1,6 @@
import type { ZodSchema, ZodString } from 'zod'
import type { InputFieldConfiguration } from './types'
import * as z from 'zod'
import { z } from 'zod'
import { SupportedFileTypes, TransferMethod } from '@/app/components/rag-pipeline/components/panel/input-field/editor/form/schema'
import { InputFieldType } from './types'

View File

@@ -204,10 +204,23 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
}
}
catch {
// Avoid executing arbitrary code; require valid JSON for chart options.
setChartState('error')
processedRef.current = true
return
try {
// eslint-disable-next-line no-new-func
const result = new Function(`return ${trimmedContent}`)()
if (typeof result === 'object' && result !== null) {
setFinalChartOption(result)
setChartState('success')
processedRef.current = true
return
}
}
catch {
// If we have a complete JSON structure but it doesn't parse,
// it's likely an error rather than incomplete data
setChartState('error')
processedRef.current = true
return
}
}
}
@@ -236,9 +249,19 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
}
}
catch {
// Only accept JSON to avoid executing arbitrary code from the message.
setChartState('error')
processedRef.current = true
try {
// eslint-disable-next-line no-new-func
const result = new Function(`return ${trimmedContent}`)()
if (typeof result === 'object' && result !== null) {
setFinalChartOption(result)
isValidOption = true
}
}
catch {
// Both parsing methods failed, but content looks complete
setChartState('error')
processedRef.current = true
}
}
if (isValidOption) {

View File

@@ -2,7 +2,6 @@
import type { FC } from 'react'
import * as React from 'react'
import { useTranslation } from 'react-i18next'
import { env } from '@/env'
import ParamItem from '.'
type Props = {
@@ -12,7 +11,12 @@ type Props = {
enable: boolean
}
const maxTopK = env.NEXT_PUBLIC_TOP_K_MAX_VALUE
const maxTopK = (() => {
const configValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-top-k-max-value') || '', 10)
if (configValue && !isNaN(configValue))
return configValue
return 10
})()
const VALUE_LIMIT = {
default: 2,
step: 1,

View File

@@ -4,6 +4,7 @@ import { cva } from 'class-variance-authority'
import * as React from 'react'
import { Highlight } from '@/app/components/base/icons/src/public/common'
import { cn } from '@/utils/classnames'
import './index.css'
const PremiumBadgeVariants = cva(
'premium-badge',

View File

@@ -1,6 +1,6 @@
import { render, screen } from '@testing-library/react'
import { noop } from 'es-toolkit/function'
import * as z from 'zod'
import { z } from 'zod'
import withValidation from '.'
describe('withValidation HOC', () => {

View File

@@ -1,5 +1,5 @@
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import * as z from 'zod'
import { z } from 'zod'
import withValidation from '.'
// Sample components to wrap with validation
@@ -65,7 +65,7 @@ const ProductCard = ({ name, price, category, inStock }: ProductCardProps) => {
// Create validated versions
const userSchema = z.object({
name: z.string().min(1, 'Name is required'),
email: z.email('Invalid email'),
email: z.string().email('Invalid email'),
age: z.number().min(0).max(150),
})
@@ -371,7 +371,7 @@ export const ConfigurationValidation: Story = {
)
const configSchema = z.object({
apiUrl: z.url('Must be valid URL'),
apiUrl: z.string().url('Must be valid URL'),
timeout: z.number().min(0).max(30000),
retries: z.number().min(0).max(5),
debug: z.boolean(),
@@ -430,7 +430,7 @@ export const UsageDocumentation: Story = {
<div>
<h4 className="mb-2 text-sm font-semibold text-gray-900">Usage Example</h4>
<pre className="overflow-x-auto rounded-lg bg-gray-900 p-4 text-xs text-gray-100">
{`import * as z from 'zod'
{`import { z } from 'zod'
import withValidation from './withValidation'
// Define your component

View File

@@ -5,7 +5,6 @@ import { useTranslation } from 'react-i18next'
import Input from '@/app/components/base/input'
import { InputNumber } from '@/app/components/base/input-number'
import Tooltip from '@/app/components/base/tooltip'
import { env } from '@/env'
const TextLabel: FC<PropsWithChildren> = (props) => {
return <label className="text-xs font-semibold leading-none text-text-secondary">{props.children}</label>
@@ -47,7 +46,7 @@ export const DelimiterInput: FC<InputProps & { tooltip?: string }> = (props) =>
}
export const MaxLengthInput: FC<InputNumberProps> = (props) => {
const maxValue = env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
const maxValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-indexing-max-segmentation-tokens-length') || '4000', 10)
const { t } = useTranslation()
return (

View File

@@ -1,6 +1,5 @@
import type { ParentMode, PreProcessingRule, ProcessRule, Rules, SummaryIndexSetting as SummaryIndexSettingType } from '@/models/datasets'
import { useCallback, useRef, useState } from 'react'
import { env } from '@/env'
import { ChunkingMode, ProcessMode } from '@/models/datasets'
import escape from './escape'
import unescape from './unescape'
@@ -9,7 +8,10 @@ import unescape from './unescape'
export const DEFAULT_SEGMENT_IDENTIFIER = '\\n\\n'
export const DEFAULT_MAXIMUM_CHUNK_LENGTH = 1024
export const DEFAULT_OVERLAP = 50
export const MAXIMUM_CHUNK_TOKEN_LENGTH = env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
export const MAXIMUM_CHUNK_TOKEN_LENGTH = Number.parseInt(
globalThis.document?.body?.getAttribute('data-public-indexing-max-segmentation-tokens-length') || '4000',
10,
)
export type ParentChildConfig = {
chunkForContext: ParentMode

View File

@@ -1,7 +1,7 @@
import type { BaseConfiguration } from '@/app/components/base/form/form-scenarios/base/types'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import * as React from 'react'
import * as z from 'zod'
import { z } from 'zod'
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
import Toast from '@/app/components/base/toast'
import Actions from './actions'
@@ -53,7 +53,7 @@ const createFailingSchema = () => {
issues: [{ path: ['field1'], message: 'is required' }],
},
}),
} as unknown as z.ZodType
} as unknown as z.ZodSchema
}
// ==========================================

View File

@@ -1,129 +0,0 @@
'use client'
import type { FC } from 'react'
import type { DocType } from '@/models/datasets'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import Radio from '@/app/components/base/radio'
import Tooltip from '@/app/components/base/tooltip'
import { useMetadataMap } from '@/hooks/use-metadata'
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
import { cn } from '@/utils/classnames'
import s from '../style.module.css'
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
return <div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
}
const IconButton: FC<{ type: DocType, isChecked: boolean }> = ({ type, isChecked = false }) => {
const metadataMap = useMetadataMap()
return (
<Tooltip popupContent={metadataMap[type].text}>
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
<TypeIcon
iconName={metadataMap[type].iconName || ''}
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
/>
</button>
</Tooltip>
)
}
type DocTypeSelectorProps = {
docType: DocType | ''
documentType?: DocType | ''
tempDocType: DocType | ''
onTempDocTypeChange: (type: DocType | '') => void
onConfirm: () => void
onCancel: () => void
}
const DocTypeSelector: FC<DocTypeSelectorProps> = ({
docType,
documentType,
tempDocType,
onTempDocTypeChange,
onConfirm,
onCancel,
}) => {
const { t } = useTranslation()
const isFirstTime = !docType && !documentType
const currValue = tempDocType ?? documentType
return (
<>
{isFirstTime && (
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
)}
<div className={s.operationWrapper}>
{isFirstTime && (
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
)}
{documentType && (
<>
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
</>
)}
<Radio.Group value={currValue ?? ''} onChange={onTempDocTypeChange} className={s.radioGroup}>
{CUSTOMIZABLE_DOC_TYPES.map(type => (
<Radio key={type} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
<IconButton type={type} isChecked={currValue === type} />
</Radio>
))}
</Radio.Group>
{isFirstTime && (
<Button variant="primary" onClick={onConfirm} disabled={!tempDocType}>
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
</Button>
)}
{documentType && (
<div className={s.opBtnWrapper}>
<Button onClick={onConfirm} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">
{t('operation.save', { ns: 'common' })}
</Button>
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>
{t('operation.cancel', { ns: 'common' })}
</Button>
</div>
)}
</div>
</>
)
}
type DocumentTypeDisplayProps = {
displayType: DocType | ''
showChangeLink?: boolean
onChangeClick?: () => void
}
export const DocumentTypeDisplay: FC<DocumentTypeDisplayProps> = ({
displayType,
showChangeLink = false,
onChangeClick,
}) => {
const { t } = useTranslation()
const metadataMap = useMetadataMap()
const effectiveType = displayType || 'book'
return (
<div className={s.documentTypeShow}>
{(displayType || !showChangeLink) && (
<>
<TypeIcon iconName={metadataMap[effectiveType]?.iconName || ''} className={s.iconShow} />
{metadataMap[effectiveType].text}
{showChangeLink && (
<div className="ml-1 inline-flex items-center gap-1">
·
<div onClick={onChangeClick} className="cursor-pointer hover:text-text-accent">
{t('operation.change', { ns: 'common' })}
</div>
</div>
)}
</>
)}
</div>
)
}
export default DocTypeSelector

View File

@@ -1,89 +0,0 @@
'use client'
import type { FC, ReactNode } from 'react'
import type { inputType } from '@/hooks/use-metadata'
import { useTranslation } from 'react-i18next'
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
import Input from '@/app/components/base/input'
import { SimpleSelect } from '@/app/components/base/select'
import { getTextWidthWithCanvas } from '@/utils'
import { cn } from '@/utils/classnames'
import s from '../style.module.css'
type FieldInfoProps = {
label: string
value?: string
valueIcon?: ReactNode
displayedValue?: string
defaultValue?: string
showEdit?: boolean
inputType?: inputType
selectOptions?: Array<{ value: string, name: string }>
onUpdate?: (v: string) => void
}
const FieldInfo: FC<FieldInfoProps> = ({
label,
value = '',
valueIcon,
displayedValue = '',
defaultValue,
showEdit = false,
inputType = 'input',
selectOptions = [],
onUpdate,
}) => {
const { t } = useTranslation()
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
const editAlignTop = showEdit && inputType === 'textarea'
const readAlignTop = !showEdit && textNeedWrap
const renderContent = () => {
if (!showEdit)
return displayedValue
if (inputType === 'select') {
return (
<SimpleSelect
onSelect={({ value }) => onUpdate?.(value as string)}
items={selectOptions}
defaultValue={value}
className={s.select}
wrapperClassName={s.selectWrapper}
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
if (inputType === 'textarea') {
return (
<AutoHeightTextarea
onChange={e => onUpdate?.(e.target.value)}
value={value}
className={s.textArea}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<Input
onChange={e => onUpdate?.(e.target.value)}
value={value}
defaultValue={defaultValue}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
<div className="flex grow items-center gap-1 text-text-secondary">
{valueIcon}
{renderContent()}
</div>
</div>
)
}
export default FieldInfo

View File

@@ -1,88 +0,0 @@
'use client'
import type { FC } from 'react'
import type { metadataType } from '@/hooks/use-metadata'
import type { FullDocumentDetail } from '@/models/datasets'
import { get } from 'es-toolkit/compat'
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
import FieldInfo from './field-info'
const map2Options = (map: Record<string, string>) => {
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
}
function useCategoryMapResolver(mainField: metadataType | '') {
const languageMap = useLanguages()
const bookCategoryMap = useBookCategories()
const personalDocCategoryMap = usePersonalDocCategories()
const businessDocCategoryMap = useBusinessDocCategories()
return (field: string): Record<string, string> => {
if (field === 'language')
return languageMap
if (field === 'category' && mainField === 'book')
return bookCategoryMap
if (field === 'document_type') {
if (mainField === 'personal_document')
return personalDocCategoryMap
if (mainField === 'business_document')
return businessDocCategoryMap
}
return {}
}
}
type MetadataFieldListProps = {
mainField: metadataType | ''
canEdit?: boolean
metadata?: Record<string, string>
docDetail?: FullDocumentDetail
onFieldUpdate?: (field: string, value: string) => void
}
const MetadataFieldList: FC<MetadataFieldListProps> = ({
mainField,
canEdit = false,
metadata,
docDetail,
onFieldUpdate,
}) => {
const metadataMap = useMetadataMap()
const getCategoryMap = useCategoryMapResolver(mainField)
if (!mainField)
return null
const fieldMap = metadataMap[mainField]?.subFieldsMap
const isFixedField = ['originInfo', 'technicalParameters'].includes(mainField)
const sourceData = isFixedField ? docDetail : metadata
const getDisplayValue = (field: string) => {
const val = get(sourceData, field, '')
if (!val && val !== 0)
return '-'
if (fieldMap[field]?.inputType === 'select')
return getCategoryMap(field)[val]
if (fieldMap[field]?.render)
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
return val
}
return (
<div className="flex flex-col gap-1">
{Object.keys(fieldMap).map(field => (
<FieldInfo
key={fieldMap[field]?.label}
label={fieldMap[field]?.label}
displayedValue={getDisplayValue(field)}
value={get(sourceData, field, '')}
inputType={fieldMap[field]?.inputType || 'input'}
showEdit={canEdit}
onUpdate={val => onFieldUpdate?.(field, val)}
selectOptions={map2Options(getCategoryMap(field))}
/>
))}
</div>
)
}
export default MetadataFieldList

View File

@@ -1,137 +0,0 @@
'use client'
import type { CommonResponse } from '@/models/common'
import type { DocType, FullDocumentDetail } from '@/models/datasets'
import { useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { ToastContext } from '@/app/components/base/toast'
import { modifyDocMetadata } from '@/service/datasets'
import { asyncRunSafe } from '@/utils'
import { useDocumentContext } from '../../context'
type MetadataState = {
documentType?: DocType | ''
metadata: Record<string, string>
}
/**
* Normalize raw doc_type: treat 'others' as empty string.
*/
const normalizeDocType = (rawDocType: string): DocType | '' => {
return rawDocType === 'others' ? '' : rawDocType as DocType | ''
}
type UseMetadataStateOptions = {
docDetail?: FullDocumentDetail
onUpdate?: () => void
}
export function useMetadataState({ docDetail, onUpdate }: UseMetadataStateOptions) {
const { doc_metadata = {} } = docDetail || {}
const rawDocType = docDetail?.doc_type ?? ''
const docType = normalizeDocType(rawDocType)
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const datasetId = useDocumentContext(s => s.datasetId)
const documentId = useDocumentContext(s => s.documentId)
// If no documentType yet, start in editing + showDocTypes mode
const [editStatus, setEditStatus] = useState(!docType)
const [metadataParams, setMetadataParams] = useState<MetadataState>(
docType
? { documentType: docType, metadata: (doc_metadata || {}) as Record<string, string> }
: { metadata: {} },
)
const [showDocTypes, setShowDocTypes] = useState(!docType)
const [tempDocType, setTempDocType] = useState<DocType | ''>('')
const [saveLoading, setSaveLoading] = useState(false)
// Sync local state when the upstream docDetail changes (e.g. after save or navigation).
// These setters are intentionally called together to batch-reset multiple pieces
// of derived editing state that cannot be expressed as pure derived values.
useEffect(() => {
if (docDetail?.doc_type) {
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setEditStatus(false)
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setShowDocTypes(false)
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setTempDocType(docType)
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setMetadataParams({
documentType: docType,
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
})
}
}, [docDetail?.doc_type, docDetail?.doc_metadata, docType])
const confirmDocType = () => {
if (!tempDocType)
return
setMetadataParams({
documentType: tempDocType,
// Clear metadata when switching to a different doc type
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {},
})
setEditStatus(true)
setShowDocTypes(false)
}
const cancelDocType = () => {
setTempDocType(metadataParams.documentType ?? '')
setEditStatus(true)
setShowDocTypes(false)
}
const enableEdit = () => {
setEditStatus(true)
}
const cancelEdit = () => {
setMetadataParams({ documentType: docType || '', metadata: { ...(docDetail?.doc_metadata || {}) } })
setEditStatus(!docType)
if (!docType)
setShowDocTypes(true)
}
const saveMetadata = async () => {
setSaveLoading(true)
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
datasetId,
documentId,
body: {
doc_type: metadataParams.documentType || docType || '',
doc_metadata: metadataParams.metadata,
},
}) as Promise<CommonResponse>)
if (!e)
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
else
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
onUpdate?.()
setEditStatus(false)
setSaveLoading(false)
}
const updateMetadataField = (field: string, value: string) => {
setMetadataParams(prev => ({ ...prev, metadata: { ...prev.metadata, [field]: value } }))
}
return {
docType,
editStatus,
showDocTypes,
tempDocType,
saveLoading,
metadataParams,
setTempDocType,
setShowDocTypes,
confirmDocType,
cancelDocType,
enableEdit,
cancelEdit,
saveMetadata,
updateMetadataField,
}
}

View File

@@ -1,6 +1,7 @@
import type { FullDocumentDetail } from '@/models/datasets'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import Metadata, { FieldInfo } from './index'
// Mock document context
@@ -120,6 +121,7 @@ vi.mock('@/hooks/use-metadata', () => ({
}),
}))
// Mock getTextWidthWithCanvas
vi.mock('@/utils', () => ({
asyncRunSafe: async (promise: Promise<unknown>) => {
try {
@@ -133,32 +135,33 @@ vi.mock('@/utils', () => ({
getTextWidthWithCanvas: () => 100,
}))
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
id: 'doc-1',
name: 'Test Document',
doc_type: 'book',
doc_metadata: {
title: 'Test Book',
author: 'Test Author',
language: 'en',
},
data_source_type: 'upload_file',
segment_count: 10,
hit_count: 5,
...overrides,
} as FullDocumentDetail)
describe('Metadata', () => {
beforeEach(() => {
vi.clearAllMocks()
})
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
id: 'doc-1',
name: 'Test Document',
doc_type: 'book',
doc_metadata: {
title: 'Test Book',
author: 'Test Author',
language: 'en',
},
data_source_type: 'upload_file',
segment_count: 10,
hit_count: 5,
...overrides,
} as FullDocumentDetail)
const defaultProps = {
docDetail: createMockDocDetail(),
loading: false,
onUpdate: vi.fn(),
}
// Rendering tests
describe('Rendering', () => {
it('should render without crashing', () => {
// Arrange & Act
@@ -188,7 +191,7 @@ describe('Metadata', () => {
// Arrange & Act
render(<Metadata {...defaultProps} loading={true} />)
// Assert - Loading component should be rendered, title should not
// Assert - Loading component should be rendered
expect(screen.queryByText(/metadata\.title/i)).not.toBeInTheDocument()
})
@@ -201,7 +204,7 @@ describe('Metadata', () => {
})
})
// Edit mode (tests useMetadataState hook integration)
// Edit mode tests
describe('Edit Mode', () => {
it('should enter edit mode when edit button is clicked', () => {
// Arrange
@@ -300,7 +303,7 @@ describe('Metadata', () => {
})
})
// Document type selection (tests DocTypeSelector sub-component integration)
// Document type selection
describe('Document Type Selection', () => {
it('should show doc type selection when no doc_type exists', () => {
// Arrange
@@ -350,13 +353,13 @@ describe('Metadata', () => {
})
})
// Fixed fields (tests MetadataFieldList sub-component integration)
// Origin info and technical parameters
describe('Fixed Fields', () => {
it('should render origin info fields', () => {
// Arrange & Act
render(<Metadata {...defaultProps} />)
// Assert
// Assert - Origin info fields should be displayed
expect(screen.getByText('Data Source Type')).toBeInTheDocument()
})
@@ -379,7 +382,7 @@ describe('Metadata', () => {
// Act
const { container } = render(<Metadata {...defaultProps} docDetail={docDetail} />)
// Assert
// Assert - should render without crashing
expect(container.firstChild).toBeInTheDocument()
})
@@ -387,7 +390,7 @@ describe('Metadata', () => {
// Arrange & Act
const { container } = render(<Metadata {...defaultProps} docDetail={undefined} loading={false} />)
// Assert
// Assert - should render without crashing
expect(container.firstChild).toBeInTheDocument()
})
@@ -422,6 +425,7 @@ describe('Metadata', () => {
})
})
// FieldInfo component tests
describe('FieldInfo', () => {
beforeEach(() => {
vi.clearAllMocks()
@@ -539,149 +543,3 @@ describe('FieldInfo', () => {
})
})
})
// --- useMetadataState hook coverage tests (via component interactions) ---
describe('useMetadataState coverage', () => {
beforeEach(() => {
vi.clearAllMocks()
})
const defaultProps = {
docDetail: createMockDocDetail(),
loading: false,
onUpdate: vi.fn(),
}
describe('cancelDocType', () => {
it('should cancel doc type change and return to edit mode', () => {
// Arrange
render(<Metadata {...defaultProps} />)
// Enter edit mode → click change to open doc type selector
fireEvent.click(screen.getByText(/operation\.edit/i))
fireEvent.click(screen.getByText(/operation\.change/i))
// Now in doc type selector mode — should show cancel button
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
// Act — cancel the doc type change
fireEvent.click(screen.getByText(/operation\.cancel/i))
// Assert — should be back to edit mode (cancel + save buttons visible)
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
})
})
describe('confirmDocType', () => {
it('should confirm same doc type and return to edit mode keeping metadata', () => {
// Arrange — useEffect syncs tempDocType='book' from docDetail
render(<Metadata {...defaultProps} />)
// Enter edit mode → click change to open doc type selector
fireEvent.click(screen.getByText(/operation\.edit/i))
fireEvent.click(screen.getByText(/operation\.change/i))
// DocTypeSelector shows save/cancel buttons
expect(screen.getByText(/metadata\.docTypeChangeTitle/i)).toBeInTheDocument()
// Act — click save to confirm same doc type (tempDocType='book')
fireEvent.click(screen.getByText(/operation\.save/i))
// Assert — should return to edit mode with metadata fields visible
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
})
})
describe('cancelEdit when no docType', () => {
it('should show doc type selection when cancel is clicked with doc_type others', () => {
// Arrange — doc with 'others' type normalizes to '' internally.
// The useEffect sees doc_type='others' (truthy) and syncs state,
// so the component initially shows view mode. Enter edit → cancel to trigger cancelEdit.
const docDetail = createMockDocDetail({ doc_type: 'others' })
render(<Metadata {...defaultProps} docDetail={docDetail} />)
// 'others' is normalized to '' → useEffect fires (doc_type truthy) → view mode
// The rendered type uses default 'book' fallback for display
expect(screen.getByText(/operation\.edit/i)).toBeInTheDocument()
// Enter edit mode
fireEvent.click(screen.getByText(/operation\.edit/i))
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
// Act — cancel edit; internally docType is '' so cancelEdit goes to showDocTypes
fireEvent.click(screen.getByText(/operation\.cancel/i))
// Assert — should show doc type selection since normalized docType was ''
expect(screen.getByText(/metadata\.docTypeSelectTitle/i)).toBeInTheDocument()
})
})
describe('updateMetadataField', () => {
it('should update metadata field value via input', () => {
// Arrange
render(<Metadata {...defaultProps} />)
// Enter edit mode
fireEvent.click(screen.getByText(/operation\.edit/i))
// Act — find an input and change its value (Title field)
const inputs = screen.getAllByRole('textbox')
expect(inputs.length).toBeGreaterThan(0)
fireEvent.change(inputs[0], { target: { value: 'Updated Title' } })
// Assert — the input should have the new value
expect(inputs[0]).toHaveValue('Updated Title')
})
})
describe('saveMetadata calls modifyDocMetadata with correct body', () => {
it('should pass doc_type and doc_metadata in save request', async () => {
// Arrange
mockModifyDocMetadata.mockResolvedValueOnce({})
render(<Metadata {...defaultProps} />)
// Enter edit mode
fireEvent.click(screen.getByText(/operation\.edit/i))
// Act — save
fireEvent.click(screen.getByText(/operation\.save/i))
// Assert
await waitFor(() => {
expect(mockModifyDocMetadata).toHaveBeenCalledWith(
expect.objectContaining({
datasetId: 'test-dataset-id',
documentId: 'test-document-id',
body: expect.objectContaining({
doc_type: 'book',
}),
}),
)
})
})
})
describe('useEffect sync', () => {
it('should handle doc_metadata being null in effect sync', () => {
// Arrange — first render with null metadata
const { rerender } = render(
<Metadata
{...defaultProps}
docDetail={createMockDocDetail({ doc_metadata: null })}
/>,
)
// Act — rerender with a different doc_type to trigger useEffect sync
rerender(
<Metadata
{...defaultProps}
docDetail={createMockDocDetail({ doc_type: 'paper', doc_metadata: null })}
/>,
)
// Assert — should render without crashing, showing Paper type
expect(screen.getByText('Paper')).toBeInTheDocument()
})
})
})

View File

@@ -1,124 +1,422 @@
'use client'
import type { FC } from 'react'
import type { FullDocumentDetail } from '@/models/datasets'
import type { FC, ReactNode } from 'react'
import type { inputType, metadataType } from '@/hooks/use-metadata'
import type { CommonResponse } from '@/models/common'
import type { DocType, FullDocumentDetail } from '@/models/datasets'
import { PencilIcon } from '@heroicons/react/24/outline'
import { get } from 'es-toolkit/compat'
import * as React from 'react'
import { useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
import Button from '@/app/components/base/button'
import Divider from '@/app/components/base/divider'
import Input from '@/app/components/base/input'
import Loading from '@/app/components/base/loading'
import { useMetadataMap } from '@/hooks/use-metadata'
import DocTypeSelector, { DocumentTypeDisplay } from './components/doc-type-selector'
import MetadataFieldList from './components/metadata-field-list'
import { useMetadataState } from './hooks/use-metadata-state'
import Radio from '@/app/components/base/radio'
import { SimpleSelect } from '@/app/components/base/select'
import { ToastContext } from '@/app/components/base/toast'
import Tooltip from '@/app/components/base/tooltip'
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
import { modifyDocMetadata } from '@/service/datasets'
import { asyncRunSafe, getTextWidthWithCanvas } from '@/utils'
import { cn } from '@/utils/classnames'
import { useDocumentContext } from '../context'
import s from './style.module.css'
export { default as FieldInfo } from './components/field-info'
const map2Options = (map: { [key: string]: string }) => {
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
}
type MetadataProps = {
type IFieldInfoProps = {
label: string
value?: string
valueIcon?: ReactNode
displayedValue?: string
defaultValue?: string
showEdit?: boolean
inputType?: inputType
selectOptions?: Array<{ value: string, name: string }>
onUpdate?: (v: any) => void
}
export const FieldInfo: FC<IFieldInfoProps> = ({
label,
value = '',
valueIcon,
displayedValue = '',
defaultValue,
showEdit = false,
inputType = 'input',
selectOptions = [],
onUpdate,
}) => {
const { t } = useTranslation()
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
const editAlignTop = showEdit && inputType === 'textarea'
const readAlignTop = !showEdit && textNeedWrap
const renderContent = () => {
if (!showEdit)
return displayedValue
if (inputType === 'select') {
return (
<SimpleSelect
onSelect={({ value }) => onUpdate?.(value as string)}
items={selectOptions}
defaultValue={value}
className={s.select}
wrapperClassName={s.selectWrapper}
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
if (inputType === 'textarea') {
return (
<AutoHeightTextarea
onChange={e => onUpdate?.(e.target.value)}
value={value}
className={s.textArea}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<Input
onChange={e => onUpdate?.(e.target.value)}
value={value}
defaultValue={defaultValue}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
<div className="flex grow items-center gap-1 text-text-secondary">
{valueIcon}
{renderContent()}
</div>
</div>
)
}
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
return (
<div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
)
}
const IconButton: FC<{
type: DocType
isChecked: boolean
}> = ({ type, isChecked = false }) => {
const metadataMap = useMetadataMap()
return (
<Tooltip
popupContent={metadataMap[type].text}
>
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
<TypeIcon
iconName={metadataMap[type].iconName || ''}
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
/>
</button>
</Tooltip>
)
}
type IMetadataProps = {
docDetail?: FullDocumentDetail
loading: boolean
onUpdate: () => void
}
const Metadata: FC<MetadataProps> = ({ docDetail, loading, onUpdate }) => {
type MetadataState = {
documentType?: DocType | ''
metadata: Record<string, string>
}
const Metadata: FC<IMetadataProps> = ({ docDetail, loading, onUpdate }) => {
const { doc_metadata = {} } = docDetail || {}
const rawDocType = docDetail?.doc_type ?? ''
const doc_type = rawDocType === 'others' ? '' : rawDocType
const { t } = useTranslation()
const metadataMap = useMetadataMap()
const languageMap = useLanguages()
const bookCategoryMap = useBookCategories()
const personalDocCategoryMap = usePersonalDocCategories()
const businessDocCategoryMap = useBusinessDocCategories()
const [editStatus, setEditStatus] = useState(!doc_type) // if no documentType, in editing status by default
// the initial values are according to the documentType
const [metadataParams, setMetadataParams] = useState<MetadataState>(
doc_type
? {
documentType: doc_type as DocType,
metadata: (doc_metadata || {}) as Record<string, string>,
}
: { metadata: {} },
)
const [showDocTypes, setShowDocTypes] = useState(!doc_type) // whether show doc types
const [tempDocType, setTempDocType] = useState<DocType | ''>('') // for remember icon click
const [saveLoading, setSaveLoading] = useState(false)
const {
docType,
editStatus,
showDocTypes,
tempDocType,
saveLoading,
metadataParams,
setTempDocType,
setShowDocTypes,
confirmDocType,
cancelDocType,
enableEdit,
cancelEdit,
saveMetadata,
updateMetadataField,
} = useMetadataState({ docDetail, onUpdate })
const { notify } = useContext(ToastContext)
const datasetId = useDocumentContext(s => s.datasetId)
const documentId = useDocumentContext(s => s.documentId)
useEffect(() => {
if (docDetail?.doc_type) {
setEditStatus(false)
setShowDocTypes(false)
setTempDocType(doc_type as DocType | '')
setMetadataParams({
documentType: doc_type as DocType | '',
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
})
}
}, [docDetail?.doc_type, docDetail?.doc_metadata, doc_type])
// confirm doc type
const confirmDocType = () => {
if (!tempDocType)
return
setMetadataParams({
documentType: tempDocType,
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {} as Record<string, string>, // change doc type, clear metadata
})
setEditStatus(true)
setShowDocTypes(false)
}
// cancel doc type
const cancelDocType = () => {
setTempDocType(metadataParams.documentType ?? '')
setEditStatus(true)
setShowDocTypes(false)
}
// show doc type select
const renderSelectDocType = () => {
const { documentType } = metadataParams
if (loading) {
return (
<div className={`${s.main} bg-gray-25`}>
<Loading type="app" />
<>
{!doc_type && !documentType && (
<>
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
</>
)}
<div className={s.operationWrapper}>
{!doc_type && !documentType && (
<>
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
</>
)}
{documentType && (
<>
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
</>
)}
<Radio.Group value={tempDocType ?? documentType ?? ''} onChange={setTempDocType} className={s.radioGroup}>
{CUSTOMIZABLE_DOC_TYPES.map((type, index) => {
const currValue = tempDocType ?? documentType
return (
<Radio key={index} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
<IconButton
type={type}
isChecked={currValue === type}
/>
</Radio>
)
})}
</Radio.Group>
{!doc_type && !documentType && (
<Button
variant="primary"
onClick={confirmDocType}
disabled={!tempDocType}
>
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
</Button>
)}
{documentType && (
<div className={s.opBtnWrapper}>
<Button onClick={confirmDocType} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">{t('operation.save', { ns: 'common' })}</Button>
<Button onClick={cancelDocType} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
</div>
)}
</div>
</>
)
}
// show metadata info and edit
const renderFieldInfos = ({ mainField = 'book', canEdit }: { mainField?: metadataType | '', canEdit?: boolean }) => {
if (!mainField)
return null
const fieldMap = metadataMap[mainField]?.subFieldsMap
const sourceData = ['originInfo', 'technicalParameters'].includes(mainField) ? docDetail : metadataParams.metadata
const getTargetMap = (field: string) => {
if (field === 'language')
return languageMap
if (field === 'category' && mainField === 'book')
return bookCategoryMap
if (field === 'document_type') {
if (mainField === 'personal_document')
return personalDocCategoryMap
if (mainField === 'business_document')
return businessDocCategoryMap
}
return {} as any
}
const getTargetValue = (field: string) => {
const val = get(sourceData, field, '')
if (!val && val !== 0)
return '-'
if (fieldMap[field]?.inputType === 'select')
return getTargetMap(field)[val]
if (fieldMap[field]?.render)
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
return val
}
return (
<div className="flex flex-col gap-1">
{Object.keys(fieldMap).map((field) => {
return (
<FieldInfo
key={fieldMap[field]?.label}
label={fieldMap[field]?.label}
displayedValue={getTargetValue(field)}
value={get(sourceData, field, '')}
inputType={fieldMap[field]?.inputType || 'input'}
showEdit={canEdit}
onUpdate={(val) => {
setMetadataParams(pre => ({ ...pre, metadata: { ...pre.metadata, [field]: val } }))
}}
selectOptions={map2Options(getTargetMap(field))}
/>
)
})}
</div>
)
}
const enabledEdit = () => {
setEditStatus(true)
}
const onCancel = () => {
setMetadataParams({ documentType: doc_type || '', metadata: { ...docDetail?.doc_metadata } })
setEditStatus(!doc_type)
if (!doc_type)
setShowDocTypes(true)
}
const onSave = async () => {
setSaveLoading(true)
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
datasetId,
documentId,
body: {
doc_type: metadataParams.documentType || doc_type || '',
doc_metadata: metadataParams.metadata,
},
}) as Promise<CommonResponse>)
if (!e)
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
else
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
onUpdate?.()
setEditStatus(false)
setSaveLoading(false)
}
return (
<div className={`${s.main} ${editStatus ? 'bg-white' : 'bg-gray-25'}`}>
{/* Header: title + action buttons */}
<div className={s.titleWrapper}>
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
{!editStatus
? (
<Button onClick={enableEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
<PencilIcon className={s.opIcon} />
{t('operation.edit', { ns: 'common' })}
</Button>
)
: !showDocTypes && (
<div className={s.opBtnWrapper}>
<Button onClick={cancelEdit} className={`${s.opBtn} ${s.opCancelBtn}`}>
{t('operation.cancel', { ns: 'common' })}
</Button>
<Button onClick={saveMetadata} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary" loading={saveLoading}>
{t('operation.save', { ns: 'common' })}
</Button>
</div>
)}
</div>
{/* Document type display / selector */}
{!editStatus
? <DocumentTypeDisplay displayType={docType} />
: showDocTypes
? null
: (
<DocumentTypeDisplay
displayType={metadataParams.documentType || ''}
showChangeLink={editStatus}
onChangeClick={() => setShowDocTypes(true)}
/>
)}
{/* Divider between type display and fields (skip when in first-time selection) */}
{(!docType && showDocTypes) ? null : <Divider />}
{/* Doc type selector or editable metadata fields */}
{showDocTypes
? (
<DocTypeSelector
docType={docType}
documentType={metadataParams.documentType}
tempDocType={tempDocType}
onTempDocTypeChange={setTempDocType}
onConfirm={confirmDocType}
onCancel={cancelDocType}
/>
)
{loading
? (<Loading type="app" />)
: (
<MetadataFieldList
mainField={metadataParams.documentType || ''}
canEdit={editStatus}
metadata={metadataParams.metadata}
docDetail={docDetail}
onFieldUpdate={updateMetadataField}
/>
<>
<div className={s.titleWrapper}>
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
{!editStatus
? (
<Button onClick={enabledEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
<PencilIcon className={s.opIcon} />
{t('operation.edit', { ns: 'common' })}
</Button>
)
: showDocTypes
? null
: (
<div className={s.opBtnWrapper}>
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
<Button
onClick={onSave}
className={`${s.opBtn} ${s.opSaveBtn}`}
variant="primary"
loading={saveLoading}
>
{t('operation.save', { ns: 'common' })}
</Button>
</div>
)}
</div>
{/* show selected doc type and changing entry */}
{!editStatus
? (
<div className={s.documentTypeShow}>
<TypeIcon iconName={metadataMap[doc_type || 'book']?.iconName || ''} className={s.iconShow} />
{metadataMap[doc_type || 'book'].text}
</div>
)
: showDocTypes
? null
: (
<div className={s.documentTypeShow}>
{metadataParams.documentType && (
<>
<TypeIcon iconName={metadataMap[metadataParams.documentType || 'book'].iconName || ''} className={s.iconShow} />
{metadataMap[metadataParams.documentType || 'book'].text}
{editStatus && (
<div className="ml-1 inline-flex items-center gap-1">
·
<div
onClick={() => { setShowDocTypes(true) }}
className="cursor-pointer hover:text-text-accent"
>
{t('operation.change', { ns: 'common' })}
</div>
</div>
)}
</>
)}
</div>
)}
{(!doc_type && showDocTypes) ? null : <Divider />}
{showDocTypes ? renderSelectDocType() : renderFieldInfos({ mainField: metadataParams.documentType, canEdit: editStatus })}
{/* show fixed fields */}
<Divider />
{renderFieldInfos({ mainField: 'originInfo', canEdit: false })}
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
<Divider />
{renderFieldInfos({ mainField: 'technicalParameters', canEdit: false })}
</>
)}
{/* Fixed fields: origin info */}
<Divider />
<MetadataFieldList mainField="originInfo" docDetail={docDetail} />
{/* Fixed fields: technical parameters */}
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
<Divider />
<MetadataFieldList mainField="technicalParameters" docDetail={docDetail} />
</div>
)
}

View File

@@ -28,7 +28,6 @@ import { useGlobalPublicStore } from '@/context/global-public-context'
import { useDocLink } from '@/context/i18n'
import { useModalContext } from '@/context/modal-context'
import { useProviderContext } from '@/context/provider-context'
import { env } from '@/env'
import { useLogout } from '@/service/use-common'
import { cn } from '@/utils/classnames'
import AccountAbout from '../account-about'
@@ -179,7 +178,7 @@ export default function AppSelector() {
</Link>
</MenuItem>
{
env.NEXT_PUBLIC_SITE_ABOUT !== 'hide' && (
document?.body?.getAttribute('data-public-site-about') !== 'hide' && (
<MenuItem>
<div
className={cn(itemClassName, 'justify-between', 'data-[active]:bg-state-base-hover')}

View File

@@ -104,7 +104,7 @@ const MembersPage = () => {
<UpgradeBtn className="mr-2" loc="member-invite" />
)}
<div className="shrink-0">
{isCurrentWorkspaceManager && <InviteButton disabled={isMemberFull} onClick={() => setInviteModalVisible(true)} />}
<InviteButton disabled={!isCurrentWorkspaceManager || isMemberFull} onClick={() => setInviteModalVisible(true)} />
</div>
</div>
<div className="overflow-visible lg:overflow-visible">

View File

@@ -38,7 +38,7 @@ const DeprecationNotice: FC<DeprecationNoticeProps> = ({
iconWrapperClassName,
textClassName,
}) => {
const { t } = useTranslation('plugin')
const { t } = useTranslation()
const deprecatedReasonKey = useMemo(() => {
if (!deprecatedReason)

View File

@@ -0,0 +1,42 @@
'use client'
import { SerwistProvider } from '@serwist/turbopack/react'
import { useEffect } from 'react'
import { IS_DEV } from '@/config'
import { isClient } from '@/utils/client'
export function PWAProvider({ children }: { children: React.ReactNode }) {
if (IS_DEV) {
return <DisabledPWAProvider>{children}</DisabledPWAProvider>
}
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
const swUrl = `${basePath}/serwist/sw.js`
return (
<SerwistProvider swUrl={swUrl}>
{children}
</SerwistProvider>
)
}
function DisabledPWAProvider({ children }: { children: React.ReactNode }) {
useEffect(() => {
if (isClient && 'serviceWorker' in navigator) {
navigator.serviceWorker.getRegistrations()
.then((registrations) => {
registrations.forEach((registration) => {
registration.unregister()
.catch((error) => {
console.error('Error unregistering service worker:', error)
})
})
})
.catch((error) => {
console.error('Error unregistering service workers:', error)
})
}
}, [])
return <>{children}</>
}

View File

@@ -1,6 +1,6 @@
import type { TFunction } from 'i18next'
import type { SchemaOptions } from './types'
import * as z from 'zod'
import { z } from 'zod'
import { InputTypeEnum } from '@/app/components/base/form/components/field/input-type-select/types'
import { MAX_VAR_KEY_LENGTH } from '@/config'
import { PipelineInputVarType } from '@/models/pipeline'

View File

@@ -1,6 +1,6 @@
import type { PropsWithChildren } from 'react'
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { act, cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { DSLImportStatus } from '@/models/app'
import UpdateDSLModal from './update-dsl-modal'
@@ -145,6 +145,11 @@ vi.mock('@/app/components/workflow/constants', () => ({
WORKFLOW_DATA_UPDATE: 'WORKFLOW_DATA_UPDATE',
}))
afterEach(() => {
cleanup()
vi.clearAllMocks()
})
describe('UpdateDSLModal', () => {
const mockOnCancel = vi.fn()
const mockOnBackup = vi.fn()

View File

@@ -1,17 +1,40 @@
'use client'
import type { MouseEventHandler } from 'react'
import {
RiAlertFill,
RiCloseLine,
RiFileDownloadLine,
} from '@remixicon/react'
import { memo } from 'react'
import {
memo,
useCallback,
useRef,
useState,
} from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import Uploader from '@/app/components/app/create-from-dsl-modal/uploader'
import Button from '@/app/components/base/button'
import Modal from '@/app/components/base/modal'
import { useUpdateDSLModal } from '../hooks/use-update-dsl-modal'
import VersionMismatchModal from './version-mismatch-modal'
import { ToastContext } from '@/app/components/base/toast'
import { WORKFLOW_DATA_UPDATE } from '@/app/components/workflow/constants'
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
import { useWorkflowStore } from '@/app/components/workflow/store'
import {
initialEdges,
initialNodes,
} from '@/app/components/workflow/utils'
import { useEventEmitterContextContext } from '@/context/event-emitter'
import {
DSLImportMode,
DSLImportStatus,
} from '@/models/app'
import {
useImportPipelineDSL,
useImportPipelineDSLConfirm,
} from '@/service/use-pipeline'
import { fetchWorkflowDraft } from '@/service/workflow'
type UpdateDSLModalProps = {
onCancel: () => void
@@ -25,17 +48,146 @@ const UpdateDSLModal = ({
onImport,
}: UpdateDSLModalProps) => {
const { t } = useTranslation()
const {
currentFile,
handleFile,
show,
showErrorModal,
setShowErrorModal,
loading,
versions,
handleImport,
onUpdateDSLConfirm,
} = useUpdateDSLModal({ onCancel, onImport })
const { notify } = useContext(ToastContext)
const [currentFile, setDSLFile] = useState<File>()
const [fileContent, setFileContent] = useState<string>()
const [loading, setLoading] = useState(false)
const { eventEmitter } = useEventEmitterContextContext()
const [show, setShow] = useState(true)
const [showErrorModal, setShowErrorModal] = useState(false)
const [versions, setVersions] = useState<{ importedVersion: string, systemVersion: string }>()
const [importId, setImportId] = useState<string>()
const { handleCheckPluginDependencies } = usePluginDependencies()
const { mutateAsync: importDSL } = useImportPipelineDSL()
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
const workflowStore = useWorkflowStore()
const readFile = (file: File) => {
const reader = new FileReader()
reader.onload = function (event) {
const content = event.target?.result
setFileContent(content as string)
}
reader.readAsText(file)
}
const handleFile = (file?: File) => {
setDSLFile(file)
if (file)
readFile(file)
if (!file)
setFileContent('')
}
const handleWorkflowUpdate = useCallback(async (pipelineId: string) => {
const {
graph,
hash,
rag_pipeline_variables,
} = await fetchWorkflowDraft(`/rag/pipelines/${pipelineId}/workflows/draft`)
const { nodes, edges, viewport } = graph
eventEmitter?.emit({
type: WORKFLOW_DATA_UPDATE,
payload: {
nodes: initialNodes(nodes, edges),
edges: initialEdges(edges, nodes),
viewport,
hash,
rag_pipeline_variables: rag_pipeline_variables || [],
},
} as any)
}, [eventEmitter])
const isCreatingRef = useRef(false)
const handleImport: MouseEventHandler = useCallback(async () => {
const { pipelineId } = workflowStore.getState()
if (isCreatingRef.current)
return
isCreatingRef.current = true
if (!currentFile)
return
try {
if (pipelineId && fileContent) {
setLoading(true)
const response = await importDSL({ mode: DSLImportMode.YAML_CONTENT, yaml_content: fileContent, pipeline_id: pipelineId })
const { id, status, pipeline_id, imported_dsl_version, current_dsl_version } = response
if (status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS) {
if (!pipeline_id) {
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
return
}
handleWorkflowUpdate(pipeline_id)
if (onImport)
onImport()
notify({
type: status === DSLImportStatus.COMPLETED ? 'success' : 'warning',
message: t(status === DSLImportStatus.COMPLETED ? 'common.importSuccess' : 'common.importWarning', { ns: 'workflow' }),
children: status === DSLImportStatus.COMPLETED_WITH_WARNINGS && t('common.importWarningDetails', { ns: 'workflow' }),
})
await handleCheckPluginDependencies(pipeline_id, true)
setLoading(false)
onCancel()
}
else if (status === DSLImportStatus.PENDING) {
setShow(false)
setTimeout(() => {
setShowErrorModal(true)
}, 300)
setVersions({
importedVersion: imported_dsl_version ?? '',
systemVersion: current_dsl_version ?? '',
})
setImportId(id)
}
else {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
}
}
// eslint-disable-next-line unused-imports/no-unused-vars
catch (e) {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
isCreatingRef.current = false
}, [currentFile, fileContent, onCancel, notify, t, onImport, handleWorkflowUpdate, handleCheckPluginDependencies, workflowStore, importDSL])
const onUpdateDSLConfirm: MouseEventHandler = async () => {
try {
if (!importId)
return
const response = await importDSLConfirm(importId)
const { status, pipeline_id } = response
if (status === DSLImportStatus.COMPLETED) {
if (!pipeline_id) {
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
return
}
handleWorkflowUpdate(pipeline_id)
await handleCheckPluginDependencies(pipeline_id, true)
if (onImport)
onImport()
notify({ type: 'success', message: t('common.importSuccess', { ns: 'workflow' }) })
setLoading(false)
onCancel()
}
else if (status === DSLImportStatus.FAILED) {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
}
// eslint-disable-next-line unused-imports/no-unused-vars
catch (e) {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
}
return (
<>
@@ -98,12 +250,32 @@ const UpdateDSLModal = ({
</Button>
</div>
</Modal>
<VersionMismatchModal
<Modal
isShow={showErrorModal}
versions={versions}
onClose={() => setShowErrorModal(false)}
onConfirm={onUpdateDSLConfirm}
/>
className="w-[480px]"
>
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
<div className="system-md-regular flex grow flex-col text-text-secondary">
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
<br />
<div>
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
<span className="system-md-medium">{versions?.importedVersion}</span>
</div>
<div>
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
<span className="system-md-medium">{versions?.systemVersion}</span>
</div>
</div>
</div>
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
<Button variant="secondary" onClick={() => setShowErrorModal(false)}>{t('newApp.Cancel', { ns: 'app' })}</Button>
<Button variant="primary" destructive onClick={onUpdateDSLConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
</div>
</Modal>
</>
)
}

View File

@@ -1,117 +0,0 @@
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import VersionMismatchModal from './version-mismatch-modal'
describe('VersionMismatchModal', () => {
const mockOnClose = vi.fn()
const mockOnConfirm = vi.fn()
const defaultVersions = {
importedVersion: '0.8.0',
systemVersion: '1.0.0',
}
const defaultProps = {
isShow: true,
versions: defaultVersions,
onClose: mockOnClose,
onConfirm: mockOnConfirm,
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('rendering', () => {
it('should render dialog when isShow is true', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByRole('dialog')).toBeInTheDocument()
})
it('should not render dialog when isShow is false', () => {
render(<VersionMismatchModal {...defaultProps} isShow={false} />)
expect(screen.queryByRole('dialog')).not.toBeInTheDocument()
})
it('should render error title', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
})
it('should render all error description parts', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorPart1')).toBeInTheDocument()
expect(screen.getByText('app.newApp.appCreateDSLErrorPart2')).toBeInTheDocument()
expect(screen.getByText('app.newApp.appCreateDSLErrorPart3')).toBeInTheDocument()
expect(screen.getByText('app.newApp.appCreateDSLErrorPart4')).toBeInTheDocument()
})
it('should display imported and system version numbers', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByText('0.8.0')).toBeInTheDocument()
expect(screen.getByText('1.0.0')).toBeInTheDocument()
})
it('should render cancel and confirm buttons', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByRole('button', { name: /app\.newApp\.Cancel/ })).toBeInTheDocument()
expect(screen.getByRole('button', { name: /app\.newApp\.Confirm/ })).toBeInTheDocument()
})
})
describe('user interactions', () => {
it('should call onClose when cancel button is clicked', () => {
render(<VersionMismatchModal {...defaultProps} />)
fireEvent.click(screen.getByRole('button', { name: /app\.newApp\.Cancel/ }))
expect(mockOnClose).toHaveBeenCalledTimes(1)
})
it('should call onConfirm when confirm button is clicked', () => {
render(<VersionMismatchModal {...defaultProps} />)
fireEvent.click(screen.getByRole('button', { name: /app\.newApp\.Confirm/ }))
expect(mockOnConfirm).toHaveBeenCalledTimes(1)
})
})
describe('button variants', () => {
it('should render cancel button with secondary variant', () => {
render(<VersionMismatchModal {...defaultProps} />)
const cancelBtn = screen.getByRole('button', { name: /app\.newApp\.Cancel/ })
expect(cancelBtn).toHaveClass('btn-secondary')
})
it('should render confirm button with primary destructive variant', () => {
render(<VersionMismatchModal {...defaultProps} />)
const confirmBtn = screen.getByRole('button', { name: /app\.newApp\.Confirm/ })
expect(confirmBtn).toHaveClass('btn-primary')
expect(confirmBtn).toHaveClass('btn-destructive')
})
})
describe('edge cases', () => {
it('should handle undefined versions gracefully', () => {
render(<VersionMismatchModal {...defaultProps} versions={undefined} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
})
it('should handle empty version strings', () => {
const emptyVersions = { importedVersion: '', systemVersion: '' }
render(<VersionMismatchModal {...defaultProps} versions={emptyVersions} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
})
})
})

View File

@@ -1,54 +0,0 @@
import type { MouseEventHandler } from 'react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import Modal from '@/app/components/base/modal'
type VersionMismatchModalProps = {
isShow: boolean
versions?: {
importedVersion: string
systemVersion: string
}
onClose: () => void
onConfirm: MouseEventHandler
}
const VersionMismatchModal = ({
isShow,
versions,
onClose,
onConfirm,
}: VersionMismatchModalProps) => {
const { t } = useTranslation()
return (
<Modal
isShow={isShow}
onClose={onClose}
className="w-[480px]"
>
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
<div className="system-md-regular flex grow flex-col text-text-secondary">
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
<br />
<div>
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
<span className="system-md-medium">{versions?.importedVersion}</span>
</div>
<div>
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
<span className="system-md-medium">{versions?.systemVersion}</span>
</div>
</div>
</div>
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
<Button variant="secondary" onClick={onClose}>{t('newApp.Cancel', { ns: 'app' })}</Button>
<Button variant="primary" destructive onClick={onConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
</div>
</Modal>
)
}
export default VersionMismatchModal

View File

@@ -68,20 +68,23 @@ vi.mock('@/config', () => ({
API_PREFIX: '/api',
}))
// Mock postWithKeepalive from service/fetch
const mockPostWithKeepalive = vi.fn()
vi.mock('@/service/fetch', () => ({
postWithKeepalive: (...args: unknown[]) => mockPostWithKeepalive(...args),
}))
// ============================================================================
// Tests
// ============================================================================
describe('useNodesSyncDraft', () => {
const mockSendBeacon = vi.fn()
beforeEach(() => {
vi.clearAllMocks()
// Setup navigator.sendBeacon mock
Object.defineProperty(navigator, 'sendBeacon', {
value: mockSendBeacon,
writable: true,
configurable: true,
})
// Default store state
mockStoreGetState.mockReturnValue({
getNodes: mockGetNodes,
@@ -131,7 +134,7 @@ describe('useNodesSyncDraft', () => {
})
describe('syncWorkflowDraftWhenPageClose', () => {
it('should not call postWithKeepalive when nodes are read only', () => {
it('should not call sendBeacon when nodes are read only', () => {
mockGetNodesReadOnly.mockReturnValue(true)
const { result } = renderHook(() => useNodesSyncDraft())
@@ -140,10 +143,10 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
expect(mockSendBeacon).not.toHaveBeenCalled()
})
it('should call postWithKeepalive with correct URL and params', () => {
it('should call sendBeacon with correct URL and params', () => {
mockGetNodesReadOnly.mockReturnValue(false)
mockGetNodes.mockReturnValue([
{ id: 'node-1', data: { type: 'start' }, position: { x: 0, y: 0 } },
@@ -155,16 +158,13 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockPostWithKeepalive).toHaveBeenCalledWith(
expect(mockSendBeacon).toHaveBeenCalledWith(
'/api/rag/pipelines/test-pipeline-id/workflows/draft',
expect.objectContaining({
graph: expect.any(Object),
hash: 'test-hash',
}),
expect.any(String),
)
})
it('should not call postWithKeepalive when pipelineId is missing', () => {
it('should not call sendBeacon when pipelineId is missing', () => {
mockWorkflowStoreGetState.mockReturnValue({
pipelineId: undefined,
environmentVariables: [],
@@ -178,10 +178,10 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
expect(mockSendBeacon).not.toHaveBeenCalled()
})
it('should not call postWithKeepalive when nodes array is empty', () => {
it('should not call sendBeacon when nodes array is empty', () => {
mockGetNodes.mockReturnValue([])
const { result } = renderHook(() => useNodesSyncDraft())
@@ -190,7 +190,7 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
expect(mockSendBeacon).not.toHaveBeenCalled()
})
it('should filter out temp nodes', () => {
@@ -204,8 +204,8 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
// Should not call postWithKeepalive because after filtering temp nodes, array is empty
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
// Should not call sendBeacon because after filtering temp nodes, array is empty
expect(mockSendBeacon).not.toHaveBeenCalled()
})
it('should remove underscore-prefixed data keys from nodes', () => {
@@ -219,9 +219,9 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockPostWithKeepalive).toHaveBeenCalled()
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
expect(sentParams.graph.nodes[0].data._privateData).toBeUndefined()
expect(mockSendBeacon).toHaveBeenCalled()
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
expect(sentData.graph.nodes[0].data._privateData).toBeUndefined()
})
})
@@ -395,8 +395,8 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
expect(sentParams.graph.viewport).toEqual({ x: 100, y: 200, zoom: 1.5 })
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
expect(sentData.graph.viewport).toEqual({ x: 100, y: 200, zoom: 1.5 })
})
it('should include environment variables in params', () => {
@@ -418,8 +418,8 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
expect(sentParams.environment_variables).toEqual([{ key: 'API_KEY', value: 'secret' }])
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
expect(sentData.environment_variables).toEqual([{ key: 'API_KEY', value: 'secret' }])
})
it('should include rag pipeline variables in params', () => {
@@ -441,8 +441,8 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
expect(sentParams.rag_pipeline_variables).toEqual([{ variable: 'input', type: 'text-input' }])
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
expect(sentData.rag_pipeline_variables).toEqual([{ variable: 'input', type: 'text-input' }])
})
it('should remove underscore-prefixed keys from edges', () => {
@@ -461,9 +461,9 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
expect(sentParams.graph.edges[0].data._hidden).toBeUndefined()
expect(sentParams.graph.edges[0].data.visible).toBe(false)
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
expect(sentData.graph.edges[0].data._hidden).toBeUndefined()
expect(sentData.graph.edges[0].data.visible).toBe(false)
})
})
})

View File

@@ -9,7 +9,6 @@ import {
useWorkflowStore,
} from '@/app/components/workflow/store'
import { API_PREFIX } from '@/config'
import { postWithKeepalive } from '@/service/fetch'
import { syncWorkflowDraft } from '@/service/workflow'
import { usePipelineRefreshDraft } from '.'
@@ -77,8 +76,12 @@ export const useNodesSyncDraft = () => {
return
const postParams = getPostParams()
if (postParams)
postWithKeepalive(`${API_PREFIX}${postParams.url}`, postParams.params)
if (postParams) {
navigator.sendBeacon(
`${API_PREFIX}${postParams.url}`,
JSON.stringify(postParams.params),
)
}
}, [getPostParams, getNodesReadOnly])
const performSync = useCallback(async (

Some files were not shown because too many files have changed in this diff Show More