mirror of
https://github.com/langgenius/dify.git
synced 2026-04-13 03:59:23 +08:00
Compare commits
5 Commits
codex/run-
...
feat/docum
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bd1d4047e8 | ||
|
|
6b20cebdda | ||
|
|
ce6f36fea9 | ||
|
|
3d2f61ec33 | ||
|
|
82ead9556c |
@@ -26,20 +26,13 @@ import userEvent from '@testing-library/user-event'
|
||||
// WHY: Mocks must be hoisted to top of file (Jest requirement).
|
||||
// They run BEFORE imports, so keep them before component imports.
|
||||
|
||||
// i18n (automatically mocked)
|
||||
// WHY: Shared mock at web/__mocks__/react-i18next.ts is auto-loaded by Jest
|
||||
// No explicit mock needed - it returns translation keys as-is
|
||||
// Override only if custom translations are required:
|
||||
// jest.mock('react-i18next', () => ({
|
||||
// useTranslation: () => ({
|
||||
// t: (key: string) => {
|
||||
// const customTranslations: Record<string, string> = {
|
||||
// 'my.custom.key': 'Custom Translation',
|
||||
// }
|
||||
// return customTranslations[key] || key
|
||||
// },
|
||||
// }),
|
||||
// }))
|
||||
// i18n (always required in Dify)
|
||||
// WHY: Returns key instead of translation so tests don't depend on i18n files
|
||||
jest.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Router (if component uses useRouter, usePathname, useSearchParams)
|
||||
// WHY: Isolates tests from Next.js routing, enables testing navigation behavior
|
||||
|
||||
10
.github/workflows/api-tests.yml
vendored
10
.github/workflows/api-tests.yml
vendored
@@ -93,12 +93,4 @@ jobs:
|
||||
# Create a detailed coverage summary
|
||||
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
|
||||
{
|
||||
echo ""
|
||||
echo "<details><summary>File-level coverage (click to expand)</summary>"
|
||||
echo ""
|
||||
echo '```'
|
||||
uv run --project api coverage report -m
|
||||
echo '```'
|
||||
echo "</details>"
|
||||
} >> $GITHUB_STEP_SUMMARY
|
||||
uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
@@ -543,25 +543,6 @@ APP_MAX_EXECUTION_TIME=1200
|
||||
APP_DEFAULT_ACTIVE_REQUESTS=0
|
||||
APP_MAX_ACTIVE_REQUESTS=0
|
||||
|
||||
# Aliyun SLS Logstore Configuration
|
||||
# Aliyun Access Key ID
|
||||
ALIYUN_SLS_ACCESS_KEY_ID=
|
||||
# Aliyun Access Key Secret
|
||||
ALIYUN_SLS_ACCESS_KEY_SECRET=
|
||||
# Aliyun SLS Endpoint (e.g., cn-hangzhou.log.aliyuncs.com)
|
||||
ALIYUN_SLS_ENDPOINT=
|
||||
# Aliyun SLS Region (e.g., cn-hangzhou)
|
||||
ALIYUN_SLS_REGION=
|
||||
# Aliyun SLS Project Name
|
||||
ALIYUN_SLS_PROJECT_NAME=
|
||||
# Number of days to retain workflow run logs (default: 365 days, 3650 for permanent storage)
|
||||
ALIYUN_SLS_LOGSTORE_TTL=365
|
||||
# Enable dual-write to both SLS LogStore and SQL database (default: false)
|
||||
LOGSTORE_DUAL_WRITE_ENABLED=false
|
||||
# Enable dual-read fallback to SQL database when LogStore returns no results (default: true)
|
||||
# Useful for migration scenarios where historical data exists only in SQL database
|
||||
LOGSTORE_DUAL_READ_ENABLED=true
|
||||
|
||||
# Celery beat configuration
|
||||
CELERY_BEAT_SCHEDULER_TIME=1
|
||||
|
||||
@@ -689,4 +670,4 @@ ANNOTATION_IMPORT_MIN_RECORDS=1
|
||||
ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE=5
|
||||
ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR=20
|
||||
# Maximum number of concurrent annotation import tasks per tenant
|
||||
ANNOTATION_IMPORT_MAX_CONCURRENT=5
|
||||
ANNOTATION_IMPORT_MAX_CONCURRENT=5
|
||||
@@ -75,7 +75,6 @@ def initialize_extensions(app: DifyApp):
|
||||
ext_import_modules,
|
||||
ext_logging,
|
||||
ext_login,
|
||||
ext_logstore,
|
||||
ext_mail,
|
||||
ext_migrate,
|
||||
ext_orjson,
|
||||
@@ -106,7 +105,6 @@ def initialize_extensions(app: DifyApp):
|
||||
ext_migrate,
|
||||
ext_redis,
|
||||
ext_storage,
|
||||
ext_logstore, # Initialize logstore after storage, before celery
|
||||
ext_celery,
|
||||
ext_login,
|
||||
ext_mail,
|
||||
|
||||
@@ -22,12 +22,7 @@ from controllers.console.error import (
|
||||
NotAllowedCreateWorkspace,
|
||||
WorkspacesLimitExceeded,
|
||||
)
|
||||
from controllers.console.wraps import (
|
||||
decrypt_code_field,
|
||||
decrypt_password_field,
|
||||
email_password_login_enabled,
|
||||
setup_required,
|
||||
)
|
||||
from controllers.console.wraps import email_password_login_enabled, setup_required
|
||||
from events.tenant_event import tenant_was_created
|
||||
from libs.helper import EmailStr, extract_remote_ip
|
||||
from libs.login import current_account_with_tenant
|
||||
@@ -84,7 +79,6 @@ class LoginApi(Resource):
|
||||
@setup_required
|
||||
@email_password_login_enabled
|
||||
@console_ns.expect(console_ns.models[LoginPayload.__name__])
|
||||
@decrypt_password_field
|
||||
def post(self):
|
||||
"""Authenticate user and login."""
|
||||
args = LoginPayload.model_validate(console_ns.payload)
|
||||
@@ -224,7 +218,6 @@ class EmailCodeLoginSendEmailApi(Resource):
|
||||
class EmailCodeLoginApi(Resource):
|
||||
@setup_required
|
||||
@console_ns.expect(console_ns.models[EmailCodeLoginPayload.__name__])
|
||||
@decrypt_code_field
|
||||
def post(self):
|
||||
args = EmailCodeLoginPayload.model_validate(console_ns.payload)
|
||||
|
||||
|
||||
@@ -140,18 +140,6 @@ class DataSourceNotionListApi(Resource):
|
||||
credential_id = request.args.get("credential_id", default=None, type=str)
|
||||
if not credential_id:
|
||||
raise ValueError("Credential id is required.")
|
||||
|
||||
# Get datasource_parameters from query string (optional, for GitHub and other datasources)
|
||||
datasource_parameters_str = request.args.get("datasource_parameters", default=None, type=str)
|
||||
datasource_parameters = {}
|
||||
if datasource_parameters_str:
|
||||
try:
|
||||
datasource_parameters = json.loads(datasource_parameters_str)
|
||||
if not isinstance(datasource_parameters, dict):
|
||||
raise ValueError("datasource_parameters must be a JSON object.")
|
||||
except json.JSONDecodeError:
|
||||
raise ValueError("Invalid datasource_parameters JSON format.")
|
||||
|
||||
datasource_provider_service = DatasourceProviderService()
|
||||
credential = datasource_provider_service.get_datasource_credentials(
|
||||
tenant_id=current_tenant_id,
|
||||
@@ -199,7 +187,7 @@ class DataSourceNotionListApi(Resource):
|
||||
online_document_result: Generator[OnlineDocumentPagesMessage, None, None] = (
|
||||
datasource_runtime.get_online_document_pages(
|
||||
user_id=current_user.id,
|
||||
datasource_parameters=datasource_parameters,
|
||||
datasource_parameters={},
|
||||
provider_type=datasource_runtime.datasource_provider_type(),
|
||||
)
|
||||
)
|
||||
@@ -230,14 +218,14 @@ class DataSourceNotionListApi(Resource):
|
||||
|
||||
|
||||
@console_ns.route(
|
||||
"/notion/pages/<uuid:page_id>/<string:page_type>/preview",
|
||||
"/notion/workspaces/<uuid:workspace_id>/pages/<uuid:page_id>/<string:page_type>/preview",
|
||||
"/datasets/notion-indexing-estimate",
|
||||
)
|
||||
class DataSourceNotionApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, page_id, page_type):
|
||||
def get(self, workspace_id, page_id, page_type):
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
credential_id = request.args.get("credential_id", default=None, type=str)
|
||||
@@ -251,10 +239,11 @@ class DataSourceNotionApi(Resource):
|
||||
plugin_id="langgenius/notion_datasource",
|
||||
)
|
||||
|
||||
workspace_id = str(workspace_id)
|
||||
page_id = str(page_id)
|
||||
|
||||
extractor = NotionExtractor(
|
||||
notion_workspace_id="",
|
||||
notion_workspace_id=workspace_id,
|
||||
notion_obj_id=page_id,
|
||||
notion_page_type=page_type,
|
||||
notion_access_token=credential.get("integration_secret"),
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any, Literal, cast
|
||||
from uuid import UUID
|
||||
|
||||
from flask import abort, request
|
||||
from flask_restx import Resource, marshal_with, reqparse # type: ignore
|
||||
from flask_restx import Resource, marshal_with # type: ignore
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||
@@ -975,11 +975,6 @@ class RagPipelineRecommendedPluginApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("type", type=str, location="args", required=False, default="all")
|
||||
args = parser.parse_args()
|
||||
type = args["type"]
|
||||
|
||||
rag_pipeline_service = RagPipelineService()
|
||||
recommended_plugins = rag_pipeline_service.get_recommended_plugins(type)
|
||||
recommended_plugins = rag_pipeline_service.get_recommended_plugins()
|
||||
return recommended_plugins
|
||||
|
||||
@@ -9,12 +9,10 @@ from typing import ParamSpec, TypeVar
|
||||
from flask import abort, request
|
||||
|
||||
from configs import dify_config
|
||||
from controllers.console.auth.error import AuthenticationFailedError, EmailCodeError
|
||||
from controllers.console.workspace.error import AccountNotInitializedError
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.encryption import FieldEncryption
|
||||
from libs.login import current_account_with_tenant
|
||||
from models.account import AccountStatus
|
||||
from models.dataset import RateLimitLog
|
||||
@@ -27,14 +25,6 @@ from .error import NotInitValidateError, NotSetupError, UnauthorizedAndForceLogo
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
# Field names for decryption
|
||||
FIELD_NAME_PASSWORD = "password"
|
||||
FIELD_NAME_CODE = "code"
|
||||
|
||||
# Error messages for decryption failures
|
||||
ERROR_MSG_INVALID_ENCRYPTED_DATA = "Invalid encrypted data"
|
||||
ERROR_MSG_INVALID_ENCRYPTED_CODE = "Invalid encrypted code"
|
||||
|
||||
|
||||
def account_initialization_required(view: Callable[P, R]):
|
||||
@wraps(view)
|
||||
@@ -429,75 +419,3 @@ def annotation_import_concurrency_limit(view: Callable[P, R]):
|
||||
return view(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def _decrypt_field(field_name: str, error_class: type[Exception], error_message: str) -> None:
|
||||
"""
|
||||
Helper to decode a Base64 encoded field in the request payload.
|
||||
|
||||
Args:
|
||||
field_name: Name of the field to decode
|
||||
error_class: Exception class to raise on decoding failure
|
||||
error_message: Error message to include in the exception
|
||||
"""
|
||||
if not request or not request.is_json:
|
||||
return
|
||||
# Get the payload dict - it's cached and mutable
|
||||
payload = request.get_json()
|
||||
if not payload or field_name not in payload:
|
||||
return
|
||||
encoded_value = payload[field_name]
|
||||
decoded_value = FieldEncryption.decrypt_field(encoded_value)
|
||||
|
||||
# If decoding failed, raise error immediately
|
||||
if decoded_value is None:
|
||||
raise error_class(error_message)
|
||||
|
||||
# Update payload dict in-place with decoded value
|
||||
# Since payload is a mutable dict and get_json() returns the cached reference,
|
||||
# modifying it will affect all subsequent accesses including console_ns.payload
|
||||
payload[field_name] = decoded_value
|
||||
|
||||
|
||||
def decrypt_password_field(view: Callable[P, R]):
|
||||
"""
|
||||
Decorator to decrypt password field in request payload.
|
||||
|
||||
Automatically decrypts the 'password' field if encryption is enabled.
|
||||
If decryption fails, raises AuthenticationFailedError.
|
||||
|
||||
Usage:
|
||||
@decrypt_password_field
|
||||
def post(self):
|
||||
args = LoginPayload.model_validate(console_ns.payload)
|
||||
# args.password is now decrypted
|
||||
"""
|
||||
|
||||
@wraps(view)
|
||||
def decorated(*args: P.args, **kwargs: P.kwargs):
|
||||
_decrypt_field(FIELD_NAME_PASSWORD, AuthenticationFailedError, ERROR_MSG_INVALID_ENCRYPTED_DATA)
|
||||
return view(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def decrypt_code_field(view: Callable[P, R]):
|
||||
"""
|
||||
Decorator to decrypt verification code field in request payload.
|
||||
|
||||
Automatically decrypts the 'code' field if encryption is enabled.
|
||||
If decryption fails, raises EmailCodeError.
|
||||
|
||||
Usage:
|
||||
@decrypt_code_field
|
||||
def post(self):
|
||||
args = EmailCodeLoginPayload.model_validate(console_ns.payload)
|
||||
# args.code is now decrypted
|
||||
"""
|
||||
|
||||
@wraps(view)
|
||||
def decorated(*args: P.args, **kwargs: P.kwargs):
|
||||
_decrypt_field(FIELD_NAME_CODE, EmailCodeError, ERROR_MSG_INVALID_ENCRYPTED_CODE)
|
||||
return view(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
|
||||
@@ -10,7 +10,7 @@ class NotionInfo(BaseModel):
|
||||
"""
|
||||
|
||||
credential_id: str | None = None
|
||||
notion_workspace_id: str | None = ""
|
||||
notion_workspace_id: str
|
||||
notion_obj_id: str
|
||||
notion_page_type: str
|
||||
document: Document | None = None
|
||||
|
||||
@@ -166,7 +166,7 @@ class ExtractProcessor:
|
||||
elif extract_setting.datasource_type == DatasourceType.NOTION:
|
||||
assert extract_setting.notion_info is not None, "notion_info is required"
|
||||
extractor = NotionExtractor(
|
||||
notion_workspace_id=extract_setting.notion_info.notion_workspace_id or "",
|
||||
notion_workspace_id=extract_setting.notion_info.notion_workspace_id,
|
||||
notion_obj_id=extract_setting.notion_info.notion_obj_id,
|
||||
notion_page_type=extract_setting.notion_info.notion_page_type,
|
||||
document_model=extract_setting.notion_info.document,
|
||||
|
||||
@@ -45,6 +45,6 @@ def detect_file_encodings(file_path: str, timeout: int = 5, sample_size: int = 1
|
||||
except concurrent.futures.TimeoutError:
|
||||
raise TimeoutError(f"Timeout reached while detecting encoding for {file_path}")
|
||||
|
||||
if all(encoding.encoding is None for encoding in encodings):
|
||||
if all(encoding["encoding"] is None for encoding in encodings):
|
||||
raise RuntimeError(f"Could not detect encoding for {file_path}")
|
||||
return [enc for enc in encodings if enc.encoding is not None]
|
||||
return [FileEncoding(**enc) for enc in encodings if enc["encoding"] is not None]
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
"""
|
||||
Logstore extension for Dify application.
|
||||
|
||||
This extension initializes the logstore (Aliyun SLS) on application startup,
|
||||
creating necessary projects, logstores, and indexes if they don't exist.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from dify_app import DifyApp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_enabled() -> bool:
|
||||
"""
|
||||
Check if logstore extension is enabled.
|
||||
|
||||
Returns:
|
||||
True if all required Aliyun SLS environment variables are set, False otherwise
|
||||
"""
|
||||
# Load environment variables from .env file
|
||||
load_dotenv()
|
||||
|
||||
required_vars = [
|
||||
"ALIYUN_SLS_ACCESS_KEY_ID",
|
||||
"ALIYUN_SLS_ACCESS_KEY_SECRET",
|
||||
"ALIYUN_SLS_ENDPOINT",
|
||||
"ALIYUN_SLS_REGION",
|
||||
"ALIYUN_SLS_PROJECT_NAME",
|
||||
]
|
||||
|
||||
all_set = all(os.environ.get(var) for var in required_vars)
|
||||
|
||||
if not all_set:
|
||||
logger.info("Logstore extension disabled: required Aliyun SLS environment variables not set")
|
||||
|
||||
return all_set
|
||||
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
"""
|
||||
Initialize logstore on application startup.
|
||||
|
||||
This function:
|
||||
1. Creates Aliyun SLS project if it doesn't exist
|
||||
2. Creates logstores (workflow_execution, workflow_node_execution) if they don't exist
|
||||
3. Creates indexes with field configurations based on PostgreSQL table structures
|
||||
|
||||
This operation is idempotent and only executes once during application startup.
|
||||
|
||||
Args:
|
||||
app: The Dify application instance
|
||||
"""
|
||||
try:
|
||||
from extensions.logstore.aliyun_logstore import AliyunLogStore
|
||||
|
||||
logger.info("Initializing logstore...")
|
||||
|
||||
# Create logstore client and initialize project/logstores/indexes
|
||||
logstore_client = AliyunLogStore()
|
||||
logstore_client.init_project_logstore()
|
||||
|
||||
# Attach to app for potential later use
|
||||
app.extensions["logstore"] = logstore_client
|
||||
|
||||
logger.info("Logstore initialized successfully")
|
||||
except Exception:
|
||||
logger.exception("Failed to initialize logstore")
|
||||
# Don't raise - allow application to continue even if logstore init fails
|
||||
# This ensures that the application can still run if logstore is misconfigured
|
||||
@@ -1,890 +0,0 @@
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
from typing import Any
|
||||
|
||||
import sqlalchemy as sa
|
||||
from aliyun.log import ( # type: ignore[import-untyped]
|
||||
GetLogsRequest,
|
||||
IndexConfig,
|
||||
IndexKeyConfig,
|
||||
IndexLineConfig,
|
||||
LogClient,
|
||||
LogItem,
|
||||
PutLogsRequest,
|
||||
)
|
||||
from aliyun.log.auth import AUTH_VERSION_4 # type: ignore[import-untyped]
|
||||
from aliyun.log.logexception import LogException # type: ignore[import-untyped]
|
||||
from dotenv import load_dotenv
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.logstore.aliyun_logstore_pg import AliyunLogStorePG
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AliyunLogStore:
|
||||
"""
|
||||
Singleton class for Aliyun SLS LogStore operations.
|
||||
|
||||
Ensures only one instance exists to prevent multiple PG connection pools.
|
||||
"""
|
||||
|
||||
_instance: "AliyunLogStore | None" = None
|
||||
_initialized: bool = False
|
||||
|
||||
# Track delayed PG connection for newly created projects
|
||||
_pg_connection_timer: threading.Timer | None = None
|
||||
_pg_connection_delay: int = 90 # delay seconds
|
||||
|
||||
# Default tokenizer for text/json fields and full-text index
|
||||
# Common delimiters: comma, space, quotes, punctuation, operators, brackets, special chars
|
||||
DEFAULT_TOKEN_LIST = [
|
||||
",",
|
||||
" ",
|
||||
'"',
|
||||
'"',
|
||||
";",
|
||||
"=",
|
||||
"(",
|
||||
")",
|
||||
"[",
|
||||
"]",
|
||||
"{",
|
||||
"}",
|
||||
"?",
|
||||
"@",
|
||||
"&",
|
||||
"<",
|
||||
">",
|
||||
"/",
|
||||
":",
|
||||
"\n",
|
||||
"\t",
|
||||
]
|
||||
|
||||
def __new__(cls) -> "AliyunLogStore":
|
||||
"""Implement singleton pattern."""
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
project_des = "dify"
|
||||
|
||||
workflow_execution_logstore = "workflow_execution"
|
||||
|
||||
workflow_node_execution_logstore = "workflow_node_execution"
|
||||
|
||||
@staticmethod
|
||||
def _sqlalchemy_type_to_logstore_type(column: Any) -> str:
|
||||
"""
|
||||
Map SQLAlchemy column type to Aliyun LogStore index type.
|
||||
|
||||
Args:
|
||||
column: SQLAlchemy column object
|
||||
|
||||
Returns:
|
||||
LogStore index type: 'text', 'long', 'double', or 'json'
|
||||
"""
|
||||
column_type = column.type
|
||||
|
||||
# Integer types -> long
|
||||
if isinstance(column_type, (sa.Integer, sa.BigInteger, sa.SmallInteger)):
|
||||
return "long"
|
||||
|
||||
# Float types -> double
|
||||
if isinstance(column_type, (sa.Float, sa.Numeric)):
|
||||
return "double"
|
||||
|
||||
# String and Text types -> text
|
||||
if isinstance(column_type, (sa.String, sa.Text)):
|
||||
return "text"
|
||||
|
||||
# DateTime -> text (stored as ISO format string in logstore)
|
||||
if isinstance(column_type, sa.DateTime):
|
||||
return "text"
|
||||
|
||||
# Boolean -> long (stored as 0/1)
|
||||
if isinstance(column_type, sa.Boolean):
|
||||
return "long"
|
||||
|
||||
# JSON -> json
|
||||
if isinstance(column_type, sa.JSON):
|
||||
return "json"
|
||||
|
||||
# Default to text for unknown types
|
||||
return "text"
|
||||
|
||||
@staticmethod
|
||||
def _generate_index_keys_from_model(model_class: type[DeclarativeBase]) -> dict[str, IndexKeyConfig]:
|
||||
"""
|
||||
Automatically generate LogStore field index configuration from SQLAlchemy model.
|
||||
|
||||
This method introspects the SQLAlchemy model's column definitions and creates
|
||||
corresponding LogStore index configurations. When the PG schema is updated via
|
||||
Flask-Migrate, this method will automatically pick up the new fields on next startup.
|
||||
|
||||
Args:
|
||||
model_class: SQLAlchemy model class (e.g., WorkflowRun, WorkflowNodeExecutionModel)
|
||||
|
||||
Returns:
|
||||
Dictionary mapping field names to IndexKeyConfig objects
|
||||
"""
|
||||
index_keys = {}
|
||||
|
||||
# Iterate over all mapped columns in the model
|
||||
if hasattr(model_class, "__mapper__"):
|
||||
for column_name, column_property in model_class.__mapper__.columns.items():
|
||||
# Skip relationship properties and other non-column attributes
|
||||
if not hasattr(column_property, "type"):
|
||||
continue
|
||||
|
||||
# Map SQLAlchemy type to LogStore type
|
||||
logstore_type = AliyunLogStore._sqlalchemy_type_to_logstore_type(column_property)
|
||||
|
||||
# Create index configuration
|
||||
# - text fields: case_insensitive for better search, with tokenizer and Chinese support
|
||||
# - all fields: doc_value=True for analytics
|
||||
if logstore_type == "text":
|
||||
index_keys[column_name] = IndexKeyConfig(
|
||||
index_type="text",
|
||||
case_sensitive=False,
|
||||
doc_value=True,
|
||||
token_list=AliyunLogStore.DEFAULT_TOKEN_LIST,
|
||||
chinese=True,
|
||||
)
|
||||
else:
|
||||
index_keys[column_name] = IndexKeyConfig(index_type=logstore_type, doc_value=True)
|
||||
|
||||
# Add log_version field (not in PG model, but used in logstore for versioning)
|
||||
index_keys["log_version"] = IndexKeyConfig(index_type="long", doc_value=True)
|
||||
|
||||
return index_keys
|
||||
|
||||
def __init__(self) -> None:
|
||||
# Skip initialization if already initialized (singleton pattern)
|
||||
if self.__class__._initialized:
|
||||
return
|
||||
|
||||
load_dotenv()
|
||||
|
||||
self.access_key_id: str = os.environ.get("ALIYUN_SLS_ACCESS_KEY_ID", "")
|
||||
self.access_key_secret: str = os.environ.get("ALIYUN_SLS_ACCESS_KEY_SECRET", "")
|
||||
self.endpoint: str = os.environ.get("ALIYUN_SLS_ENDPOINT", "")
|
||||
self.region: str = os.environ.get("ALIYUN_SLS_REGION", "")
|
||||
self.project_name: str = os.environ.get("ALIYUN_SLS_PROJECT_NAME", "")
|
||||
self.logstore_ttl: int = int(os.environ.get("ALIYUN_SLS_LOGSTORE_TTL", 365))
|
||||
self.log_enabled: bool = os.environ.get("SQLALCHEMY_ECHO", "false").lower() == "true"
|
||||
self.pg_mode_enabled: bool = os.environ.get("LOGSTORE_PG_MODE_ENABLED", "true").lower() == "true"
|
||||
|
||||
# Initialize SDK client
|
||||
self.client = LogClient(
|
||||
self.endpoint, self.access_key_id, self.access_key_secret, auth_version=AUTH_VERSION_4, region=self.region
|
||||
)
|
||||
|
||||
# Append Dify identification to the existing user agent
|
||||
original_user_agent = self.client._user_agent # pyright: ignore[reportPrivateUsage]
|
||||
dify_version = dify_config.project.version
|
||||
enhanced_user_agent = f"Dify,Dify-{dify_version},{original_user_agent}"
|
||||
self.client.set_user_agent(enhanced_user_agent)
|
||||
|
||||
# PG client will be initialized in init_project_logstore
|
||||
self._pg_client: AliyunLogStorePG | None = None
|
||||
self._use_pg_protocol: bool = False
|
||||
|
||||
self.__class__._initialized = True
|
||||
|
||||
@property
|
||||
def supports_pg_protocol(self) -> bool:
|
||||
"""Check if PG protocol is supported and enabled."""
|
||||
return self._use_pg_protocol
|
||||
|
||||
def _attempt_pg_connection_init(self) -> bool:
|
||||
"""
|
||||
Attempt to initialize PG connection.
|
||||
|
||||
This method tries to establish PG connection and performs necessary checks.
|
||||
It's used both for immediate connection (existing projects) and delayed connection (new projects).
|
||||
|
||||
Returns:
|
||||
True if PG connection was successfully established, False otherwise.
|
||||
"""
|
||||
if not self.pg_mode_enabled or not self._pg_client:
|
||||
return False
|
||||
|
||||
try:
|
||||
self._use_pg_protocol = self._pg_client.init_connection()
|
||||
if self._use_pg_protocol:
|
||||
logger.info("Successfully connected to project %s using PG protocol", self.project_name)
|
||||
# Check if scan_index is enabled for all logstores
|
||||
self._check_and_disable_pg_if_scan_index_disabled()
|
||||
return True
|
||||
else:
|
||||
logger.info("PG connection failed for project %s. Will use SDK mode.", self.project_name)
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to establish PG connection for project %s: %s. Will use SDK mode.",
|
||||
self.project_name,
|
||||
str(e),
|
||||
)
|
||||
self._use_pg_protocol = False
|
||||
return False
|
||||
|
||||
def _delayed_pg_connection_init(self) -> None:
|
||||
"""
|
||||
Delayed initialization of PG connection for newly created projects.
|
||||
|
||||
This method is called by a background timer 3 minutes after project creation.
|
||||
"""
|
||||
# Double check conditions in case state changed
|
||||
if self._use_pg_protocol:
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"Attempting delayed PG connection for newly created project %s ...",
|
||||
self.project_name,
|
||||
)
|
||||
self._attempt_pg_connection_init()
|
||||
self.__class__._pg_connection_timer = None
|
||||
|
||||
def init_project_logstore(self):
|
||||
"""
|
||||
Initialize project, logstore, index, and PG connection.
|
||||
|
||||
This method should be called once during application startup to ensure
|
||||
all required resources exist and connections are established.
|
||||
"""
|
||||
# Step 1: Ensure project and logstore exist
|
||||
project_is_new = False
|
||||
if not self.is_project_exist():
|
||||
self.create_project()
|
||||
project_is_new = True
|
||||
|
||||
self.create_logstore_if_not_exist()
|
||||
|
||||
# Step 2: Initialize PG client and connection (if enabled)
|
||||
if not self.pg_mode_enabled:
|
||||
logger.info("PG mode is disabled. Will use SDK mode.")
|
||||
return
|
||||
|
||||
# Create PG client if not already created
|
||||
if self._pg_client is None:
|
||||
logger.info("Initializing PG client for project %s...", self.project_name)
|
||||
self._pg_client = AliyunLogStorePG(
|
||||
self.access_key_id, self.access_key_secret, self.endpoint, self.project_name
|
||||
)
|
||||
|
||||
# Step 3: Establish PG connection based on project status
|
||||
if project_is_new:
|
||||
# For newly created projects, schedule delayed PG connection
|
||||
self._use_pg_protocol = False
|
||||
logger.info(
|
||||
"Project %s is newly created. Will use SDK mode and schedule PG connection attempt in %d seconds.",
|
||||
self.project_name,
|
||||
self.__class__._pg_connection_delay,
|
||||
)
|
||||
if self.__class__._pg_connection_timer is not None:
|
||||
self.__class__._pg_connection_timer.cancel()
|
||||
self.__class__._pg_connection_timer = threading.Timer(
|
||||
self.__class__._pg_connection_delay,
|
||||
self._delayed_pg_connection_init,
|
||||
)
|
||||
self.__class__._pg_connection_timer.daemon = True # Don't block app shutdown
|
||||
self.__class__._pg_connection_timer.start()
|
||||
else:
|
||||
# For existing projects, attempt PG connection immediately
|
||||
logger.info("Project %s already exists. Attempting PG connection...", self.project_name)
|
||||
self._attempt_pg_connection_init()
|
||||
|
||||
def _check_and_disable_pg_if_scan_index_disabled(self) -> None:
|
||||
"""
|
||||
Check if scan_index is enabled for all logstores.
|
||||
If any logstore has scan_index=false, disable PG protocol.
|
||||
|
||||
This is necessary because PG protocol requires scan_index to be enabled.
|
||||
"""
|
||||
logstore_name_list = [
|
||||
AliyunLogStore.workflow_execution_logstore,
|
||||
AliyunLogStore.workflow_node_execution_logstore,
|
||||
]
|
||||
|
||||
for logstore_name in logstore_name_list:
|
||||
existing_config = self.get_existing_index_config(logstore_name)
|
||||
if existing_config and not existing_config.scan_index:
|
||||
logger.info(
|
||||
"Logstore %s has scan_index=false, USE SDK mode for read/write operations. "
|
||||
"PG protocol requires scan_index to be enabled.",
|
||||
logstore_name,
|
||||
)
|
||||
self._use_pg_protocol = False
|
||||
# Close PG connection if it was initialized
|
||||
if self._pg_client:
|
||||
self._pg_client.close()
|
||||
self._pg_client = None
|
||||
return
|
||||
|
||||
def is_project_exist(self) -> bool:
|
||||
try:
|
||||
self.client.get_project(self.project_name)
|
||||
return True
|
||||
except Exception as e:
|
||||
if e.args[0] == "ProjectNotExist":
|
||||
return False
|
||||
else:
|
||||
raise e
|
||||
|
||||
def create_project(self):
|
||||
try:
|
||||
self.client.create_project(self.project_name, AliyunLogStore.project_des)
|
||||
logger.info("Project %s created successfully", self.project_name)
|
||||
except LogException as e:
|
||||
logger.exception(
|
||||
"Failed to create project %s: errorCode=%s, errorMessage=%s, requestId=%s",
|
||||
self.project_name,
|
||||
e.get_error_code(),
|
||||
e.get_error_message(),
|
||||
e.get_request_id(),
|
||||
)
|
||||
raise
|
||||
|
||||
def is_logstore_exist(self, logstore_name: str) -> bool:
|
||||
try:
|
||||
_ = self.client.get_logstore(self.project_name, logstore_name)
|
||||
return True
|
||||
except Exception as e:
|
||||
if e.args[0] == "LogStoreNotExist":
|
||||
return False
|
||||
else:
|
||||
raise e
|
||||
|
||||
def create_logstore_if_not_exist(self) -> None:
|
||||
logstore_name_list = [
|
||||
AliyunLogStore.workflow_execution_logstore,
|
||||
AliyunLogStore.workflow_node_execution_logstore,
|
||||
]
|
||||
|
||||
for logstore_name in logstore_name_list:
|
||||
if not self.is_logstore_exist(logstore_name):
|
||||
try:
|
||||
self.client.create_logstore(
|
||||
project_name=self.project_name, logstore_name=logstore_name, ttl=self.logstore_ttl
|
||||
)
|
||||
logger.info("logstore %s created successfully", logstore_name)
|
||||
except LogException as e:
|
||||
logger.exception(
|
||||
"Failed to create logstore %s: errorCode=%s, errorMessage=%s, requestId=%s",
|
||||
logstore_name,
|
||||
e.get_error_code(),
|
||||
e.get_error_message(),
|
||||
e.get_request_id(),
|
||||
)
|
||||
raise
|
||||
|
||||
# Ensure index contains all Dify-required fields
|
||||
# This intelligently merges with existing config, preserving custom indexes
|
||||
self.ensure_index_config(logstore_name)
|
||||
|
||||
def is_index_exist(self, logstore_name: str) -> bool:
|
||||
try:
|
||||
_ = self.client.get_index_config(self.project_name, logstore_name)
|
||||
return True
|
||||
except Exception as e:
|
||||
if e.args[0] == "IndexConfigNotExist":
|
||||
return False
|
||||
else:
|
||||
raise e
|
||||
|
||||
def get_existing_index_config(self, logstore_name: str) -> IndexConfig | None:
|
||||
"""
|
||||
Get existing index configuration from logstore.
|
||||
|
||||
Args:
|
||||
logstore_name: Name of the logstore
|
||||
|
||||
Returns:
|
||||
IndexConfig object if index exists, None otherwise
|
||||
"""
|
||||
try:
|
||||
response = self.client.get_index_config(self.project_name, logstore_name)
|
||||
return response.get_index_config()
|
||||
except Exception as e:
|
||||
if e.args[0] == "IndexConfigNotExist":
|
||||
return None
|
||||
else:
|
||||
logger.exception("Failed to get index config for logstore %s", logstore_name)
|
||||
raise e
|
||||
|
||||
def _get_workflow_execution_index_keys(self) -> dict[str, IndexKeyConfig]:
|
||||
"""
|
||||
Get field index configuration for workflow_execution logstore.
|
||||
|
||||
This method automatically generates index configuration from the WorkflowRun SQLAlchemy model.
|
||||
When the PG schema is updated via Flask-Migrate, the index configuration will be automatically
|
||||
updated on next application startup.
|
||||
"""
|
||||
from models.workflow import WorkflowRun
|
||||
|
||||
index_keys = self._generate_index_keys_from_model(WorkflowRun)
|
||||
|
||||
# Add custom fields that are in logstore but not in PG model
|
||||
# These fields are added by the repository layer
|
||||
index_keys["error_message"] = IndexKeyConfig(
|
||||
index_type="text",
|
||||
case_sensitive=False,
|
||||
doc_value=True,
|
||||
token_list=self.DEFAULT_TOKEN_LIST,
|
||||
chinese=True,
|
||||
) # Maps to 'error' in PG
|
||||
index_keys["started_at"] = IndexKeyConfig(
|
||||
index_type="text",
|
||||
case_sensitive=False,
|
||||
doc_value=True,
|
||||
token_list=self.DEFAULT_TOKEN_LIST,
|
||||
chinese=True,
|
||||
) # Maps to 'created_at' in PG
|
||||
|
||||
logger.info("Generated %d index keys for workflow_execution from WorkflowRun model", len(index_keys))
|
||||
return index_keys
|
||||
|
||||
def _get_workflow_node_execution_index_keys(self) -> dict[str, IndexKeyConfig]:
|
||||
"""
|
||||
Get field index configuration for workflow_node_execution logstore.
|
||||
|
||||
This method automatically generates index configuration from the WorkflowNodeExecutionModel.
|
||||
When the PG schema is updated via Flask-Migrate, the index configuration will be automatically
|
||||
updated on next application startup.
|
||||
"""
|
||||
from models.workflow import WorkflowNodeExecutionModel
|
||||
|
||||
index_keys = self._generate_index_keys_from_model(WorkflowNodeExecutionModel)
|
||||
|
||||
logger.debug(
|
||||
"Generated %d index keys for workflow_node_execution from WorkflowNodeExecutionModel", len(index_keys)
|
||||
)
|
||||
return index_keys
|
||||
|
||||
def _get_index_config(self, logstore_name: str) -> IndexConfig:
|
||||
"""
|
||||
Get index configuration for the specified logstore.
|
||||
|
||||
Args:
|
||||
logstore_name: Name of the logstore
|
||||
|
||||
Returns:
|
||||
IndexConfig object with line and field indexes
|
||||
"""
|
||||
# Create full-text index (line config) with tokenizer
|
||||
line_config = IndexLineConfig(token_list=self.DEFAULT_TOKEN_LIST, case_sensitive=False, chinese=True)
|
||||
|
||||
# Get field index configuration based on logstore name
|
||||
field_keys = {}
|
||||
if logstore_name == AliyunLogStore.workflow_execution_logstore:
|
||||
field_keys = self._get_workflow_execution_index_keys()
|
||||
elif logstore_name == AliyunLogStore.workflow_node_execution_logstore:
|
||||
field_keys = self._get_workflow_node_execution_index_keys()
|
||||
|
||||
# key_config_list should be a dict, not a list
|
||||
# Create index config with both line and field indexes
|
||||
return IndexConfig(line_config=line_config, key_config_list=field_keys, scan_index=True)
|
||||
|
||||
def create_index(self, logstore_name: str) -> None:
|
||||
"""
|
||||
Create index for the specified logstore with both full-text and field indexes.
|
||||
Field indexes are automatically generated from the corresponding SQLAlchemy model.
|
||||
"""
|
||||
index_config = self._get_index_config(logstore_name)
|
||||
|
||||
try:
|
||||
self.client.create_index(self.project_name, logstore_name, index_config)
|
||||
logger.info(
|
||||
"index for %s created successfully with %d field indexes",
|
||||
logstore_name,
|
||||
len(index_config.key_config_list or {}),
|
||||
)
|
||||
except LogException as e:
|
||||
logger.exception(
|
||||
"Failed to create index for logstore %s: errorCode=%s, errorMessage=%s, requestId=%s",
|
||||
logstore_name,
|
||||
e.get_error_code(),
|
||||
e.get_error_message(),
|
||||
e.get_request_id(),
|
||||
)
|
||||
raise
|
||||
|
||||
def _merge_index_configs(
|
||||
self, existing_config: IndexConfig, required_keys: dict[str, IndexKeyConfig], logstore_name: str
|
||||
) -> tuple[IndexConfig, bool]:
|
||||
"""
|
||||
Intelligently merge existing index config with Dify's required field indexes.
|
||||
|
||||
This method:
|
||||
1. Preserves all existing field indexes in logstore (including custom fields)
|
||||
2. Adds missing Dify-required fields
|
||||
3. Updates fields where type doesn't match (with json/text compatibility)
|
||||
4. Corrects case mismatches (e.g., if Dify needs 'status' but logstore has 'Status')
|
||||
|
||||
Type compatibility rules:
|
||||
- json and text types are considered compatible (users can manually choose either)
|
||||
- All other type mismatches will be corrected to match Dify requirements
|
||||
|
||||
Note: Logstore is case-sensitive and doesn't allow duplicate fields with different cases.
|
||||
Case mismatch means: existing field name differs from required name only in case.
|
||||
|
||||
Args:
|
||||
existing_config: Current index configuration from logstore
|
||||
required_keys: Dify's required field index configurations
|
||||
logstore_name: Name of the logstore (for logging)
|
||||
|
||||
Returns:
|
||||
Tuple of (merged_config, needs_update)
|
||||
"""
|
||||
# key_config_list is already a dict in the SDK
|
||||
# Make a copy to avoid modifying the original
|
||||
existing_keys = dict(existing_config.key_config_list) if existing_config.key_config_list else {}
|
||||
|
||||
# Track changes
|
||||
needs_update = False
|
||||
case_corrections = [] # Fields that need case correction (e.g., 'Status' -> 'status')
|
||||
missing_fields = []
|
||||
type_mismatches = []
|
||||
|
||||
# First pass: Check for and resolve case mismatches with required fields
|
||||
# Note: Logstore itself doesn't allow duplicate fields with different cases,
|
||||
# so we only need to check if the existing case matches the required case
|
||||
for required_name in required_keys:
|
||||
lower_name = required_name.lower()
|
||||
# Find key that matches case-insensitively but not exactly
|
||||
wrong_case_key = None
|
||||
for existing_key in existing_keys:
|
||||
if existing_key.lower() == lower_name and existing_key != required_name:
|
||||
wrong_case_key = existing_key
|
||||
break
|
||||
|
||||
if wrong_case_key:
|
||||
# Field exists but with wrong case (e.g., 'Status' when we need 'status')
|
||||
# Remove the wrong-case key, will be added back with correct case later
|
||||
case_corrections.append((wrong_case_key, required_name))
|
||||
del existing_keys[wrong_case_key]
|
||||
needs_update = True
|
||||
|
||||
# Second pass: Check each required field
|
||||
for required_name, required_config in required_keys.items():
|
||||
# Check for exact match (case-sensitive)
|
||||
if required_name in existing_keys:
|
||||
existing_type = existing_keys[required_name].index_type
|
||||
required_type = required_config.index_type
|
||||
|
||||
# Check if type matches
|
||||
# Special case: json and text are interchangeable for JSON content fields
|
||||
# Allow users to manually configure text instead of json (or vice versa) without forcing updates
|
||||
is_compatible = existing_type == required_type or ({existing_type, required_type} == {"json", "text"})
|
||||
|
||||
if not is_compatible:
|
||||
type_mismatches.append((required_name, existing_type, required_type))
|
||||
# Update with correct type
|
||||
existing_keys[required_name] = required_config
|
||||
needs_update = True
|
||||
# else: field exists with compatible type, no action needed
|
||||
else:
|
||||
# Field doesn't exist (may have been removed in first pass due to case conflict)
|
||||
missing_fields.append(required_name)
|
||||
existing_keys[required_name] = required_config
|
||||
needs_update = True
|
||||
|
||||
# Log changes
|
||||
if missing_fields:
|
||||
logger.info(
|
||||
"Logstore %s: Adding %d missing Dify-required fields: %s",
|
||||
logstore_name,
|
||||
len(missing_fields),
|
||||
", ".join(missing_fields[:10]) + ("..." if len(missing_fields) > 10 else ""),
|
||||
)
|
||||
|
||||
if type_mismatches:
|
||||
logger.info(
|
||||
"Logstore %s: Fixing %d type mismatches: %s",
|
||||
logstore_name,
|
||||
len(type_mismatches),
|
||||
", ".join([f"{name}({old}->{new})" for name, old, new in type_mismatches[:5]])
|
||||
+ ("..." if len(type_mismatches) > 5 else ""),
|
||||
)
|
||||
|
||||
if case_corrections:
|
||||
logger.info(
|
||||
"Logstore %s: Correcting %d field name cases: %s",
|
||||
logstore_name,
|
||||
len(case_corrections),
|
||||
", ".join([f"'{old}' -> '{new}'" for old, new in case_corrections[:5]])
|
||||
+ ("..." if len(case_corrections) > 5 else ""),
|
||||
)
|
||||
|
||||
# Create merged config
|
||||
# key_config_list should be a dict, not a list
|
||||
# Preserve the original scan_index value - don't force it to True
|
||||
merged_config = IndexConfig(
|
||||
line_config=existing_config.line_config
|
||||
or IndexLineConfig(token_list=self.DEFAULT_TOKEN_LIST, case_sensitive=False, chinese=True),
|
||||
key_config_list=existing_keys,
|
||||
scan_index=existing_config.scan_index,
|
||||
)
|
||||
|
||||
return merged_config, needs_update
|
||||
|
||||
def ensure_index_config(self, logstore_name: str) -> None:
|
||||
"""
|
||||
Ensure index configuration includes all Dify-required fields.
|
||||
|
||||
This method intelligently manages index configuration:
|
||||
1. If index doesn't exist, create it with Dify's required fields
|
||||
2. If index exists:
|
||||
- Check if all Dify-required fields are present
|
||||
- Check if field types match requirements
|
||||
- Only update if fields are missing or types are incorrect
|
||||
- Preserve any additional custom index configurations
|
||||
|
||||
This approach allows users to add their own custom indexes without being overwritten.
|
||||
"""
|
||||
# Get Dify's required field indexes
|
||||
required_keys = {}
|
||||
if logstore_name == AliyunLogStore.workflow_execution_logstore:
|
||||
required_keys = self._get_workflow_execution_index_keys()
|
||||
elif logstore_name == AliyunLogStore.workflow_node_execution_logstore:
|
||||
required_keys = self._get_workflow_node_execution_index_keys()
|
||||
|
||||
# Check if index exists
|
||||
existing_config = self.get_existing_index_config(logstore_name)
|
||||
|
||||
if existing_config is None:
|
||||
# Index doesn't exist, create it
|
||||
logger.info(
|
||||
"Logstore %s: Index doesn't exist, creating with %d required fields",
|
||||
logstore_name,
|
||||
len(required_keys),
|
||||
)
|
||||
self.create_index(logstore_name)
|
||||
else:
|
||||
merged_config, needs_update = self._merge_index_configs(existing_config, required_keys, logstore_name)
|
||||
|
||||
if needs_update:
|
||||
logger.info("Logstore %s: Updating index to include Dify-required fields", logstore_name)
|
||||
try:
|
||||
self.client.update_index(self.project_name, logstore_name, merged_config)
|
||||
logger.info(
|
||||
"Logstore %s: Index updated successfully, now has %d total field indexes",
|
||||
logstore_name,
|
||||
len(merged_config.key_config_list or {}),
|
||||
)
|
||||
except LogException as e:
|
||||
logger.exception(
|
||||
"Failed to update index for logstore %s: errorCode=%s, errorMessage=%s, requestId=%s",
|
||||
logstore_name,
|
||||
e.get_error_code(),
|
||||
e.get_error_message(),
|
||||
e.get_request_id(),
|
||||
)
|
||||
raise
|
||||
else:
|
||||
logger.info(
|
||||
"Logstore %s: Index already contains all %d Dify-required fields with correct types, "
|
||||
"no update needed",
|
||||
logstore_name,
|
||||
len(required_keys),
|
||||
)
|
||||
|
||||
def put_log(self, logstore: str, contents: Sequence[tuple[str, str]]) -> None:
|
||||
# Route to PG or SDK based on protocol availability
|
||||
if self._use_pg_protocol and self._pg_client:
|
||||
self._pg_client.put_log(logstore, contents, self.log_enabled)
|
||||
else:
|
||||
log_item = LogItem(contents=contents)
|
||||
request = PutLogsRequest(project=self.project_name, logstore=logstore, logitems=[log_item])
|
||||
|
||||
if self.log_enabled:
|
||||
logger.info(
|
||||
"[LogStore-SDK] PUT_LOG | logstore=%s | project=%s | items_count=%d",
|
||||
logstore,
|
||||
self.project_name,
|
||||
len(contents),
|
||||
)
|
||||
|
||||
try:
|
||||
self.client.put_logs(request)
|
||||
except LogException as e:
|
||||
logger.exception(
|
||||
"Failed to put logs to logstore %s: errorCode=%s, errorMessage=%s, requestId=%s",
|
||||
logstore,
|
||||
e.get_error_code(),
|
||||
e.get_error_message(),
|
||||
e.get_request_id(),
|
||||
)
|
||||
raise
|
||||
|
||||
def get_logs(
|
||||
self,
|
||||
logstore: str,
|
||||
from_time: int,
|
||||
to_time: int,
|
||||
topic: str = "",
|
||||
query: str = "",
|
||||
line: int = 100,
|
||||
offset: int = 0,
|
||||
reverse: bool = True,
|
||||
) -> list[dict]:
|
||||
request = GetLogsRequest(
|
||||
project=self.project_name,
|
||||
logstore=logstore,
|
||||
fromTime=from_time,
|
||||
toTime=to_time,
|
||||
topic=topic,
|
||||
query=query,
|
||||
line=line,
|
||||
offset=offset,
|
||||
reverse=reverse,
|
||||
)
|
||||
|
||||
# Log query info if SQLALCHEMY_ECHO is enabled
|
||||
if self.log_enabled:
|
||||
logger.info(
|
||||
"[LogStore] GET_LOGS | logstore=%s | project=%s | query=%s | "
|
||||
"from_time=%d | to_time=%d | line=%d | offset=%d | reverse=%s",
|
||||
logstore,
|
||||
self.project_name,
|
||||
query,
|
||||
from_time,
|
||||
to_time,
|
||||
line,
|
||||
offset,
|
||||
reverse,
|
||||
)
|
||||
|
||||
try:
|
||||
response = self.client.get_logs(request)
|
||||
result = []
|
||||
logs = response.get_logs() if response else []
|
||||
for log in logs:
|
||||
result.append(log.get_contents())
|
||||
|
||||
# Log result count if SQLALCHEMY_ECHO is enabled
|
||||
if self.log_enabled:
|
||||
logger.info(
|
||||
"[LogStore] GET_LOGS RESULT | logstore=%s | returned_count=%d",
|
||||
logstore,
|
||||
len(result),
|
||||
)
|
||||
|
||||
return result
|
||||
except LogException as e:
|
||||
logger.exception(
|
||||
"Failed to get logs from logstore %s with query '%s': errorCode=%s, errorMessage=%s, requestId=%s",
|
||||
logstore,
|
||||
query,
|
||||
e.get_error_code(),
|
||||
e.get_error_message(),
|
||||
e.get_request_id(),
|
||||
)
|
||||
raise
|
||||
|
||||
def execute_sql(
|
||||
self,
|
||||
sql: str,
|
||||
logstore: str | None = None,
|
||||
query: str = "*",
|
||||
from_time: int | None = None,
|
||||
to_time: int | None = None,
|
||||
power_sql: bool = False,
|
||||
) -> list[dict]:
|
||||
"""
|
||||
Execute SQL query for aggregation and analysis.
|
||||
|
||||
Args:
|
||||
sql: SQL query string (SELECT statement)
|
||||
logstore: Name of the logstore (required)
|
||||
query: Search/filter query for SDK mode (default: "*" for all logs).
|
||||
Only used in SDK mode. PG mode ignores this parameter.
|
||||
from_time: Start time (Unix timestamp) - only used in SDK mode
|
||||
to_time: End time (Unix timestamp) - only used in SDK mode
|
||||
power_sql: Whether to use enhanced SQL mode (default: False)
|
||||
|
||||
Returns:
|
||||
List of result rows as dictionaries
|
||||
|
||||
Note:
|
||||
- PG mode: Only executes the SQL directly
|
||||
- SDK mode: Combines query and sql as "query | sql"
|
||||
"""
|
||||
# Logstore is required
|
||||
if not logstore:
|
||||
raise ValueError("logstore parameter is required for execute_sql")
|
||||
|
||||
# Route to PG or SDK based on protocol availability
|
||||
if self._use_pg_protocol and self._pg_client:
|
||||
# PG mode: execute SQL directly (ignore query parameter)
|
||||
return self._pg_client.execute_sql(sql, logstore, self.log_enabled)
|
||||
else:
|
||||
# SDK mode: combine query and sql as "query | sql"
|
||||
full_query = f"{query} | {sql}"
|
||||
|
||||
# Provide default time range if not specified
|
||||
if from_time is None:
|
||||
from_time = 0
|
||||
|
||||
if to_time is None:
|
||||
to_time = int(time.time()) # now
|
||||
|
||||
request = GetLogsRequest(
|
||||
project=self.project_name,
|
||||
logstore=logstore,
|
||||
fromTime=from_time,
|
||||
toTime=to_time,
|
||||
query=full_query,
|
||||
)
|
||||
|
||||
# Log query info if SQLALCHEMY_ECHO is enabled
|
||||
if self.log_enabled:
|
||||
logger.info(
|
||||
"[LogStore-SDK] EXECUTE_SQL | logstore=%s | project=%s | from_time=%d | to_time=%d | full_query=%s",
|
||||
logstore,
|
||||
self.project_name,
|
||||
from_time,
|
||||
to_time,
|
||||
query,
|
||||
sql,
|
||||
)
|
||||
|
||||
try:
|
||||
response = self.client.get_logs(request)
|
||||
|
||||
result = []
|
||||
logs = response.get_logs() if response else []
|
||||
for log in logs:
|
||||
result.append(log.get_contents())
|
||||
|
||||
# Log result count if SQLALCHEMY_ECHO is enabled
|
||||
if self.log_enabled:
|
||||
logger.info(
|
||||
"[LogStore-SDK] EXECUTE_SQL RESULT | logstore=%s | returned_count=%d",
|
||||
logstore,
|
||||
len(result),
|
||||
)
|
||||
|
||||
return result
|
||||
except LogException as e:
|
||||
logger.exception(
|
||||
"Failed to execute SQL, logstore %s: errorCode=%s, errorMessage=%s, requestId=%s, full_query=%s",
|
||||
logstore,
|
||||
e.get_error_code(),
|
||||
e.get_error_message(),
|
||||
e.get_request_id(),
|
||||
full_query,
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
aliyun_logstore = AliyunLogStore()
|
||||
# aliyun_logstore.init_project_logstore()
|
||||
aliyun_logstore.put_log(AliyunLogStore.workflow_execution_logstore, [("key1", "value1")])
|
||||
@@ -1,407 +0,0 @@
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
from contextlib import contextmanager
|
||||
from typing import Any
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.pool
|
||||
from psycopg2 import InterfaceError, OperationalError
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AliyunLogStorePG:
|
||||
"""
|
||||
PostgreSQL protocol support for Aliyun SLS LogStore.
|
||||
|
||||
Handles PG connection pooling and operations for regions that support PG protocol.
|
||||
"""
|
||||
|
||||
def __init__(self, access_key_id: str, access_key_secret: str, endpoint: str, project_name: str):
|
||||
"""
|
||||
Initialize PG connection for SLS.
|
||||
|
||||
Args:
|
||||
access_key_id: Aliyun access key ID
|
||||
access_key_secret: Aliyun access key secret
|
||||
endpoint: SLS endpoint
|
||||
project_name: SLS project name
|
||||
"""
|
||||
self._access_key_id = access_key_id
|
||||
self._access_key_secret = access_key_secret
|
||||
self._endpoint = endpoint
|
||||
self.project_name = project_name
|
||||
self._pg_pool: psycopg2.pool.SimpleConnectionPool | None = None
|
||||
self._use_pg_protocol = False
|
||||
|
||||
def _check_port_connectivity(self, host: str, port: int, timeout: float = 2.0) -> bool:
|
||||
"""
|
||||
Check if a TCP port is reachable using socket connection.
|
||||
|
||||
This provides a fast check before attempting full database connection,
|
||||
preventing long waits when connecting to unsupported regions.
|
||||
|
||||
Args:
|
||||
host: Hostname or IP address
|
||||
port: Port number
|
||||
timeout: Connection timeout in seconds (default: 2.0)
|
||||
|
||||
Returns:
|
||||
True if port is reachable, False otherwise
|
||||
"""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(timeout)
|
||||
result = sock.connect_ex((host, port))
|
||||
sock.close()
|
||||
return result == 0
|
||||
except Exception as e:
|
||||
logger.debug("Port connectivity check failed for %s:%d: %s", host, port, str(e))
|
||||
return False
|
||||
|
||||
def init_connection(self) -> bool:
|
||||
"""
|
||||
Initialize PostgreSQL connection pool for SLS PG protocol support.
|
||||
|
||||
Attempts to connect to SLS using PostgreSQL protocol. If successful, sets
|
||||
_use_pg_protocol to True and creates a connection pool. If connection fails
|
||||
(region doesn't support PG protocol or other errors), returns False.
|
||||
|
||||
Returns:
|
||||
True if PG protocol is supported and initialized, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Extract hostname from endpoint (remove protocol if present)
|
||||
pg_host = self._endpoint.replace("http://", "").replace("https://", "")
|
||||
|
||||
# Get pool configuration
|
||||
pg_max_connections = int(os.environ.get("ALIYUN_SLS_PG_MAX_CONNECTIONS", 10))
|
||||
|
||||
logger.debug(
|
||||
"Check PG protocol connection to SLS: host=%s, project=%s",
|
||||
pg_host,
|
||||
self.project_name,
|
||||
)
|
||||
|
||||
# Fast port connectivity check before attempting full connection
|
||||
# This prevents long waits when connecting to unsupported regions
|
||||
if not self._check_port_connectivity(pg_host, 5432, timeout=1.0):
|
||||
logger.info(
|
||||
"USE SDK mode for read/write operations, host=%s",
|
||||
pg_host,
|
||||
)
|
||||
return False
|
||||
|
||||
# Create connection pool
|
||||
self._pg_pool = psycopg2.pool.SimpleConnectionPool(
|
||||
minconn=1,
|
||||
maxconn=pg_max_connections,
|
||||
host=pg_host,
|
||||
port=5432,
|
||||
database=self.project_name,
|
||||
user=self._access_key_id,
|
||||
password=self._access_key_secret,
|
||||
sslmode="require",
|
||||
connect_timeout=5,
|
||||
application_name=f"Dify-{dify_config.project.version}",
|
||||
)
|
||||
|
||||
# Note: Skip test query because SLS PG protocol only supports SELECT/INSERT on actual tables
|
||||
# Connection pool creation success already indicates connectivity
|
||||
|
||||
self._use_pg_protocol = True
|
||||
logger.info(
|
||||
"PG protocol initialized successfully for SLS project=%s. Will use PG for read/write operations.",
|
||||
self.project_name,
|
||||
)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
# PG connection failed - fallback to SDK mode
|
||||
self._use_pg_protocol = False
|
||||
if self._pg_pool:
|
||||
try:
|
||||
self._pg_pool.closeall()
|
||||
except Exception:
|
||||
logger.debug("Failed to close PG connection pool during cleanup, ignoring")
|
||||
self._pg_pool = None
|
||||
|
||||
logger.info(
|
||||
"PG protocol connection failed (region may not support PG protocol): %s. "
|
||||
"Falling back to SDK mode for read/write operations.",
|
||||
str(e),
|
||||
)
|
||||
return False
|
||||
|
||||
def _is_connection_valid(self, conn: Any) -> bool:
|
||||
"""
|
||||
Check if a connection is still valid.
|
||||
|
||||
Args:
|
||||
conn: psycopg2 connection object
|
||||
|
||||
Returns:
|
||||
True if connection is valid, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Check if connection is closed
|
||||
if conn.closed:
|
||||
return False
|
||||
|
||||
# Quick ping test - execute a lightweight query
|
||||
# For SLS PG protocol, we can't use SELECT 1 without FROM,
|
||||
# so we just check the connection status
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
cursor.fetchone()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@contextmanager
|
||||
def _get_connection(self):
|
||||
"""
|
||||
Context manager to get a PostgreSQL connection from the pool.
|
||||
|
||||
Automatically validates and refreshes stale connections.
|
||||
|
||||
Note: Aliyun SLS PG protocol does not support transactions, so we always
|
||||
use autocommit mode.
|
||||
|
||||
Yields:
|
||||
psycopg2 connection object
|
||||
|
||||
Raises:
|
||||
RuntimeError: If PG pool is not initialized
|
||||
"""
|
||||
if not self._pg_pool:
|
||||
raise RuntimeError("PG connection pool is not initialized")
|
||||
|
||||
conn = self._pg_pool.getconn()
|
||||
try:
|
||||
# Validate connection and get a fresh one if needed
|
||||
if not self._is_connection_valid(conn):
|
||||
logger.debug("Connection is stale, marking as bad and getting a new one")
|
||||
# Mark connection as bad and get a new one
|
||||
self._pg_pool.putconn(conn, close=True)
|
||||
conn = self._pg_pool.getconn()
|
||||
|
||||
# Aliyun SLS PG protocol does not support transactions, always use autocommit
|
||||
conn.autocommit = True
|
||||
yield conn
|
||||
finally:
|
||||
# Return connection to pool (or close if it's bad)
|
||||
if self._is_connection_valid(conn):
|
||||
self._pg_pool.putconn(conn)
|
||||
else:
|
||||
self._pg_pool.putconn(conn, close=True)
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the PostgreSQL connection pool."""
|
||||
if self._pg_pool:
|
||||
try:
|
||||
self._pg_pool.closeall()
|
||||
logger.info("PG connection pool closed")
|
||||
except Exception:
|
||||
logger.exception("Failed to close PG connection pool")
|
||||
|
||||
def _is_retriable_error(self, error: Exception) -> bool:
|
||||
"""
|
||||
Check if an error is retriable (connection-related issues).
|
||||
|
||||
Args:
|
||||
error: Exception to check
|
||||
|
||||
Returns:
|
||||
True if the error is retriable, False otherwise
|
||||
"""
|
||||
# Retry on connection-related errors
|
||||
if isinstance(error, (OperationalError, InterfaceError)):
|
||||
return True
|
||||
|
||||
# Check error message for specific connection issues
|
||||
error_msg = str(error).lower()
|
||||
retriable_patterns = [
|
||||
"connection",
|
||||
"timeout",
|
||||
"closed",
|
||||
"broken pipe",
|
||||
"reset by peer",
|
||||
"no route to host",
|
||||
"network",
|
||||
]
|
||||
return any(pattern in error_msg for pattern in retriable_patterns)
|
||||
|
||||
def put_log(self, logstore: str, contents: Sequence[tuple[str, str]], log_enabled: bool = False) -> None:
|
||||
"""
|
||||
Write log to SLS using PostgreSQL protocol with automatic retry.
|
||||
|
||||
Note: SLS PG protocol only supports INSERT (not UPDATE). This uses append-only
|
||||
writes with log_version field for versioning, same as SDK implementation.
|
||||
|
||||
Args:
|
||||
logstore: Name of the logstore table
|
||||
contents: List of (field_name, value) tuples
|
||||
log_enabled: Whether to enable logging
|
||||
|
||||
Raises:
|
||||
psycopg2.Error: If database operation fails after all retries
|
||||
"""
|
||||
if not contents:
|
||||
return
|
||||
|
||||
# Extract field names and values from contents
|
||||
fields = [field_name for field_name, _ in contents]
|
||||
values = [value for _, value in contents]
|
||||
|
||||
# Build INSERT statement with literal values
|
||||
# Note: Aliyun SLS PG protocol doesn't support parameterized queries,
|
||||
# so we need to use mogrify to safely create literal values
|
||||
field_list = ", ".join([f'"{field}"' for field in fields])
|
||||
|
||||
if log_enabled:
|
||||
logger.info(
|
||||
"[LogStore-PG] PUT_LOG | logstore=%s | project=%s | items_count=%d",
|
||||
logstore,
|
||||
self.project_name,
|
||||
len(contents),
|
||||
)
|
||||
|
||||
# Retry configuration
|
||||
max_retries = 3
|
||||
retry_delay = 0.1 # Start with 100ms
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
with self._get_connection() as conn:
|
||||
with conn.cursor() as cursor:
|
||||
# Use mogrify to safely convert values to SQL literals
|
||||
placeholders = ", ".join(["%s"] * len(fields))
|
||||
values_literal = cursor.mogrify(f"({placeholders})", values).decode("utf-8")
|
||||
insert_sql = f'INSERT INTO "{logstore}" ({field_list}) VALUES {values_literal}'
|
||||
cursor.execute(insert_sql)
|
||||
# Success - exit retry loop
|
||||
return
|
||||
|
||||
except psycopg2.Error as e:
|
||||
# Check if error is retriable
|
||||
if not self._is_retriable_error(e):
|
||||
# Not a retriable error (e.g., data validation error), fail immediately
|
||||
logger.exception(
|
||||
"Failed to put logs to logstore %s via PG protocol (non-retriable error)",
|
||||
logstore,
|
||||
)
|
||||
raise
|
||||
|
||||
# Retriable error - log and retry if we have attempts left
|
||||
if attempt < max_retries - 1:
|
||||
logger.warning(
|
||||
"Failed to put logs to logstore %s via PG protocol (attempt %d/%d): %s. Retrying...",
|
||||
logstore,
|
||||
attempt + 1,
|
||||
max_retries,
|
||||
str(e),
|
||||
)
|
||||
time.sleep(retry_delay)
|
||||
retry_delay *= 2 # Exponential backoff
|
||||
else:
|
||||
# Last attempt failed
|
||||
logger.exception(
|
||||
"Failed to put logs to logstore %s via PG protocol after %d attempts",
|
||||
logstore,
|
||||
max_retries,
|
||||
)
|
||||
raise
|
||||
|
||||
def execute_sql(self, sql: str, logstore: str, log_enabled: bool = False) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Execute SQL query using PostgreSQL protocol with automatic retry.
|
||||
|
||||
Args:
|
||||
sql: SQL query string
|
||||
logstore: Name of the logstore (for logging purposes)
|
||||
log_enabled: Whether to enable logging
|
||||
|
||||
Returns:
|
||||
List of result rows as dictionaries
|
||||
|
||||
Raises:
|
||||
psycopg2.Error: If database operation fails after all retries
|
||||
"""
|
||||
if log_enabled:
|
||||
logger.info(
|
||||
"[LogStore-PG] EXECUTE_SQL | logstore=%s | project=%s | sql=%s",
|
||||
logstore,
|
||||
self.project_name,
|
||||
sql,
|
||||
)
|
||||
|
||||
# Retry configuration
|
||||
max_retries = 3
|
||||
retry_delay = 0.1 # Start with 100ms
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
with self._get_connection() as conn:
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute(sql)
|
||||
|
||||
# Get column names from cursor description
|
||||
columns = [desc[0] for desc in cursor.description]
|
||||
|
||||
# Fetch all results and convert to list of dicts
|
||||
result = []
|
||||
for row in cursor.fetchall():
|
||||
row_dict = {}
|
||||
for col, val in zip(columns, row):
|
||||
row_dict[col] = "" if val is None else str(val)
|
||||
result.append(row_dict)
|
||||
|
||||
if log_enabled:
|
||||
logger.info(
|
||||
"[LogStore-PG] EXECUTE_SQL RESULT | logstore=%s | returned_count=%d",
|
||||
logstore,
|
||||
len(result),
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except psycopg2.Error as e:
|
||||
# Check if error is retriable
|
||||
if not self._is_retriable_error(e):
|
||||
# Not a retriable error (e.g., SQL syntax error), fail immediately
|
||||
logger.exception(
|
||||
"Failed to execute SQL query on logstore %s via PG protocol (non-retriable error): sql=%s",
|
||||
logstore,
|
||||
sql,
|
||||
)
|
||||
raise
|
||||
|
||||
# Retriable error - log and retry if we have attempts left
|
||||
if attempt < max_retries - 1:
|
||||
logger.warning(
|
||||
"Failed to execute SQL query on logstore %s via PG protocol (attempt %d/%d): %s. Retrying...",
|
||||
logstore,
|
||||
attempt + 1,
|
||||
max_retries,
|
||||
str(e),
|
||||
)
|
||||
time.sleep(retry_delay)
|
||||
retry_delay *= 2 # Exponential backoff
|
||||
else:
|
||||
# Last attempt failed
|
||||
logger.exception(
|
||||
"Failed to execute SQL query on logstore %s via PG protocol after %d attempts: sql=%s",
|
||||
logstore,
|
||||
max_retries,
|
||||
sql,
|
||||
)
|
||||
raise
|
||||
|
||||
# This line should never be reached due to raise above, but makes type checker happy
|
||||
return []
|
||||
@@ -1,365 +0,0 @@
|
||||
"""
|
||||
LogStore implementation of DifyAPIWorkflowNodeExecutionRepository.
|
||||
|
||||
This module provides the LogStore-based implementation for service-layer
|
||||
WorkflowNodeExecutionModel operations using Aliyun SLS LogStore.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from extensions.logstore.aliyun_logstore import AliyunLogStore
|
||||
from models.workflow import WorkflowNodeExecutionModel
|
||||
from repositories.api_workflow_node_execution_repository import DifyAPIWorkflowNodeExecutionRepository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _dict_to_workflow_node_execution_model(data: dict[str, Any]) -> WorkflowNodeExecutionModel:
|
||||
"""
|
||||
Convert LogStore result dictionary to WorkflowNodeExecutionModel instance.
|
||||
|
||||
Args:
|
||||
data: Dictionary from LogStore query result
|
||||
|
||||
Returns:
|
||||
WorkflowNodeExecutionModel instance (detached from session)
|
||||
|
||||
Note:
|
||||
The returned model is not attached to any SQLAlchemy session.
|
||||
Relationship fields (like offload_data) are not loaded from LogStore.
|
||||
"""
|
||||
logger.debug("_dict_to_workflow_node_execution_model: data keys=%s", list(data.keys())[:5])
|
||||
# Create model instance without session
|
||||
model = WorkflowNodeExecutionModel()
|
||||
|
||||
# Map all required fields with validation
|
||||
# Critical fields - must not be None
|
||||
model.id = data.get("id") or ""
|
||||
model.tenant_id = data.get("tenant_id") or ""
|
||||
model.app_id = data.get("app_id") or ""
|
||||
model.workflow_id = data.get("workflow_id") or ""
|
||||
model.triggered_from = data.get("triggered_from") or ""
|
||||
model.node_id = data.get("node_id") or ""
|
||||
model.node_type = data.get("node_type") or ""
|
||||
model.status = data.get("status") or "running" # Default status if missing
|
||||
model.title = data.get("title") or ""
|
||||
model.created_by_role = data.get("created_by_role") or ""
|
||||
model.created_by = data.get("created_by") or ""
|
||||
|
||||
# Numeric fields with defaults
|
||||
model.index = int(data.get("index", 0))
|
||||
model.elapsed_time = float(data.get("elapsed_time", 0))
|
||||
|
||||
# Optional fields
|
||||
model.workflow_run_id = data.get("workflow_run_id")
|
||||
model.predecessor_node_id = data.get("predecessor_node_id")
|
||||
model.node_execution_id = data.get("node_execution_id")
|
||||
model.inputs = data.get("inputs")
|
||||
model.process_data = data.get("process_data")
|
||||
model.outputs = data.get("outputs")
|
||||
model.error = data.get("error")
|
||||
model.execution_metadata = data.get("execution_metadata")
|
||||
|
||||
# Handle datetime fields
|
||||
created_at = data.get("created_at")
|
||||
if created_at:
|
||||
if isinstance(created_at, str):
|
||||
model.created_at = datetime.fromisoformat(created_at)
|
||||
elif isinstance(created_at, (int, float)):
|
||||
model.created_at = datetime.fromtimestamp(created_at)
|
||||
else:
|
||||
model.created_at = created_at
|
||||
else:
|
||||
# Provide default created_at if missing
|
||||
model.created_at = datetime.now()
|
||||
|
||||
finished_at = data.get("finished_at")
|
||||
if finished_at:
|
||||
if isinstance(finished_at, str):
|
||||
model.finished_at = datetime.fromisoformat(finished_at)
|
||||
elif isinstance(finished_at, (int, float)):
|
||||
model.finished_at = datetime.fromtimestamp(finished_at)
|
||||
else:
|
||||
model.finished_at = finished_at
|
||||
|
||||
return model
|
||||
|
||||
|
||||
class LogstoreAPIWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecutionRepository):
|
||||
"""
|
||||
LogStore implementation of DifyAPIWorkflowNodeExecutionRepository.
|
||||
|
||||
Provides service-layer database operations for WorkflowNodeExecutionModel
|
||||
using LogStore SQL queries with optimized deduplication strategies.
|
||||
"""
|
||||
|
||||
def __init__(self, session_maker: sessionmaker | None = None):
|
||||
"""
|
||||
Initialize the repository with LogStore client.
|
||||
|
||||
Args:
|
||||
session_maker: SQLAlchemy sessionmaker (unused, for compatibility with factory pattern)
|
||||
"""
|
||||
logger.debug("LogstoreAPIWorkflowNodeExecutionRepository.__init__: initializing")
|
||||
self.logstore_client = AliyunLogStore()
|
||||
|
||||
def get_node_last_execution(
|
||||
self,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
workflow_id: str,
|
||||
node_id: str,
|
||||
) -> WorkflowNodeExecutionModel | None:
|
||||
"""
|
||||
Get the most recent execution for a specific node.
|
||||
|
||||
Uses query syntax to get raw logs and selects the one with max log_version.
|
||||
Returns the most recent execution ordered by created_at.
|
||||
"""
|
||||
logger.debug(
|
||||
"get_node_last_execution: tenant_id=%s, app_id=%s, workflow_id=%s, node_id=%s",
|
||||
tenant_id,
|
||||
app_id,
|
||||
workflow_id,
|
||||
node_id,
|
||||
)
|
||||
try:
|
||||
# Check if PG protocol is supported
|
||||
if self.logstore_client.supports_pg_protocol:
|
||||
# Use PG protocol with SQL query (get latest version of each record)
|
||||
sql_query = f"""
|
||||
SELECT * FROM (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) as rn
|
||||
FROM "{AliyunLogStore.workflow_node_execution_logstore}"
|
||||
WHERE tenant_id = '{tenant_id}'
|
||||
AND app_id = '{app_id}'
|
||||
AND workflow_id = '{workflow_id}'
|
||||
AND node_id = '{node_id}'
|
||||
AND __time__ > 0
|
||||
) AS subquery WHERE rn = 1
|
||||
LIMIT 100
|
||||
"""
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql_query,
|
||||
logstore=AliyunLogStore.workflow_node_execution_logstore,
|
||||
)
|
||||
else:
|
||||
# Use SDK with LogStore query syntax
|
||||
query = (
|
||||
f"tenant_id: {tenant_id} and app_id: {app_id} and workflow_id: {workflow_id} and node_id: {node_id}"
|
||||
)
|
||||
from_time = 0
|
||||
to_time = int(time.time()) # now
|
||||
|
||||
results = self.logstore_client.get_logs(
|
||||
logstore=AliyunLogStore.workflow_node_execution_logstore,
|
||||
from_time=from_time,
|
||||
to_time=to_time,
|
||||
query=query,
|
||||
line=100,
|
||||
reverse=False,
|
||||
)
|
||||
|
||||
if not results:
|
||||
return None
|
||||
|
||||
# For SDK mode, group by id and select the one with max log_version for each group
|
||||
# For PG mode, this is already done by the SQL query
|
||||
if not self.logstore_client.supports_pg_protocol:
|
||||
id_to_results: dict[str, list[dict[str, Any]]] = {}
|
||||
for row in results:
|
||||
row_id = row.get("id")
|
||||
if row_id:
|
||||
if row_id not in id_to_results:
|
||||
id_to_results[row_id] = []
|
||||
id_to_results[row_id].append(row)
|
||||
|
||||
# For each id, select the row with max log_version
|
||||
deduplicated_results = []
|
||||
for rows in id_to_results.values():
|
||||
if len(rows) > 1:
|
||||
max_row = max(rows, key=lambda x: int(x.get("log_version", 0)))
|
||||
else:
|
||||
max_row = rows[0]
|
||||
deduplicated_results.append(max_row)
|
||||
else:
|
||||
# For PG mode, results are already deduplicated by the SQL query
|
||||
deduplicated_results = results
|
||||
|
||||
# Sort by created_at DESC and return the most recent one
|
||||
deduplicated_results.sort(
|
||||
key=lambda x: x.get("created_at", 0) if isinstance(x.get("created_at"), (int, float)) else 0,
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
if deduplicated_results:
|
||||
return _dict_to_workflow_node_execution_model(deduplicated_results[0])
|
||||
|
||||
return None
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get node last execution from LogStore")
|
||||
raise
|
||||
|
||||
def get_executions_by_workflow_run(
|
||||
self,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
workflow_run_id: str,
|
||||
) -> Sequence[WorkflowNodeExecutionModel]:
|
||||
"""
|
||||
Get all node executions for a specific workflow run.
|
||||
|
||||
Uses query syntax to get raw logs and selects the one with max log_version for each node execution.
|
||||
Ordered by index DESC for trace visualization.
|
||||
"""
|
||||
logger.debug(
|
||||
"[LogStore] get_executions_by_workflow_run: tenant_id=%s, app_id=%s, workflow_run_id=%s",
|
||||
tenant_id,
|
||||
app_id,
|
||||
workflow_run_id,
|
||||
)
|
||||
try:
|
||||
# Check if PG protocol is supported
|
||||
if self.logstore_client.supports_pg_protocol:
|
||||
# Use PG protocol with SQL query (get latest version of each record)
|
||||
sql_query = f"""
|
||||
SELECT * FROM (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) as rn
|
||||
FROM "{AliyunLogStore.workflow_node_execution_logstore}"
|
||||
WHERE tenant_id = '{tenant_id}'
|
||||
AND app_id = '{app_id}'
|
||||
AND workflow_run_id = '{workflow_run_id}'
|
||||
AND __time__ > 0
|
||||
) AS subquery WHERE rn = 1
|
||||
LIMIT 1000
|
||||
"""
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql_query,
|
||||
logstore=AliyunLogStore.workflow_node_execution_logstore,
|
||||
)
|
||||
else:
|
||||
# Use SDK with LogStore query syntax
|
||||
query = f"tenant_id: {tenant_id} and app_id: {app_id} and workflow_run_id: {workflow_run_id}"
|
||||
from_time = 0
|
||||
to_time = int(time.time()) # now
|
||||
|
||||
results = self.logstore_client.get_logs(
|
||||
logstore=AliyunLogStore.workflow_node_execution_logstore,
|
||||
from_time=from_time,
|
||||
to_time=to_time,
|
||||
query=query,
|
||||
line=1000, # Get more results for node executions
|
||||
reverse=False,
|
||||
)
|
||||
|
||||
if not results:
|
||||
return []
|
||||
|
||||
# For SDK mode, group by id and select the one with max log_version for each group
|
||||
# For PG mode, this is already done by the SQL query
|
||||
models = []
|
||||
if not self.logstore_client.supports_pg_protocol:
|
||||
id_to_results: dict[str, list[dict[str, Any]]] = {}
|
||||
for row in results:
|
||||
row_id = row.get("id")
|
||||
if row_id:
|
||||
if row_id not in id_to_results:
|
||||
id_to_results[row_id] = []
|
||||
id_to_results[row_id].append(row)
|
||||
|
||||
# For each id, select the row with max log_version
|
||||
for rows in id_to_results.values():
|
||||
if len(rows) > 1:
|
||||
max_row = max(rows, key=lambda x: int(x.get("log_version", 0)))
|
||||
else:
|
||||
max_row = rows[0]
|
||||
|
||||
model = _dict_to_workflow_node_execution_model(max_row)
|
||||
if model and model.id: # Ensure model is valid
|
||||
models.append(model)
|
||||
else:
|
||||
# For PG mode, results are already deduplicated by the SQL query
|
||||
for row in results:
|
||||
model = _dict_to_workflow_node_execution_model(row)
|
||||
if model and model.id: # Ensure model is valid
|
||||
models.append(model)
|
||||
|
||||
# Sort by index DESC for trace visualization
|
||||
models.sort(key=lambda x: x.index, reverse=True)
|
||||
|
||||
return models
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get executions by workflow run from LogStore")
|
||||
raise
|
||||
|
||||
def get_execution_by_id(
|
||||
self,
|
||||
execution_id: str,
|
||||
tenant_id: str | None = None,
|
||||
) -> WorkflowNodeExecutionModel | None:
|
||||
"""
|
||||
Get a workflow node execution by its ID.
|
||||
Uses query syntax to get raw logs and selects the one with max log_version.
|
||||
"""
|
||||
logger.debug("get_execution_by_id: execution_id=%s, tenant_id=%s", execution_id, tenant_id)
|
||||
try:
|
||||
# Check if PG protocol is supported
|
||||
if self.logstore_client.supports_pg_protocol:
|
||||
# Use PG protocol with SQL query (get latest version of record)
|
||||
tenant_filter = f"AND tenant_id = '{tenant_id}'" if tenant_id else ""
|
||||
sql_query = f"""
|
||||
SELECT * FROM (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) as rn
|
||||
FROM "{AliyunLogStore.workflow_node_execution_logstore}"
|
||||
WHERE id = '{execution_id}' {tenant_filter} AND __time__ > 0
|
||||
) AS subquery WHERE rn = 1
|
||||
LIMIT 1
|
||||
"""
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql_query,
|
||||
logstore=AliyunLogStore.workflow_node_execution_logstore,
|
||||
)
|
||||
else:
|
||||
# Use SDK with LogStore query syntax
|
||||
if tenant_id:
|
||||
query = f"id: {execution_id} and tenant_id: {tenant_id}"
|
||||
else:
|
||||
query = f"id: {execution_id}"
|
||||
|
||||
from_time = 0
|
||||
to_time = int(time.time()) # now
|
||||
|
||||
results = self.logstore_client.get_logs(
|
||||
logstore=AliyunLogStore.workflow_node_execution_logstore,
|
||||
from_time=from_time,
|
||||
to_time=to_time,
|
||||
query=query,
|
||||
line=100,
|
||||
reverse=False,
|
||||
)
|
||||
|
||||
if not results:
|
||||
return None
|
||||
|
||||
# For PG mode, result is already the latest version
|
||||
# For SDK mode, if multiple results, select the one with max log_version
|
||||
if self.logstore_client.supports_pg_protocol or len(results) == 1:
|
||||
return _dict_to_workflow_node_execution_model(results[0])
|
||||
else:
|
||||
max_result = max(results, key=lambda x: int(x.get("log_version", 0)))
|
||||
return _dict_to_workflow_node_execution_model(max_result)
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get execution by ID from LogStore: execution_id=%s", execution_id)
|
||||
raise
|
||||
@@ -1,757 +0,0 @@
|
||||
"""
|
||||
LogStore API WorkflowRun Repository Implementation
|
||||
|
||||
This module provides the LogStore-based implementation of the APIWorkflowRunRepository
|
||||
protocol. It handles service-layer WorkflowRun database operations using Aliyun SLS LogStore
|
||||
with optimized queries for statistics and pagination.
|
||||
|
||||
Key Features:
|
||||
- LogStore SQL queries for aggregation and statistics
|
||||
- Optimized deduplication using finished_at IS NOT NULL filter
|
||||
- Window functions only when necessary (running status queries)
|
||||
- Multi-tenant data isolation and security
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime
|
||||
from typing import Any, cast
|
||||
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from extensions.logstore.aliyun_logstore import AliyunLogStore
|
||||
from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from models.workflow import WorkflowRun
|
||||
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
|
||||
from repositories.types import (
|
||||
AverageInteractionStats,
|
||||
DailyRunsStats,
|
||||
DailyTerminalsStats,
|
||||
DailyTokenCostStats,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _dict_to_workflow_run(data: dict[str, Any]) -> WorkflowRun:
|
||||
"""
|
||||
Convert LogStore result dictionary to WorkflowRun instance.
|
||||
|
||||
Args:
|
||||
data: Dictionary from LogStore query result
|
||||
|
||||
Returns:
|
||||
WorkflowRun instance
|
||||
"""
|
||||
logger.debug("_dict_to_workflow_run: data keys=%s", list(data.keys())[:5])
|
||||
# Create model instance without session
|
||||
model = WorkflowRun()
|
||||
|
||||
# Map all required fields with validation
|
||||
# Critical fields - must not be None
|
||||
model.id = data.get("id") or ""
|
||||
model.tenant_id = data.get("tenant_id") or ""
|
||||
model.app_id = data.get("app_id") or ""
|
||||
model.workflow_id = data.get("workflow_id") or ""
|
||||
model.type = data.get("type") or ""
|
||||
model.triggered_from = data.get("triggered_from") or ""
|
||||
model.version = data.get("version") or ""
|
||||
model.status = data.get("status") or "running" # Default status if missing
|
||||
model.created_by_role = data.get("created_by_role") or ""
|
||||
model.created_by = data.get("created_by") or ""
|
||||
|
||||
# Numeric fields with defaults
|
||||
model.total_tokens = int(data.get("total_tokens", 0))
|
||||
model.total_steps = int(data.get("total_steps", 0))
|
||||
model.exceptions_count = int(data.get("exceptions_count", 0))
|
||||
|
||||
# Optional fields
|
||||
model.graph = data.get("graph")
|
||||
model.inputs = data.get("inputs")
|
||||
model.outputs = data.get("outputs")
|
||||
model.error = data.get("error_message") or data.get("error")
|
||||
|
||||
# Handle datetime fields
|
||||
started_at = data.get("started_at") or data.get("created_at")
|
||||
if started_at:
|
||||
if isinstance(started_at, str):
|
||||
model.created_at = datetime.fromisoformat(started_at)
|
||||
elif isinstance(started_at, (int, float)):
|
||||
model.created_at = datetime.fromtimestamp(started_at)
|
||||
else:
|
||||
model.created_at = started_at
|
||||
else:
|
||||
# Provide default created_at if missing
|
||||
model.created_at = datetime.now()
|
||||
|
||||
finished_at = data.get("finished_at")
|
||||
if finished_at:
|
||||
if isinstance(finished_at, str):
|
||||
model.finished_at = datetime.fromisoformat(finished_at)
|
||||
elif isinstance(finished_at, (int, float)):
|
||||
model.finished_at = datetime.fromtimestamp(finished_at)
|
||||
else:
|
||||
model.finished_at = finished_at
|
||||
|
||||
# Compute elapsed_time from started_at and finished_at
|
||||
# LogStore doesn't store elapsed_time, it's computed in WorkflowExecution domain entity
|
||||
if model.finished_at and model.created_at:
|
||||
model.elapsed_time = (model.finished_at - model.created_at).total_seconds()
|
||||
else:
|
||||
model.elapsed_time = float(data.get("elapsed_time", 0))
|
||||
|
||||
return model
|
||||
|
||||
|
||||
class LogstoreAPIWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
"""
|
||||
LogStore implementation of APIWorkflowRunRepository.
|
||||
|
||||
Provides service-layer WorkflowRun database operations using LogStore SQL
|
||||
with optimized query strategies:
|
||||
- Use finished_at IS NOT NULL for deduplication (10-100x faster)
|
||||
- Use window functions only when running status is required
|
||||
- Proper time range filtering for LogStore queries
|
||||
"""
|
||||
|
||||
def __init__(self, session_maker: sessionmaker | None = None):
|
||||
"""
|
||||
Initialize the repository with LogStore client.
|
||||
|
||||
Args:
|
||||
session_maker: SQLAlchemy sessionmaker (unused, for compatibility with factory pattern)
|
||||
"""
|
||||
logger.debug("LogstoreAPIWorkflowRunRepository.__init__: initializing")
|
||||
self.logstore_client = AliyunLogStore()
|
||||
|
||||
# Control flag for dual-read (fallback to PostgreSQL when LogStore returns no results)
|
||||
# Set to True to enable fallback for safe migration from PostgreSQL to LogStore
|
||||
# Set to False for new deployments without legacy data in PostgreSQL
|
||||
self._enable_dual_read = os.environ.get("LOGSTORE_DUAL_READ_ENABLED", "true").lower() == "true"
|
||||
|
||||
def get_paginated_workflow_runs(
|
||||
self,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
triggered_from: WorkflowRunTriggeredFrom | Sequence[WorkflowRunTriggeredFrom],
|
||||
limit: int = 20,
|
||||
last_id: str | None = None,
|
||||
status: str | None = None,
|
||||
) -> InfiniteScrollPagination:
|
||||
"""
|
||||
Get paginated workflow runs with filtering.
|
||||
|
||||
Uses window function for deduplication to support both running and finished states.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier for multi-tenant isolation
|
||||
app_id: Application identifier
|
||||
triggered_from: Filter by trigger source(s)
|
||||
limit: Maximum number of records to return (default: 20)
|
||||
last_id: Cursor for pagination - ID of the last record from previous page
|
||||
status: Optional filter by status
|
||||
|
||||
Returns:
|
||||
InfiniteScrollPagination object
|
||||
"""
|
||||
logger.debug(
|
||||
"get_paginated_workflow_runs: tenant_id=%s, app_id=%s, limit=%d, status=%s",
|
||||
tenant_id,
|
||||
app_id,
|
||||
limit,
|
||||
status,
|
||||
)
|
||||
# Convert triggered_from to list if needed
|
||||
if isinstance(triggered_from, WorkflowRunTriggeredFrom):
|
||||
triggered_from_list = [triggered_from]
|
||||
else:
|
||||
triggered_from_list = list(triggered_from)
|
||||
|
||||
# Build triggered_from filter
|
||||
triggered_from_filter = " OR ".join([f"triggered_from='{tf.value}'" for tf in triggered_from_list])
|
||||
|
||||
# Build status filter
|
||||
status_filter = f"AND status='{status}'" if status else ""
|
||||
|
||||
# Build last_id filter for pagination
|
||||
# Note: This is simplified. In production, you'd need to track created_at from last record
|
||||
last_id_filter = ""
|
||||
if last_id:
|
||||
# TODO: Implement proper cursor-based pagination with created_at
|
||||
logger.warning("last_id pagination not fully implemented for LogStore")
|
||||
|
||||
# Use window function to get latest log_version of each workflow run
|
||||
sql = f"""
|
||||
SELECT * FROM (
|
||||
SELECT *, ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) AS rn
|
||||
FROM {AliyunLogStore.workflow_execution_logstore}
|
||||
WHERE tenant_id='{tenant_id}'
|
||||
AND app_id='{app_id}'
|
||||
AND ({triggered_from_filter})
|
||||
{status_filter}
|
||||
{last_id_filter}
|
||||
) t
|
||||
WHERE rn = 1
|
||||
ORDER BY created_at DESC
|
||||
LIMIT {limit + 1}
|
||||
"""
|
||||
|
||||
try:
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore, from_time=None, to_time=None
|
||||
)
|
||||
|
||||
# Check if there are more records
|
||||
has_more = len(results) > limit
|
||||
if has_more:
|
||||
results = results[:limit]
|
||||
|
||||
# Convert results to WorkflowRun models
|
||||
workflow_runs = [_dict_to_workflow_run(row) for row in results]
|
||||
return InfiniteScrollPagination(data=workflow_runs, limit=limit, has_more=has_more)
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get paginated workflow runs from LogStore")
|
||||
raise
|
||||
|
||||
def get_workflow_run_by_id(
|
||||
self,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
run_id: str,
|
||||
) -> WorkflowRun | None:
|
||||
"""
|
||||
Get a specific workflow run by ID with tenant and app isolation.
|
||||
|
||||
Uses query syntax to get raw logs and selects the one with max log_version in code.
|
||||
Falls back to PostgreSQL if not found in LogStore (for data consistency during migration).
|
||||
"""
|
||||
logger.debug("get_workflow_run_by_id: tenant_id=%s, app_id=%s, run_id=%s", tenant_id, app_id, run_id)
|
||||
|
||||
try:
|
||||
# Check if PG protocol is supported
|
||||
if self.logstore_client.supports_pg_protocol:
|
||||
# Use PG protocol with SQL query (get latest version of record)
|
||||
sql_query = f"""
|
||||
SELECT * FROM (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) as rn
|
||||
FROM "{AliyunLogStore.workflow_execution_logstore}"
|
||||
WHERE id = '{run_id}' AND tenant_id = '{tenant_id}' AND app_id = '{app_id}' AND __time__ > 0
|
||||
) AS subquery WHERE rn = 1
|
||||
LIMIT 100
|
||||
"""
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql_query,
|
||||
logstore=AliyunLogStore.workflow_execution_logstore,
|
||||
)
|
||||
else:
|
||||
# Use SDK with LogStore query syntax
|
||||
query = f"id: {run_id} and tenant_id: {tenant_id} and app_id: {app_id}"
|
||||
from_time = 0
|
||||
to_time = int(time.time()) # now
|
||||
|
||||
results = self.logstore_client.get_logs(
|
||||
logstore=AliyunLogStore.workflow_execution_logstore,
|
||||
from_time=from_time,
|
||||
to_time=to_time,
|
||||
query=query,
|
||||
line=100,
|
||||
reverse=False,
|
||||
)
|
||||
|
||||
if not results:
|
||||
# Fallback to PostgreSQL for records created before LogStore migration
|
||||
if self._enable_dual_read:
|
||||
logger.debug(
|
||||
"WorkflowRun not found in LogStore, falling back to PostgreSQL: "
|
||||
"run_id=%s, tenant_id=%s, app_id=%s",
|
||||
run_id,
|
||||
tenant_id,
|
||||
app_id,
|
||||
)
|
||||
return self._fallback_get_workflow_run_by_id_with_tenant(run_id, tenant_id, app_id)
|
||||
return None
|
||||
|
||||
# For PG mode, results are already deduplicated by the SQL query
|
||||
# For SDK mode, if multiple results, select the one with max log_version
|
||||
if self.logstore_client.supports_pg_protocol or len(results) == 1:
|
||||
return _dict_to_workflow_run(results[0])
|
||||
else:
|
||||
max_result = max(results, key=lambda x: int(x.get("log_version", 0)))
|
||||
return _dict_to_workflow_run(max_result)
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get workflow run by ID from LogStore: run_id=%s", run_id)
|
||||
# Try PostgreSQL fallback on any error (only if dual-read is enabled)
|
||||
if self._enable_dual_read:
|
||||
try:
|
||||
return self._fallback_get_workflow_run_by_id_with_tenant(run_id, tenant_id, app_id)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"PostgreSQL fallback also failed: run_id=%s, tenant_id=%s, app_id=%s", run_id, tenant_id, app_id
|
||||
)
|
||||
raise
|
||||
|
||||
def _fallback_get_workflow_run_by_id_with_tenant(
|
||||
self, run_id: str, tenant_id: str, app_id: str
|
||||
) -> WorkflowRun | None:
|
||||
"""Fallback to PostgreSQL query for records not in LogStore (with tenant isolation)."""
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
with Session(db.engine) as session:
|
||||
stmt = select(WorkflowRun).where(
|
||||
WorkflowRun.id == run_id, WorkflowRun.tenant_id == tenant_id, WorkflowRun.app_id == app_id
|
||||
)
|
||||
return session.scalar(stmt)
|
||||
|
||||
def get_workflow_run_by_id_without_tenant(
|
||||
self,
|
||||
run_id: str,
|
||||
) -> WorkflowRun | None:
|
||||
"""
|
||||
Get a specific workflow run by ID without tenant/app context.
|
||||
Uses query syntax to get raw logs and selects the one with max log_version.
|
||||
Falls back to PostgreSQL if not found in LogStore (controlled by LOGSTORE_DUAL_READ_ENABLED).
|
||||
"""
|
||||
logger.debug("get_workflow_run_by_id_without_tenant: run_id=%s", run_id)
|
||||
|
||||
try:
|
||||
# Check if PG protocol is supported
|
||||
if self.logstore_client.supports_pg_protocol:
|
||||
# Use PG protocol with SQL query (get latest version of record)
|
||||
sql_query = f"""
|
||||
SELECT * FROM (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) as rn
|
||||
FROM "{AliyunLogStore.workflow_execution_logstore}"
|
||||
WHERE id = '{run_id}' AND __time__ > 0
|
||||
) AS subquery WHERE rn = 1
|
||||
LIMIT 100
|
||||
"""
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql_query,
|
||||
logstore=AliyunLogStore.workflow_execution_logstore,
|
||||
)
|
||||
else:
|
||||
# Use SDK with LogStore query syntax
|
||||
query = f"id: {run_id}"
|
||||
from_time = 0
|
||||
to_time = int(time.time()) # now
|
||||
|
||||
results = self.logstore_client.get_logs(
|
||||
logstore=AliyunLogStore.workflow_execution_logstore,
|
||||
from_time=from_time,
|
||||
to_time=to_time,
|
||||
query=query,
|
||||
line=100,
|
||||
reverse=False,
|
||||
)
|
||||
|
||||
if not results:
|
||||
# Fallback to PostgreSQL for records created before LogStore migration
|
||||
if self._enable_dual_read:
|
||||
logger.debug("WorkflowRun not found in LogStore, falling back to PostgreSQL: run_id=%s", run_id)
|
||||
return self._fallback_get_workflow_run_by_id(run_id)
|
||||
return None
|
||||
|
||||
# For PG mode, results are already deduplicated by the SQL query
|
||||
# For SDK mode, if multiple results, select the one with max log_version
|
||||
if self.logstore_client.supports_pg_protocol or len(results) == 1:
|
||||
return _dict_to_workflow_run(results[0])
|
||||
else:
|
||||
max_result = max(results, key=lambda x: int(x.get("log_version", 0)))
|
||||
return _dict_to_workflow_run(max_result)
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get workflow run without tenant: run_id=%s", run_id)
|
||||
# Try PostgreSQL fallback on any error (only if dual-read is enabled)
|
||||
if self._enable_dual_read:
|
||||
try:
|
||||
return self._fallback_get_workflow_run_by_id(run_id)
|
||||
except Exception:
|
||||
logger.exception("PostgreSQL fallback also failed: run_id=%s", run_id)
|
||||
raise
|
||||
|
||||
def _fallback_get_workflow_run_by_id(self, run_id: str) -> WorkflowRun | None:
|
||||
"""Fallback to PostgreSQL query for records not in LogStore."""
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
with Session(db.engine) as session:
|
||||
stmt = select(WorkflowRun).where(WorkflowRun.id == run_id)
|
||||
return session.scalar(stmt)
|
||||
|
||||
def get_workflow_runs_count(
|
||||
self,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
triggered_from: str,
|
||||
status: str | None = None,
|
||||
time_range: str | None = None,
|
||||
) -> dict[str, int]:
|
||||
"""
|
||||
Get workflow runs count statistics grouped by status.
|
||||
|
||||
Optimization: Use finished_at IS NOT NULL for completed runs (10-50x faster)
|
||||
"""
|
||||
logger.debug(
|
||||
"get_workflow_runs_count: tenant_id=%s, app_id=%s, triggered_from=%s, status=%s",
|
||||
tenant_id,
|
||||
app_id,
|
||||
triggered_from,
|
||||
status,
|
||||
)
|
||||
# Build time range filter
|
||||
time_filter = ""
|
||||
if time_range:
|
||||
# TODO: Parse time_range and convert to from_time/to_time
|
||||
logger.warning("time_range filter not implemented")
|
||||
|
||||
# If status is provided, simple count
|
||||
if status:
|
||||
if status == "running":
|
||||
# Running status requires window function
|
||||
sql = f"""
|
||||
SELECT COUNT(*) as count
|
||||
FROM (
|
||||
SELECT *, ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) AS rn
|
||||
FROM {AliyunLogStore.workflow_execution_logstore}
|
||||
WHERE tenant_id='{tenant_id}'
|
||||
AND app_id='{app_id}'
|
||||
AND triggered_from='{triggered_from}'
|
||||
AND status='running'
|
||||
{time_filter}
|
||||
) t
|
||||
WHERE rn = 1
|
||||
"""
|
||||
else:
|
||||
# Finished status uses optimized filter
|
||||
sql = f"""
|
||||
SELECT COUNT(DISTINCT id) as count
|
||||
FROM {AliyunLogStore.workflow_execution_logstore}
|
||||
WHERE tenant_id='{tenant_id}'
|
||||
AND app_id='{app_id}'
|
||||
AND triggered_from='{triggered_from}'
|
||||
AND status='{status}'
|
||||
AND finished_at IS NOT NULL
|
||||
{time_filter}
|
||||
"""
|
||||
|
||||
try:
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore
|
||||
)
|
||||
count = results[0]["count"] if results and len(results) > 0 else 0
|
||||
|
||||
return {
|
||||
"total": count,
|
||||
"running": count if status == "running" else 0,
|
||||
"succeeded": count if status == "succeeded" else 0,
|
||||
"failed": count if status == "failed" else 0,
|
||||
"stopped": count if status == "stopped" else 0,
|
||||
"partial-succeeded": count if status == "partial-succeeded" else 0,
|
||||
}
|
||||
except Exception:
|
||||
logger.exception("Failed to get workflow runs count")
|
||||
raise
|
||||
|
||||
# No status filter - get counts grouped by status
|
||||
# Use optimized query for finished runs, separate query for running
|
||||
try:
|
||||
# Count finished runs grouped by status
|
||||
finished_sql = f"""
|
||||
SELECT status, COUNT(DISTINCT id) as count
|
||||
FROM {AliyunLogStore.workflow_execution_logstore}
|
||||
WHERE tenant_id='{tenant_id}'
|
||||
AND app_id='{app_id}'
|
||||
AND triggered_from='{triggered_from}'
|
||||
AND finished_at IS NOT NULL
|
||||
{time_filter}
|
||||
GROUP BY status
|
||||
"""
|
||||
|
||||
# Count running runs
|
||||
running_sql = f"""
|
||||
SELECT COUNT(*) as count
|
||||
FROM (
|
||||
SELECT *, ROW_NUMBER() OVER (PARTITION BY id ORDER BY log_version DESC) AS rn
|
||||
FROM {AliyunLogStore.workflow_execution_logstore}
|
||||
WHERE tenant_id='{tenant_id}'
|
||||
AND app_id='{app_id}'
|
||||
AND triggered_from='{triggered_from}'
|
||||
AND status='running'
|
||||
{time_filter}
|
||||
) t
|
||||
WHERE rn = 1
|
||||
"""
|
||||
|
||||
finished_results = self.logstore_client.execute_sql(
|
||||
sql=finished_sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore
|
||||
)
|
||||
running_results = self.logstore_client.execute_sql(
|
||||
sql=running_sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore
|
||||
)
|
||||
|
||||
# Build response
|
||||
status_counts = {
|
||||
"running": 0,
|
||||
"succeeded": 0,
|
||||
"failed": 0,
|
||||
"stopped": 0,
|
||||
"partial-succeeded": 0,
|
||||
}
|
||||
|
||||
total = 0
|
||||
for result in finished_results:
|
||||
status_val = result.get("status")
|
||||
count = result.get("count", 0)
|
||||
if status_val in status_counts:
|
||||
status_counts[status_val] = count
|
||||
total += count
|
||||
|
||||
# Add running count
|
||||
running_count = running_results[0]["count"] if running_results and len(running_results) > 0 else 0
|
||||
status_counts["running"] = running_count
|
||||
total += running_count
|
||||
|
||||
return {"total": total} | status_counts
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get workflow runs count")
|
||||
raise
|
||||
|
||||
def get_daily_runs_statistics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
triggered_from: str,
|
||||
start_date: datetime | None = None,
|
||||
end_date: datetime | None = None,
|
||||
timezone: str = "UTC",
|
||||
) -> list[DailyRunsStats]:
|
||||
"""
|
||||
Get daily runs statistics using optimized query.
|
||||
|
||||
Optimization: Use finished_at IS NOT NULL + COUNT(DISTINCT id) (20-100x faster)
|
||||
"""
|
||||
logger.debug(
|
||||
"get_daily_runs_statistics: tenant_id=%s, app_id=%s, triggered_from=%s", tenant_id, app_id, triggered_from
|
||||
)
|
||||
# Build time range filter
|
||||
time_filter = ""
|
||||
if start_date:
|
||||
time_filter += f" AND __time__ >= to_unixtime(from_iso8601_timestamp('{start_date.isoformat()}'))"
|
||||
if end_date:
|
||||
time_filter += f" AND __time__ < to_unixtime(from_iso8601_timestamp('{end_date.isoformat()}'))"
|
||||
|
||||
# Optimized query: Use finished_at filter to avoid window function
|
||||
sql = f"""
|
||||
SELECT DATE(from_unixtime(__time__)) as date, COUNT(DISTINCT id) as runs
|
||||
FROM {AliyunLogStore.workflow_execution_logstore}
|
||||
WHERE tenant_id='{tenant_id}'
|
||||
AND app_id='{app_id}'
|
||||
AND triggered_from='{triggered_from}'
|
||||
AND finished_at IS NOT NULL
|
||||
{time_filter}
|
||||
GROUP BY date
|
||||
ORDER BY date
|
||||
"""
|
||||
|
||||
try:
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore
|
||||
)
|
||||
|
||||
response_data = []
|
||||
for row in results:
|
||||
response_data.append({"date": str(row.get("date", "")), "runs": row.get("runs", 0)})
|
||||
|
||||
return cast(list[DailyRunsStats], response_data)
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get daily runs statistics")
|
||||
raise
|
||||
|
||||
def get_daily_terminals_statistics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
triggered_from: str,
|
||||
start_date: datetime | None = None,
|
||||
end_date: datetime | None = None,
|
||||
timezone: str = "UTC",
|
||||
) -> list[DailyTerminalsStats]:
|
||||
"""
|
||||
Get daily terminals statistics using optimized query.
|
||||
|
||||
Optimization: Use finished_at IS NOT NULL + COUNT(DISTINCT created_by) (20-100x faster)
|
||||
"""
|
||||
logger.debug(
|
||||
"get_daily_terminals_statistics: tenant_id=%s, app_id=%s, triggered_from=%s",
|
||||
tenant_id,
|
||||
app_id,
|
||||
triggered_from,
|
||||
)
|
||||
# Build time range filter
|
||||
time_filter = ""
|
||||
if start_date:
|
||||
time_filter += f" AND __time__ >= to_unixtime(from_iso8601_timestamp('{start_date.isoformat()}'))"
|
||||
if end_date:
|
||||
time_filter += f" AND __time__ < to_unixtime(from_iso8601_timestamp('{end_date.isoformat()}'))"
|
||||
|
||||
sql = f"""
|
||||
SELECT DATE(from_unixtime(__time__)) as date, COUNT(DISTINCT created_by) as terminal_count
|
||||
FROM {AliyunLogStore.workflow_execution_logstore}
|
||||
WHERE tenant_id='{tenant_id}'
|
||||
AND app_id='{app_id}'
|
||||
AND triggered_from='{triggered_from}'
|
||||
AND finished_at IS NOT NULL
|
||||
{time_filter}
|
||||
GROUP BY date
|
||||
ORDER BY date
|
||||
"""
|
||||
|
||||
try:
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore
|
||||
)
|
||||
|
||||
response_data = []
|
||||
for row in results:
|
||||
response_data.append({"date": str(row.get("date", "")), "terminal_count": row.get("terminal_count", 0)})
|
||||
|
||||
return cast(list[DailyTerminalsStats], response_data)
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get daily terminals statistics")
|
||||
raise
|
||||
|
||||
def get_daily_token_cost_statistics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
triggered_from: str,
|
||||
start_date: datetime | None = None,
|
||||
end_date: datetime | None = None,
|
||||
timezone: str = "UTC",
|
||||
) -> list[DailyTokenCostStats]:
|
||||
"""
|
||||
Get daily token cost statistics using optimized query.
|
||||
|
||||
Optimization: Use finished_at IS NOT NULL + SUM(total_tokens) (20-100x faster)
|
||||
"""
|
||||
logger.debug(
|
||||
"get_daily_token_cost_statistics: tenant_id=%s, app_id=%s, triggered_from=%s",
|
||||
tenant_id,
|
||||
app_id,
|
||||
triggered_from,
|
||||
)
|
||||
# Build time range filter
|
||||
time_filter = ""
|
||||
if start_date:
|
||||
time_filter += f" AND __time__ >= to_unixtime(from_iso8601_timestamp('{start_date.isoformat()}'))"
|
||||
if end_date:
|
||||
time_filter += f" AND __time__ < to_unixtime(from_iso8601_timestamp('{end_date.isoformat()}'))"
|
||||
|
||||
sql = f"""
|
||||
SELECT DATE(from_unixtime(__time__)) as date, SUM(total_tokens) as token_count
|
||||
FROM {AliyunLogStore.workflow_execution_logstore}
|
||||
WHERE tenant_id='{tenant_id}'
|
||||
AND app_id='{app_id}'
|
||||
AND triggered_from='{triggered_from}'
|
||||
AND finished_at IS NOT NULL
|
||||
{time_filter}
|
||||
GROUP BY date
|
||||
ORDER BY date
|
||||
"""
|
||||
|
||||
try:
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore
|
||||
)
|
||||
|
||||
response_data = []
|
||||
for row in results:
|
||||
response_data.append({"date": str(row.get("date", "")), "token_count": row.get("token_count", 0)})
|
||||
|
||||
return cast(list[DailyTokenCostStats], response_data)
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get daily token cost statistics")
|
||||
raise
|
||||
|
||||
def get_average_app_interaction_statistics(
|
||||
self,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
triggered_from: str,
|
||||
start_date: datetime | None = None,
|
||||
end_date: datetime | None = None,
|
||||
timezone: str = "UTC",
|
||||
) -> list[AverageInteractionStats]:
|
||||
"""
|
||||
Get average app interaction statistics using optimized query.
|
||||
|
||||
Optimization: Use finished_at IS NOT NULL + AVG (20-100x faster)
|
||||
"""
|
||||
logger.debug(
|
||||
"get_average_app_interaction_statistics: tenant_id=%s, app_id=%s, triggered_from=%s",
|
||||
tenant_id,
|
||||
app_id,
|
||||
triggered_from,
|
||||
)
|
||||
# Build time range filter
|
||||
time_filter = ""
|
||||
if start_date:
|
||||
time_filter += f" AND __time__ >= to_unixtime(from_iso8601_timestamp('{start_date.isoformat()}'))"
|
||||
if end_date:
|
||||
time_filter += f" AND __time__ < to_unixtime(from_iso8601_timestamp('{end_date.isoformat()}'))"
|
||||
|
||||
sql = f"""
|
||||
SELECT
|
||||
AVG(sub.interactions) AS interactions,
|
||||
sub.date
|
||||
FROM (
|
||||
SELECT
|
||||
DATE(from_unixtime(__time__)) AS date,
|
||||
created_by,
|
||||
COUNT(DISTINCT id) AS interactions
|
||||
FROM {AliyunLogStore.workflow_execution_logstore}
|
||||
WHERE tenant_id='{tenant_id}'
|
||||
AND app_id='{app_id}'
|
||||
AND triggered_from='{triggered_from}'
|
||||
AND finished_at IS NOT NULL
|
||||
{time_filter}
|
||||
GROUP BY date, created_by
|
||||
) sub
|
||||
GROUP BY sub.date
|
||||
"""
|
||||
|
||||
try:
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql, query="*", logstore=AliyunLogStore.workflow_execution_logstore
|
||||
)
|
||||
|
||||
response_data = []
|
||||
for row in results:
|
||||
response_data.append(
|
||||
{
|
||||
"date": str(row.get("date", "")),
|
||||
"interactions": float(row.get("interactions", 0)),
|
||||
}
|
||||
)
|
||||
|
||||
return cast(list[AverageInteractionStats], response_data)
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to get average app interaction statistics")
|
||||
raise
|
||||
@@ -1,164 +0,0 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from typing import Union
|
||||
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
|
||||
from core.workflow.entities import WorkflowExecution
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from extensions.logstore.aliyun_logstore import AliyunLogStore
|
||||
from libs.helper import extract_tenant_id
|
||||
from models import (
|
||||
Account,
|
||||
CreatorUserRole,
|
||||
EndUser,
|
||||
)
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LogstoreWorkflowExecutionRepository(WorkflowExecutionRepository):
|
||||
def __init__(
|
||||
self,
|
||||
session_factory: sessionmaker | Engine,
|
||||
user: Union[Account, EndUser],
|
||||
app_id: str | None,
|
||||
triggered_from: WorkflowRunTriggeredFrom | None,
|
||||
):
|
||||
"""
|
||||
Initialize the repository with a SQLAlchemy sessionmaker or engine and context information.
|
||||
|
||||
Args:
|
||||
session_factory: SQLAlchemy sessionmaker or engine for creating sessions
|
||||
user: Account or EndUser object containing tenant_id, user ID, and role information
|
||||
app_id: App ID for filtering by application (can be None)
|
||||
triggered_from: Source of the execution trigger (DEBUGGING or APP_RUN)
|
||||
"""
|
||||
logger.debug(
|
||||
"LogstoreWorkflowExecutionRepository.__init__: app_id=%s, triggered_from=%s", app_id, triggered_from
|
||||
)
|
||||
# Initialize LogStore client
|
||||
# Note: Project/logstore/index initialization is done at app startup via ext_logstore
|
||||
self.logstore_client = AliyunLogStore()
|
||||
|
||||
# Extract tenant_id from user
|
||||
tenant_id = extract_tenant_id(user)
|
||||
if not tenant_id:
|
||||
raise ValueError("User must have a tenant_id or current_tenant_id")
|
||||
self._tenant_id = tenant_id
|
||||
|
||||
# Store app context
|
||||
self._app_id = app_id
|
||||
|
||||
# Extract user context
|
||||
self._triggered_from = triggered_from
|
||||
self._creator_user_id = user.id
|
||||
|
||||
# Determine user role based on user type
|
||||
self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER
|
||||
|
||||
# Initialize SQL repository for dual-write support
|
||||
self.sql_repository = SQLAlchemyWorkflowExecutionRepository(session_factory, user, app_id, triggered_from)
|
||||
|
||||
# Control flag for dual-write (write to both LogStore and SQL database)
|
||||
# Set to True to enable dual-write for safe migration, False to use LogStore only
|
||||
self._enable_dual_write = os.environ.get("LOGSTORE_DUAL_WRITE_ENABLED", "true").lower() == "true"
|
||||
|
||||
def _to_logstore_model(self, domain_model: WorkflowExecution) -> list[tuple[str, str]]:
|
||||
"""
|
||||
Convert a domain model to a logstore model (List[Tuple[str, str]]).
|
||||
|
||||
Args:
|
||||
domain_model: The domain model to convert
|
||||
|
||||
Returns:
|
||||
The logstore model as a list of key-value tuples
|
||||
"""
|
||||
logger.debug(
|
||||
"_to_logstore_model: id=%s, workflow_id=%s, status=%s",
|
||||
domain_model.id_,
|
||||
domain_model.workflow_id,
|
||||
domain_model.status.value,
|
||||
)
|
||||
# Use values from constructor if provided
|
||||
if not self._triggered_from:
|
||||
raise ValueError("triggered_from is required in repository constructor")
|
||||
if not self._creator_user_id:
|
||||
raise ValueError("created_by is required in repository constructor")
|
||||
if not self._creator_user_role:
|
||||
raise ValueError("created_by_role is required in repository constructor")
|
||||
|
||||
# Generate log_version as nanosecond timestamp for record versioning
|
||||
log_version = str(time.time_ns())
|
||||
|
||||
logstore_model = [
|
||||
("id", domain_model.id_),
|
||||
("log_version", log_version), # Add log_version field for append-only writes
|
||||
("tenant_id", self._tenant_id),
|
||||
("app_id", self._app_id or ""),
|
||||
("workflow_id", domain_model.workflow_id),
|
||||
(
|
||||
"triggered_from",
|
||||
self._triggered_from.value if hasattr(self._triggered_from, "value") else str(self._triggered_from),
|
||||
),
|
||||
("type", domain_model.workflow_type.value),
|
||||
("version", domain_model.workflow_version),
|
||||
("graph", json.dumps(domain_model.graph, ensure_ascii=False) if domain_model.graph else "{}"),
|
||||
("inputs", json.dumps(domain_model.inputs, ensure_ascii=False) if domain_model.inputs else "{}"),
|
||||
("outputs", json.dumps(domain_model.outputs, ensure_ascii=False) if domain_model.outputs else "{}"),
|
||||
("status", domain_model.status.value),
|
||||
("error_message", domain_model.error_message or ""),
|
||||
("total_tokens", str(domain_model.total_tokens)),
|
||||
("total_steps", str(domain_model.total_steps)),
|
||||
("exceptions_count", str(domain_model.exceptions_count)),
|
||||
(
|
||||
"created_by_role",
|
||||
self._creator_user_role.value
|
||||
if hasattr(self._creator_user_role, "value")
|
||||
else str(self._creator_user_role),
|
||||
),
|
||||
("created_by", self._creator_user_id),
|
||||
("started_at", domain_model.started_at.isoformat() if domain_model.started_at else ""),
|
||||
("finished_at", domain_model.finished_at.isoformat() if domain_model.finished_at else ""),
|
||||
]
|
||||
|
||||
return logstore_model
|
||||
|
||||
def save(self, execution: WorkflowExecution) -> None:
|
||||
"""
|
||||
Save or update a WorkflowExecution domain entity to the logstore.
|
||||
|
||||
This method serves as a domain-to-logstore adapter that:
|
||||
1. Converts the domain entity to its logstore representation
|
||||
2. Persists the logstore model using Aliyun SLS
|
||||
3. Maintains proper multi-tenancy by including tenant context during conversion
|
||||
4. Optionally writes to SQL database for dual-write support (controlled by LOGSTORE_DUAL_WRITE_ENABLED)
|
||||
|
||||
Args:
|
||||
execution: The WorkflowExecution domain entity to persist
|
||||
"""
|
||||
logger.debug(
|
||||
"save: id=%s, workflow_id=%s, status=%s", execution.id_, execution.workflow_id, execution.status.value
|
||||
)
|
||||
try:
|
||||
logstore_model = self._to_logstore_model(execution)
|
||||
self.logstore_client.put_log(AliyunLogStore.workflow_execution_logstore, logstore_model)
|
||||
|
||||
logger.debug("Saved workflow execution to logstore: id=%s", execution.id_)
|
||||
except Exception:
|
||||
logger.exception("Failed to save workflow execution to logstore: id=%s", execution.id_)
|
||||
raise
|
||||
|
||||
# Dual-write to SQL database if enabled (for safe migration)
|
||||
if self._enable_dual_write:
|
||||
try:
|
||||
self.sql_repository.save(execution)
|
||||
logger.debug("Dual-write: saved workflow execution to SQL database: id=%s", execution.id_)
|
||||
except Exception:
|
||||
logger.exception("Failed to dual-write workflow execution to SQL database: id=%s", execution.id_)
|
||||
# Don't raise - LogStore write succeeded, SQL is just a backup
|
||||
@@ -1,366 +0,0 @@
|
||||
"""
|
||||
LogStore implementation of the WorkflowNodeExecutionRepository.
|
||||
|
||||
This module provides a LogStore-based repository for WorkflowNodeExecution entities,
|
||||
using Aliyun SLS LogStore with append-only writes and version control.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime
|
||||
from typing import Any, Union
|
||||
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.workflow.entities import WorkflowNodeExecution
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
|
||||
from core.workflow.enums import NodeType
|
||||
from core.workflow.repositories.workflow_node_execution_repository import OrderConfig, WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_type_encoder import WorkflowRuntimeTypeConverter
|
||||
from extensions.logstore.aliyun_logstore import AliyunLogStore
|
||||
from libs.helper import extract_tenant_id
|
||||
from models import (
|
||||
Account,
|
||||
CreatorUserRole,
|
||||
EndUser,
|
||||
WorkflowNodeExecutionTriggeredFrom,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _dict_to_workflow_node_execution(data: dict[str, Any]) -> WorkflowNodeExecution:
|
||||
"""
|
||||
Convert LogStore result dictionary to WorkflowNodeExecution domain model.
|
||||
|
||||
Args:
|
||||
data: Dictionary from LogStore query result
|
||||
|
||||
Returns:
|
||||
WorkflowNodeExecution domain model instance
|
||||
"""
|
||||
logger.debug("_dict_to_workflow_node_execution: data keys=%s", list(data.keys())[:5])
|
||||
# Parse JSON fields
|
||||
inputs = json.loads(data.get("inputs", "{}"))
|
||||
process_data = json.loads(data.get("process_data", "{}"))
|
||||
outputs = json.loads(data.get("outputs", "{}"))
|
||||
metadata = json.loads(data.get("execution_metadata", "{}"))
|
||||
|
||||
# Convert metadata to domain enum keys
|
||||
domain_metadata = {}
|
||||
for k, v in metadata.items():
|
||||
try:
|
||||
domain_metadata[WorkflowNodeExecutionMetadataKey(k)] = v
|
||||
except ValueError:
|
||||
# Skip invalid metadata keys
|
||||
continue
|
||||
|
||||
# Convert status to domain enum
|
||||
status = WorkflowNodeExecutionStatus(data.get("status", "running"))
|
||||
|
||||
# Parse datetime fields
|
||||
created_at = datetime.fromisoformat(data.get("created_at", "")) if data.get("created_at") else datetime.now()
|
||||
finished_at = datetime.fromisoformat(data.get("finished_at", "")) if data.get("finished_at") else None
|
||||
|
||||
return WorkflowNodeExecution(
|
||||
id=data.get("id", ""),
|
||||
node_execution_id=data.get("node_execution_id"),
|
||||
workflow_id=data.get("workflow_id", ""),
|
||||
workflow_execution_id=data.get("workflow_run_id"),
|
||||
index=int(data.get("index", 0)),
|
||||
predecessor_node_id=data.get("predecessor_node_id"),
|
||||
node_id=data.get("node_id", ""),
|
||||
node_type=NodeType(data.get("node_type", "start")),
|
||||
title=data.get("title", ""),
|
||||
inputs=inputs,
|
||||
process_data=process_data,
|
||||
outputs=outputs,
|
||||
status=status,
|
||||
error=data.get("error"),
|
||||
elapsed_time=float(data.get("elapsed_time", 0.0)),
|
||||
metadata=domain_metadata,
|
||||
created_at=created_at,
|
||||
finished_at=finished_at,
|
||||
)
|
||||
|
||||
|
||||
class LogstoreWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository):
|
||||
"""
|
||||
LogStore implementation of the WorkflowNodeExecutionRepository interface.
|
||||
|
||||
This implementation uses Aliyun SLS LogStore with an append-only write strategy:
|
||||
- Each save() operation appends a new record with a version timestamp
|
||||
- Updates are simulated by writing new records with higher version numbers
|
||||
- Queries retrieve the latest version using finished_at IS NOT NULL filter
|
||||
- Multi-tenancy is maintained through tenant_id filtering
|
||||
|
||||
Version Strategy:
|
||||
version = time.time_ns() # Nanosecond timestamp for unique ordering
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session_factory: sessionmaker | Engine,
|
||||
user: Union[Account, EndUser],
|
||||
app_id: str | None,
|
||||
triggered_from: WorkflowNodeExecutionTriggeredFrom | None,
|
||||
):
|
||||
"""
|
||||
Initialize the repository with a SQLAlchemy sessionmaker or engine and context information.
|
||||
|
||||
Args:
|
||||
session_factory: SQLAlchemy sessionmaker or engine for creating sessions
|
||||
user: Account or EndUser object containing tenant_id, user ID, and role information
|
||||
app_id: App ID for filtering by application (can be None)
|
||||
triggered_from: Source of the execution trigger (SINGLE_STEP or WORKFLOW_RUN)
|
||||
"""
|
||||
logger.debug(
|
||||
"LogstoreWorkflowNodeExecutionRepository.__init__: app_id=%s, triggered_from=%s", app_id, triggered_from
|
||||
)
|
||||
# Initialize LogStore client
|
||||
self.logstore_client = AliyunLogStore()
|
||||
|
||||
# Extract tenant_id from user
|
||||
tenant_id = extract_tenant_id(user)
|
||||
if not tenant_id:
|
||||
raise ValueError("User must have a tenant_id or current_tenant_id")
|
||||
self._tenant_id = tenant_id
|
||||
|
||||
# Store app context
|
||||
self._app_id = app_id
|
||||
|
||||
# Extract user context
|
||||
self._triggered_from = triggered_from
|
||||
self._creator_user_id = user.id
|
||||
|
||||
# Determine user role based on user type
|
||||
self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER
|
||||
|
||||
# Initialize SQL repository for dual-write support
|
||||
self.sql_repository = SQLAlchemyWorkflowNodeExecutionRepository(session_factory, user, app_id, triggered_from)
|
||||
|
||||
# Control flag for dual-write (write to both LogStore and SQL database)
|
||||
# Set to True to enable dual-write for safe migration, False to use LogStore only
|
||||
self._enable_dual_write = os.environ.get("LOGSTORE_DUAL_WRITE_ENABLED", "true").lower() == "true"
|
||||
|
||||
def _to_logstore_model(self, domain_model: WorkflowNodeExecution) -> Sequence[tuple[str, str]]:
|
||||
logger.debug(
|
||||
"_to_logstore_model: id=%s, node_id=%s, status=%s",
|
||||
domain_model.id,
|
||||
domain_model.node_id,
|
||||
domain_model.status.value,
|
||||
)
|
||||
if not self._triggered_from:
|
||||
raise ValueError("triggered_from is required in repository constructor")
|
||||
if not self._creator_user_id:
|
||||
raise ValueError("created_by is required in repository constructor")
|
||||
if not self._creator_user_role:
|
||||
raise ValueError("created_by_role is required in repository constructor")
|
||||
|
||||
# Generate log_version as nanosecond timestamp for record versioning
|
||||
log_version = str(time.time_ns())
|
||||
|
||||
json_converter = WorkflowRuntimeTypeConverter()
|
||||
|
||||
logstore_model = [
|
||||
("id", domain_model.id),
|
||||
("log_version", log_version), # Add log_version field for append-only writes
|
||||
("tenant_id", self._tenant_id),
|
||||
("app_id", self._app_id or ""),
|
||||
("workflow_id", domain_model.workflow_id),
|
||||
(
|
||||
"triggered_from",
|
||||
self._triggered_from.value if hasattr(self._triggered_from, "value") else str(self._triggered_from),
|
||||
),
|
||||
("workflow_run_id", domain_model.workflow_execution_id or ""),
|
||||
("index", str(domain_model.index)),
|
||||
("predecessor_node_id", domain_model.predecessor_node_id or ""),
|
||||
("node_execution_id", domain_model.node_execution_id or ""),
|
||||
("node_id", domain_model.node_id),
|
||||
("node_type", domain_model.node_type.value),
|
||||
("title", domain_model.title),
|
||||
(
|
||||
"inputs",
|
||||
json.dumps(json_converter.to_json_encodable(domain_model.inputs), ensure_ascii=False)
|
||||
if domain_model.inputs
|
||||
else "{}",
|
||||
),
|
||||
(
|
||||
"process_data",
|
||||
json.dumps(json_converter.to_json_encodable(domain_model.process_data), ensure_ascii=False)
|
||||
if domain_model.process_data
|
||||
else "{}",
|
||||
),
|
||||
(
|
||||
"outputs",
|
||||
json.dumps(json_converter.to_json_encodable(domain_model.outputs), ensure_ascii=False)
|
||||
if domain_model.outputs
|
||||
else "{}",
|
||||
),
|
||||
("status", domain_model.status.value),
|
||||
("error", domain_model.error or ""),
|
||||
("elapsed_time", str(domain_model.elapsed_time)),
|
||||
(
|
||||
"execution_metadata",
|
||||
json.dumps(jsonable_encoder(domain_model.metadata), ensure_ascii=False)
|
||||
if domain_model.metadata
|
||||
else "{}",
|
||||
),
|
||||
("created_at", domain_model.created_at.isoformat() if domain_model.created_at else ""),
|
||||
("created_by_role", self._creator_user_role.value),
|
||||
("created_by", self._creator_user_id),
|
||||
("finished_at", domain_model.finished_at.isoformat() if domain_model.finished_at else ""),
|
||||
]
|
||||
|
||||
return logstore_model
|
||||
|
||||
def save(self, execution: WorkflowNodeExecution) -> None:
|
||||
"""
|
||||
Save or update a NodeExecution domain entity to LogStore.
|
||||
|
||||
This method serves as a domain-to-logstore adapter that:
|
||||
1. Converts the domain entity to its logstore representation
|
||||
2. Appends a new record with a log_version timestamp
|
||||
3. Maintains proper multi-tenancy by including tenant context during conversion
|
||||
4. Optionally writes to SQL database for dual-write support (controlled by LOGSTORE_DUAL_WRITE_ENABLED)
|
||||
|
||||
Each save operation creates a new record. Updates are simulated by writing
|
||||
new records with higher log_version numbers.
|
||||
|
||||
Args:
|
||||
execution: The NodeExecution domain entity to persist
|
||||
"""
|
||||
logger.debug(
|
||||
"save: id=%s, node_execution_id=%s, status=%s",
|
||||
execution.id,
|
||||
execution.node_execution_id,
|
||||
execution.status.value,
|
||||
)
|
||||
try:
|
||||
logstore_model = self._to_logstore_model(execution)
|
||||
self.logstore_client.put_log(AliyunLogStore.workflow_node_execution_logstore, logstore_model)
|
||||
|
||||
logger.debug(
|
||||
"Saved node execution to LogStore: id=%s, node_execution_id=%s, status=%s",
|
||||
execution.id,
|
||||
execution.node_execution_id,
|
||||
execution.status.value,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to save node execution to LogStore: id=%s, node_execution_id=%s",
|
||||
execution.id,
|
||||
execution.node_execution_id,
|
||||
)
|
||||
raise
|
||||
|
||||
# Dual-write to SQL database if enabled (for safe migration)
|
||||
if self._enable_dual_write:
|
||||
try:
|
||||
self.sql_repository.save(execution)
|
||||
logger.debug("Dual-write: saved node execution to SQL database: id=%s", execution.id)
|
||||
except Exception:
|
||||
logger.exception("Failed to dual-write node execution to SQL database: id=%s", execution.id)
|
||||
# Don't raise - LogStore write succeeded, SQL is just a backup
|
||||
|
||||
def save_execution_data(self, execution: WorkflowNodeExecution) -> None:
|
||||
"""
|
||||
Save or update the inputs, process_data, or outputs associated with a specific
|
||||
node_execution record.
|
||||
|
||||
For LogStore implementation, this is similar to save() since we always write
|
||||
complete records. We append a new record with updated data fields.
|
||||
|
||||
Args:
|
||||
execution: The NodeExecution instance with data to save
|
||||
"""
|
||||
logger.debug("save_execution_data: id=%s, node_execution_id=%s", execution.id, execution.node_execution_id)
|
||||
# In LogStore, we simply write a new complete record with the data
|
||||
# The log_version timestamp will ensure this is treated as the latest version
|
||||
self.save(execution)
|
||||
|
||||
def get_by_workflow_run(
|
||||
self,
|
||||
workflow_run_id: str,
|
||||
order_config: OrderConfig | None = None,
|
||||
) -> Sequence[WorkflowNodeExecution]:
|
||||
"""
|
||||
Retrieve all NodeExecution instances for a specific workflow run.
|
||||
Uses LogStore SQL query with finished_at IS NOT NULL filter for deduplication.
|
||||
This ensures we only get the final version of each node execution.
|
||||
Args:
|
||||
workflow_run_id: The workflow run ID
|
||||
order_config: Optional configuration for ordering results
|
||||
order_config.order_by: List of fields to order by (e.g., ["index", "created_at"])
|
||||
order_config.order_direction: Direction to order ("asc" or "desc")
|
||||
|
||||
Returns:
|
||||
A list of NodeExecution instances
|
||||
|
||||
Note:
|
||||
This method filters by finished_at IS NOT NULL to avoid duplicates from
|
||||
version updates. For complete history including intermediate states,
|
||||
a different query strategy would be needed.
|
||||
"""
|
||||
logger.debug("get_by_workflow_run: workflow_run_id=%s, order_config=%s", workflow_run_id, order_config)
|
||||
# Build SQL query with deduplication using finished_at IS NOT NULL
|
||||
# This optimization avoids window functions for common case where we only
|
||||
# want the final state of each node execution
|
||||
|
||||
# Build ORDER BY clause
|
||||
order_clause = ""
|
||||
if order_config and order_config.order_by:
|
||||
order_fields = []
|
||||
for field in order_config.order_by:
|
||||
# Map domain field names to logstore field names if needed
|
||||
field_name = field
|
||||
if order_config.order_direction == "desc":
|
||||
order_fields.append(f"{field_name} DESC")
|
||||
else:
|
||||
order_fields.append(f"{field_name} ASC")
|
||||
if order_fields:
|
||||
order_clause = "ORDER BY " + ", ".join(order_fields)
|
||||
|
||||
sql = f"""
|
||||
SELECT *
|
||||
FROM {AliyunLogStore.workflow_node_execution_logstore}
|
||||
WHERE workflow_run_id='{workflow_run_id}'
|
||||
AND tenant_id='{self._tenant_id}'
|
||||
AND finished_at IS NOT NULL
|
||||
"""
|
||||
|
||||
if self._app_id:
|
||||
sql += f" AND app_id='{self._app_id}'"
|
||||
|
||||
if order_clause:
|
||||
sql += f" {order_clause}"
|
||||
|
||||
try:
|
||||
# Execute SQL query
|
||||
results = self.logstore_client.execute_sql(
|
||||
sql=sql,
|
||||
query="*",
|
||||
logstore=AliyunLogStore.workflow_node_execution_logstore,
|
||||
)
|
||||
|
||||
# Convert LogStore results to WorkflowNodeExecution domain models
|
||||
executions = []
|
||||
for row in results:
|
||||
try:
|
||||
execution = _dict_to_workflow_node_execution(row)
|
||||
executions.append(execution)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to convert row to WorkflowNodeExecution: %s, row=%s", e, row)
|
||||
continue
|
||||
|
||||
return executions
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to retrieve node executions from LogStore: workflow_run_id=%s", workflow_run_id)
|
||||
raise
|
||||
@@ -1,66 +0,0 @@
|
||||
"""
|
||||
Field Encoding/Decoding Utilities
|
||||
|
||||
Provides Base64 decoding for sensitive fields (password, verification code)
|
||||
received from the frontend.
|
||||
|
||||
Note: This uses Base64 encoding for obfuscation, not cryptographic encryption.
|
||||
Real security relies on HTTPS for transport layer encryption.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FieldEncryption:
|
||||
"""Handle decoding of sensitive fields during transmission"""
|
||||
|
||||
@classmethod
|
||||
def decrypt_field(cls, encoded_text: str) -> str | None:
|
||||
"""
|
||||
Decode Base64 encoded field from frontend.
|
||||
|
||||
Args:
|
||||
encoded_text: Base64 encoded text from frontend
|
||||
|
||||
Returns:
|
||||
Decoded plaintext, or None if decoding fails
|
||||
"""
|
||||
try:
|
||||
# Decode base64
|
||||
decoded_bytes = base64.b64decode(encoded_text)
|
||||
decoded_text = decoded_bytes.decode("utf-8")
|
||||
logger.debug("Field decoding successful")
|
||||
return decoded_text
|
||||
|
||||
except Exception:
|
||||
# Decoding failed - return None to trigger error in caller
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def decrypt_password(cls, encrypted_password: str) -> str | None:
|
||||
"""
|
||||
Decrypt password field
|
||||
|
||||
Args:
|
||||
encrypted_password: Encrypted password from frontend
|
||||
|
||||
Returns:
|
||||
Decrypted password or None if decryption fails
|
||||
"""
|
||||
return cls.decrypt_field(encrypted_password)
|
||||
|
||||
@classmethod
|
||||
def decrypt_verification_code(cls, encrypted_code: str) -> str | None:
|
||||
"""
|
||||
Decrypt verification code field
|
||||
|
||||
Args:
|
||||
encrypted_code: Encrypted code from frontend
|
||||
|
||||
Returns:
|
||||
Decrypted code or None if decryption fails
|
||||
"""
|
||||
return cls.decrypt_field(encrypted_code)
|
||||
@@ -1,31 +0,0 @@
|
||||
"""add type column not null default tool
|
||||
|
||||
Revision ID: 03ea244985ce
|
||||
Revises: d57accd375ae
|
||||
Create Date: 2025-12-16 18:17:12.193877
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '03ea244985ce'
|
||||
down_revision = 'd57accd375ae'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('pipeline_recommended_plugins', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('type', sa.String(length=50), server_default=sa.text("'tool'"), nullable=False))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('pipeline_recommended_plugins', schema=None) as batch_op:
|
||||
batch_op.drop_column('type')
|
||||
# ### end Alembic commands ###
|
||||
@@ -1532,7 +1532,6 @@ class PipelineRecommendedPlugin(TypeBase):
|
||||
)
|
||||
plugin_id: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
provider_name: Mapped[str] = mapped_column(LongText, nullable=False)
|
||||
type: Mapped[str] = mapped_column(sa.String(50), nullable=False, server_default=sa.text("'tool'"))
|
||||
position: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0)
|
||||
active: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
|
||||
@@ -4,7 +4,6 @@ version = "1.11.1"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
"aliyun-log-python-sdk~=0.9.37",
|
||||
"arize-phoenix-otel~=0.9.2",
|
||||
"azure-identity==1.16.1",
|
||||
"beautifulsoup4==4.12.2",
|
||||
@@ -12,7 +11,7 @@ dependencies = [
|
||||
"bs4~=0.0.1",
|
||||
"cachetools~=5.3.0",
|
||||
"celery~=5.5.2",
|
||||
"chardet~=5.1.0",
|
||||
"charset-normalizer>=3.4.4",
|
||||
"flask~=3.1.2",
|
||||
"flask-compress>=1.17,<1.18",
|
||||
"flask-cors~=6.0.0",
|
||||
@@ -92,6 +91,7 @@ dependencies = [
|
||||
"weaviate-client==4.17.0",
|
||||
"apscheduler>=3.11.0",
|
||||
"weave>=0.52.16",
|
||||
"jsonschema>=4.25.1",
|
||||
]
|
||||
# Before adding new dependency, consider place it in
|
||||
# alphabet order (a-z) and suitable group.
|
||||
|
||||
@@ -1248,13 +1248,14 @@ class RagPipelineService:
|
||||
session.commit()
|
||||
return workflow_node_execution_db_model
|
||||
|
||||
def get_recommended_plugins(self, type: str) -> dict:
|
||||
def get_recommended_plugins(self) -> dict:
|
||||
# Query active recommended plugins
|
||||
query = db.session.query(PipelineRecommendedPlugin).where(PipelineRecommendedPlugin.active == True)
|
||||
if type and type != "all":
|
||||
query = query.where(PipelineRecommendedPlugin.type == type)
|
||||
|
||||
pipeline_recommended_plugins = query.order_by(PipelineRecommendedPlugin.position.asc()).all()
|
||||
pipeline_recommended_plugins = (
|
||||
db.session.query(PipelineRecommendedPlugin)
|
||||
.where(PipelineRecommendedPlugin.active == True)
|
||||
.order_by(PipelineRecommendedPlugin.position.asc())
|
||||
.all()
|
||||
)
|
||||
|
||||
if not pipeline_recommended_plugins:
|
||||
return {
|
||||
|
||||
@@ -410,12 +410,9 @@ class VariableTruncator(BaseTruncator):
|
||||
@overload
|
||||
def _truncate_json_primitives(self, val: None, target_size: int) -> _PartResult[None]: ...
|
||||
|
||||
@overload
|
||||
def _truncate_json_primitives(self, val: File, target_size: int) -> _PartResult[File]: ...
|
||||
|
||||
def _truncate_json_primitives(
|
||||
self,
|
||||
val: UpdatedVariable | File | str | list[object] | dict[str, object] | bool | int | float | None,
|
||||
val: UpdatedVariable | str | list[object] | dict[str, object] | bool | int | float | None,
|
||||
target_size: int,
|
||||
) -> _PartResult[Any]:
|
||||
"""Truncate a value within an object to fit within budget."""
|
||||
@@ -428,9 +425,6 @@ class VariableTruncator(BaseTruncator):
|
||||
return self._truncate_array(val, target_size)
|
||||
elif isinstance(val, dict):
|
||||
return self._truncate_object(val, target_size)
|
||||
elif isinstance(val, File):
|
||||
# File objects should not be truncated, return as-is
|
||||
return _PartResult(val, self.calculate_json_size(val), False)
|
||||
elif val is None or isinstance(val, (bool, int, float)):
|
||||
return _PartResult(val, self.calculate_json_size(val), False)
|
||||
else:
|
||||
|
||||
@@ -113,31 +113,16 @@ class TestShardedRedisBroadcastChannelIntegration:
|
||||
topic = broadcast_channel.topic(topic_name)
|
||||
producer = topic.as_producer()
|
||||
subscriptions = [topic.subscribe() for _ in range(subscriber_count)]
|
||||
ready_events = [threading.Event() for _ in range(subscriber_count)]
|
||||
|
||||
def producer_thread():
|
||||
deadline = time.time() + 5.0
|
||||
for ev in ready_events:
|
||||
remaining = deadline - time.time()
|
||||
if remaining <= 0:
|
||||
break
|
||||
if not ev.wait(timeout=max(0.0, remaining)):
|
||||
pytest.fail("subscriber did not become ready before publish deadline")
|
||||
time.sleep(0.2) # Allow all subscribers to connect
|
||||
producer.publish(message)
|
||||
time.sleep(0.2)
|
||||
for sub in subscriptions:
|
||||
sub.close()
|
||||
|
||||
def consumer_thread(subscription: Subscription, ready_event: threading.Event) -> list[bytes]:
|
||||
def consumer_thread(subscription: Subscription) -> list[bytes]:
|
||||
received_msgs = []
|
||||
# Prime subscription so the underlying Pub/Sub listener thread starts before publishing
|
||||
try:
|
||||
_ = subscription.receive(0.01)
|
||||
except SubscriptionClosedError:
|
||||
return received_msgs
|
||||
finally:
|
||||
ready_event.set()
|
||||
|
||||
while True:
|
||||
try:
|
||||
msg = subscription.receive(0.1)
|
||||
@@ -152,10 +137,7 @@ class TestShardedRedisBroadcastChannelIntegration:
|
||||
|
||||
with ThreadPoolExecutor(max_workers=subscriber_count + 1) as executor:
|
||||
producer_future = executor.submit(producer_thread)
|
||||
consumer_futures = [
|
||||
executor.submit(consumer_thread, subscription, ready_events[idx])
|
||||
for idx, subscription in enumerate(subscriptions)
|
||||
]
|
||||
consumer_futures = [executor.submit(consumer_thread, subscription) for subscription in subscriptions]
|
||||
|
||||
producer_future.result(timeout=10.0)
|
||||
msgs_by_consumers = []
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Test authentication security to prevent user enumeration."""
|
||||
|
||||
import base64
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -12,11 +11,6 @@ from controllers.console.auth.error import AuthenticationFailedError
|
||||
from controllers.console.auth.login import LoginApi
|
||||
|
||||
|
||||
def encode_password(password: str) -> str:
|
||||
"""Helper to encode password as Base64 for testing."""
|
||||
return base64.b64encode(password.encode("utf-8")).decode()
|
||||
|
||||
|
||||
class TestAuthenticationSecurity:
|
||||
"""Test authentication endpoints for security against user enumeration."""
|
||||
|
||||
@@ -48,9 +42,7 @@ class TestAuthenticationSecurity:
|
||||
|
||||
# Act
|
||||
with self.app.test_request_context(
|
||||
"/login",
|
||||
method="POST",
|
||||
json={"email": "nonexistent@example.com", "password": encode_password("WrongPass123!")},
|
||||
"/login", method="POST", json={"email": "nonexistent@example.com", "password": "WrongPass123!"}
|
||||
):
|
||||
login_api = LoginApi()
|
||||
|
||||
@@ -80,9 +72,7 @@ class TestAuthenticationSecurity:
|
||||
|
||||
# Act
|
||||
with self.app.test_request_context(
|
||||
"/login",
|
||||
method="POST",
|
||||
json={"email": "existing@example.com", "password": encode_password("WrongPass123!")},
|
||||
"/login", method="POST", json={"email": "existing@example.com", "password": "WrongPass123!"}
|
||||
):
|
||||
login_api = LoginApi()
|
||||
|
||||
@@ -114,9 +104,7 @@ class TestAuthenticationSecurity:
|
||||
|
||||
# Act
|
||||
with self.app.test_request_context(
|
||||
"/login",
|
||||
method="POST",
|
||||
json={"email": "nonexistent@example.com", "password": encode_password("WrongPass123!")},
|
||||
"/login", method="POST", json={"email": "nonexistent@example.com", "password": "WrongPass123!"}
|
||||
):
|
||||
login_api = LoginApi()
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ This module tests the email code login mechanism including:
|
||||
- Workspace creation for new users
|
||||
"""
|
||||
|
||||
import base64
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -26,11 +25,6 @@ from controllers.console.error import (
|
||||
from services.errors.account import AccountRegisterError
|
||||
|
||||
|
||||
def encode_code(code: str) -> str:
|
||||
"""Helper to encode verification code as Base64 for testing."""
|
||||
return base64.b64encode(code.encode("utf-8")).decode()
|
||||
|
||||
|
||||
class TestEmailCodeLoginSendEmailApi:
|
||||
"""Test cases for sending email verification codes."""
|
||||
|
||||
@@ -296,7 +290,7 @@ class TestEmailCodeLoginApi:
|
||||
with app.test_request_context(
|
||||
"/email-code-login/validity",
|
||||
method="POST",
|
||||
json={"email": "test@example.com", "code": encode_code("123456"), "token": "valid_token"},
|
||||
json={"email": "test@example.com", "code": "123456", "token": "valid_token"},
|
||||
):
|
||||
api = EmailCodeLoginApi()
|
||||
response = api.post()
|
||||
@@ -345,12 +339,7 @@ class TestEmailCodeLoginApi:
|
||||
with app.test_request_context(
|
||||
"/email-code-login/validity",
|
||||
method="POST",
|
||||
json={
|
||||
"email": "newuser@example.com",
|
||||
"code": encode_code("123456"),
|
||||
"token": "valid_token",
|
||||
"language": "en-US",
|
||||
},
|
||||
json={"email": "newuser@example.com", "code": "123456", "token": "valid_token", "language": "en-US"},
|
||||
):
|
||||
api = EmailCodeLoginApi()
|
||||
response = api.post()
|
||||
@@ -376,7 +365,7 @@ class TestEmailCodeLoginApi:
|
||||
with app.test_request_context(
|
||||
"/email-code-login/validity",
|
||||
method="POST",
|
||||
json={"email": "test@example.com", "code": encode_code("123456"), "token": "invalid_token"},
|
||||
json={"email": "test@example.com", "code": "123456", "token": "invalid_token"},
|
||||
):
|
||||
api = EmailCodeLoginApi()
|
||||
with pytest.raises(InvalidTokenError):
|
||||
@@ -399,7 +388,7 @@ class TestEmailCodeLoginApi:
|
||||
with app.test_request_context(
|
||||
"/email-code-login/validity",
|
||||
method="POST",
|
||||
json={"email": "different@example.com", "code": encode_code("123456"), "token": "token"},
|
||||
json={"email": "different@example.com", "code": "123456", "token": "token"},
|
||||
):
|
||||
api = EmailCodeLoginApi()
|
||||
with pytest.raises(InvalidEmailError):
|
||||
@@ -422,7 +411,7 @@ class TestEmailCodeLoginApi:
|
||||
with app.test_request_context(
|
||||
"/email-code-login/validity",
|
||||
method="POST",
|
||||
json={"email": "test@example.com", "code": encode_code("wrong_code"), "token": "token"},
|
||||
json={"email": "test@example.com", "code": "wrong_code", "token": "token"},
|
||||
):
|
||||
api = EmailCodeLoginApi()
|
||||
with pytest.raises(EmailCodeError):
|
||||
@@ -508,7 +497,7 @@ class TestEmailCodeLoginApi:
|
||||
with app.test_request_context(
|
||||
"/email-code-login/validity",
|
||||
method="POST",
|
||||
json={"email": "test@example.com", "code": encode_code("123456"), "token": "token"},
|
||||
json={"email": "test@example.com", "code": "123456", "token": "token"},
|
||||
):
|
||||
api = EmailCodeLoginApi()
|
||||
with pytest.raises(WorkspacesLimitExceeded):
|
||||
@@ -550,7 +539,7 @@ class TestEmailCodeLoginApi:
|
||||
with app.test_request_context(
|
||||
"/email-code-login/validity",
|
||||
method="POST",
|
||||
json={"email": "test@example.com", "code": encode_code("123456"), "token": "token"},
|
||||
json={"email": "test@example.com", "code": "123456", "token": "token"},
|
||||
):
|
||||
api = EmailCodeLoginApi()
|
||||
with pytest.raises(NotAllowedCreateWorkspace):
|
||||
|
||||
@@ -8,7 +8,6 @@ This module tests the core authentication endpoints including:
|
||||
- Account status validation
|
||||
"""
|
||||
|
||||
import base64
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
@@ -29,11 +28,6 @@ from controllers.console.error import (
|
||||
from services.errors.account import AccountLoginError, AccountPasswordError
|
||||
|
||||
|
||||
def encode_password(password: str) -> str:
|
||||
"""Helper to encode password as Base64 for testing."""
|
||||
return base64.b64encode(password.encode("utf-8")).decode()
|
||||
|
||||
|
||||
class TestLoginApi:
|
||||
"""Test cases for the LoginApi endpoint."""
|
||||
|
||||
@@ -112,9 +106,7 @@ class TestLoginApi:
|
||||
|
||||
# Act
|
||||
with app.test_request_context(
|
||||
"/login",
|
||||
method="POST",
|
||||
json={"email": "test@example.com", "password": encode_password("ValidPass123!")},
|
||||
"/login", method="POST", json={"email": "test@example.com", "password": "ValidPass123!"}
|
||||
):
|
||||
login_api = LoginApi()
|
||||
response = login_api.post()
|
||||
@@ -166,11 +158,7 @@ class TestLoginApi:
|
||||
with app.test_request_context(
|
||||
"/login",
|
||||
method="POST",
|
||||
json={
|
||||
"email": "test@example.com",
|
||||
"password": encode_password("ValidPass123!"),
|
||||
"invite_token": "valid_token",
|
||||
},
|
||||
json={"email": "test@example.com", "password": "ValidPass123!", "invite_token": "valid_token"},
|
||||
):
|
||||
login_api = LoginApi()
|
||||
response = login_api.post()
|
||||
@@ -198,7 +186,7 @@ class TestLoginApi:
|
||||
|
||||
# Act & Assert
|
||||
with app.test_request_context(
|
||||
"/login", method="POST", json={"email": "test@example.com", "password": encode_password("password")}
|
||||
"/login", method="POST", json={"email": "test@example.com", "password": "password"}
|
||||
):
|
||||
login_api = LoginApi()
|
||||
with pytest.raises(EmailPasswordLoginLimitError):
|
||||
@@ -221,7 +209,7 @@ class TestLoginApi:
|
||||
|
||||
# Act & Assert
|
||||
with app.test_request_context(
|
||||
"/login", method="POST", json={"email": "frozen@example.com", "password": encode_password("password")}
|
||||
"/login", method="POST", json={"email": "frozen@example.com", "password": "password"}
|
||||
):
|
||||
login_api = LoginApi()
|
||||
with pytest.raises(AccountInFreezeError):
|
||||
@@ -258,7 +246,7 @@ class TestLoginApi:
|
||||
|
||||
# Act & Assert
|
||||
with app.test_request_context(
|
||||
"/login", method="POST", json={"email": "test@example.com", "password": encode_password("WrongPass123!")}
|
||||
"/login", method="POST", json={"email": "test@example.com", "password": "WrongPass123!"}
|
||||
):
|
||||
login_api = LoginApi()
|
||||
with pytest.raises(AuthenticationFailedError):
|
||||
@@ -289,7 +277,7 @@ class TestLoginApi:
|
||||
|
||||
# Act & Assert
|
||||
with app.test_request_context(
|
||||
"/login", method="POST", json={"email": "banned@example.com", "password": encode_password("ValidPass123!")}
|
||||
"/login", method="POST", json={"email": "banned@example.com", "password": "ValidPass123!"}
|
||||
):
|
||||
login_api = LoginApi()
|
||||
with pytest.raises(AccountBannedError):
|
||||
@@ -334,7 +322,7 @@ class TestLoginApi:
|
||||
|
||||
# Act & Assert
|
||||
with app.test_request_context(
|
||||
"/login", method="POST", json={"email": "test@example.com", "password": encode_password("ValidPass123!")}
|
||||
"/login", method="POST", json={"email": "test@example.com", "password": "ValidPass123!"}
|
||||
):
|
||||
login_api = LoginApi()
|
||||
with pytest.raises(WorkspacesLimitExceeded):
|
||||
@@ -361,11 +349,7 @@ class TestLoginApi:
|
||||
with app.test_request_context(
|
||||
"/login",
|
||||
method="POST",
|
||||
json={
|
||||
"email": "different@example.com",
|
||||
"password": encode_password("ValidPass123!"),
|
||||
"invite_token": "token",
|
||||
},
|
||||
json={"email": "different@example.com", "password": "ValidPass123!", "invite_token": "token"},
|
||||
):
|
||||
login_api = LoginApi()
|
||||
with pytest.raises(InvalidEmailError):
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
import tempfile
|
||||
|
||||
from core.rag.extractor.helpers import FileEncoding, detect_file_encodings
|
||||
|
||||
|
||||
def test_detect_file_encodings() -> None:
|
||||
with tempfile.NamedTemporaryFile(mode="w+t", suffix=".txt") as temp:
|
||||
temp.write("Shared data")
|
||||
temp_path = temp.name
|
||||
assert detect_file_encodings(temp_path) == [FileEncoding(encoding="utf_8", confidence=0.0, language="Unknown")]
|
||||
@@ -1,150 +0,0 @@
|
||||
"""
|
||||
Unit tests for field encoding/decoding utilities.
|
||||
|
||||
These tests verify Base64 encoding/decoding functionality and
|
||||
proper error handling and fallback behavior.
|
||||
"""
|
||||
|
||||
import base64
|
||||
|
||||
from libs.encryption import FieldEncryption
|
||||
|
||||
|
||||
class TestDecodeField:
|
||||
"""Test cases for field decoding functionality."""
|
||||
|
||||
def test_decode_valid_base64(self):
|
||||
"""Test decoding a valid Base64 encoded string."""
|
||||
plaintext = "password123"
|
||||
encoded = base64.b64encode(plaintext.encode("utf-8")).decode()
|
||||
|
||||
result = FieldEncryption.decrypt_field(encoded)
|
||||
assert result == plaintext
|
||||
|
||||
def test_decode_non_base64_returns_none(self):
|
||||
"""Test that non-base64 input returns None."""
|
||||
non_base64 = "plain-password-!@#"
|
||||
result = FieldEncryption.decrypt_field(non_base64)
|
||||
# Should return None (decoding failed)
|
||||
assert result is None
|
||||
|
||||
def test_decode_unicode_text(self):
|
||||
"""Test decoding Base64 encoded Unicode text."""
|
||||
plaintext = "密码Test123"
|
||||
encoded = base64.b64encode(plaintext.encode("utf-8")).decode()
|
||||
|
||||
result = FieldEncryption.decrypt_field(encoded)
|
||||
assert result == plaintext
|
||||
|
||||
def test_decode_empty_string(self):
|
||||
"""Test decoding an empty string returns empty string."""
|
||||
result = FieldEncryption.decrypt_field("")
|
||||
# Empty string base64 decodes to empty string
|
||||
assert result == ""
|
||||
|
||||
def test_decode_special_characters(self):
|
||||
"""Test decoding with special characters."""
|
||||
plaintext = "P@ssw0rd!#$%^&*()"
|
||||
encoded = base64.b64encode(plaintext.encode("utf-8")).decode()
|
||||
|
||||
result = FieldEncryption.decrypt_field(encoded)
|
||||
assert result == plaintext
|
||||
|
||||
|
||||
class TestDecodePassword:
|
||||
"""Test cases for password decoding."""
|
||||
|
||||
def test_decode_password_base64(self):
|
||||
"""Test decoding a Base64 encoded password."""
|
||||
password = "SecureP@ssw0rd!"
|
||||
encoded = base64.b64encode(password.encode("utf-8")).decode()
|
||||
|
||||
result = FieldEncryption.decrypt_password(encoded)
|
||||
assert result == password
|
||||
|
||||
def test_decode_password_invalid_returns_none(self):
|
||||
"""Test that invalid base64 passwords return None."""
|
||||
invalid = "PlainPassword!@#"
|
||||
result = FieldEncryption.decrypt_password(invalid)
|
||||
# Should return None (decoding failed)
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestDecodeVerificationCode:
|
||||
"""Test cases for verification code decoding."""
|
||||
|
||||
def test_decode_code_base64(self):
|
||||
"""Test decoding a Base64 encoded verification code."""
|
||||
code = "789012"
|
||||
encoded = base64.b64encode(code.encode("utf-8")).decode()
|
||||
|
||||
result = FieldEncryption.decrypt_verification_code(encoded)
|
||||
assert result == code
|
||||
|
||||
def test_decode_code_invalid_returns_none(self):
|
||||
"""Test that invalid base64 codes return None."""
|
||||
invalid = "123456" # Plain 6-digit code, not base64
|
||||
result = FieldEncryption.decrypt_verification_code(invalid)
|
||||
# Should return None (decoding failed)
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestRoundTripEncodingDecoding:
|
||||
"""
|
||||
Integration tests for complete encoding-decoding cycle.
|
||||
These tests simulate the full frontend-to-backend flow using Base64.
|
||||
"""
|
||||
|
||||
def test_roundtrip_password(self):
|
||||
"""Test encoding and decoding a password."""
|
||||
original_password = "SecureP@ssw0rd!"
|
||||
|
||||
# Simulate frontend encoding (Base64)
|
||||
encoded = base64.b64encode(original_password.encode("utf-8")).decode()
|
||||
|
||||
# Backend decoding
|
||||
decoded = FieldEncryption.decrypt_password(encoded)
|
||||
|
||||
assert decoded == original_password
|
||||
|
||||
def test_roundtrip_verification_code(self):
|
||||
"""Test encoding and decoding a verification code."""
|
||||
original_code = "123456"
|
||||
|
||||
# Simulate frontend encoding
|
||||
encoded = base64.b64encode(original_code.encode("utf-8")).decode()
|
||||
|
||||
# Backend decoding
|
||||
decoded = FieldEncryption.decrypt_verification_code(encoded)
|
||||
|
||||
assert decoded == original_code
|
||||
|
||||
def test_roundtrip_unicode_password(self):
|
||||
"""Test encoding and decoding password with Unicode characters."""
|
||||
original_password = "密码Test123!@#"
|
||||
|
||||
# Frontend encoding
|
||||
encoded = base64.b64encode(original_password.encode("utf-8")).decode()
|
||||
|
||||
# Backend decoding
|
||||
decoded = FieldEncryption.decrypt_password(encoded)
|
||||
|
||||
assert decoded == original_password
|
||||
|
||||
def test_roundtrip_long_password(self):
|
||||
"""Test encoding and decoding a long password."""
|
||||
original_password = "ThisIsAVeryLongPasswordWithLotsOfCharacters123!@#$%^&*()"
|
||||
|
||||
encoded = base64.b64encode(original_password.encode("utf-8")).decode()
|
||||
decoded = FieldEncryption.decrypt_password(encoded)
|
||||
|
||||
assert decoded == original_password
|
||||
|
||||
def test_roundtrip_with_whitespace(self):
|
||||
"""Test encoding and decoding with whitespace."""
|
||||
original_password = "pass word with spaces"
|
||||
|
||||
encoded = base64.b64encode(original_password.encode("utf-8")).decode()
|
||||
decoded = FieldEncryption.decrypt_field(encoded)
|
||||
|
||||
assert decoded == original_password
|
||||
@@ -518,55 +518,6 @@ class TestEdgeCases:
|
||||
assert isinstance(result.result, StringSegment)
|
||||
|
||||
|
||||
class TestTruncateJsonPrimitives:
|
||||
"""Test _truncate_json_primitives method with different data types."""
|
||||
|
||||
@pytest.fixture
|
||||
def truncator(self):
|
||||
return VariableTruncator()
|
||||
|
||||
def test_truncate_json_primitives_file_type(self, truncator, file):
|
||||
"""Test that File objects are handled correctly in _truncate_json_primitives."""
|
||||
# Test File object is returned as-is without truncation
|
||||
result = truncator._truncate_json_primitives(file, 1000)
|
||||
|
||||
assert result.value == file
|
||||
assert result.truncated is False
|
||||
# Size should be calculated correctly
|
||||
expected_size = VariableTruncator.calculate_json_size(file)
|
||||
assert result.value_size == expected_size
|
||||
|
||||
def test_truncate_json_primitives_file_type_small_budget(self, truncator, file):
|
||||
"""Test that File objects are returned as-is even with small budget."""
|
||||
# Even with a small size budget, File objects should not be truncated
|
||||
result = truncator._truncate_json_primitives(file, 10)
|
||||
|
||||
assert result.value == file
|
||||
assert result.truncated is False
|
||||
|
||||
def test_truncate_json_primitives_file_type_in_array(self, truncator, file):
|
||||
"""Test File objects in arrays are handled correctly."""
|
||||
array_with_files = [file, file]
|
||||
result = truncator._truncate_json_primitives(array_with_files, 1000)
|
||||
|
||||
assert isinstance(result.value, list)
|
||||
assert len(result.value) == 2
|
||||
assert result.value[0] == file
|
||||
assert result.value[1] == file
|
||||
assert result.truncated is False
|
||||
|
||||
def test_truncate_json_primitives_file_type_in_object(self, truncator, file):
|
||||
"""Test File objects in objects are handled correctly."""
|
||||
obj_with_files = {"file1": file, "file2": file}
|
||||
result = truncator._truncate_json_primitives(obj_with_files, 1000)
|
||||
|
||||
assert isinstance(result.value, dict)
|
||||
assert len(result.value) == 2
|
||||
assert result.value["file1"] == file
|
||||
assert result.value["file2"] == file
|
||||
assert result.truncated is False
|
||||
|
||||
|
||||
class TestIntegrationScenarios:
|
||||
"""Test realistic integration scenarios."""
|
||||
|
||||
|
||||
4651
api/uv.lock
generated
4651
api/uv.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1044,25 +1044,6 @@ WORKFLOW_LOG_RETENTION_DAYS=30
|
||||
# Batch size for workflow log cleanup operations (default: 100)
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
||||
|
||||
# Aliyun SLS Logstore Configuration
|
||||
# Aliyun Access Key ID
|
||||
ALIYUN_SLS_ACCESS_KEY_ID=
|
||||
# Aliyun Access Key Secret
|
||||
ALIYUN_SLS_ACCESS_KEY_SECRET=
|
||||
# Aliyun SLS Endpoint (e.g., cn-hangzhou.log.aliyuncs.com)
|
||||
ALIYUN_SLS_ENDPOINT=
|
||||
# Aliyun SLS Region (e.g., cn-hangzhou)
|
||||
ALIYUN_SLS_REGION=
|
||||
# Aliyun SLS Project Name
|
||||
ALIYUN_SLS_PROJECT_NAME=
|
||||
# Number of days to retain workflow run logs (default: 365 days, 3650 for permanent storage)
|
||||
ALIYUN_SLS_LOGSTORE_TTL=365
|
||||
# Enable dual-write to both SLS LogStore and SQL database (default: false)
|
||||
LOGSTORE_DUAL_WRITE_ENABLED=false
|
||||
# Enable dual-read fallback to SQL database when LogStore returns no results (default: true)
|
||||
# Useful for migration scenarios where historical data exists only in SQL database
|
||||
LOGSTORE_DUAL_READ_ENABLED=true
|
||||
|
||||
# HTTP request node in workflow configuration
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
|
||||
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
|
||||
@@ -1248,7 +1229,7 @@ NGINX_SSL_PORT=443
|
||||
# and modify the env vars below accordingly.
|
||||
NGINX_SSL_CERT_FILENAME=dify.crt
|
||||
NGINX_SSL_CERT_KEY_FILENAME=dify.key
|
||||
NGINX_SSL_PROTOCOLS=TLSv1.2 TLSv1.3
|
||||
NGINX_SSL_PROTOCOLS=TLSv1.1 TLSv1.2 TLSv1.3
|
||||
|
||||
# Nginx performance tuning
|
||||
NGINX_WORKER_PROCESSES=auto
|
||||
@@ -1440,7 +1421,7 @@ QUEUE_MONITOR_ALERT_EMAILS=
|
||||
QUEUE_MONITOR_INTERVAL=30
|
||||
|
||||
# Swagger UI configuration
|
||||
SWAGGER_UI_ENABLED=false
|
||||
SWAGGER_UI_ENABLED=true
|
||||
SWAGGER_UI_PATH=/swagger-ui.html
|
||||
|
||||
# Whether to encrypt dataset IDs when exporting DSL files (default: true)
|
||||
@@ -1479,4 +1460,4 @@ ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR=20
|
||||
ANNOTATION_IMPORT_MAX_CONCURRENT=5
|
||||
|
||||
# The API key of amplitude
|
||||
AMPLITUDE_API_KEY=
|
||||
AMPLITUDE_API_KEY=
|
||||
|
||||
@@ -414,7 +414,7 @@ services:
|
||||
# and modify the env vars below in .env if HTTPS_ENABLED is true.
|
||||
NGINX_SSL_CERT_FILENAME: ${NGINX_SSL_CERT_FILENAME:-dify.crt}
|
||||
NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key}
|
||||
NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.2 TLSv1.3}
|
||||
NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.1 TLSv1.2 TLSv1.3}
|
||||
NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto}
|
||||
NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M}
|
||||
NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65}
|
||||
|
||||
@@ -455,14 +455,6 @@ x-shared-env: &shared-api-worker-env
|
||||
WORKFLOW_LOG_CLEANUP_ENABLED: ${WORKFLOW_LOG_CLEANUP_ENABLED:-false}
|
||||
WORKFLOW_LOG_RETENTION_DAYS: ${WORKFLOW_LOG_RETENTION_DAYS:-30}
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE: ${WORKFLOW_LOG_CLEANUP_BATCH_SIZE:-100}
|
||||
ALIYUN_SLS_ACCESS_KEY_ID: ${ALIYUN_SLS_ACCESS_KEY_ID:-}
|
||||
ALIYUN_SLS_ACCESS_KEY_SECRET: ${ALIYUN_SLS_ACCESS_KEY_SECRET:-}
|
||||
ALIYUN_SLS_ENDPOINT: ${ALIYUN_SLS_ENDPOINT:-}
|
||||
ALIYUN_SLS_REGION: ${ALIYUN_SLS_REGION:-}
|
||||
ALIYUN_SLS_PROJECT_NAME: ${ALIYUN_SLS_PROJECT_NAME:-}
|
||||
ALIYUN_SLS_LOGSTORE_TTL: ${ALIYUN_SLS_LOGSTORE_TTL:-365}
|
||||
LOGSTORE_DUAL_WRITE_ENABLED: ${LOGSTORE_DUAL_WRITE_ENABLED:-false}
|
||||
LOGSTORE_DUAL_READ_ENABLED: ${LOGSTORE_DUAL_READ_ENABLED:-true}
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760}
|
||||
HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576}
|
||||
HTTP_REQUEST_NODE_SSL_VERIFY: ${HTTP_REQUEST_NODE_SSL_VERIFY:-True}
|
||||
@@ -536,7 +528,7 @@ x-shared-env: &shared-api-worker-env
|
||||
NGINX_SSL_PORT: ${NGINX_SSL_PORT:-443}
|
||||
NGINX_SSL_CERT_FILENAME: ${NGINX_SSL_CERT_FILENAME:-dify.crt}
|
||||
NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key}
|
||||
NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.2 TLSv1.3}
|
||||
NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.1 TLSv1.2 TLSv1.3}
|
||||
NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto}
|
||||
NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M}
|
||||
NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65}
|
||||
@@ -639,7 +631,7 @@ x-shared-env: &shared-api-worker-env
|
||||
QUEUE_MONITOR_THRESHOLD: ${QUEUE_MONITOR_THRESHOLD:-200}
|
||||
QUEUE_MONITOR_ALERT_EMAILS: ${QUEUE_MONITOR_ALERT_EMAILS:-}
|
||||
QUEUE_MONITOR_INTERVAL: ${QUEUE_MONITOR_INTERVAL:-30}
|
||||
SWAGGER_UI_ENABLED: ${SWAGGER_UI_ENABLED:-false}
|
||||
SWAGGER_UI_ENABLED: ${SWAGGER_UI_ENABLED:-true}
|
||||
SWAGGER_UI_PATH: ${SWAGGER_UI_PATH:-/swagger-ui.html}
|
||||
DSL_EXPORT_ENCRYPT_DATASET_ID: ${DSL_EXPORT_ENCRYPT_DATASET_ID:-true}
|
||||
DATASET_MAX_SEGMENTS_PER_REQUEST: ${DATASET_MAX_SEGMENTS_PER_REQUEST:-0}
|
||||
@@ -1079,7 +1071,7 @@ services:
|
||||
# and modify the env vars below in .env if HTTPS_ENABLED is true.
|
||||
NGINX_SSL_CERT_FILENAME: ${NGINX_SSL_CERT_FILENAME:-dify.crt}
|
||||
NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key}
|
||||
NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.2 TLSv1.3}
|
||||
NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.1 TLSv1.2 TLSv1.3}
|
||||
NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto}
|
||||
NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M}
|
||||
NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65}
|
||||
|
||||
@@ -213,24 +213,3 @@ PLUGIN_VOLCENGINE_TOS_ENDPOINT=
|
||||
PLUGIN_VOLCENGINE_TOS_ACCESS_KEY=
|
||||
PLUGIN_VOLCENGINE_TOS_SECRET_KEY=
|
||||
PLUGIN_VOLCENGINE_TOS_REGION=
|
||||
|
||||
# ------------------------------
|
||||
# Environment Variables for Aliyun SLS (Simple Log Service)
|
||||
# ------------------------------
|
||||
# Aliyun SLS Access Key ID
|
||||
ALIYUN_SLS_ACCESS_KEY_ID=
|
||||
# Aliyun SLS Access Key Secret
|
||||
ALIYUN_SLS_ACCESS_KEY_SECRET=
|
||||
# Aliyun SLS Endpoint (e.g., cn-hangzhou.log.aliyuncs.com)
|
||||
ALIYUN_SLS_ENDPOINT=
|
||||
# Aliyun SLS Region (e.g., cn-hangzhou)
|
||||
ALIYUN_SLS_REGION=
|
||||
# Aliyun SLS Project Name
|
||||
ALIYUN_SLS_PROJECT_NAME=
|
||||
# Aliyun SLS Logstore TTL (default: 365 days, 3650 for permanent storage)
|
||||
ALIYUN_SLS_LOGSTORE_TTL=365
|
||||
# Enable dual-write to both LogStore and SQL database (default: true)
|
||||
LOGSTORE_DUAL_WRITE_ENABLED=true
|
||||
# Enable dual-read fallback to SQL database when LogStore returns no results (default: true)
|
||||
# Useful for migration scenarios where historical data exists only in SQL database
|
||||
LOGSTORE_DUAL_READ_ENABLED=true
|
||||
@@ -19,13 +19,7 @@
|
||||
*/
|
||||
|
||||
export const useTranslation = () => ({
|
||||
t: (key: string, options?: Record<string, unknown>) => {
|
||||
if (options?.returnObjects)
|
||||
return [`${key}-feature-1`, `${key}-feature-2`]
|
||||
if (options)
|
||||
return `${key}:${JSON.stringify(options)}`
|
||||
return key
|
||||
},
|
||||
t: (key: string) => key,
|
||||
i18n: {
|
||||
language: 'en',
|
||||
changeLanguage: jest.fn(),
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import EditItem, { EditItemType } from './index'
|
||||
|
||||
jest.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
describe('AddAnnotationModal/EditItem', () => {
|
||||
test('should render query inputs with user avatar and placeholder strings', () => {
|
||||
render(
|
||||
<EditItem
|
||||
type={EditItemType.Query}
|
||||
content="Why?"
|
||||
onChange={jest.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('appAnnotation.addModal.queryName')).toBeInTheDocument()
|
||||
expect(screen.getByPlaceholderText('appAnnotation.addModal.queryPlaceholder')).toBeInTheDocument()
|
||||
expect(screen.getByText('Why?')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should render answer name and placeholder text', () => {
|
||||
render(
|
||||
<EditItem
|
||||
type={EditItemType.Answer}
|
||||
content="Existing answer"
|
||||
onChange={jest.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('appAnnotation.addModal.answerName')).toBeInTheDocument()
|
||||
expect(screen.getByPlaceholderText('appAnnotation.addModal.answerPlaceholder')).toBeInTheDocument()
|
||||
expect(screen.getByDisplayValue('Existing answer')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should propagate changes when answer content updates', () => {
|
||||
const handleChange = jest.fn()
|
||||
render(
|
||||
<EditItem
|
||||
type={EditItemType.Answer}
|
||||
content=""
|
||||
onChange={handleChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.change(screen.getByPlaceholderText('appAnnotation.addModal.answerPlaceholder'), { target: { value: 'Because' } })
|
||||
expect(handleChange).toHaveBeenCalledWith('Because')
|
||||
})
|
||||
})
|
||||
@@ -1,155 +0,0 @@
|
||||
import React from 'react'
|
||||
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import AddAnnotationModal from './index'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
|
||||
jest.mock('@/context/provider-context', () => ({
|
||||
useProviderContext: jest.fn(),
|
||||
}))
|
||||
|
||||
const mockToastNotify = jest.fn()
|
||||
jest.mock('@/app/components/base/toast', () => ({
|
||||
__esModule: true,
|
||||
default: {
|
||||
notify: jest.fn(args => mockToastNotify(args)),
|
||||
},
|
||||
}))
|
||||
|
||||
jest.mock('@/app/components/billing/annotation-full', () => () => <div data-testid="annotation-full" />)
|
||||
|
||||
const mockUseProviderContext = useProviderContext as jest.Mock
|
||||
|
||||
const getProviderContext = ({ usage = 0, total = 10, enableBilling = false } = {}) => ({
|
||||
plan: {
|
||||
usage: { annotatedResponse: usage },
|
||||
total: { annotatedResponse: total },
|
||||
},
|
||||
enableBilling,
|
||||
})
|
||||
|
||||
describe('AddAnnotationModal', () => {
|
||||
const baseProps = {
|
||||
isShow: true,
|
||||
onHide: jest.fn(),
|
||||
onAdd: jest.fn(),
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockUseProviderContext.mockReturnValue(getProviderContext())
|
||||
})
|
||||
|
||||
const typeQuestion = (value: string) => {
|
||||
fireEvent.change(screen.getByPlaceholderText('appAnnotation.addModal.queryPlaceholder'), {
|
||||
target: { value },
|
||||
})
|
||||
}
|
||||
|
||||
const typeAnswer = (value: string) => {
|
||||
fireEvent.change(screen.getByPlaceholderText('appAnnotation.addModal.answerPlaceholder'), {
|
||||
target: { value },
|
||||
})
|
||||
}
|
||||
|
||||
test('should render modal title when drawer is visible', () => {
|
||||
render(<AddAnnotationModal {...baseProps} />)
|
||||
|
||||
expect(screen.getByText('appAnnotation.addModal.title')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should capture query input text when typing', () => {
|
||||
render(<AddAnnotationModal {...baseProps} />)
|
||||
typeQuestion('Sample question')
|
||||
expect(screen.getByPlaceholderText('appAnnotation.addModal.queryPlaceholder')).toHaveValue('Sample question')
|
||||
})
|
||||
|
||||
test('should capture answer input text when typing', () => {
|
||||
render(<AddAnnotationModal {...baseProps} />)
|
||||
typeAnswer('Sample answer')
|
||||
expect(screen.getByPlaceholderText('appAnnotation.addModal.answerPlaceholder')).toHaveValue('Sample answer')
|
||||
})
|
||||
|
||||
test('should show annotation full notice and disable submit when quota exceeded', () => {
|
||||
mockUseProviderContext.mockReturnValue(getProviderContext({ usage: 10, total: 10, enableBilling: true }))
|
||||
render(<AddAnnotationModal {...baseProps} />)
|
||||
|
||||
expect(screen.getByTestId('annotation-full')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'common.operation.add' })).toBeDisabled()
|
||||
})
|
||||
|
||||
test('should call onAdd with form values when create next enabled', async () => {
|
||||
const onAdd = jest.fn().mockResolvedValue(undefined)
|
||||
render(<AddAnnotationModal {...baseProps} onAdd={onAdd} />)
|
||||
|
||||
typeQuestion('Question value')
|
||||
typeAnswer('Answer value')
|
||||
fireEvent.click(screen.getByTestId('checkbox-create-next-checkbox'))
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.add' }))
|
||||
})
|
||||
|
||||
expect(onAdd).toHaveBeenCalledWith({ question: 'Question value', answer: 'Answer value' })
|
||||
})
|
||||
|
||||
test('should reset fields after saving when create next enabled', async () => {
|
||||
const onAdd = jest.fn().mockResolvedValue(undefined)
|
||||
render(<AddAnnotationModal {...baseProps} onAdd={onAdd} />)
|
||||
|
||||
typeQuestion('Question value')
|
||||
typeAnswer('Answer value')
|
||||
const createNextToggle = screen.getByText('appAnnotation.addModal.createNext').previousElementSibling as HTMLElement
|
||||
fireEvent.click(createNextToggle)
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.add' }))
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByPlaceholderText('appAnnotation.addModal.queryPlaceholder')).toHaveValue('')
|
||||
expect(screen.getByPlaceholderText('appAnnotation.addModal.answerPlaceholder')).toHaveValue('')
|
||||
})
|
||||
})
|
||||
|
||||
test('should show toast when validation fails for missing question', () => {
|
||||
render(<AddAnnotationModal {...baseProps} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.add' }))
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(expect.objectContaining({
|
||||
type: 'error',
|
||||
message: 'appAnnotation.errorMessage.queryRequired',
|
||||
}))
|
||||
})
|
||||
|
||||
test('should show toast when validation fails for missing answer', () => {
|
||||
render(<AddAnnotationModal {...baseProps} />)
|
||||
typeQuestion('Filled question')
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.add' }))
|
||||
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(expect.objectContaining({
|
||||
type: 'error',
|
||||
message: 'appAnnotation.errorMessage.answerRequired',
|
||||
}))
|
||||
})
|
||||
|
||||
test('should close modal when save completes and create next unchecked', async () => {
|
||||
const onAdd = jest.fn().mockResolvedValue(undefined)
|
||||
render(<AddAnnotationModal {...baseProps} onAdd={onAdd} />)
|
||||
|
||||
typeQuestion('Q')
|
||||
typeAnswer('A')
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.add' }))
|
||||
})
|
||||
|
||||
expect(baseProps.onHide).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
test('should allow cancel button to close the drawer', () => {
|
||||
render(<AddAnnotationModal {...baseProps} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.cancel' }))
|
||||
expect(baseProps.onHide).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
@@ -101,7 +101,7 @@ const AddAnnotationModal: FC<Props> = ({
|
||||
<div
|
||||
className='flex items-center space-x-2'
|
||||
>
|
||||
<Checkbox id='create-next-checkbox' checked={isCreateNext} onCheck={() => setIsCreateNext(!isCreateNext)} />
|
||||
<Checkbox checked={isCreateNext} onCheck={() => setIsCreateNext(!isCreateNext)} />
|
||||
<div>{t('appAnnotation.addModal.createNext')}</div>
|
||||
</div>
|
||||
<div className='mt-2 flex space-x-2'>
|
||||
|
||||
@@ -1,397 +0,0 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import EditItem, { EditItemType, EditTitle } from './index'
|
||||
|
||||
describe('EditTitle', () => {
|
||||
it('should render title content correctly', () => {
|
||||
// Arrange
|
||||
const props = { title: 'Test Title' }
|
||||
|
||||
// Act
|
||||
render(<EditTitle {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(/test title/i)).toBeInTheDocument()
|
||||
// Should contain edit icon (svg element)
|
||||
expect(document.querySelector('svg')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should apply custom className when provided', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
title: 'Test Title',
|
||||
className: 'custom-class',
|
||||
}
|
||||
|
||||
// Act
|
||||
const { container } = render(<EditTitle {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(/test title/i)).toBeInTheDocument()
|
||||
expect(container.querySelector('.custom-class')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('EditItem', () => {
|
||||
const defaultProps = {
|
||||
type: EditItemType.Query,
|
||||
content: 'Test content',
|
||||
onSave: jest.fn(),
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
// Rendering tests (REQUIRED)
|
||||
describe('Rendering', () => {
|
||||
it('should render content correctly', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(/test content/i)).toBeInTheDocument()
|
||||
// Should show item name (query or answer)
|
||||
expect(screen.getByText('appAnnotation.editModal.queryName')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render different item types correctly', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
type: EditItemType.Answer,
|
||||
content: 'Answer content',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(/answer content/i)).toBeInTheDocument()
|
||||
expect(screen.getByText('appAnnotation.editModal.answerName')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show edit controls when not readonly', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('common.operation.edit')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should hide edit controls when readonly', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
readonly: true,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByText('common.operation.edit')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Props tests (REQUIRED)
|
||||
describe('Props', () => {
|
||||
it('should respect readonly prop for edit functionality', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
readonly: true,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(/test content/i)).toBeInTheDocument()
|
||||
expect(screen.queryByText('common.operation.edit')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display provided content', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
content: 'Custom content for testing',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(/custom content for testing/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render appropriate content based on type', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
type: EditItemType.Query,
|
||||
content: 'Question content',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(/question content/i)).toBeInTheDocument()
|
||||
expect(screen.getByText('appAnnotation.editModal.queryName')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// User Interactions
|
||||
describe('User Interactions', () => {
|
||||
it('should activate edit mode when edit button is clicked', async () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
|
||||
// Assert
|
||||
expect(screen.getByRole('textbox')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'common.operation.save' })).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'common.operation.cancel' })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should save new content when save button is clicked', async () => {
|
||||
// Arrange
|
||||
const mockSave = jest.fn().mockResolvedValue(undefined)
|
||||
const props = {
|
||||
...defaultProps,
|
||||
onSave: mockSave,
|
||||
}
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
|
||||
// Type new content
|
||||
const textarea = screen.getByRole('textbox')
|
||||
await user.clear(textarea)
|
||||
await user.type(textarea, 'Updated content')
|
||||
|
||||
// Save
|
||||
await user.click(screen.getByRole('button', { name: 'common.operation.save' }))
|
||||
|
||||
// Assert
|
||||
expect(mockSave).toHaveBeenCalledWith('Updated content')
|
||||
})
|
||||
|
||||
it('should exit edit mode when cancel button is clicked', async () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
await user.click(screen.getByRole('button', { name: 'common.operation.cancel' }))
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByRole('textbox')).not.toBeInTheDocument()
|
||||
expect(screen.getByText(/test content/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show content preview while typing', async () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
|
||||
const textarea = screen.getByRole('textbox')
|
||||
await user.type(textarea, 'New content')
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(/new content/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should call onSave with correct content when saving', async () => {
|
||||
// Arrange
|
||||
const mockSave = jest.fn().mockResolvedValue(undefined)
|
||||
const props = {
|
||||
...defaultProps,
|
||||
onSave: mockSave,
|
||||
}
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
|
||||
const textarea = screen.getByRole('textbox')
|
||||
await user.clear(textarea)
|
||||
await user.type(textarea, 'Test save content')
|
||||
|
||||
// Save
|
||||
await user.click(screen.getByRole('button', { name: 'common.operation.save' }))
|
||||
|
||||
// Assert
|
||||
expect(mockSave).toHaveBeenCalledWith('Test save content')
|
||||
})
|
||||
|
||||
it('should show delete option when content changes', async () => {
|
||||
// Arrange
|
||||
const mockSave = jest.fn().mockResolvedValue(undefined)
|
||||
const props = {
|
||||
...defaultProps,
|
||||
onSave: mockSave,
|
||||
}
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
|
||||
// Enter edit mode and change content
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
const textarea = screen.getByRole('textbox')
|
||||
await user.clear(textarea)
|
||||
await user.type(textarea, 'Modified content')
|
||||
|
||||
// Save to trigger content change
|
||||
await user.click(screen.getByRole('button', { name: 'common.operation.save' }))
|
||||
|
||||
// Assert
|
||||
expect(mockSave).toHaveBeenCalledWith('Modified content')
|
||||
})
|
||||
|
||||
it('should handle keyboard interactions in edit mode', async () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
|
||||
const textarea = screen.getByRole('textbox')
|
||||
|
||||
// Test typing
|
||||
await user.type(textarea, 'Keyboard test')
|
||||
|
||||
// Assert
|
||||
expect(textarea).toHaveValue('Keyboard test')
|
||||
expect(screen.getByText(/keyboard test/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// State Management
|
||||
describe('State Management', () => {
|
||||
it('should reset newContent when content prop changes', async () => {
|
||||
// Arrange
|
||||
const { rerender } = render(<EditItem {...defaultProps} />)
|
||||
|
||||
// Act - Enter edit mode and type something
|
||||
const user = userEvent.setup()
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
const textarea = screen.getByRole('textbox')
|
||||
await user.clear(textarea)
|
||||
await user.type(textarea, 'New content')
|
||||
|
||||
// Rerender with new content prop
|
||||
rerender(<EditItem {...defaultProps} content="Updated content" />)
|
||||
|
||||
// Assert - Textarea value should be reset due to useEffect
|
||||
expect(textarea).toHaveValue('')
|
||||
})
|
||||
|
||||
it('should preserve edit state across content changes', async () => {
|
||||
// Arrange
|
||||
const { rerender } = render(<EditItem {...defaultProps} />)
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act - Enter edit mode
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
|
||||
// Rerender with new content
|
||||
rerender(<EditItem {...defaultProps} content="Updated content" />)
|
||||
|
||||
// Assert - Should still be in edit mode
|
||||
expect(screen.getByRole('textbox')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Edge Cases (REQUIRED)
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty content', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
content: '',
|
||||
}
|
||||
|
||||
// Act
|
||||
const { container } = render(<EditItem {...props} />)
|
||||
|
||||
// Assert - Should render without crashing
|
||||
// Check that the component renders properly with empty content
|
||||
expect(container.querySelector('.grow')).toBeInTheDocument()
|
||||
// Should still show edit button
|
||||
expect(screen.getByText('common.operation.edit')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle very long content', () => {
|
||||
// Arrange
|
||||
const longContent = 'A'.repeat(1000)
|
||||
const props = {
|
||||
...defaultProps,
|
||||
content: longContent,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(longContent)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle content with special characters', () => {
|
||||
// Arrange
|
||||
const specialContent = 'Content with & < > " \' characters'
|
||||
const props = {
|
||||
...defaultProps,
|
||||
content: specialContent,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(specialContent)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle rapid edit/cancel operations', async () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<EditItem {...props} />)
|
||||
|
||||
// Rapid edit/cancel operations
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
await user.click(screen.getByText('common.operation.cancel'))
|
||||
await user.click(screen.getByText('common.operation.edit'))
|
||||
await user.click(screen.getByText('common.operation.cancel'))
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByRole('textbox')).not.toBeInTheDocument()
|
||||
expect(screen.getByText('Test content')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,408 +0,0 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import Toast, { type IToastProps, type ToastHandle } from '@/app/components/base/toast'
|
||||
import EditAnnotationModal from './index'
|
||||
|
||||
// Mock only external dependencies
|
||||
jest.mock('@/service/annotation', () => ({
|
||||
addAnnotation: jest.fn(),
|
||||
editAnnotation: jest.fn(),
|
||||
}))
|
||||
|
||||
jest.mock('@/context/provider-context', () => ({
|
||||
useProviderContext: () => ({
|
||||
plan: {
|
||||
usage: { annotatedResponse: 5 },
|
||||
total: { annotatedResponse: 10 },
|
||||
},
|
||||
enableBilling: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
jest.mock('@/hooks/use-timestamp', () => ({
|
||||
__esModule: true,
|
||||
default: () => ({
|
||||
formatTime: () => '2023-12-01 10:30:00',
|
||||
}),
|
||||
}))
|
||||
|
||||
// Note: i18n is automatically mocked by Jest via __mocks__/react-i18next.ts
|
||||
|
||||
jest.mock('@/app/components/billing/annotation-full', () => ({
|
||||
__esModule: true,
|
||||
default: () => <div data-testid="annotation-full" />,
|
||||
}))
|
||||
|
||||
type ToastNotifyProps = Pick<IToastProps, 'type' | 'size' | 'message' | 'duration' | 'className' | 'customComponent' | 'onClose'>
|
||||
type ToastWithNotify = typeof Toast & { notify: (props: ToastNotifyProps) => ToastHandle }
|
||||
const toastWithNotify = Toast as unknown as ToastWithNotify
|
||||
const toastNotifySpy = jest.spyOn(toastWithNotify, 'notify').mockReturnValue({ clear: jest.fn() })
|
||||
|
||||
const { addAnnotation: mockAddAnnotation, editAnnotation: mockEditAnnotation } = jest.requireMock('@/service/annotation') as {
|
||||
addAnnotation: jest.Mock
|
||||
editAnnotation: jest.Mock
|
||||
}
|
||||
|
||||
describe('EditAnnotationModal', () => {
|
||||
const defaultProps = {
|
||||
isShow: true,
|
||||
onHide: jest.fn(),
|
||||
appId: 'test-app-id',
|
||||
query: 'Test query',
|
||||
answer: 'Test answer',
|
||||
onEdited: jest.fn(),
|
||||
onAdded: jest.fn(),
|
||||
onRemove: jest.fn(),
|
||||
}
|
||||
|
||||
afterAll(() => {
|
||||
toastNotifySpy.mockRestore()
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockAddAnnotation.mockResolvedValue({
|
||||
id: 'test-id',
|
||||
account: { name: 'Test User' },
|
||||
})
|
||||
mockEditAnnotation.mockResolvedValue({})
|
||||
})
|
||||
|
||||
// Rendering tests (REQUIRED)
|
||||
describe('Rendering', () => {
|
||||
it('should render modal when isShow is true', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert - Check for modal title as it appears in the mock
|
||||
expect(screen.getByText('appAnnotation.editModal.title')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render modal when isShow is false', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps, isShow: false }
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByText('appAnnotation.editModal.title')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display query and answer sections', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert - Look for query and answer content
|
||||
expect(screen.getByText('Test query')).toBeInTheDocument()
|
||||
expect(screen.getByText('Test answer')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Props tests (REQUIRED)
|
||||
describe('Props', () => {
|
||||
it('should handle different query and answer content', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
query: 'Custom query content',
|
||||
answer: 'Custom answer content',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert - Check content is displayed
|
||||
expect(screen.getByText('Custom query content')).toBeInTheDocument()
|
||||
expect(screen.getByText('Custom answer content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show remove option when annotationId is provided', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
annotationId: 'test-annotation-id',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert - Remove option should be present (using pattern)
|
||||
expect(screen.getByText('appAnnotation.editModal.removeThisCache')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// User Interactions
|
||||
describe('User Interactions', () => {
|
||||
it('should enable editing for query and answer sections', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert - Edit links should be visible (using text content)
|
||||
const editLinks = screen.getAllByText(/common\.operation\.edit/i)
|
||||
expect(editLinks).toHaveLength(2)
|
||||
})
|
||||
|
||||
it('should show remove option when annotationId is provided', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
annotationId: 'test-annotation-id',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('appAnnotation.editModal.removeThisCache')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should save content when edited', async () => {
|
||||
// Arrange
|
||||
const mockOnAdded = jest.fn()
|
||||
const props = {
|
||||
...defaultProps,
|
||||
onAdded: mockOnAdded,
|
||||
}
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Mock API response
|
||||
mockAddAnnotation.mockResolvedValueOnce({
|
||||
id: 'test-annotation-id',
|
||||
account: { name: 'Test User' },
|
||||
})
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Find and click edit link for query
|
||||
const editLinks = screen.getAllByText(/common\.operation\.edit/i)
|
||||
await user.click(editLinks[0])
|
||||
|
||||
// Find textarea and enter new content
|
||||
const textarea = screen.getByRole('textbox')
|
||||
await user.clear(textarea)
|
||||
await user.type(textarea, 'New query content')
|
||||
|
||||
// Click save button
|
||||
const saveButton = screen.getByRole('button', { name: 'common.operation.save' })
|
||||
await user.click(saveButton)
|
||||
|
||||
// Assert
|
||||
expect(mockAddAnnotation).toHaveBeenCalledWith('test-app-id', {
|
||||
question: 'New query content',
|
||||
answer: 'Test answer',
|
||||
message_id: undefined,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// API Calls
|
||||
describe('API Calls', () => {
|
||||
it('should call addAnnotation when saving new annotation', async () => {
|
||||
// Arrange
|
||||
const mockOnAdded = jest.fn()
|
||||
const props = {
|
||||
...defaultProps,
|
||||
onAdded: mockOnAdded,
|
||||
}
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Mock the API response
|
||||
mockAddAnnotation.mockResolvedValueOnce({
|
||||
id: 'test-annotation-id',
|
||||
account: { name: 'Test User' },
|
||||
})
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Edit query content
|
||||
const editLinks = screen.getAllByText(/common\.operation\.edit/i)
|
||||
await user.click(editLinks[0])
|
||||
|
||||
const textarea = screen.getByRole('textbox')
|
||||
await user.clear(textarea)
|
||||
await user.type(textarea, 'Updated query')
|
||||
|
||||
const saveButton = screen.getByRole('button', { name: 'common.operation.save' })
|
||||
await user.click(saveButton)
|
||||
|
||||
// Assert
|
||||
expect(mockAddAnnotation).toHaveBeenCalledWith('test-app-id', {
|
||||
question: 'Updated query',
|
||||
answer: 'Test answer',
|
||||
message_id: undefined,
|
||||
})
|
||||
})
|
||||
|
||||
it('should call editAnnotation when updating existing annotation', async () => {
|
||||
// Arrange
|
||||
const mockOnEdited = jest.fn()
|
||||
const props = {
|
||||
...defaultProps,
|
||||
annotationId: 'test-annotation-id',
|
||||
messageId: 'test-message-id',
|
||||
onEdited: mockOnEdited,
|
||||
}
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Edit query content
|
||||
const editLinks = screen.getAllByText(/common\.operation\.edit/i)
|
||||
await user.click(editLinks[0])
|
||||
|
||||
const textarea = screen.getByRole('textbox')
|
||||
await user.clear(textarea)
|
||||
await user.type(textarea, 'Modified query')
|
||||
|
||||
const saveButton = screen.getByRole('button', { name: 'common.operation.save' })
|
||||
await user.click(saveButton)
|
||||
|
||||
// Assert
|
||||
expect(mockEditAnnotation).toHaveBeenCalledWith(
|
||||
'test-app-id',
|
||||
'test-annotation-id',
|
||||
{
|
||||
message_id: 'test-message-id',
|
||||
question: 'Modified query',
|
||||
answer: 'Test answer',
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
// State Management
|
||||
describe('State Management', () => {
|
||||
it('should initialize with closed confirm modal', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert - Confirm dialog should not be visible initially
|
||||
expect(screen.queryByText('appDebug.feature.annotation.removeConfirm')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show confirm modal when remove is clicked', async () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
annotationId: 'test-annotation-id',
|
||||
}
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
await user.click(screen.getByText('appAnnotation.editModal.removeThisCache'))
|
||||
|
||||
// Assert - Confirmation dialog should appear
|
||||
expect(screen.getByText('appDebug.feature.annotation.removeConfirm')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should call onRemove when removal is confirmed', async () => {
|
||||
// Arrange
|
||||
const mockOnRemove = jest.fn()
|
||||
const props = {
|
||||
...defaultProps,
|
||||
annotationId: 'test-annotation-id',
|
||||
onRemove: mockOnRemove,
|
||||
}
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Click remove
|
||||
await user.click(screen.getByText('appAnnotation.editModal.removeThisCache'))
|
||||
|
||||
// Click confirm
|
||||
const confirmButton = screen.getByRole('button', { name: 'common.operation.confirm' })
|
||||
await user.click(confirmButton)
|
||||
|
||||
// Assert
|
||||
expect(mockOnRemove).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
// Edge Cases (REQUIRED)
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty query and answer', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
query: '',
|
||||
answer: '',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('appAnnotation.editModal.title')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle very long content', () => {
|
||||
// Arrange
|
||||
const longQuery = 'Q'.repeat(1000)
|
||||
const longAnswer = 'A'.repeat(1000)
|
||||
const props = {
|
||||
...defaultProps,
|
||||
query: longQuery,
|
||||
answer: longAnswer,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(longQuery)).toBeInTheDocument()
|
||||
expect(screen.getByText(longAnswer)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle special characters in content', () => {
|
||||
// Arrange
|
||||
const specialQuery = 'Query with & < > " \' characters'
|
||||
const specialAnswer = 'Answer with & < > " \' characters'
|
||||
const props = {
|
||||
...defaultProps,
|
||||
query: specialQuery,
|
||||
answer: specialAnswer,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText(specialQuery)).toBeInTheDocument()
|
||||
expect(screen.getByText(specialAnswer)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle onlyEditResponse prop', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
onlyEditResponse: true,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<EditAnnotationModal {...props} />)
|
||||
|
||||
// Assert - Query should be readonly, answer should be editable
|
||||
const editLinks = screen.queryAllByText(/common\.operation\.edit/i)
|
||||
expect(editLinks).toHaveLength(1) // Only answer should have edit button
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,22 +0,0 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import CannotQueryDataset from './cannot-query-dataset'
|
||||
|
||||
describe('CannotQueryDataset WarningMask', () => {
|
||||
test('should render dataset warning copy and action button', () => {
|
||||
const onConfirm = jest.fn()
|
||||
render(<CannotQueryDataset onConfirm={onConfirm} />)
|
||||
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.unableToQueryDataSet')).toBeInTheDocument()
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.unableToQueryDataSetTip')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'appDebug.feature.dataSet.queryVariable.ok' })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should invoke onConfirm when OK button clicked', () => {
|
||||
const onConfirm = jest.fn()
|
||||
render(<CannotQueryDataset onConfirm={onConfirm} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'appDebug.feature.dataSet.queryVariable.ok' }))
|
||||
expect(onConfirm).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
@@ -1,39 +0,0 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import FormattingChanged from './formatting-changed'
|
||||
|
||||
describe('FormattingChanged WarningMask', () => {
|
||||
test('should display translation text and both actions', () => {
|
||||
const onConfirm = jest.fn()
|
||||
const onCancel = jest.fn()
|
||||
|
||||
render(
|
||||
<FormattingChanged
|
||||
onConfirm={onConfirm}
|
||||
onCancel={onCancel}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('appDebug.formattingChangedTitle')).toBeInTheDocument()
|
||||
expect(screen.getByText('appDebug.formattingChangedText')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'common.operation.cancel' })).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: /common\.operation\.refresh/ })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should call callbacks when buttons are clicked', () => {
|
||||
const onConfirm = jest.fn()
|
||||
const onCancel = jest.fn()
|
||||
render(
|
||||
<FormattingChanged
|
||||
onConfirm={onConfirm}
|
||||
onCancel={onCancel}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /common\.operation\.refresh/ }))
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.cancel' }))
|
||||
|
||||
expect(onConfirm).toHaveBeenCalledTimes(1)
|
||||
expect(onCancel).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
@@ -1,26 +0,0 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import HasNotSetAPI from './has-not-set-api'
|
||||
|
||||
describe('HasNotSetAPI WarningMask', () => {
|
||||
test('should show default title when trial not finished', () => {
|
||||
render(<HasNotSetAPI isTrailFinished={false} onSetting={jest.fn()} />)
|
||||
|
||||
expect(screen.getByText('appDebug.notSetAPIKey.title')).toBeInTheDocument()
|
||||
expect(screen.getByText('appDebug.notSetAPIKey.description')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should show trail finished title when flag is true', () => {
|
||||
render(<HasNotSetAPI isTrailFinished onSetting={jest.fn()} />)
|
||||
|
||||
expect(screen.getByText('appDebug.notSetAPIKey.trailFinished')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should call onSetting when primary button clicked', () => {
|
||||
const onSetting = jest.fn()
|
||||
render(<HasNotSetAPI isTrailFinished={false} onSetting={onSetting} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'appDebug.notSetAPIKey.settingBtn' }))
|
||||
expect(onSetting).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
@@ -1,25 +0,0 @@
|
||||
import React from 'react'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import WarningMask from './index'
|
||||
|
||||
describe('WarningMask', () => {
|
||||
// Rendering of title, description, and footer content
|
||||
describe('Rendering', () => {
|
||||
test('should display provided title, description, and footer node', () => {
|
||||
const footer = <button type="button">Retry</button>
|
||||
// Arrange
|
||||
render(
|
||||
<WarningMask
|
||||
title="Access Restricted"
|
||||
description="Only workspace owners may modify this section."
|
||||
footer={footer}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('Access Restricted')).toBeInTheDocument()
|
||||
expect(screen.getByText('Only workspace owners may modify this section.')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'Retry' })).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,121 +0,0 @@
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import ConfigString, { type IConfigStringProps } from './index'
|
||||
|
||||
const renderConfigString = (props?: Partial<IConfigStringProps>) => {
|
||||
const onChange = jest.fn()
|
||||
const defaultProps: IConfigStringProps = {
|
||||
value: 5,
|
||||
maxLength: 10,
|
||||
modelId: 'model-id',
|
||||
onChange,
|
||||
}
|
||||
|
||||
render(<ConfigString {...defaultProps} {...props} />)
|
||||
|
||||
return { onChange }
|
||||
}
|
||||
|
||||
describe('ConfigString', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render numeric input with bounds', () => {
|
||||
renderConfigString({ value: 3, maxLength: 8 })
|
||||
|
||||
const input = screen.getByRole('spinbutton')
|
||||
|
||||
expect(input).toHaveValue(3)
|
||||
expect(input).toHaveAttribute('min', '1')
|
||||
expect(input).toHaveAttribute('max', '8')
|
||||
})
|
||||
|
||||
it('should render empty input when value is undefined', () => {
|
||||
const { onChange } = renderConfigString({ value: undefined })
|
||||
|
||||
expect(screen.getByRole('spinbutton')).toHaveValue(null)
|
||||
expect(onChange).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Effect behavior', () => {
|
||||
it('should clamp initial value to maxLength when it exceeds limit', async () => {
|
||||
const onChange = jest.fn()
|
||||
render(
|
||||
<ConfigString
|
||||
value={15}
|
||||
maxLength={10}
|
||||
modelId="model-id"
|
||||
onChange={onChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(onChange).toHaveBeenCalledWith(10)
|
||||
})
|
||||
expect(onChange).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should clamp when updated prop value exceeds maxLength', async () => {
|
||||
const onChange = jest.fn()
|
||||
const { rerender } = render(
|
||||
<ConfigString
|
||||
value={4}
|
||||
maxLength={6}
|
||||
modelId="model-id"
|
||||
onChange={onChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
rerender(
|
||||
<ConfigString
|
||||
value={9}
|
||||
maxLength={6}
|
||||
modelId="model-id"
|
||||
onChange={onChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(onChange).toHaveBeenCalledWith(6)
|
||||
})
|
||||
expect(onChange).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('User interactions', () => {
|
||||
it('should clamp entered value above maxLength', () => {
|
||||
const { onChange } = renderConfigString({ maxLength: 7 })
|
||||
|
||||
fireEvent.change(screen.getByRole('spinbutton'), { target: { value: '12' } })
|
||||
|
||||
expect(onChange).toHaveBeenCalledWith(7)
|
||||
})
|
||||
|
||||
it('should raise value below minimum to one', () => {
|
||||
const { onChange } = renderConfigString()
|
||||
|
||||
fireEvent.change(screen.getByRole('spinbutton'), { target: { value: '0' } })
|
||||
|
||||
expect(onChange).toHaveBeenCalledWith(1)
|
||||
})
|
||||
|
||||
it('should forward parsed value when within bounds', () => {
|
||||
const { onChange } = renderConfigString({ maxLength: 9 })
|
||||
|
||||
fireEvent.change(screen.getByRole('spinbutton'), { target: { value: '7' } })
|
||||
|
||||
expect(onChange).toHaveBeenCalledWith(7)
|
||||
})
|
||||
|
||||
it('should pass through NaN when input is cleared', () => {
|
||||
const { onChange } = renderConfigString()
|
||||
|
||||
fireEvent.change(screen.getByRole('spinbutton'), { target: { value: '' } })
|
||||
|
||||
expect(onChange).toHaveBeenCalledTimes(1)
|
||||
expect(onChange.mock.calls[0][0]).toBeNaN()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,45 +0,0 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import SelectTypeItem from './index'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
|
||||
describe('SelectTypeItem', () => {
|
||||
// Rendering pathways based on type and selection state
|
||||
describe('Rendering', () => {
|
||||
test('should render ok', () => {
|
||||
// Arrange
|
||||
const { container } = render(
|
||||
<SelectTypeItem
|
||||
type={InputVarType.textInput}
|
||||
selected={false}
|
||||
onClick={jest.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('appDebug.variableConfig.text-input')).toBeInTheDocument()
|
||||
expect(container.querySelector('svg')).not.toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
// User interaction outcomes
|
||||
describe('Interactions', () => {
|
||||
test('should trigger onClick when item is pressed', () => {
|
||||
const handleClick = jest.fn()
|
||||
// Arrange
|
||||
render(
|
||||
<SelectTypeItem
|
||||
type={InputVarType.paragraph}
|
||||
selected={false}
|
||||
onClick={handleClick}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
fireEvent.click(screen.getByText('appDebug.variableConfig.paragraph'))
|
||||
|
||||
// Assert
|
||||
expect(handleClick).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,242 +0,0 @@
|
||||
import { fireEvent, render, screen, waitFor, within } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import Item from './index'
|
||||
import type React from 'react'
|
||||
import type { DataSet } from '@/models/datasets'
|
||||
import { ChunkingMode, DataSourceType, DatasetPermission } from '@/models/datasets'
|
||||
import type { IndexingType } from '@/app/components/datasets/create/step-two'
|
||||
import type { RetrievalConfig } from '@/types/app'
|
||||
import { RETRIEVE_METHOD } from '@/types/app'
|
||||
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
|
||||
|
||||
jest.mock('../settings-modal', () => ({
|
||||
__esModule: true,
|
||||
default: ({ onSave, onCancel, currentDataset }: any) => (
|
||||
<div>
|
||||
<div>Mock settings modal</div>
|
||||
<button onClick={() => onSave({ ...currentDataset, name: 'Updated dataset' })}>Save changes</button>
|
||||
<button onClick={onCancel}>Close</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
jest.mock('@/hooks/use-breakpoints', () => {
|
||||
const actual = jest.requireActual('@/hooks/use-breakpoints')
|
||||
return {
|
||||
__esModule: true,
|
||||
...actual,
|
||||
default: jest.fn(() => actual.MediaType.pc),
|
||||
}
|
||||
})
|
||||
|
||||
const mockedUseBreakpoints = useBreakpoints as jest.MockedFunction<typeof useBreakpoints>
|
||||
|
||||
const baseRetrievalConfig: RetrievalConfig = {
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: {
|
||||
reranking_provider_name: 'provider',
|
||||
reranking_model_name: 'rerank-model',
|
||||
},
|
||||
top_k: 4,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0,
|
||||
}
|
||||
|
||||
const defaultIndexingTechnique: IndexingType = 'high_quality' as IndexingType
|
||||
|
||||
const createDataset = (overrides: Partial<DataSet> = {}): DataSet => {
|
||||
const {
|
||||
retrieval_model,
|
||||
retrieval_model_dict,
|
||||
icon_info,
|
||||
...restOverrides
|
||||
} = overrides
|
||||
|
||||
const resolvedRetrievalModelDict = {
|
||||
...baseRetrievalConfig,
|
||||
...retrieval_model_dict,
|
||||
}
|
||||
const resolvedRetrievalModel = {
|
||||
...baseRetrievalConfig,
|
||||
...(retrieval_model ?? retrieval_model_dict),
|
||||
}
|
||||
|
||||
const defaultIconInfo = {
|
||||
icon: '📘',
|
||||
icon_type: 'emoji',
|
||||
icon_background: '#FFEAD5',
|
||||
icon_url: '',
|
||||
}
|
||||
|
||||
const resolvedIconInfo = ('icon_info' in overrides)
|
||||
? icon_info
|
||||
: defaultIconInfo
|
||||
|
||||
return {
|
||||
id: 'dataset-id',
|
||||
name: 'Dataset Name',
|
||||
indexing_status: 'completed',
|
||||
icon_info: resolvedIconInfo as DataSet['icon_info'],
|
||||
description: 'A test dataset',
|
||||
permission: DatasetPermission.onlyMe,
|
||||
data_source_type: DataSourceType.FILE,
|
||||
indexing_technique: defaultIndexingTechnique,
|
||||
author_name: 'author',
|
||||
created_by: 'creator',
|
||||
updated_by: 'updater',
|
||||
updated_at: 0,
|
||||
app_count: 0,
|
||||
doc_form: ChunkingMode.text,
|
||||
document_count: 0,
|
||||
total_document_count: 0,
|
||||
total_available_documents: 0,
|
||||
word_count: 0,
|
||||
provider: 'dify',
|
||||
embedding_model: 'text-embedding',
|
||||
embedding_model_provider: 'openai',
|
||||
embedding_available: true,
|
||||
retrieval_model_dict: resolvedRetrievalModelDict,
|
||||
retrieval_model: resolvedRetrievalModel,
|
||||
tags: [],
|
||||
external_knowledge_info: {
|
||||
external_knowledge_id: 'external-id',
|
||||
external_knowledge_api_id: 'api-id',
|
||||
external_knowledge_api_name: 'api-name',
|
||||
external_knowledge_api_endpoint: 'https://endpoint',
|
||||
},
|
||||
external_retrieval_model: {
|
||||
top_k: 2,
|
||||
score_threshold: 0.5,
|
||||
score_threshold_enabled: true,
|
||||
},
|
||||
built_in_field_enabled: true,
|
||||
doc_metadata: [],
|
||||
keyword_number: 3,
|
||||
pipeline_id: 'pipeline-id',
|
||||
is_published: true,
|
||||
runtime_mode: 'general',
|
||||
enable_api: true,
|
||||
is_multimodal: false,
|
||||
...restOverrides,
|
||||
}
|
||||
}
|
||||
|
||||
const renderItem = (config: DataSet, props?: Partial<React.ComponentProps<typeof Item>>) => {
|
||||
const onSave = jest.fn()
|
||||
const onRemove = jest.fn()
|
||||
|
||||
render(
|
||||
<Item
|
||||
config={config}
|
||||
onSave={onSave}
|
||||
onRemove={onRemove}
|
||||
{...props}
|
||||
/>,
|
||||
)
|
||||
|
||||
return { onSave, onRemove }
|
||||
}
|
||||
|
||||
describe('dataset-config/card-item', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockedUseBreakpoints.mockReturnValue(MediaType.pc)
|
||||
})
|
||||
|
||||
it('should render dataset details with indexing and external badges', () => {
|
||||
const dataset = createDataset({
|
||||
provider: 'external',
|
||||
retrieval_model_dict: {
|
||||
...baseRetrievalConfig,
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
},
|
||||
})
|
||||
|
||||
renderItem(dataset)
|
||||
|
||||
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
|
||||
const actionButtons = within(card).getAllByRole('button', { hidden: true })
|
||||
|
||||
expect(screen.getByText(dataset.name)).toBeInTheDocument()
|
||||
expect(screen.getByText('dataset.indexingTechnique.high_quality · dataset.indexingMethod.semantic_search')).toBeInTheDocument()
|
||||
expect(screen.getByText('dataset.externalTag')).toBeInTheDocument()
|
||||
expect(actionButtons).toHaveLength(2)
|
||||
})
|
||||
|
||||
it('should open settings drawer from edit action and close after saving', async () => {
|
||||
const user = userEvent.setup()
|
||||
const dataset = createDataset()
|
||||
const { onSave } = renderItem(dataset)
|
||||
|
||||
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
|
||||
const [editButton] = within(card).getAllByRole('button', { hidden: true })
|
||||
await user.click(editButton)
|
||||
|
||||
expect(screen.getByText('Mock settings modal')).toBeInTheDocument()
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('dialog')).toBeVisible()
|
||||
})
|
||||
|
||||
await user.click(screen.getByText('Save changes'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(onSave).toHaveBeenCalledWith(expect.objectContaining({ name: 'Updated dataset' }))
|
||||
})
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Mock settings modal')).not.toBeVisible()
|
||||
})
|
||||
})
|
||||
|
||||
it('should call onRemove and toggle destructive state on hover', async () => {
|
||||
const user = userEvent.setup()
|
||||
const dataset = createDataset()
|
||||
const { onRemove } = renderItem(dataset)
|
||||
|
||||
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
|
||||
const buttons = within(card).getAllByRole('button', { hidden: true })
|
||||
const deleteButton = buttons[buttons.length - 1]
|
||||
|
||||
expect(deleteButton.className).not.toContain('action-btn-destructive')
|
||||
|
||||
fireEvent.mouseEnter(deleteButton)
|
||||
expect(deleteButton.className).toContain('action-btn-destructive')
|
||||
expect(card.className).toContain('border-state-destructive-border')
|
||||
|
||||
fireEvent.mouseLeave(deleteButton)
|
||||
expect(deleteButton.className).not.toContain('action-btn-destructive')
|
||||
|
||||
await user.click(deleteButton)
|
||||
expect(onRemove).toHaveBeenCalledWith(dataset.id)
|
||||
})
|
||||
|
||||
it('should use default icon information when icon details are missing', () => {
|
||||
const dataset = createDataset({ icon_info: undefined })
|
||||
|
||||
renderItem(dataset)
|
||||
|
||||
const nameElement = screen.getByText(dataset.name)
|
||||
const iconElement = nameElement.parentElement?.firstElementChild as HTMLElement
|
||||
|
||||
expect(iconElement).toHaveStyle({ background: '#FFF4ED' })
|
||||
expect(iconElement.querySelector('em-emoji')).toHaveAttribute('id', '📙')
|
||||
})
|
||||
|
||||
it('should apply mask overlay on mobile when drawer is open', async () => {
|
||||
mockedUseBreakpoints.mockReturnValue(MediaType.mobile)
|
||||
const user = userEvent.setup()
|
||||
const dataset = createDataset()
|
||||
|
||||
renderItem(dataset)
|
||||
|
||||
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
|
||||
const [editButton] = within(card).getAllByRole('button', { hidden: true })
|
||||
await user.click(editButton)
|
||||
expect(screen.getByText('Mock settings modal')).toBeInTheDocument()
|
||||
|
||||
const overlay = Array.from(document.querySelectorAll('[class]'))
|
||||
.find(element => element.className.toString().includes('bg-black/30'))
|
||||
|
||||
expect(overlay).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@@ -1,299 +0,0 @@
|
||||
import * as React from 'react'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import ContextVar from './index'
|
||||
import type { Props } from './var-picker'
|
||||
|
||||
// Mock external dependencies only
|
||||
jest.mock('next/navigation', () => ({
|
||||
useRouter: () => ({ push: jest.fn() }),
|
||||
usePathname: () => '/test',
|
||||
}))
|
||||
|
||||
type PortalToFollowElemProps = {
|
||||
children: React.ReactNode
|
||||
open?: boolean
|
||||
onOpenChange?: (open: boolean) => void
|
||||
}
|
||||
type PortalToFollowElemTriggerProps = React.HTMLAttributes<HTMLElement> & { children?: React.ReactNode; asChild?: boolean }
|
||||
type PortalToFollowElemContentProps = React.HTMLAttributes<HTMLDivElement> & { children?: React.ReactNode }
|
||||
|
||||
jest.mock('@/app/components/base/portal-to-follow-elem', () => {
|
||||
const PortalContext = React.createContext({ open: false })
|
||||
|
||||
const PortalToFollowElem = ({ children, open }: PortalToFollowElemProps) => {
|
||||
return (
|
||||
<PortalContext.Provider value={{ open: !!open }}>
|
||||
<div data-testid="portal">{children}</div>
|
||||
</PortalContext.Provider>
|
||||
)
|
||||
}
|
||||
|
||||
const PortalToFollowElemContent = ({ children, ...props }: PortalToFollowElemContentProps) => {
|
||||
const { open } = React.useContext(PortalContext)
|
||||
if (!open) return null
|
||||
return (
|
||||
<div data-testid="portal-content" {...props}>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const PortalToFollowElemTrigger = ({ children, asChild, ...props }: PortalToFollowElemTriggerProps) => {
|
||||
if (asChild && React.isValidElement(children)) {
|
||||
return React.cloneElement(children, {
|
||||
...props,
|
||||
'data-testid': 'portal-trigger',
|
||||
} as React.HTMLAttributes<HTMLElement>)
|
||||
}
|
||||
return (
|
||||
<div data-testid="portal-trigger" {...props}>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
PortalToFollowElem,
|
||||
PortalToFollowElemContent,
|
||||
PortalToFollowElemTrigger,
|
||||
}
|
||||
})
|
||||
|
||||
describe('ContextVar', () => {
|
||||
const mockOptions: Props['options'] = [
|
||||
{ name: 'Variable 1', value: 'var1', type: 'string' },
|
||||
{ name: 'Variable 2', value: 'var2', type: 'number' },
|
||||
]
|
||||
|
||||
const defaultProps: Props = {
|
||||
value: 'var1',
|
||||
options: mockOptions,
|
||||
onChange: jest.fn(),
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
// Rendering tests (REQUIRED)
|
||||
describe('Rendering', () => {
|
||||
it('should display query variable selector when options are provided', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.title')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show selected variable with proper formatting when value is provided', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('var1')).toBeInTheDocument()
|
||||
expect(screen.getByText('{{')).toBeInTheDocument()
|
||||
expect(screen.getByText('}}')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Props tests (REQUIRED)
|
||||
describe('Props', () => {
|
||||
it('should display selected variable when value prop is provided', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps, value: 'var2' }
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
// Assert - Should display the selected value
|
||||
expect(screen.getByText('var2')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show placeholder text when no value is selected', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
value: undefined,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
// Assert - Should show placeholder instead of variable
|
||||
expect(screen.queryByText('var1')).not.toBeInTheDocument()
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.choosePlaceholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display custom tip message when notSelectedVarTip is provided', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
value: undefined,
|
||||
notSelectedVarTip: 'Select a variable',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('Select a variable')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should apply custom className to VarPicker when provided', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
className: 'custom-class',
|
||||
}
|
||||
|
||||
// Act
|
||||
const { container } = render(<ContextVar {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(container.querySelector('.custom-class')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// User Interactions
|
||||
describe('User Interactions', () => {
|
||||
it('should call onChange when user selects a different variable', async () => {
|
||||
// Arrange
|
||||
const onChange = jest.fn()
|
||||
const props = { ...defaultProps, onChange }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
const triggers = screen.getAllByTestId('portal-trigger')
|
||||
const varPickerTrigger = triggers[triggers.length - 1]
|
||||
|
||||
await user.click(varPickerTrigger)
|
||||
expect(screen.getByTestId('portal-content')).toBeInTheDocument()
|
||||
|
||||
// Select a different option
|
||||
const options = screen.getAllByText('var2')
|
||||
expect(options.length).toBeGreaterThan(0)
|
||||
await user.click(options[0])
|
||||
|
||||
// Assert
|
||||
expect(onChange).toHaveBeenCalledWith('var2')
|
||||
expect(screen.queryByTestId('portal-content')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should toggle dropdown when clicking the trigger button', async () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
const triggers = screen.getAllByTestId('portal-trigger')
|
||||
const varPickerTrigger = triggers[triggers.length - 1]
|
||||
|
||||
// Open dropdown
|
||||
await user.click(varPickerTrigger)
|
||||
expect(screen.getByTestId('portal-content')).toBeInTheDocument()
|
||||
|
||||
// Close dropdown
|
||||
await user.click(varPickerTrigger)
|
||||
expect(screen.queryByTestId('portal-content')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Edge Cases (REQUIRED)
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle undefined value gracefully', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
value: undefined,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.title')).toBeInTheDocument()
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.choosePlaceholder')).toBeInTheDocument()
|
||||
expect(screen.queryByText('var1')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle empty options array', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
options: [],
|
||||
value: undefined,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.title')).toBeInTheDocument()
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.choosePlaceholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle null value without crashing', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
value: undefined,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.title')).toBeInTheDocument()
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.choosePlaceholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle options with different data types', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
options: [
|
||||
{ name: 'String Var', value: 'strVar', type: 'string' },
|
||||
{ name: 'Number Var', value: '42', type: 'number' },
|
||||
{ name: 'Boolean Var', value: 'true', type: 'boolean' },
|
||||
],
|
||||
value: 'strVar',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('strVar')).toBeInTheDocument()
|
||||
expect(screen.getByText('{{')).toBeInTheDocument()
|
||||
expect(screen.getByText('}}')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render variable names with special characters safely', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
options: [
|
||||
{ name: 'Variable with & < > " \' characters', value: 'specialVar', type: 'string' },
|
||||
],
|
||||
value: 'specialVar',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<ContextVar {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('specialVar')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,392 +0,0 @@
|
||||
import * as React from 'react'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import VarPicker, { type Props } from './var-picker'
|
||||
|
||||
// Mock external dependencies only
|
||||
jest.mock('next/navigation', () => ({
|
||||
useRouter: () => ({ push: jest.fn() }),
|
||||
usePathname: () => '/test',
|
||||
}))
|
||||
|
||||
type PortalToFollowElemProps = {
|
||||
children: React.ReactNode
|
||||
open?: boolean
|
||||
onOpenChange?: (open: boolean) => void
|
||||
}
|
||||
type PortalToFollowElemTriggerProps = React.HTMLAttributes<HTMLElement> & { children?: React.ReactNode; asChild?: boolean }
|
||||
type PortalToFollowElemContentProps = React.HTMLAttributes<HTMLDivElement> & { children?: React.ReactNode }
|
||||
|
||||
jest.mock('@/app/components/base/portal-to-follow-elem', () => {
|
||||
const PortalContext = React.createContext({ open: false })
|
||||
|
||||
const PortalToFollowElem = ({ children, open }: PortalToFollowElemProps) => {
|
||||
return (
|
||||
<PortalContext.Provider value={{ open: !!open }}>
|
||||
<div data-testid="portal">{children}</div>
|
||||
</PortalContext.Provider>
|
||||
)
|
||||
}
|
||||
|
||||
const PortalToFollowElemContent = ({ children, ...props }: PortalToFollowElemContentProps) => {
|
||||
const { open } = React.useContext(PortalContext)
|
||||
if (!open) return null
|
||||
return (
|
||||
<div data-testid="portal-content" {...props}>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const PortalToFollowElemTrigger = ({ children, asChild, ...props }: PortalToFollowElemTriggerProps) => {
|
||||
if (asChild && React.isValidElement(children)) {
|
||||
return React.cloneElement(children, {
|
||||
...props,
|
||||
'data-testid': 'portal-trigger',
|
||||
} as React.HTMLAttributes<HTMLElement>)
|
||||
}
|
||||
return (
|
||||
<div data-testid="portal-trigger" {...props}>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
PortalToFollowElem,
|
||||
PortalToFollowElemContent,
|
||||
PortalToFollowElemTrigger,
|
||||
}
|
||||
})
|
||||
|
||||
describe('VarPicker', () => {
|
||||
const mockOptions: Props['options'] = [
|
||||
{ name: 'Variable 1', value: 'var1', type: 'string' },
|
||||
{ name: 'Variable 2', value: 'var2', type: 'number' },
|
||||
{ name: 'Variable 3', value: 'var3', type: 'boolean' },
|
||||
]
|
||||
|
||||
const defaultProps: Props = {
|
||||
value: 'var1',
|
||||
options: mockOptions,
|
||||
onChange: jest.fn(),
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
// Rendering tests (REQUIRED)
|
||||
describe('Rendering', () => {
|
||||
it('should render variable picker with dropdown trigger', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('portal-trigger')).toBeInTheDocument()
|
||||
expect(screen.getByText('var1')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display selected variable with type icon when value is provided', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('var1')).toBeInTheDocument()
|
||||
expect(screen.getByText('{{')).toBeInTheDocument()
|
||||
expect(screen.getByText('}}')).toBeInTheDocument()
|
||||
// IconTypeIcon should be rendered (check for svg icon)
|
||||
expect(document.querySelector('svg')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show placeholder text when no value is selected', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
value: undefined,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByText('var1')).not.toBeInTheDocument()
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.choosePlaceholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display custom tip message when notSelectedVarTip is provided', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
value: undefined,
|
||||
notSelectedVarTip: 'Select a variable',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('Select a variable')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render dropdown indicator icon', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert - Trigger should be present
|
||||
expect(screen.getByTestId('portal-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Props tests (REQUIRED)
|
||||
describe('Props', () => {
|
||||
it('should apply custom className to wrapper', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
className: 'custom-class',
|
||||
}
|
||||
|
||||
// Act
|
||||
const { container } = render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(container.querySelector('.custom-class')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should apply custom triggerClassName to trigger button', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
triggerClassName: 'custom-trigger-class',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('portal-trigger')).toHaveClass('custom-trigger-class')
|
||||
})
|
||||
|
||||
it('should display selected value with proper formatting', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
value: 'customVar',
|
||||
options: [
|
||||
{ name: 'Custom Variable', value: 'customVar', type: 'string' },
|
||||
],
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('customVar')).toBeInTheDocument()
|
||||
expect(screen.getByText('{{')).toBeInTheDocument()
|
||||
expect(screen.getByText('}}')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// User Interactions
|
||||
describe('User Interactions', () => {
|
||||
it('should open dropdown when clicking the trigger button', async () => {
|
||||
// Arrange
|
||||
const onChange = jest.fn()
|
||||
const props = { ...defaultProps, onChange }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
await user.click(screen.getByTestId('portal-trigger'))
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('portal-content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should call onChange and close dropdown when selecting an option', async () => {
|
||||
// Arrange
|
||||
const onChange = jest.fn()
|
||||
const props = { ...defaultProps, onChange }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Open dropdown
|
||||
await user.click(screen.getByTestId('portal-trigger'))
|
||||
expect(screen.getByTestId('portal-content')).toBeInTheDocument()
|
||||
|
||||
// Select a different option
|
||||
const options = screen.getAllByText('var2')
|
||||
expect(options.length).toBeGreaterThan(0)
|
||||
await user.click(options[0])
|
||||
|
||||
// Assert
|
||||
expect(onChange).toHaveBeenCalledWith('var2')
|
||||
expect(screen.queryByTestId('portal-content')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should toggle dropdown when clicking trigger button multiple times', async () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
const trigger = screen.getByTestId('portal-trigger')
|
||||
|
||||
// Open dropdown
|
||||
await user.click(trigger)
|
||||
expect(screen.getByTestId('portal-content')).toBeInTheDocument()
|
||||
|
||||
// Close dropdown
|
||||
await user.click(trigger)
|
||||
expect(screen.queryByTestId('portal-content')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// State Management
|
||||
describe('State Management', () => {
|
||||
it('should initialize with closed dropdown', () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByTestId('portal-content')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should toggle dropdown state on trigger click', async () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
const trigger = screen.getByTestId('portal-trigger')
|
||||
expect(screen.queryByTestId('portal-content')).not.toBeInTheDocument()
|
||||
|
||||
// Open dropdown
|
||||
await user.click(trigger)
|
||||
expect(screen.getByTestId('portal-content')).toBeInTheDocument()
|
||||
|
||||
// Close dropdown
|
||||
await user.click(trigger)
|
||||
expect(screen.queryByTestId('portal-content')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should preserve selected value when dropdown is closed without selection', async () => {
|
||||
// Arrange
|
||||
const props = { ...defaultProps }
|
||||
const user = userEvent.setup()
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Open and close dropdown without selecting anything
|
||||
const trigger = screen.getByTestId('portal-trigger')
|
||||
await user.click(trigger)
|
||||
await user.click(trigger)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('var1')).toBeInTheDocument() // Original value still displayed
|
||||
})
|
||||
})
|
||||
|
||||
// Edge Cases (REQUIRED)
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle undefined value gracefully', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
value: undefined,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.choosePlaceholder')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('portal-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle empty options array', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
options: [],
|
||||
value: undefined,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('portal-trigger')).toBeInTheDocument()
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.choosePlaceholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle null value without crashing', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
value: undefined,
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('appDebug.feature.dataSet.queryVariable.choosePlaceholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle variable names with special characters safely', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
options: [
|
||||
{ name: 'Variable with & < > " \' characters', value: 'specialVar', type: 'string' },
|
||||
],
|
||||
value: 'specialVar',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('specialVar')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle long variable names', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
...defaultProps,
|
||||
options: [
|
||||
{ name: 'A very long variable name that should be truncated', value: 'longVar', type: 'string' },
|
||||
],
|
||||
value: 'longVar',
|
||||
}
|
||||
|
||||
// Act
|
||||
render(<VarPicker {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('longVar')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('portal-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,347 +0,0 @@
|
||||
import { render, screen, within } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import AppCard from './index'
|
||||
import type { AppIconType } from '@/types/app'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import type { App } from '@/models/explore'
|
||||
|
||||
jest.mock('@heroicons/react/20/solid', () => ({
|
||||
PlusIcon: ({ className }: any) => <div data-testid="plus-icon" className={className} aria-label="Add icon">+</div>,
|
||||
}))
|
||||
|
||||
const mockApp: App = {
|
||||
app: {
|
||||
id: 'test-app-id',
|
||||
mode: AppModeEnum.CHAT,
|
||||
icon_type: 'emoji' as AppIconType,
|
||||
icon: '🤖',
|
||||
icon_background: '#FFEAD5',
|
||||
icon_url: '',
|
||||
name: 'Test Chat App',
|
||||
description: 'A test chat application for demonstration purposes',
|
||||
use_icon_as_answer_icon: false,
|
||||
},
|
||||
app_id: 'test-app-id',
|
||||
description: 'A comprehensive chat application template',
|
||||
copyright: 'Test Corp',
|
||||
privacy_policy: null,
|
||||
custom_disclaimer: null,
|
||||
category: 'Assistant',
|
||||
position: 1,
|
||||
is_listed: true,
|
||||
install_count: 100,
|
||||
installed: false,
|
||||
editable: true,
|
||||
is_agent: false,
|
||||
}
|
||||
|
||||
describe('AppCard', () => {
|
||||
const defaultProps = {
|
||||
app: mockApp,
|
||||
canCreate: true,
|
||||
onCreate: jest.fn(),
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
const { container } = render(<AppCard {...defaultProps} />)
|
||||
|
||||
expect(container.querySelector('em-emoji')).toBeInTheDocument()
|
||||
expect(screen.getByText('Test Chat App')).toBeInTheDocument()
|
||||
expect(screen.getByText(mockApp.description)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render app type icon and label', () => {
|
||||
const { container } = render(<AppCard {...defaultProps} />)
|
||||
|
||||
expect(container.querySelector('svg')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.typeSelector.chatbot')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Props', () => {
|
||||
describe('canCreate behavior', () => {
|
||||
it('should show create button when canCreate is true', () => {
|
||||
render(<AppCard {...defaultProps} canCreate={true} />)
|
||||
|
||||
const button = screen.getByRole('button', { name: /app\.newApp\.useTemplate/ })
|
||||
expect(button).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should hide create button when canCreate is false', () => {
|
||||
render(<AppCard {...defaultProps} canCreate={false} />)
|
||||
|
||||
const button = screen.queryByRole('button', { name: /app\.newApp\.useTemplate/ })
|
||||
expect(button).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('should display app name from appBasicInfo', () => {
|
||||
const customApp = {
|
||||
...mockApp,
|
||||
app: {
|
||||
...mockApp.app,
|
||||
name: 'Custom App Name',
|
||||
},
|
||||
}
|
||||
render(<AppCard {...defaultProps} app={customApp} />)
|
||||
|
||||
expect(screen.getByText('Custom App Name')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display app description from app level', () => {
|
||||
const customApp = {
|
||||
...mockApp,
|
||||
description: 'Custom description for the app',
|
||||
}
|
||||
render(<AppCard {...defaultProps} app={customApp} />)
|
||||
|
||||
expect(screen.getByText('Custom description for the app')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should truncate long app names', () => {
|
||||
const longNameApp = {
|
||||
...mockApp,
|
||||
app: {
|
||||
...mockApp.app,
|
||||
name: 'This is a very long app name that should be truncated with line-clamp-1',
|
||||
},
|
||||
}
|
||||
render(<AppCard {...defaultProps} app={longNameApp} />)
|
||||
|
||||
const nameElement = screen.getByTitle('This is a very long app name that should be truncated with line-clamp-1')
|
||||
expect(nameElement).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('App Modes - Data Driven Tests', () => {
|
||||
const testCases = [
|
||||
{
|
||||
mode: AppModeEnum.CHAT,
|
||||
expectedLabel: 'app.typeSelector.chatbot',
|
||||
description: 'Chat application mode',
|
||||
},
|
||||
{
|
||||
mode: AppModeEnum.AGENT_CHAT,
|
||||
expectedLabel: 'app.typeSelector.agent',
|
||||
description: 'Agent chat mode',
|
||||
},
|
||||
{
|
||||
mode: AppModeEnum.COMPLETION,
|
||||
expectedLabel: 'app.typeSelector.completion',
|
||||
description: 'Completion mode',
|
||||
},
|
||||
{
|
||||
mode: AppModeEnum.ADVANCED_CHAT,
|
||||
expectedLabel: 'app.typeSelector.advanced',
|
||||
description: 'Advanced chat mode',
|
||||
},
|
||||
{
|
||||
mode: AppModeEnum.WORKFLOW,
|
||||
expectedLabel: 'app.typeSelector.workflow',
|
||||
description: 'Workflow mode',
|
||||
},
|
||||
]
|
||||
|
||||
testCases.forEach(({ mode, expectedLabel, description }) => {
|
||||
it(`should display correct type label for ${description}`, () => {
|
||||
const appWithMode = {
|
||||
...mockApp,
|
||||
app: {
|
||||
...mockApp.app,
|
||||
mode,
|
||||
},
|
||||
}
|
||||
render(<AppCard {...defaultProps} app={appWithMode} />)
|
||||
|
||||
expect(screen.getByText(expectedLabel)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Icon Type Tests', () => {
|
||||
it('should render emoji icon without image element', () => {
|
||||
const appWithIcon = {
|
||||
...mockApp,
|
||||
app: {
|
||||
...mockApp.app,
|
||||
icon_type: 'emoji' as AppIconType,
|
||||
icon: '🤖',
|
||||
},
|
||||
}
|
||||
const { container } = render(<AppCard {...defaultProps} app={appWithIcon} />)
|
||||
|
||||
const card = container.firstElementChild as HTMLElement
|
||||
expect(within(card).queryByRole('img', { name: 'app icon' })).not.toBeInTheDocument()
|
||||
expect(card.querySelector('em-emoji')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should prioritize icon_url when both icon and icon_url are provided', () => {
|
||||
const appWithImageUrl = {
|
||||
...mockApp,
|
||||
app: {
|
||||
...mockApp.app,
|
||||
icon_type: 'image' as AppIconType,
|
||||
icon: 'local-icon.png',
|
||||
icon_url: 'https://example.com/remote-icon.png',
|
||||
},
|
||||
}
|
||||
render(<AppCard {...defaultProps} app={appWithImageUrl} />)
|
||||
|
||||
expect(screen.getByRole('img', { name: 'app icon' })).toHaveAttribute('src', 'https://example.com/remote-icon.png')
|
||||
})
|
||||
})
|
||||
|
||||
describe('User Interactions', () => {
|
||||
it('should call onCreate when create button is clicked', async () => {
|
||||
const mockOnCreate = jest.fn()
|
||||
render(<AppCard {...defaultProps} onCreate={mockOnCreate} />)
|
||||
|
||||
const button = screen.getByRole('button', { name: /app\.newApp\.useTemplate/ })
|
||||
await userEvent.click(button)
|
||||
expect(mockOnCreate).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should handle click on card itself', async () => {
|
||||
const mockOnCreate = jest.fn()
|
||||
const { container } = render(<AppCard {...defaultProps} onCreate={mockOnCreate} />)
|
||||
|
||||
const card = container.firstElementChild as HTMLElement
|
||||
await userEvent.click(card)
|
||||
// Note: Card click doesn't trigger onCreate, only the button does
|
||||
expect(mockOnCreate).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Keyboard Accessibility', () => {
|
||||
it('should allow the create button to be focused', async () => {
|
||||
const mockOnCreate = jest.fn()
|
||||
render(<AppCard {...defaultProps} onCreate={mockOnCreate} />)
|
||||
|
||||
await userEvent.tab()
|
||||
const button = screen.getByRole('button', { name: /app\.newApp\.useTemplate/ }) as HTMLButtonElement
|
||||
|
||||
// Test that button can be focused
|
||||
expect(button).toHaveFocus()
|
||||
|
||||
// Test click event works (keyboard events on buttons typically trigger click)
|
||||
await userEvent.click(button)
|
||||
expect(mockOnCreate).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle app with null icon_type', () => {
|
||||
const appWithNullIcon = {
|
||||
...mockApp,
|
||||
app: {
|
||||
...mockApp.app,
|
||||
icon_type: null,
|
||||
},
|
||||
}
|
||||
const { container } = render(<AppCard {...defaultProps} app={appWithNullIcon} />)
|
||||
|
||||
const appIcon = container.querySelector('em-emoji')
|
||||
expect(appIcon).toBeInTheDocument()
|
||||
// AppIcon component should handle null icon_type gracefully
|
||||
})
|
||||
|
||||
it('should handle app with empty description', () => {
|
||||
const appWithEmptyDesc = {
|
||||
...mockApp,
|
||||
description: '',
|
||||
}
|
||||
const { container } = render(<AppCard {...defaultProps} app={appWithEmptyDesc} />)
|
||||
|
||||
const descriptionContainer = container.querySelector('.line-clamp-3')
|
||||
expect(descriptionContainer).toBeInTheDocument()
|
||||
expect(descriptionContainer).toHaveTextContent('')
|
||||
})
|
||||
|
||||
it('should handle app with very long description', () => {
|
||||
const longDescription = 'This is a very long description that should be truncated with line-clamp-3. '.repeat(5)
|
||||
const appWithLongDesc = {
|
||||
...mockApp,
|
||||
description: longDescription,
|
||||
}
|
||||
render(<AppCard {...defaultProps} app={appWithLongDesc} />)
|
||||
|
||||
expect(screen.getByText(/This is a very long description/)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle app with special characters in name', () => {
|
||||
const appWithSpecialChars = {
|
||||
...mockApp,
|
||||
app: {
|
||||
...mockApp.app,
|
||||
name: 'App <script>alert("test")</script> & Special "Chars"',
|
||||
},
|
||||
}
|
||||
render(<AppCard {...defaultProps} app={appWithSpecialChars} />)
|
||||
|
||||
expect(screen.getByText('App <script>alert("test")</script> & Special "Chars"')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle onCreate function throwing error', async () => {
|
||||
const errorOnCreate = jest.fn(() => {
|
||||
throw new Error('Create failed')
|
||||
})
|
||||
|
||||
// Mock console.error to avoid test output noise
|
||||
const consoleSpy = jest.spyOn(console, 'error').mockImplementation(jest.fn())
|
||||
|
||||
render(<AppCard {...defaultProps} onCreate={errorOnCreate} />)
|
||||
|
||||
const button = screen.getByRole('button', { name: /app\.newApp\.useTemplate/ })
|
||||
let capturedError: unknown
|
||||
try {
|
||||
await userEvent.click(button)
|
||||
}
|
||||
catch (err) {
|
||||
capturedError = err
|
||||
}
|
||||
expect(errorOnCreate).toHaveBeenCalledTimes(1)
|
||||
expect(consoleSpy).toHaveBeenCalled()
|
||||
if (capturedError instanceof Error)
|
||||
expect(capturedError.message).toContain('Create failed')
|
||||
|
||||
consoleSpy.mockRestore()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Accessibility', () => {
|
||||
it('should have proper elements for accessibility', () => {
|
||||
const { container } = render(<AppCard {...defaultProps} />)
|
||||
|
||||
expect(container.querySelector('em-emoji')).toBeInTheDocument()
|
||||
expect(container.querySelector('svg')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should have title attribute for app name when truncated', () => {
|
||||
render(<AppCard {...defaultProps} />)
|
||||
|
||||
const nameElement = screen.getByText('Test Chat App')
|
||||
expect(nameElement).toHaveAttribute('title', 'Test Chat App')
|
||||
})
|
||||
|
||||
it('should have accessible button with proper label', () => {
|
||||
render(<AppCard {...defaultProps} />)
|
||||
|
||||
const button = screen.getByRole('button', { name: /app\.newApp\.useTemplate/ })
|
||||
expect(button).toBeEnabled()
|
||||
expect(button).toHaveTextContent('app.newApp.useTemplate')
|
||||
})
|
||||
})
|
||||
|
||||
describe('User-Visible Behavior Tests', () => {
|
||||
it('should show plus icon in create button', () => {
|
||||
render(<AppCard {...defaultProps} />)
|
||||
|
||||
expect(screen.getByTestId('plus-icon')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -15,7 +15,6 @@ export type AppCardProps = {
|
||||
|
||||
const AppCard = ({
|
||||
app,
|
||||
canCreate,
|
||||
onCreate,
|
||||
}: AppCardProps) => {
|
||||
const { t } = useTranslation()
|
||||
@@ -46,16 +45,14 @@ const AppCard = ({
|
||||
{app.description}
|
||||
</div>
|
||||
</div>
|
||||
{canCreate && (
|
||||
<div className={cn('absolute bottom-0 left-0 right-0 hidden bg-gradient-to-t from-components-panel-gradient-2 from-[60.27%] to-transparent p-4 pt-8 group-hover:flex')}>
|
||||
<div className={cn('flex h-8 w-full items-center space-x-2')}>
|
||||
<Button variant='primary' className='grow' onClick={() => onCreate()}>
|
||||
<PlusIcon className='mr-1 h-4 w-4' />
|
||||
<span className='text-xs'>{t('app.newApp.useTemplate')}</span>
|
||||
</Button>
|
||||
</div>
|
||||
<div className={cn('absolute bottom-0 left-0 right-0 hidden bg-gradient-to-t from-components-panel-gradient-2 from-[60.27%] to-transparent p-4 pt-8 group-hover:flex')}>
|
||||
<div className={cn('flex h-8 w-full items-center space-x-2')}>
|
||||
<Button variant='primary' className='grow' onClick={() => onCreate()}>
|
||||
<PlusIcon className='mr-1 h-4 w-4' />
|
||||
<span className='text-xs'>{t('app.newApp.useTemplate')}</span>
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,209 +0,0 @@
|
||||
import type { RenderOptions } from '@testing-library/react'
|
||||
import { fireEvent, render } from '@testing-library/react'
|
||||
import { defaultPlan } from '@/app/components/billing/config'
|
||||
import { noop } from 'lodash-es'
|
||||
import type { ModalContextState } from '@/context/modal-context'
|
||||
import APIKeyInfoPanel from './index'
|
||||
|
||||
// Mock the modules before importing the functions
|
||||
jest.mock('@/context/provider-context', () => ({
|
||||
useProviderContext: jest.fn(),
|
||||
}))
|
||||
|
||||
jest.mock('@/context/modal-context', () => ({
|
||||
useModalContext: jest.fn(),
|
||||
}))
|
||||
|
||||
import { useProviderContext as actualUseProviderContext } from '@/context/provider-context'
|
||||
import { useModalContext as actualUseModalContext } from '@/context/modal-context'
|
||||
|
||||
// Type casting for mocks
|
||||
const mockUseProviderContext = actualUseProviderContext as jest.MockedFunction<typeof actualUseProviderContext>
|
||||
const mockUseModalContext = actualUseModalContext as jest.MockedFunction<typeof actualUseModalContext>
|
||||
|
||||
// Default mock data
|
||||
const defaultProviderContext = {
|
||||
modelProviders: [],
|
||||
refreshModelProviders: noop,
|
||||
textGenerationModelList: [],
|
||||
supportRetrievalMethods: [],
|
||||
isAPIKeySet: false,
|
||||
plan: defaultPlan,
|
||||
isFetchedPlan: false,
|
||||
enableBilling: false,
|
||||
onPlanInfoChanged: noop,
|
||||
enableReplaceWebAppLogo: false,
|
||||
modelLoadBalancingEnabled: false,
|
||||
datasetOperatorEnabled: false,
|
||||
enableEducationPlan: false,
|
||||
isEducationWorkspace: false,
|
||||
isEducationAccount: false,
|
||||
allowRefreshEducationVerify: false,
|
||||
educationAccountExpireAt: null,
|
||||
isLoadingEducationAccountInfo: false,
|
||||
isFetchingEducationAccountInfo: false,
|
||||
webappCopyrightEnabled: false,
|
||||
licenseLimit: {
|
||||
workspace_members: {
|
||||
size: 0,
|
||||
limit: 0,
|
||||
},
|
||||
},
|
||||
refreshLicenseLimit: noop,
|
||||
isAllowTransferWorkspace: false,
|
||||
isAllowPublishAsCustomKnowledgePipelineTemplate: false,
|
||||
}
|
||||
|
||||
const defaultModalContext: ModalContextState = {
|
||||
setShowAccountSettingModal: noop,
|
||||
setShowApiBasedExtensionModal: noop,
|
||||
setShowModerationSettingModal: noop,
|
||||
setShowExternalDataToolModal: noop,
|
||||
setShowPricingModal: noop,
|
||||
setShowAnnotationFullModal: noop,
|
||||
setShowModelModal: noop,
|
||||
setShowExternalKnowledgeAPIModal: noop,
|
||||
setShowModelLoadBalancingModal: noop,
|
||||
setShowOpeningModal: noop,
|
||||
setShowUpdatePluginModal: noop,
|
||||
setShowEducationExpireNoticeModal: noop,
|
||||
setShowTriggerEventsLimitModal: noop,
|
||||
}
|
||||
|
||||
export type MockOverrides = {
|
||||
providerContext?: Partial<typeof defaultProviderContext>
|
||||
modalContext?: Partial<typeof defaultModalContext>
|
||||
}
|
||||
|
||||
export type APIKeyInfoPanelRenderOptions = {
|
||||
mockOverrides?: MockOverrides
|
||||
} & Omit<RenderOptions, 'wrapper'>
|
||||
|
||||
// Setup function to configure mocks
|
||||
export function setupMocks(overrides: MockOverrides = {}) {
|
||||
mockUseProviderContext.mockReturnValue({
|
||||
...defaultProviderContext,
|
||||
...overrides.providerContext,
|
||||
})
|
||||
|
||||
mockUseModalContext.mockReturnValue({
|
||||
...defaultModalContext,
|
||||
...overrides.modalContext,
|
||||
})
|
||||
}
|
||||
|
||||
// Custom render function
|
||||
export function renderAPIKeyInfoPanel(options: APIKeyInfoPanelRenderOptions = {}) {
|
||||
const { mockOverrides, ...renderOptions } = options
|
||||
|
||||
setupMocks(mockOverrides)
|
||||
|
||||
return render(<APIKeyInfoPanel />, renderOptions)
|
||||
}
|
||||
|
||||
// Helper functions for common test scenarios
|
||||
export const scenarios = {
|
||||
// Render with API key not set (default)
|
||||
withAPIKeyNotSet: (overrides: MockOverrides = {}) =>
|
||||
renderAPIKeyInfoPanel({
|
||||
mockOverrides: {
|
||||
providerContext: { isAPIKeySet: false },
|
||||
...overrides,
|
||||
},
|
||||
}),
|
||||
|
||||
// Render with API key already set
|
||||
withAPIKeySet: (overrides: MockOverrides = {}) =>
|
||||
renderAPIKeyInfoPanel({
|
||||
mockOverrides: {
|
||||
providerContext: { isAPIKeySet: true },
|
||||
...overrides,
|
||||
},
|
||||
}),
|
||||
|
||||
// Render with mock modal function
|
||||
withMockModal: (mockSetShowAccountSettingModal: jest.Mock, overrides: MockOverrides = {}) =>
|
||||
renderAPIKeyInfoPanel({
|
||||
mockOverrides: {
|
||||
modalContext: { setShowAccountSettingModal: mockSetShowAccountSettingModal },
|
||||
...overrides,
|
||||
},
|
||||
}),
|
||||
}
|
||||
|
||||
// Common test assertions
|
||||
export const assertions = {
|
||||
// Should render main button
|
||||
shouldRenderMainButton: () => {
|
||||
const button = document.querySelector('button.btn-primary')
|
||||
expect(button).toBeInTheDocument()
|
||||
return button
|
||||
},
|
||||
|
||||
// Should not render at all
|
||||
shouldNotRender: (container: HTMLElement) => {
|
||||
expect(container.firstChild).toBeNull()
|
||||
},
|
||||
|
||||
// Should have correct panel styling
|
||||
shouldHavePanelStyling: (panel: HTMLElement) => {
|
||||
expect(panel).toHaveClass(
|
||||
'border-components-panel-border',
|
||||
'bg-components-panel-bg',
|
||||
'relative',
|
||||
'mb-6',
|
||||
'rounded-2xl',
|
||||
'border',
|
||||
'p-8',
|
||||
'shadow-md',
|
||||
)
|
||||
},
|
||||
|
||||
// Should have close button
|
||||
shouldHaveCloseButton: (container: HTMLElement) => {
|
||||
const closeButton = container.querySelector('.absolute.right-4.top-4')
|
||||
expect(closeButton).toBeInTheDocument()
|
||||
expect(closeButton).toHaveClass('cursor-pointer')
|
||||
return closeButton
|
||||
},
|
||||
}
|
||||
|
||||
// Common user interactions
|
||||
export const interactions = {
|
||||
// Click the main button
|
||||
clickMainButton: () => {
|
||||
const button = document.querySelector('button.btn-primary')
|
||||
if (button) fireEvent.click(button)
|
||||
return button
|
||||
},
|
||||
|
||||
// Click the close button
|
||||
clickCloseButton: (container: HTMLElement) => {
|
||||
const closeButton = container.querySelector('.absolute.right-4.top-4')
|
||||
if (closeButton) fireEvent.click(closeButton)
|
||||
return closeButton
|
||||
},
|
||||
}
|
||||
|
||||
// Text content keys for assertions
|
||||
export const textKeys = {
|
||||
selfHost: {
|
||||
titleRow1: /appOverview\.apiKeyInfo\.selfHost\.title\.row1/,
|
||||
titleRow2: /appOverview\.apiKeyInfo\.selfHost\.title\.row2/,
|
||||
setAPIBtn: /appOverview\.apiKeyInfo\.setAPIBtn/,
|
||||
tryCloud: /appOverview\.apiKeyInfo\.tryCloud/,
|
||||
},
|
||||
cloud: {
|
||||
trialTitle: /appOverview\.apiKeyInfo\.cloud\.trial\.title/,
|
||||
trialDescription: /appOverview\.apiKeyInfo\.cloud\.trial\.description/,
|
||||
setAPIBtn: /appOverview\.apiKeyInfo\.setAPIBtn/,
|
||||
},
|
||||
}
|
||||
|
||||
// Setup and cleanup utilities
|
||||
export function clearAllMocks() {
|
||||
jest.clearAllMocks()
|
||||
}
|
||||
|
||||
// Export mock functions for external access
|
||||
export { mockUseProviderContext, mockUseModalContext, defaultModalContext }
|
||||
@@ -1,122 +0,0 @@
|
||||
import { cleanup, screen } from '@testing-library/react'
|
||||
import { ACCOUNT_SETTING_TAB } from '@/app/components/header/account-setting/constants'
|
||||
import {
|
||||
assertions,
|
||||
clearAllMocks,
|
||||
defaultModalContext,
|
||||
interactions,
|
||||
mockUseModalContext,
|
||||
scenarios,
|
||||
textKeys,
|
||||
} from './apikey-info-panel.test-utils'
|
||||
|
||||
// Mock config for Cloud edition
|
||||
jest.mock('@/config', () => ({
|
||||
IS_CE_EDITION: false, // Test Cloud edition
|
||||
}))
|
||||
|
||||
afterEach(cleanup)
|
||||
|
||||
describe('APIKeyInfoPanel - Cloud Edition', () => {
|
||||
const mockSetShowAccountSettingModal = jest.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
clearAllMocks()
|
||||
mockUseModalContext.mockReturnValue({
|
||||
...defaultModalContext,
|
||||
setShowAccountSettingModal: mockSetShowAccountSettingModal,
|
||||
})
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing when API key is not set', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
assertions.shouldRenderMainButton()
|
||||
})
|
||||
|
||||
it('should not render when API key is already set', () => {
|
||||
const { container } = scenarios.withAPIKeySet()
|
||||
assertions.shouldNotRender(container)
|
||||
})
|
||||
|
||||
it('should not render when panel is hidden by user', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
interactions.clickCloseButton(container)
|
||||
assertions.shouldNotRender(container)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Cloud Edition Content', () => {
|
||||
it('should display cloud version title', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
expect(screen.getByText(textKeys.cloud.trialTitle)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display emoji for cloud version', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
expect(container.querySelector('em-emoji')).toBeInTheDocument()
|
||||
expect(container.querySelector('em-emoji')).toHaveAttribute('id', '😀')
|
||||
})
|
||||
|
||||
it('should display cloud version description', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
expect(screen.getByText(textKeys.cloud.trialDescription)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render external link for cloud version', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
expect(container.querySelector('a[href="https://cloud.dify.ai/apps"]')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display set API button text', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
expect(screen.getByText(textKeys.cloud.setAPIBtn)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('User Interactions', () => {
|
||||
it('should call setShowAccountSettingModal when set API button is clicked', () => {
|
||||
scenarios.withMockModal(mockSetShowAccountSettingModal)
|
||||
|
||||
interactions.clickMainButton()
|
||||
|
||||
expect(mockSetShowAccountSettingModal).toHaveBeenCalledWith({
|
||||
payload: ACCOUNT_SETTING_TAB.PROVIDER,
|
||||
})
|
||||
})
|
||||
|
||||
it('should hide panel when close button is clicked', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
|
||||
interactions.clickCloseButton(container)
|
||||
assertions.shouldNotRender(container)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Props and Styling', () => {
|
||||
it('should render button with primary variant', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
const button = screen.getByRole('button')
|
||||
expect(button).toHaveClass('btn-primary')
|
||||
})
|
||||
|
||||
it('should render panel container with correct classes', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
const panel = container.firstChild as HTMLElement
|
||||
assertions.shouldHavePanelStyling(panel)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Accessibility', () => {
|
||||
it('should have button with proper role', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
expect(screen.getByRole('button')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should have clickable close button', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
assertions.shouldHaveCloseButton(container)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,162 +0,0 @@
|
||||
import { cleanup, screen } from '@testing-library/react'
|
||||
import { ACCOUNT_SETTING_TAB } from '@/app/components/header/account-setting/constants'
|
||||
import {
|
||||
assertions,
|
||||
clearAllMocks,
|
||||
defaultModalContext,
|
||||
interactions,
|
||||
mockUseModalContext,
|
||||
scenarios,
|
||||
textKeys,
|
||||
} from './apikey-info-panel.test-utils'
|
||||
|
||||
// Mock config for CE edition
|
||||
jest.mock('@/config', () => ({
|
||||
IS_CE_EDITION: true, // Test CE edition by default
|
||||
}))
|
||||
|
||||
afterEach(cleanup)
|
||||
|
||||
describe('APIKeyInfoPanel - Community Edition', () => {
|
||||
const mockSetShowAccountSettingModal = jest.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
clearAllMocks()
|
||||
mockUseModalContext.mockReturnValue({
|
||||
...defaultModalContext,
|
||||
setShowAccountSettingModal: mockSetShowAccountSettingModal,
|
||||
})
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing when API key is not set', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
assertions.shouldRenderMainButton()
|
||||
})
|
||||
|
||||
it('should not render when API key is already set', () => {
|
||||
const { container } = scenarios.withAPIKeySet()
|
||||
assertions.shouldNotRender(container)
|
||||
})
|
||||
|
||||
it('should not render when panel is hidden by user', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
interactions.clickCloseButton(container)
|
||||
assertions.shouldNotRender(container)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Content Display', () => {
|
||||
it('should display self-host title content', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
|
||||
expect(screen.getByText(textKeys.selfHost.titleRow1)).toBeInTheDocument()
|
||||
expect(screen.getByText(textKeys.selfHost.titleRow2)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display set API button text', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
expect(screen.getByText(textKeys.selfHost.setAPIBtn)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render external link with correct href for self-host version', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
const link = container.querySelector('a[href="https://cloud.dify.ai/apps"]')
|
||||
|
||||
expect(link).toBeInTheDocument()
|
||||
expect(link).toHaveAttribute('target', '_blank')
|
||||
expect(link).toHaveAttribute('rel', 'noopener noreferrer')
|
||||
expect(link).toHaveTextContent(textKeys.selfHost.tryCloud)
|
||||
})
|
||||
|
||||
it('should have external link with proper styling for self-host version', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
const link = container.querySelector('a[href="https://cloud.dify.ai/apps"]')
|
||||
|
||||
expect(link).toHaveClass(
|
||||
'mt-2',
|
||||
'flex',
|
||||
'h-[26px]',
|
||||
'items-center',
|
||||
'space-x-1',
|
||||
'p-1',
|
||||
'text-xs',
|
||||
'font-medium',
|
||||
'text-[#155EEF]',
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('User Interactions', () => {
|
||||
it('should call setShowAccountSettingModal when set API button is clicked', () => {
|
||||
scenarios.withMockModal(mockSetShowAccountSettingModal)
|
||||
|
||||
interactions.clickMainButton()
|
||||
|
||||
expect(mockSetShowAccountSettingModal).toHaveBeenCalledWith({
|
||||
payload: ACCOUNT_SETTING_TAB.PROVIDER,
|
||||
})
|
||||
})
|
||||
|
||||
it('should hide panel when close button is clicked', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
|
||||
interactions.clickCloseButton(container)
|
||||
assertions.shouldNotRender(container)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Props and Styling', () => {
|
||||
it('should render button with primary variant', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
const button = screen.getByRole('button')
|
||||
expect(button).toHaveClass('btn-primary')
|
||||
})
|
||||
|
||||
it('should render panel container with correct classes', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
const panel = container.firstChild as HTMLElement
|
||||
assertions.shouldHavePanelStyling(panel)
|
||||
})
|
||||
})
|
||||
|
||||
describe('State Management', () => {
|
||||
it('should start with visible panel (isShow: true)', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
assertions.shouldRenderMainButton()
|
||||
})
|
||||
|
||||
it('should toggle visibility when close button is clicked', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
|
||||
interactions.clickCloseButton(container)
|
||||
assertions.shouldNotRender(container)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle provider context loading state', () => {
|
||||
scenarios.withAPIKeyNotSet({
|
||||
providerContext: {
|
||||
modelProviders: [],
|
||||
textGenerationModelList: [],
|
||||
},
|
||||
})
|
||||
assertions.shouldRenderMainButton()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Accessibility', () => {
|
||||
it('should have button with proper role', () => {
|
||||
scenarios.withAPIKeyNotSet()
|
||||
expect(screen.getByRole('button')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should have clickable close button', () => {
|
||||
const { container } = scenarios.withAPIKeyNotSet()
|
||||
assertions.shouldHaveCloseButton(container)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,144 +0,0 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen, within } from '@testing-library/react'
|
||||
import AppTypeSelector, { AppTypeIcon, AppTypeLabel } from './index'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
|
||||
jest.mock('react-i18next')
|
||||
|
||||
describe('AppTypeSelector', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
// Covers default rendering and the closed dropdown state.
|
||||
describe('Rendering', () => {
|
||||
it('should render "all types" trigger when no types selected', () => {
|
||||
render(<AppTypeSelector value={[]} onChange={jest.fn()} />)
|
||||
|
||||
expect(screen.getByText('app.typeSelector.all')).toBeInTheDocument()
|
||||
expect(screen.queryByRole('tooltip')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Covers prop-driven trigger variants (empty, single, multiple).
|
||||
describe('Props', () => {
|
||||
it('should render selected type label and clear button when a single type is selected', () => {
|
||||
render(<AppTypeSelector value={[AppModeEnum.CHAT]} onChange={jest.fn()} />)
|
||||
|
||||
expect(screen.getByText('app.typeSelector.chatbot')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'common.operation.clear' })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render icon-only trigger when multiple types are selected', () => {
|
||||
render(<AppTypeSelector value={[AppModeEnum.CHAT, AppModeEnum.WORKFLOW]} onChange={jest.fn()} />)
|
||||
|
||||
expect(screen.queryByText('app.typeSelector.all')).not.toBeInTheDocument()
|
||||
expect(screen.queryByText('app.typeSelector.chatbot')).not.toBeInTheDocument()
|
||||
expect(screen.queryByText('app.typeSelector.workflow')).not.toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'common.operation.clear' })).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Covers opening/closing the dropdown and selection updates.
|
||||
describe('User interactions', () => {
|
||||
it('should toggle option list when clicking the trigger', () => {
|
||||
render(<AppTypeSelector value={[]} onChange={jest.fn()} />)
|
||||
|
||||
expect(screen.queryByRole('tooltip')).not.toBeInTheDocument()
|
||||
|
||||
fireEvent.click(screen.getByText('app.typeSelector.all'))
|
||||
expect(screen.getByRole('tooltip')).toBeInTheDocument()
|
||||
|
||||
fireEvent.click(screen.getByText('app.typeSelector.all'))
|
||||
expect(screen.queryByRole('tooltip')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should call onChange with added type when selecting an unselected item', () => {
|
||||
const onChange = jest.fn()
|
||||
render(<AppTypeSelector value={[]} onChange={onChange} />)
|
||||
|
||||
fireEvent.click(screen.getByText('app.typeSelector.all'))
|
||||
fireEvent.click(within(screen.getByRole('tooltip')).getByText('app.typeSelector.workflow'))
|
||||
|
||||
expect(onChange).toHaveBeenCalledWith([AppModeEnum.WORKFLOW])
|
||||
})
|
||||
|
||||
it('should call onChange with removed type when selecting an already-selected item', () => {
|
||||
const onChange = jest.fn()
|
||||
render(<AppTypeSelector value={[AppModeEnum.WORKFLOW]} onChange={onChange} />)
|
||||
|
||||
fireEvent.click(screen.getByText('app.typeSelector.workflow'))
|
||||
fireEvent.click(within(screen.getByRole('tooltip')).getByText('app.typeSelector.workflow'))
|
||||
|
||||
expect(onChange).toHaveBeenCalledWith([])
|
||||
})
|
||||
|
||||
it('should call onChange with appended type when selecting an additional item', () => {
|
||||
const onChange = jest.fn()
|
||||
render(<AppTypeSelector value={[AppModeEnum.CHAT]} onChange={onChange} />)
|
||||
|
||||
fireEvent.click(screen.getByText('app.typeSelector.chatbot'))
|
||||
fireEvent.click(within(screen.getByRole('tooltip')).getByText('app.typeSelector.agent'))
|
||||
|
||||
expect(onChange).toHaveBeenCalledWith([AppModeEnum.CHAT, AppModeEnum.AGENT_CHAT])
|
||||
})
|
||||
|
||||
it('should clear selection without opening the dropdown when clicking clear button', () => {
|
||||
const onChange = jest.fn()
|
||||
render(<AppTypeSelector value={[AppModeEnum.CHAT]} onChange={onChange} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.clear' }))
|
||||
|
||||
expect(onChange).toHaveBeenCalledWith([])
|
||||
expect(screen.queryByRole('tooltip')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('AppTypeLabel', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
// Covers label mapping for each supported app type.
|
||||
it.each([
|
||||
[AppModeEnum.CHAT, 'app.typeSelector.chatbot'],
|
||||
[AppModeEnum.AGENT_CHAT, 'app.typeSelector.agent'],
|
||||
[AppModeEnum.COMPLETION, 'app.typeSelector.completion'],
|
||||
[AppModeEnum.ADVANCED_CHAT, 'app.typeSelector.advanced'],
|
||||
[AppModeEnum.WORKFLOW, 'app.typeSelector.workflow'],
|
||||
] as const)('should render label %s for type %s', (_type, expectedLabel) => {
|
||||
render(<AppTypeLabel type={_type} />)
|
||||
expect(screen.getByText(expectedLabel)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Covers fallback behavior for unexpected app mode values.
|
||||
it('should render empty label for unknown type', () => {
|
||||
const { container } = render(<AppTypeLabel type={'unknown' as AppModeEnum} />)
|
||||
expect(container.textContent).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('AppTypeIcon', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
// Covers icon rendering for each supported app type.
|
||||
it.each([
|
||||
[AppModeEnum.CHAT],
|
||||
[AppModeEnum.AGENT_CHAT],
|
||||
[AppModeEnum.COMPLETION],
|
||||
[AppModeEnum.ADVANCED_CHAT],
|
||||
[AppModeEnum.WORKFLOW],
|
||||
] as const)('should render icon for type %s', (type) => {
|
||||
const { container } = render(<AppTypeIcon type={type} />)
|
||||
expect(container.querySelector('svg')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Covers fallback behavior for unexpected app mode values.
|
||||
it('should render nothing for unknown type', () => {
|
||||
const { container } = render(<AppTypeIcon type={'unknown' as AppModeEnum} />)
|
||||
expect(container.firstChild).toBeNull()
|
||||
})
|
||||
})
|
||||
@@ -20,7 +20,6 @@ const allTypes: AppModeEnum[] = [AppModeEnum.WORKFLOW, AppModeEnum.ADVANCED_CHAT
|
||||
|
||||
const AppTypeSelector = ({ value, onChange }: AppSelectorProps) => {
|
||||
const [open, setOpen] = useState(false)
|
||||
const { t } = useTranslation()
|
||||
|
||||
return (
|
||||
<PortalToFollowElem
|
||||
@@ -38,21 +37,12 @@ const AppTypeSelector = ({ value, onChange }: AppSelectorProps) => {
|
||||
'flex cursor-pointer items-center justify-between space-x-1 rounded-md px-2 hover:bg-state-base-hover',
|
||||
)}>
|
||||
<AppTypeSelectTrigger values={value} />
|
||||
{value && value.length > 0 && (
|
||||
<button
|
||||
type="button"
|
||||
aria-label={t('common.operation.clear')}
|
||||
className="group h-4 w-4"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
onChange([])
|
||||
}}
|
||||
>
|
||||
<RiCloseCircleFill
|
||||
className="h-3.5 w-3.5 text-text-quaternary group-hover:text-text-tertiary"
|
||||
/>
|
||||
</button>
|
||||
)}
|
||||
{value && value.length > 0 && <div className='h-4 w-4' onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
onChange([])
|
||||
}}>
|
||||
<RiCloseCircleFill className='h-3.5 w-3.5 cursor-pointer text-text-quaternary hover:text-text-tertiary' />
|
||||
</div>}
|
||||
</div>
|
||||
</PortalToFollowElemTrigger>
|
||||
<PortalToFollowElemContent className='z-[1002]'>
|
||||
|
||||
@@ -33,10 +33,7 @@ const PlanUpgradeModal: FC<Props> = ({
|
||||
|
||||
const handleUpgrade = useCallback(() => {
|
||||
onClose()
|
||||
if (onUpgrade)
|
||||
onUpgrade()
|
||||
else
|
||||
setShowPricingModal()
|
||||
onUpgrade ? onUpgrade() : setShowPricingModal()
|
||||
}, [onClose, onUpgrade, setShowPricingModal])
|
||||
|
||||
return (
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import Button from './button'
|
||||
import { Plan } from '../../../type'
|
||||
|
||||
describe('CloudPlanButton', () => {
|
||||
describe('Disabled state', () => {
|
||||
test('should disable button and hide arrow when plan is not available', () => {
|
||||
const handleGetPayUrl = jest.fn()
|
||||
// Arrange
|
||||
render(
|
||||
<Button
|
||||
plan={Plan.team}
|
||||
isPlanDisabled
|
||||
btnText="Get started"
|
||||
handleGetPayUrl={handleGetPayUrl}
|
||||
/>,
|
||||
)
|
||||
|
||||
const button = screen.getByRole('button', { name: /Get started/i })
|
||||
// Assert
|
||||
expect(button).toBeDisabled()
|
||||
expect(button.className).toContain('cursor-not-allowed')
|
||||
expect(handleGetPayUrl).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Enabled state', () => {
|
||||
test('should invoke handler and render arrow when plan is available', () => {
|
||||
const handleGetPayUrl = jest.fn()
|
||||
// Arrange
|
||||
render(
|
||||
<Button
|
||||
plan={Plan.sandbox}
|
||||
isPlanDisabled={false}
|
||||
btnText="Start now"
|
||||
handleGetPayUrl={handleGetPayUrl}
|
||||
/>,
|
||||
)
|
||||
|
||||
const button = screen.getByRole('button', { name: /Start now/i })
|
||||
// Act
|
||||
fireEvent.click(button)
|
||||
|
||||
// Assert
|
||||
expect(handleGetPayUrl).toHaveBeenCalledTimes(1)
|
||||
expect(button).not.toBeDisabled()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,188 +0,0 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import CloudPlanItem from './index'
|
||||
import { Plan } from '../../../type'
|
||||
import { PlanRange } from '../../plan-switcher/plan-range-switcher'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useAsyncWindowOpen } from '@/hooks/use-async-window-open'
|
||||
import { fetchBillingUrl, fetchSubscriptionUrls } from '@/service/billing'
|
||||
import Toast from '../../../../base/toast'
|
||||
import { ALL_PLANS } from '../../../config'
|
||||
|
||||
jest.mock('../../../../base/toast', () => ({
|
||||
__esModule: true,
|
||||
default: {
|
||||
notify: jest.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
jest.mock('@/context/app-context', () => ({
|
||||
useAppContext: jest.fn(),
|
||||
}))
|
||||
|
||||
jest.mock('@/service/billing', () => ({
|
||||
fetchBillingUrl: jest.fn(),
|
||||
fetchSubscriptionUrls: jest.fn(),
|
||||
}))
|
||||
|
||||
jest.mock('@/hooks/use-async-window-open', () => ({
|
||||
useAsyncWindowOpen: jest.fn(),
|
||||
}))
|
||||
|
||||
jest.mock('../../assets', () => ({
|
||||
Sandbox: () => <div>Sandbox Icon</div>,
|
||||
Professional: () => <div>Professional Icon</div>,
|
||||
Team: () => <div>Team Icon</div>,
|
||||
}))
|
||||
|
||||
const mockUseAppContext = useAppContext as jest.Mock
|
||||
const mockUseAsyncWindowOpen = useAsyncWindowOpen as jest.Mock
|
||||
const mockFetchBillingUrl = fetchBillingUrl as jest.Mock
|
||||
const mockFetchSubscriptionUrls = fetchSubscriptionUrls as jest.Mock
|
||||
const mockToastNotify = Toast.notify as jest.Mock
|
||||
|
||||
let assignedHref = ''
|
||||
const originalLocation = window.location
|
||||
|
||||
beforeAll(() => {
|
||||
Object.defineProperty(window, 'location', {
|
||||
configurable: true,
|
||||
value: {
|
||||
get href() {
|
||||
return assignedHref
|
||||
},
|
||||
set href(value: string) {
|
||||
assignedHref = value
|
||||
},
|
||||
} as unknown as Location,
|
||||
})
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
Object.defineProperty(window, 'location', {
|
||||
configurable: true,
|
||||
value: originalLocation,
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockUseAppContext.mockReturnValue({ isCurrentWorkspaceManager: true })
|
||||
mockUseAsyncWindowOpen.mockReturnValue(jest.fn(async open => await open()))
|
||||
mockFetchBillingUrl.mockResolvedValue({ url: 'https://billing.example' })
|
||||
mockFetchSubscriptionUrls.mockResolvedValue({ url: 'https://subscription.example' })
|
||||
assignedHref = ''
|
||||
})
|
||||
|
||||
describe('CloudPlanItem', () => {
|
||||
// Static content for each plan
|
||||
describe('Rendering', () => {
|
||||
test('should show plan metadata and free label for sandbox plan', () => {
|
||||
render(
|
||||
<CloudPlanItem
|
||||
plan={Plan.sandbox}
|
||||
currentPlan={Plan.sandbox}
|
||||
planRange={PlanRange.monthly}
|
||||
canPay
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('billing.plans.sandbox.name')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plans.sandbox.description')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plansCommon.free')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'billing.plansCommon.currentPlan' })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should display yearly pricing with discount when planRange is yearly', () => {
|
||||
render(
|
||||
<CloudPlanItem
|
||||
plan={Plan.professional}
|
||||
currentPlan={Plan.sandbox}
|
||||
planRange={PlanRange.yearly}
|
||||
canPay
|
||||
/>,
|
||||
)
|
||||
|
||||
const professionalPlan = ALL_PLANS[Plan.professional]
|
||||
expect(screen.getByText(`$${professionalPlan.price * 12}`)).toBeInTheDocument()
|
||||
expect(screen.getByText(`$${professionalPlan.price * 10}`)).toBeInTheDocument()
|
||||
expect(screen.getByText(/billing\.plansCommon\.priceTip.*billing\.plansCommon\.year/)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should disable CTA when workspace already on higher tier', () => {
|
||||
render(
|
||||
<CloudPlanItem
|
||||
plan={Plan.professional}
|
||||
currentPlan={Plan.team}
|
||||
planRange={PlanRange.monthly}
|
||||
canPay
|
||||
/>,
|
||||
)
|
||||
|
||||
const button = screen.getByRole('button', { name: 'billing.plansCommon.startBuilding' })
|
||||
expect(button).toBeDisabled()
|
||||
})
|
||||
})
|
||||
|
||||
// Payment actions triggered from the CTA
|
||||
describe('Plan purchase flow', () => {
|
||||
test('should show toast when non-manager tries to buy a plan', () => {
|
||||
mockUseAppContext.mockReturnValue({ isCurrentWorkspaceManager: false })
|
||||
|
||||
render(
|
||||
<CloudPlanItem
|
||||
plan={Plan.professional}
|
||||
currentPlan={Plan.sandbox}
|
||||
planRange={PlanRange.monthly}
|
||||
canPay
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'billing.plansCommon.startBuilding' }))
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(expect.objectContaining({
|
||||
type: 'error',
|
||||
message: 'billing.buyPermissionDeniedTip',
|
||||
}))
|
||||
expect(mockFetchBillingUrl).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
test('should open billing portal when upgrading current paid plan', async () => {
|
||||
const openWindow = jest.fn(async (cb: () => Promise<string>) => await cb())
|
||||
mockUseAsyncWindowOpen.mockReturnValue(openWindow)
|
||||
|
||||
render(
|
||||
<CloudPlanItem
|
||||
plan={Plan.professional}
|
||||
currentPlan={Plan.professional}
|
||||
planRange={PlanRange.monthly}
|
||||
canPay
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'billing.plansCommon.currentPlan' }))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockFetchBillingUrl).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
expect(openWindow).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test('should redirect to subscription url when selecting a new paid plan', async () => {
|
||||
render(
|
||||
<CloudPlanItem
|
||||
plan={Plan.professional}
|
||||
currentPlan={Plan.sandbox}
|
||||
planRange={PlanRange.monthly}
|
||||
canPay
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'billing.plansCommon.startBuilding' }))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockFetchSubscriptionUrls).toHaveBeenCalledWith(Plan.professional, 'month')
|
||||
expect(assignedHref).toBe('https://subscription.example')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,30 +0,0 @@
|
||||
import React from 'react'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import List from './index'
|
||||
import { Plan } from '../../../../type'
|
||||
|
||||
describe('CloudPlanItem/List', () => {
|
||||
test('should show sandbox specific quotas', () => {
|
||||
render(<List plan={Plan.sandbox} />)
|
||||
|
||||
expect(screen.getByText('billing.plansCommon.messageRequest.title:{"count":200}')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plansCommon.triggerEvents.sandbox:{"count":3000}')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plansCommon.startNodes.limited:{"count":2}')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should show professional monthly quotas and tooltips', () => {
|
||||
render(<List plan={Plan.professional} />)
|
||||
|
||||
expect(screen.getByText('billing.plansCommon.messageRequest.titlePerMonth:{"count":5000}')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plansCommon.vectorSpaceTooltip')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plansCommon.workflowExecution.faster')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should show unlimited messaging details for team plan', () => {
|
||||
render(<List plan={Plan.team} />)
|
||||
|
||||
expect(screen.getByText('billing.plansCommon.triggerEvents.unlimited')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plansCommon.workflowExecution.priority')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plansCommon.unlimitedApiRate')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@@ -1,87 +0,0 @@
|
||||
import React from 'react'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import Plans from './index'
|
||||
import { Plan, type UsagePlanInfo } from '../../type'
|
||||
import { PlanRange } from '../plan-switcher/plan-range-switcher'
|
||||
|
||||
jest.mock('./cloud-plan-item', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(props => (
|
||||
<div data-testid={`cloud-plan-${props.plan}`} data-current-plan={props.currentPlan}>
|
||||
Cloud {props.plan}
|
||||
</div>
|
||||
)),
|
||||
}))
|
||||
|
||||
jest.mock('./self-hosted-plan-item', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(props => (
|
||||
<div data-testid={`self-plan-${props.plan}`}>
|
||||
Self {props.plan}
|
||||
</div>
|
||||
)),
|
||||
}))
|
||||
|
||||
const buildPlan = (type: Plan) => {
|
||||
const usage: UsagePlanInfo = {
|
||||
buildApps: 0,
|
||||
teamMembers: 0,
|
||||
annotatedResponse: 0,
|
||||
documentsUploadQuota: 0,
|
||||
apiRateLimit: 0,
|
||||
triggerEvents: 0,
|
||||
vectorSpace: 0,
|
||||
}
|
||||
return {
|
||||
type,
|
||||
usage,
|
||||
total: usage,
|
||||
}
|
||||
}
|
||||
|
||||
describe('Plans', () => {
|
||||
// Cloud plans visible only when currentPlan is cloud
|
||||
describe('Cloud plan rendering', () => {
|
||||
test('should render sandbox, professional, and team cloud plans when workspace is cloud', () => {
|
||||
render(
|
||||
<Plans
|
||||
plan={buildPlan(Plan.enterprise)}
|
||||
currentPlan="cloud"
|
||||
planRange={PlanRange.monthly}
|
||||
canPay
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('cloud-plan-sandbox')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('cloud-plan-professional')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('cloud-plan-team')).toBeInTheDocument()
|
||||
|
||||
const cloudPlanItem = jest.requireMock('./cloud-plan-item').default as jest.Mock
|
||||
const firstCallProps = cloudPlanItem.mock.calls[0][0]
|
||||
expect(firstCallProps.plan).toBe(Plan.sandbox)
|
||||
// Enterprise should be normalized to team when passed down
|
||||
expect(firstCallProps.currentPlan).toBe(Plan.team)
|
||||
})
|
||||
})
|
||||
|
||||
// Self-hosted plans visible for self-managed workspaces
|
||||
describe('Self-hosted plan rendering', () => {
|
||||
test('should render all self-hosted plans when workspace type is self-hosted', () => {
|
||||
render(
|
||||
<Plans
|
||||
plan={buildPlan(Plan.sandbox)}
|
||||
currentPlan="self"
|
||||
planRange={PlanRange.yearly}
|
||||
canPay={false}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('self-plan-community')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('self-plan-premium')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('self-plan-enterprise')).toBeInTheDocument()
|
||||
|
||||
const selfPlanItem = jest.requireMock('./self-hosted-plan-item').default as jest.Mock
|
||||
expect(selfPlanItem).toHaveBeenCalledTimes(3)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,61 +0,0 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import Button from './button'
|
||||
import { SelfHostedPlan } from '../../../type'
|
||||
import useTheme from '@/hooks/use-theme'
|
||||
import { Theme } from '@/types/app'
|
||||
|
||||
jest.mock('@/hooks/use-theme')
|
||||
|
||||
jest.mock('@/app/components/base/icons/src/public/billing', () => ({
|
||||
AwsMarketplaceLight: () => <div>AwsMarketplaceLight</div>,
|
||||
AwsMarketplaceDark: () => <div>AwsMarketplaceDark</div>,
|
||||
}))
|
||||
|
||||
const mockUseTheme = useTheme as jest.MockedFunction<typeof useTheme>
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockUseTheme.mockReturnValue({ theme: Theme.light } as unknown as ReturnType<typeof useTheme>)
|
||||
})
|
||||
|
||||
describe('SelfHostedPlanButton', () => {
|
||||
test('should invoke handler when clicked', () => {
|
||||
const handleGetPayUrl = jest.fn()
|
||||
render(
|
||||
<Button
|
||||
plan={SelfHostedPlan.community}
|
||||
handleGetPayUrl={handleGetPayUrl}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'billing.plans.community.btnText' }))
|
||||
expect(handleGetPayUrl).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test('should render AWS marketplace badge for premium plan in light theme', () => {
|
||||
const handleGetPayUrl = jest.fn()
|
||||
|
||||
render(
|
||||
<Button
|
||||
plan={SelfHostedPlan.premium}
|
||||
handleGetPayUrl={handleGetPayUrl}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('AwsMarketplaceLight')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should switch to dark AWS badge in dark theme', () => {
|
||||
mockUseTheme.mockReturnValue({ theme: Theme.dark } as unknown as ReturnType<typeof useTheme>)
|
||||
|
||||
render(
|
||||
<Button
|
||||
plan={SelfHostedPlan.premium}
|
||||
handleGetPayUrl={jest.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('AwsMarketplaceDark')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@@ -1,143 +0,0 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import SelfHostedPlanItem from './index'
|
||||
import { SelfHostedPlan } from '../../../type'
|
||||
import { contactSalesUrl, getStartedWithCommunityUrl, getWithPremiumUrl } from '../../../config'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import Toast from '../../../../base/toast'
|
||||
|
||||
const featuresTranslations: Record<string, string[]> = {
|
||||
'billing.plans.community.features': ['community-feature-1', 'community-feature-2'],
|
||||
'billing.plans.premium.features': ['premium-feature-1'],
|
||||
'billing.plans.enterprise.features': ['enterprise-feature-1'],
|
||||
}
|
||||
|
||||
jest.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string, options?: Record<string, unknown>) => {
|
||||
if (options?.returnObjects)
|
||||
return featuresTranslations[key] || []
|
||||
return key
|
||||
},
|
||||
}),
|
||||
Trans: ({ i18nKey }: { i18nKey: string }) => <span>{i18nKey}</span>,
|
||||
}))
|
||||
|
||||
jest.mock('../../../../base/toast', () => ({
|
||||
__esModule: true,
|
||||
default: {
|
||||
notify: jest.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
jest.mock('@/context/app-context', () => ({
|
||||
useAppContext: jest.fn(),
|
||||
}))
|
||||
|
||||
jest.mock('../../assets', () => ({
|
||||
Community: () => <div>Community Icon</div>,
|
||||
Premium: () => <div>Premium Icon</div>,
|
||||
Enterprise: () => <div>Enterprise Icon</div>,
|
||||
PremiumNoise: () => <div>PremiumNoise</div>,
|
||||
EnterpriseNoise: () => <div>EnterpriseNoise</div>,
|
||||
}))
|
||||
|
||||
jest.mock('@/app/components/base/icons/src/public/billing', () => ({
|
||||
Azure: () => <div>Azure</div>,
|
||||
GoogleCloud: () => <div>Google Cloud</div>,
|
||||
AwsMarketplaceDark: () => <div>AwsMarketplaceDark</div>,
|
||||
AwsMarketplaceLight: () => <div>AwsMarketplaceLight</div>,
|
||||
}))
|
||||
|
||||
const mockUseAppContext = useAppContext as jest.Mock
|
||||
const mockToastNotify = Toast.notify as jest.Mock
|
||||
|
||||
let assignedHref = ''
|
||||
const originalLocation = window.location
|
||||
|
||||
beforeAll(() => {
|
||||
Object.defineProperty(window, 'location', {
|
||||
configurable: true,
|
||||
value: {
|
||||
get href() {
|
||||
return assignedHref
|
||||
},
|
||||
set href(value: string) {
|
||||
assignedHref = value
|
||||
},
|
||||
} as unknown as Location,
|
||||
})
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
Object.defineProperty(window, 'location', {
|
||||
configurable: true,
|
||||
value: originalLocation,
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockUseAppContext.mockReturnValue({ isCurrentWorkspaceManager: true })
|
||||
assignedHref = ''
|
||||
})
|
||||
|
||||
describe('SelfHostedPlanItem', () => {
|
||||
// Copy rendering for each plan
|
||||
describe('Rendering', () => {
|
||||
test('should display community plan info', () => {
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.community} />)
|
||||
|
||||
expect(screen.getByText('billing.plans.community.name')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plans.community.description')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plans.community.price')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plans.community.includesTitle')).toBeInTheDocument()
|
||||
expect(screen.getByText('community-feature-1')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should show premium extras such as cloud provider notice', () => {
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.premium} />)
|
||||
|
||||
expect(screen.getByText('billing.plans.premium.price')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plans.premium.comingSoon')).toBeInTheDocument()
|
||||
expect(screen.getByText('Azure')).toBeInTheDocument()
|
||||
expect(screen.getByText('Google Cloud')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// CTA behavior for each plan
|
||||
describe('CTA interactions', () => {
|
||||
test('should show toast when non-manager tries to proceed', () => {
|
||||
mockUseAppContext.mockReturnValue({ isCurrentWorkspaceManager: false })
|
||||
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.premium} />)
|
||||
fireEvent.click(screen.getByRole('button', { name: /billing\.plans\.premium\.btnText/ }))
|
||||
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(expect.objectContaining({
|
||||
type: 'error',
|
||||
message: 'billing.buyPermissionDeniedTip',
|
||||
}))
|
||||
})
|
||||
|
||||
test('should redirect to community url when community plan button clicked', () => {
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.community} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'billing.plans.community.btnText' }))
|
||||
expect(assignedHref).toBe(getStartedWithCommunityUrl)
|
||||
})
|
||||
|
||||
test('should redirect to premium marketplace url when premium button clicked', () => {
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.premium} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /billing\.plans\.premium\.btnText/ }))
|
||||
expect(assignedHref).toBe(getWithPremiumUrl)
|
||||
})
|
||||
|
||||
test('should redirect to contact sales form when enterprise button clicked', () => {
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.enterprise} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'billing.plans.enterprise.btnText' }))
|
||||
expect(assignedHref).toBe(contactSalesUrl)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,25 +0,0 @@
|
||||
import React from 'react'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import List from './index'
|
||||
import { SelfHostedPlan } from '@/app/components/billing/type'
|
||||
|
||||
jest.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string, options?: Record<string, unknown>) => {
|
||||
if (options?.returnObjects)
|
||||
return ['Feature A', 'Feature B']
|
||||
return key
|
||||
},
|
||||
}),
|
||||
Trans: ({ i18nKey }: { i18nKey: string }) => <span>{i18nKey}</span>,
|
||||
}))
|
||||
|
||||
describe('SelfHostedPlanItem/List', () => {
|
||||
test('should render plan info', () => {
|
||||
render(<List plan={SelfHostedPlan.community} />)
|
||||
|
||||
expect(screen.getByText('billing.plans.community.includesTitle')).toBeInTheDocument()
|
||||
expect(screen.getByText('Feature A')).toBeInTheDocument()
|
||||
expect(screen.getByText('Feature B')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@@ -1,12 +0,0 @@
|
||||
import React from 'react'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import Item from './item'
|
||||
|
||||
describe('SelfHostedPlanItem/List/Item', () => {
|
||||
test('should display provided feature label', () => {
|
||||
const { container } = render(<Item label="Dedicated support" />)
|
||||
|
||||
expect(screen.getByText('Dedicated support')).toBeInTheDocument()
|
||||
expect(container.querySelector('svg')).not.toBeNull()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,564 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import Tab from './index'
|
||||
|
||||
// Define enum locally to avoid importing the whole module
|
||||
enum CreateFromDSLModalTab {
|
||||
FROM_FILE = 'from-file',
|
||||
FROM_URL = 'from-url',
|
||||
}
|
||||
|
||||
// Mock the create-from-dsl-modal module to export the enum
|
||||
jest.mock('@/app/components/app/create-from-dsl-modal', () => ({
|
||||
CreateFromDSLModalTab: {
|
||||
FROM_FILE: 'from-file',
|
||||
FROM_URL: 'from-url',
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock react-i18next
|
||||
jest.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
describe('Tab', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
// Tests for basic rendering
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('app.importFromDSLFile')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.importFromDSLUrl')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render two tab items', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
const { container } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Should have 2 clickable tab items
|
||||
const tabItems = container.querySelectorAll('.cursor-pointer')
|
||||
expect(tabItems.length).toBe(2)
|
||||
})
|
||||
|
||||
it('should render with correct container styling', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
const { container } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const tabContainer = container.firstChild as HTMLElement
|
||||
expect(tabContainer).toHaveClass('flex')
|
||||
expect(tabContainer).toHaveClass('h-9')
|
||||
expect(tabContainer).toHaveClass('items-center')
|
||||
expect(tabContainer).toHaveClass('gap-x-6')
|
||||
expect(tabContainer).toHaveClass('border-b')
|
||||
expect(tabContainer).toHaveClass('border-divider-subtle')
|
||||
expect(tabContainer).toHaveClass('px-6')
|
||||
})
|
||||
|
||||
it('should render tab labels with translation keys', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('app.importFromDSLFile')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.importFromDSLUrl')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for active tab indication
|
||||
describe('Active Tab Indication', () => {
|
||||
it('should show FROM_FILE tab as active when currentTab is FROM_FILE', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
// getByText returns the Item element directly (text is inside it)
|
||||
const fileTab = screen.getByText('app.importFromDSLFile')
|
||||
const urlTab = screen.getByText('app.importFromDSLUrl')
|
||||
|
||||
// Active tab should have text-text-primary class
|
||||
expect(fileTab).toHaveClass('text-text-primary')
|
||||
// Inactive tab should have text-text-tertiary class
|
||||
expect(urlTab).toHaveClass('text-text-tertiary')
|
||||
expect(urlTab).not.toHaveClass('text-text-primary')
|
||||
})
|
||||
|
||||
it('should show FROM_URL tab as active when currentTab is FROM_URL', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_URL}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const fileTab = screen.getByText('app.importFromDSLFile')
|
||||
const urlTab = screen.getByText('app.importFromDSLUrl')
|
||||
|
||||
// Inactive tab should have text-text-tertiary class
|
||||
expect(fileTab).toHaveClass('text-text-tertiary')
|
||||
expect(fileTab).not.toHaveClass('text-text-primary')
|
||||
// Active tab should have text-text-primary class
|
||||
expect(urlTab).toHaveClass('text-text-primary')
|
||||
})
|
||||
|
||||
it('should render active indicator bar for active tab', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
const { container } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Active tab should have the indicator bar
|
||||
const indicatorBars = container.querySelectorAll('.bg-util-colors-blue-brand-blue-brand-600')
|
||||
expect(indicatorBars.length).toBe(1)
|
||||
})
|
||||
|
||||
it('should render active indicator bar for URL tab when active', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
const { container } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_URL}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Should have one indicator bar
|
||||
const indicatorBars = container.querySelectorAll('.bg-util-colors-blue-brand-blue-brand-600')
|
||||
expect(indicatorBars.length).toBe(1)
|
||||
|
||||
// The indicator should be in the URL tab
|
||||
const urlTab = screen.getByText('app.importFromDSLUrl')
|
||||
expect(urlTab.querySelector('.bg-util-colors-blue-brand-blue-brand-600')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render indicator bar for inactive tab', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
// The URL tab (inactive) should not have an indicator bar
|
||||
const urlTab = screen.getByText('app.importFromDSLUrl')
|
||||
expect(urlTab.querySelector('.bg-util-colors-blue-brand-blue-brand-600')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for user interactions
|
||||
describe('User Interactions', () => {
|
||||
it('should call setCurrentTab with FROM_FILE when file tab is clicked', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_URL}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const fileTab = screen.getByText('app.importFromDSLFile')
|
||||
fireEvent.click(fileTab)
|
||||
|
||||
expect(setCurrentTab).toHaveBeenCalledTimes(1)
|
||||
// .bind() passes tab.key as first arg, event as second
|
||||
expect(setCurrentTab).toHaveBeenCalledWith(CreateFromDSLModalTab.FROM_FILE, expect.anything())
|
||||
})
|
||||
|
||||
it('should call setCurrentTab with FROM_URL when url tab is clicked', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const urlTab = screen.getByText('app.importFromDSLUrl')
|
||||
fireEvent.click(urlTab)
|
||||
|
||||
expect(setCurrentTab).toHaveBeenCalledTimes(1)
|
||||
expect(setCurrentTab).toHaveBeenCalledWith(CreateFromDSLModalTab.FROM_URL, expect.anything())
|
||||
})
|
||||
|
||||
it('should call setCurrentTab when clicking already active tab', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const fileTab = screen.getByText('app.importFromDSLFile')
|
||||
fireEvent.click(fileTab)
|
||||
|
||||
// Should still call setCurrentTab even for active tab
|
||||
expect(setCurrentTab).toHaveBeenCalledTimes(1)
|
||||
expect(setCurrentTab).toHaveBeenCalledWith(CreateFromDSLModalTab.FROM_FILE, expect.anything())
|
||||
})
|
||||
|
||||
it('should handle multiple tab clicks', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const fileTab = screen.getByText('app.importFromDSLFile')
|
||||
const urlTab = screen.getByText('app.importFromDSLUrl')
|
||||
|
||||
fireEvent.click(urlTab)
|
||||
fireEvent.click(fileTab)
|
||||
fireEvent.click(urlTab)
|
||||
|
||||
expect(setCurrentTab).toHaveBeenCalledTimes(3)
|
||||
expect(setCurrentTab).toHaveBeenNthCalledWith(1, CreateFromDSLModalTab.FROM_URL, expect.anything())
|
||||
expect(setCurrentTab).toHaveBeenNthCalledWith(2, CreateFromDSLModalTab.FROM_FILE, expect.anything())
|
||||
expect(setCurrentTab).toHaveBeenNthCalledWith(3, CreateFromDSLModalTab.FROM_URL, expect.anything())
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for props variations
|
||||
describe('Props Variations', () => {
|
||||
it('should handle FROM_FILE as currentTab prop', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const fileTab = screen.getByText('app.importFromDSLFile')
|
||||
expect(fileTab).toHaveClass('text-text-primary')
|
||||
})
|
||||
|
||||
it('should handle FROM_URL as currentTab prop', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_URL}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const urlTab = screen.getByText('app.importFromDSLUrl')
|
||||
expect(urlTab).toHaveClass('text-text-primary')
|
||||
})
|
||||
|
||||
it('should work with different setCurrentTab callback functions', () => {
|
||||
const setCurrentTab1 = jest.fn()
|
||||
const { rerender } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab1}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByText('app.importFromDSLUrl'))
|
||||
expect(setCurrentTab1).toHaveBeenCalledWith(CreateFromDSLModalTab.FROM_URL, expect.anything())
|
||||
|
||||
const setCurrentTab2 = jest.fn()
|
||||
rerender(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab2}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByText('app.importFromDSLUrl'))
|
||||
expect(setCurrentTab2).toHaveBeenCalledWith(CreateFromDSLModalTab.FROM_URL, expect.anything())
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for edge cases
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle component mounting without errors', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
expect(() =>
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
),
|
||||
).not.toThrow()
|
||||
})
|
||||
|
||||
it('should handle component unmounting without errors', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
const { unmount } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(() => unmount()).not.toThrow()
|
||||
})
|
||||
|
||||
it('should handle currentTab prop change', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
const { rerender } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Initially FROM_FILE is active
|
||||
let fileTab = screen.getByText('app.importFromDSLFile')
|
||||
expect(fileTab).toHaveClass('text-text-primary')
|
||||
|
||||
// Change to FROM_URL
|
||||
rerender(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_URL}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Now FROM_URL should be active
|
||||
const urlTab = screen.getByText('app.importFromDSLUrl')
|
||||
fileTab = screen.getByText('app.importFromDSLFile')
|
||||
expect(urlTab).toHaveClass('text-text-primary')
|
||||
expect(fileTab).not.toHaveClass('text-text-primary')
|
||||
})
|
||||
|
||||
it('should handle multiple rerenders', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
const { rerender } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
rerender(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_URL}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
rerender(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const fileTab = screen.getByText('app.importFromDSLFile')
|
||||
expect(fileTab).toHaveClass('text-text-primary')
|
||||
})
|
||||
|
||||
it('should maintain DOM structure after multiple interactions', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
const { container } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const initialTabCount = container.querySelectorAll('.cursor-pointer').length
|
||||
|
||||
// Multiple clicks
|
||||
fireEvent.click(screen.getByText('app.importFromDSLUrl'))
|
||||
fireEvent.click(screen.getByText('app.importFromDSLFile'))
|
||||
|
||||
const afterClicksTabCount = container.querySelectorAll('.cursor-pointer').length
|
||||
expect(afterClicksTabCount).toBe(initialTabCount)
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for Item component integration
|
||||
describe('Item Component Integration', () => {
|
||||
it('should render Item components with correct cursor style', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
const { container } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const tabItems = container.querySelectorAll('.cursor-pointer')
|
||||
expect(tabItems.length).toBe(2)
|
||||
})
|
||||
|
||||
it('should pass correct isActive prop to Item components', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const fileTab = screen.getByText('app.importFromDSLFile')
|
||||
const urlTab = screen.getByText('app.importFromDSLUrl')
|
||||
|
||||
// File tab should be active
|
||||
expect(fileTab).toHaveClass('text-text-primary')
|
||||
// URL tab should be inactive
|
||||
expect(urlTab).not.toHaveClass('text-text-primary')
|
||||
})
|
||||
|
||||
it('should pass correct label to Item components', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('app.importFromDSLFile')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.importFromDSLUrl')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should pass correct onClick handler to Item components', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const fileTab = screen.getByText('app.importFromDSLFile')
|
||||
const urlTab = screen.getByText('app.importFromDSLUrl')
|
||||
|
||||
fireEvent.click(fileTab)
|
||||
fireEvent.click(urlTab)
|
||||
|
||||
expect(setCurrentTab).toHaveBeenCalledTimes(2)
|
||||
expect(setCurrentTab).toHaveBeenNthCalledWith(1, CreateFromDSLModalTab.FROM_FILE, expect.anything())
|
||||
expect(setCurrentTab).toHaveBeenNthCalledWith(2, CreateFromDSLModalTab.FROM_URL, expect.anything())
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for accessibility
|
||||
describe('Accessibility', () => {
|
||||
it('should have clickable elements for each tab', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
const { container } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const clickableElements = container.querySelectorAll('.cursor-pointer')
|
||||
expect(clickableElements.length).toBe(2)
|
||||
})
|
||||
|
||||
it('should have visible text labels for each tab', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const fileLabel = screen.getByText('app.importFromDSLFile')
|
||||
const urlLabel = screen.getByText('app.importFromDSLUrl')
|
||||
|
||||
expect(fileLabel).toBeVisible()
|
||||
expect(urlLabel).toBeVisible()
|
||||
})
|
||||
|
||||
it('should visually distinguish active tab from inactive tabs', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
const { container } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Active tab has indicator bar
|
||||
const indicatorBars = container.querySelectorAll('.bg-util-colors-blue-brand-blue-brand-600')
|
||||
expect(indicatorBars.length).toBe(1)
|
||||
|
||||
// Active tab has different text color
|
||||
const fileTab = screen.getByText('app.importFromDSLFile')
|
||||
expect(fileTab).toHaveClass('text-text-primary')
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for component stability
|
||||
describe('Component Stability', () => {
|
||||
it('should handle rapid mount/unmount cycles', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const { unmount } = render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
unmount()
|
||||
}
|
||||
|
||||
expect(true).toBe(true)
|
||||
})
|
||||
|
||||
it('should handle rapid tab switching', () => {
|
||||
const setCurrentTab = jest.fn()
|
||||
render(
|
||||
<Tab
|
||||
currentTab={CreateFromDSLModalTab.FROM_FILE}
|
||||
setCurrentTab={setCurrentTab}
|
||||
/>,
|
||||
)
|
||||
|
||||
const fileTab = screen.getByText('app.importFromDSLFile')
|
||||
const urlTab = screen.getByText('app.importFromDSLUrl')
|
||||
|
||||
// Rapid clicks
|
||||
for (let i = 0; i < 10; i++)
|
||||
fireEvent.click(i % 2 === 0 ? urlTab : fileTab)
|
||||
|
||||
expect(setCurrentTab).toHaveBeenCalledTimes(10)
|
||||
})
|
||||
})
|
||||
})
|
||||
439
web/app/components/datasets/create-from-pipeline/index.spec.tsx
Normal file
439
web/app/components/datasets/create-from-pipeline/index.spec.tsx
Normal file
@@ -0,0 +1,439 @@
|
||||
import React from 'react'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import CreateFromPipeline from './index'
|
||||
|
||||
// Mock list component to avoid deep dependency issues
|
||||
jest.mock('./list', () => ({
|
||||
__esModule: true,
|
||||
default: () => <div data-testid="list">List Component</div>,
|
||||
}))
|
||||
|
||||
// Mock CreateFromDSLModal to avoid deep dependency chain
|
||||
jest.mock('./create-options/create-from-dsl-modal', () => ({
|
||||
__esModule: true,
|
||||
default: ({ show, onClose, onSuccess }: { show: boolean; onClose: () => void; onSuccess: () => void }) => (
|
||||
show
|
||||
? (
|
||||
<div data-testid="dsl-modal">
|
||||
<button data-testid="dsl-modal-close" onClick={onClose}>Close</button>
|
||||
<button data-testid="dsl-modal-success" onClick={onSuccess}>Import Success</button>
|
||||
</div>
|
||||
)
|
||||
: null
|
||||
),
|
||||
CreateFromDSLModalTab: {
|
||||
FROM_URL: 'from-url',
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock next/navigation
|
||||
const mockReplace = jest.fn()
|
||||
const mockPush = jest.fn()
|
||||
let mockSearchParams = new URLSearchParams()
|
||||
|
||||
jest.mock('next/navigation', () => ({
|
||||
useRouter: () => ({
|
||||
replace: mockReplace,
|
||||
push: mockPush,
|
||||
}),
|
||||
useSearchParams: () => mockSearchParams,
|
||||
}))
|
||||
|
||||
// Mock react-i18next
|
||||
jest.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock useInvalidDatasetList hook
|
||||
const mockInvalidDatasetList = jest.fn()
|
||||
jest.mock('@/service/knowledge/use-dataset', () => ({
|
||||
useInvalidDatasetList: () => mockInvalidDatasetList,
|
||||
}))
|
||||
|
||||
describe('CreateFromPipeline', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockSearchParams = new URLSearchParams()
|
||||
})
|
||||
|
||||
// Tests for basic rendering
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render the main container with correct className', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('relative')
|
||||
expect(mainContainer).toHaveClass('flex')
|
||||
expect(mainContainer).toHaveClass('h-[calc(100vh-56px)]')
|
||||
expect(mainContainer).toHaveClass('flex-col')
|
||||
expect(mainContainer).toHaveClass('overflow-hidden')
|
||||
expect(mainContainer).toHaveClass('rounded-t-2xl')
|
||||
expect(mainContainer).toHaveClass('border-t')
|
||||
expect(mainContainer).toHaveClass('border-effects-highlight')
|
||||
expect(mainContainer).toHaveClass('bg-background-default-subtle')
|
||||
})
|
||||
|
||||
it('should render Header component with back to knowledge text', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
expect(screen.getByText('datasetPipeline.creation.backToKnowledge')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render List component', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
expect(screen.getByTestId('list')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Footer component with import DSL button', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
expect(screen.getByText('datasetPipeline.creation.importDSL')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Effect component with blur effect', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const effectElement = container.querySelector('.blur-\\[80px\\]')
|
||||
expect(effectElement).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Effect component with correct positioning classes', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const effectElement = container.querySelector('.left-8.top-\\[-34px\\].opacity-20')
|
||||
expect(effectElement).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for Header component integration
|
||||
describe('Header Component Integration', () => {
|
||||
it('should render header with navigation link', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
const link = screen.getByRole('link')
|
||||
expect(link).toHaveAttribute('href', '/datasets')
|
||||
})
|
||||
|
||||
it('should render back button inside header', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
const button = screen.getByRole('button', { name: '' })
|
||||
expect(button).toBeInTheDocument()
|
||||
expect(button).toHaveClass('rounded-full')
|
||||
})
|
||||
|
||||
it('should render header with correct styling', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const headerElement = container.querySelector('.px-16.pb-2.pt-5')
|
||||
expect(headerElement).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for Footer component integration
|
||||
describe('Footer Component Integration', () => {
|
||||
it('should render footer with import DSL button', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
const importButton = screen.getByText('datasetPipeline.creation.importDSL')
|
||||
expect(importButton).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render footer at bottom with correct positioning classes', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const footer = container.querySelector('.absolute.bottom-0.left-0.right-0')
|
||||
expect(footer).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render footer with backdrop blur', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const footer = container.querySelector('.backdrop-blur-\\[6px\\]')
|
||||
expect(footer).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render divider in footer', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
// Divider renders with w-8 class
|
||||
const divider = container.querySelector('.w-8')
|
||||
expect(divider).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should open import modal when import DSL button is clicked', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
const importButton = screen.getByText('datasetPipeline.creation.importDSL')
|
||||
fireEvent.click(importButton)
|
||||
|
||||
expect(screen.getByTestId('dsl-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not show import modal initially', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
expect(screen.queryByTestId('dsl-modal')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for Effect component integration
|
||||
describe('Effect Component Integration', () => {
|
||||
it('should render Effect with blur effect', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const effectElement = container.querySelector('.blur-\\[80px\\]')
|
||||
expect(effectElement).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Effect with absolute positioning', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const effectElement = container.querySelector('.absolute.size-\\[112px\\].rounded-full')
|
||||
expect(effectElement).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Effect with brand color', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const effectElement = container.querySelector('.bg-util-colors-blue-brand-blue-brand-500')
|
||||
expect(effectElement).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Effect with custom opacity', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const effectElement = container.querySelector('.opacity-20')
|
||||
expect(effectElement).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for layout structure
|
||||
describe('Layout Structure', () => {
|
||||
it('should render children in correct order', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
const children = mainContainer.children
|
||||
|
||||
// Should have 4 children: Effect, Header, List, Footer
|
||||
expect(children.length).toBe(4)
|
||||
})
|
||||
|
||||
it('should have flex column layout', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('flex-col')
|
||||
})
|
||||
|
||||
it('should have overflow hidden on main container', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('overflow-hidden')
|
||||
})
|
||||
|
||||
it('should have correct height calculation', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('h-[calc(100vh-56px)]')
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for styling
|
||||
describe('Styling', () => {
|
||||
it('should have border styling on main container', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('border-t')
|
||||
expect(mainContainer).toHaveClass('border-effects-highlight')
|
||||
})
|
||||
|
||||
it('should have rounded top corners', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('rounded-t-2xl')
|
||||
})
|
||||
|
||||
it('should have subtle background color', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('bg-background-default-subtle')
|
||||
})
|
||||
|
||||
it('should have relative positioning for child absolute positioning', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('relative')
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for edge cases
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle component mounting without errors', () => {
|
||||
expect(() => render(<CreateFromPipeline />)).not.toThrow()
|
||||
})
|
||||
|
||||
it('should handle component unmounting without errors', () => {
|
||||
const { unmount } = render(<CreateFromPipeline />)
|
||||
|
||||
expect(() => unmount()).not.toThrow()
|
||||
})
|
||||
|
||||
it('should handle multiple renders without issues', () => {
|
||||
const { rerender } = render(<CreateFromPipeline />)
|
||||
|
||||
rerender(<CreateFromPipeline />)
|
||||
rerender(<CreateFromPipeline />)
|
||||
rerender(<CreateFromPipeline />)
|
||||
|
||||
expect(screen.getByText('datasetPipeline.creation.backToKnowledge')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should maintain consistent DOM structure across rerenders', () => {
|
||||
const { container, rerender } = render(<CreateFromPipeline />)
|
||||
|
||||
const initialChildCount = (container.firstChild as HTMLElement)?.children.length
|
||||
|
||||
rerender(<CreateFromPipeline />)
|
||||
|
||||
const afterRerenderChildCount = (container.firstChild as HTMLElement)?.children.length
|
||||
expect(afterRerenderChildCount).toBe(initialChildCount)
|
||||
})
|
||||
|
||||
it('should handle remoteInstallUrl search param', () => {
|
||||
mockSearchParams = new URLSearchParams('remoteInstallUrl=https://example.com/dsl.yaml')
|
||||
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
// Should render without crashing when remoteInstallUrl is present
|
||||
expect(screen.getByText('datasetPipeline.creation.backToKnowledge')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for accessibility
|
||||
describe('Accessibility', () => {
|
||||
it('should have accessible link for navigation', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
const link = screen.getByRole('link')
|
||||
expect(link).toBeInTheDocument()
|
||||
expect(link).toHaveAttribute('href', '/datasets')
|
||||
})
|
||||
|
||||
it('should have accessible buttons', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
const buttons = screen.getAllByRole('button')
|
||||
expect(buttons.length).toBeGreaterThanOrEqual(2) // back button and import DSL button
|
||||
})
|
||||
|
||||
it('should use semantic structure for content', () => {
|
||||
const { container } = render(<CreateFromPipeline />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer.tagName).toBe('DIV')
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for component stability
|
||||
describe('Component Stability', () => {
|
||||
it('should not cause memory leaks on unmount', () => {
|
||||
const { unmount } = render(<CreateFromPipeline />)
|
||||
|
||||
unmount()
|
||||
|
||||
expect(true).toBe(true)
|
||||
})
|
||||
|
||||
it('should handle rapid mount/unmount cycles', () => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const { unmount } = render(<CreateFromPipeline />)
|
||||
unmount()
|
||||
}
|
||||
|
||||
expect(true).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
// Tests for user interactions
|
||||
describe('User Interactions', () => {
|
||||
it('should toggle import modal when clicking import DSL button', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
// Initially modal is not shown
|
||||
expect(screen.queryByTestId('dsl-modal')).not.toBeInTheDocument()
|
||||
|
||||
// Click import DSL button
|
||||
const importButton = screen.getByText('datasetPipeline.creation.importDSL')
|
||||
fireEvent.click(importButton)
|
||||
|
||||
// Modal should be shown
|
||||
expect(screen.getByTestId('dsl-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
// Open modal
|
||||
const importButton = screen.getByText('datasetPipeline.creation.importDSL')
|
||||
fireEvent.click(importButton)
|
||||
expect(screen.getByTestId('dsl-modal')).toBeInTheDocument()
|
||||
|
||||
// Click close button
|
||||
const closeButton = screen.getByTestId('dsl-modal-close')
|
||||
fireEvent.click(closeButton)
|
||||
|
||||
// Modal should be hidden
|
||||
expect(screen.queryByTestId('dsl-modal')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should close modal and redirect when close button is clicked with remoteInstallUrl', () => {
|
||||
mockSearchParams = new URLSearchParams('remoteInstallUrl=https://example.com/dsl.yaml')
|
||||
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
// Open modal
|
||||
const importButton = screen.getByText('datasetPipeline.creation.importDSL')
|
||||
fireEvent.click(importButton)
|
||||
|
||||
// Click close button
|
||||
const closeButton = screen.getByTestId('dsl-modal-close')
|
||||
fireEvent.click(closeButton)
|
||||
|
||||
// Should call replace to remove the URL param
|
||||
expect(mockReplace).toHaveBeenCalledWith('/datasets/create-from-pipeline')
|
||||
})
|
||||
|
||||
it('should call invalidDatasetList when import is successful', () => {
|
||||
render(<CreateFromPipeline />)
|
||||
|
||||
// Open modal
|
||||
const importButton = screen.getByText('datasetPipeline.creation.importDSL')
|
||||
fireEvent.click(importButton)
|
||||
|
||||
// Click success button
|
||||
const successButton = screen.getByTestId('dsl-modal-success')
|
||||
fireEvent.click(successButton)
|
||||
|
||||
// Should call invalidDatasetList
|
||||
expect(mockInvalidDatasetList).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,842 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import List from './index'
|
||||
import type { PipelineTemplate, PipelineTemplateListResponse } from '@/models/pipeline'
|
||||
import { ChunkingMode } from '@/models/datasets'
|
||||
|
||||
// Mock i18n context
|
||||
let mockLocale = 'en-US'
|
||||
jest.mock('@/context/i18n', () => ({
|
||||
useI18N: () => ({
|
||||
locale: mockLocale,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock global public store
|
||||
let mockEnableMarketplace = true
|
||||
jest.mock('@/context/global-public-context', () => ({
|
||||
useGlobalPublicStore: (selector: (state: { systemFeatures: { enable_marketplace: boolean } }) => boolean) =>
|
||||
selector({ systemFeatures: { enable_marketplace: mockEnableMarketplace } }),
|
||||
}))
|
||||
|
||||
// Mock pipeline service hooks
|
||||
let mockBuiltInPipelineData: PipelineTemplateListResponse | undefined
|
||||
let mockBuiltInIsLoading = false
|
||||
let mockCustomizedPipelineData: PipelineTemplateListResponse | undefined
|
||||
let mockCustomizedIsLoading = false
|
||||
|
||||
jest.mock('@/service/use-pipeline', () => ({
|
||||
usePipelineTemplateList: (params: { type: 'built-in' | 'customized'; language?: string }, enabled?: boolean) => {
|
||||
if (params.type === 'built-in') {
|
||||
return {
|
||||
data: enabled !== false ? mockBuiltInPipelineData : undefined,
|
||||
isLoading: mockBuiltInIsLoading,
|
||||
}
|
||||
}
|
||||
return {
|
||||
data: mockCustomizedPipelineData,
|
||||
isLoading: mockCustomizedIsLoading,
|
||||
}
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock CreateCard component to avoid deep service dependencies
|
||||
jest.mock('./create-card', () => ({
|
||||
__esModule: true,
|
||||
default: () => (
|
||||
<div data-testid="create-card" className="h-[132px] cursor-pointer">
|
||||
<span>datasetPipeline.creation.createFromScratch.title</span>
|
||||
<span>datasetPipeline.creation.createFromScratch.description</span>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
// Mock TemplateCard component to avoid deep service dependencies
|
||||
jest.mock('./template-card', () => ({
|
||||
__esModule: true,
|
||||
default: ({ pipeline, type, showMoreOperations }: {
|
||||
pipeline: PipelineTemplate
|
||||
type: 'built-in' | 'customized'
|
||||
showMoreOperations?: boolean
|
||||
}) => (
|
||||
<div
|
||||
data-testid={`template-card-${pipeline.id}`}
|
||||
data-type={type}
|
||||
data-show-more={showMoreOperations}
|
||||
className="h-[132px]"
|
||||
>
|
||||
<span data-testid={`template-name-${pipeline.id}`}>{pipeline.name}</span>
|
||||
<span data-testid={`template-description-${pipeline.id}`}>{pipeline.description}</span>
|
||||
<span data-testid={`template-chunk-structure-${pipeline.id}`}>{pipeline.chunk_structure}</span>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
// Factory function for creating mock pipeline templates
|
||||
const createMockPipelineTemplate = (overrides: Partial<PipelineTemplate> = {}): PipelineTemplate => ({
|
||||
id: 'template-1',
|
||||
name: 'Test Pipeline',
|
||||
description: 'Test pipeline description',
|
||||
icon: {
|
||||
icon_type: 'emoji',
|
||||
icon: '🔧',
|
||||
icon_background: '#FFEAD5',
|
||||
icon_url: '',
|
||||
},
|
||||
position: 1,
|
||||
chunk_structure: ChunkingMode.text,
|
||||
...overrides,
|
||||
})
|
||||
|
||||
describe('List', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockLocale = 'en-US'
|
||||
mockEnableMarketplace = true
|
||||
mockBuiltInPipelineData = undefined
|
||||
mockBuiltInIsLoading = false
|
||||
mockCustomizedPipelineData = undefined
|
||||
mockCustomizedIsLoading = false
|
||||
})
|
||||
|
||||
/**
|
||||
* List Component Container
|
||||
* Tests for the main List wrapper component rendering and styling
|
||||
*/
|
||||
describe('List Component Container', () => {
|
||||
it('should render without crashing', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render the main container as a div element', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer.tagName).toBe('DIV')
|
||||
})
|
||||
|
||||
it('should render the main container with grow class for flex expansion', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('grow')
|
||||
})
|
||||
|
||||
it('should render the main container with gap-y-1 class for vertical spacing', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('gap-y-1')
|
||||
})
|
||||
|
||||
it('should render the main container with overflow-y-auto for vertical scrolling', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('overflow-y-auto')
|
||||
})
|
||||
|
||||
it('should render the main container with horizontal padding px-16', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('px-16')
|
||||
})
|
||||
|
||||
it('should render the main container with bottom padding pb-[60px]', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('pb-[60px]')
|
||||
})
|
||||
|
||||
it('should render the main container with top padding pt-1', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('pt-1')
|
||||
})
|
||||
|
||||
it('should have all required styling classes applied', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('grow')
|
||||
expect(mainContainer).toHaveClass('gap-y-1')
|
||||
expect(mainContainer).toHaveClass('overflow-y-auto')
|
||||
expect(mainContainer).toHaveClass('px-16')
|
||||
expect(mainContainer).toHaveClass('pb-[60px]')
|
||||
expect(mainContainer).toHaveClass('pt-1')
|
||||
})
|
||||
|
||||
it('should render both BuiltInPipelineList and CustomizedList as children when customized data exists', () => {
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ id: 'custom-child-test' })],
|
||||
}
|
||||
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
// BuiltInPipelineList always renders (1 child)
|
||||
// CustomizedList renders when it has data (adds more children: title + grid)
|
||||
// So we should have at least 2 children when customized data exists
|
||||
expect(mainContainer.children.length).toBeGreaterThanOrEqual(2)
|
||||
})
|
||||
|
||||
it('should render only BuiltInPipelineList when customized list is empty', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
// CustomizedList returns null when empty, so only BuiltInPipelineList renders
|
||||
expect(mainContainer.children.length).toBe(1)
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* BuiltInPipelineList Integration
|
||||
* Tests for built-in pipeline templates list including CreateCard and TemplateCards
|
||||
*/
|
||||
describe('BuiltInPipelineList Integration', () => {
|
||||
it('should render CreateCard component', () => {
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('create-card')).toBeInTheDocument()
|
||||
expect(screen.getByText('datasetPipeline.creation.createFromScratch.title')).toBeInTheDocument()
|
||||
expect(screen.getByText('datasetPipeline.creation.createFromScratch.description')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render grid container with correct responsive classes', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const gridContainer = container.querySelector('.grid')
|
||||
expect(gridContainer).toBeInTheDocument()
|
||||
expect(gridContainer).toHaveClass('grid-cols-1')
|
||||
expect(gridContainer).toHaveClass('gap-3')
|
||||
expect(gridContainer).toHaveClass('py-2')
|
||||
expect(gridContainer).toHaveClass('sm:grid-cols-2')
|
||||
expect(gridContainer).toHaveClass('md:grid-cols-3')
|
||||
expect(gridContainer).toHaveClass('lg:grid-cols-4')
|
||||
})
|
||||
|
||||
it('should not render built-in template cards when loading', () => {
|
||||
mockBuiltInIsLoading = true
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate()],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.queryByTestId('template-card-template-1')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render built-in template cards when data is loaded', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'built-1', name: 'Pipeline 1' }),
|
||||
createMockPipelineTemplate({ id: 'built-2', name: 'Pipeline 2' }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('template-card-built-1')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('template-card-built-2')).toBeInTheDocument()
|
||||
expect(screen.getByText('Pipeline 1')).toBeInTheDocument()
|
||||
expect(screen.getByText('Pipeline 2')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render empty state when no built-in templates (only CreateCard visible)', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('create-card')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId(/^template-card-/)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle undefined pipeline_templates gracefully', () => {
|
||||
mockBuiltInPipelineData = {} as PipelineTemplateListResponse
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('create-card')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should pass type=built-in to TemplateCard', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ id: 'built-type-test' })],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
const templateCard = screen.getByTestId('template-card-built-type-test')
|
||||
expect(templateCard).toHaveAttribute('data-type', 'built-in')
|
||||
})
|
||||
|
||||
it('should pass showMoreOperations=false to built-in TemplateCards', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ id: 'built-ops-test' })],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
const templateCard = screen.getByTestId('template-card-built-ops-test')
|
||||
expect(templateCard).toHaveAttribute('data-show-more', 'false')
|
||||
})
|
||||
|
||||
it('should render multiple built-in templates in order', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'first', name: 'First' }),
|
||||
createMockPipelineTemplate({ id: 'second', name: 'Second' }),
|
||||
createMockPipelineTemplate({ id: 'third', name: 'Third' }),
|
||||
],
|
||||
}
|
||||
|
||||
const { container } = render(<List />)
|
||||
|
||||
const gridContainer = container.querySelector('.grid')
|
||||
const cards = gridContainer?.querySelectorAll('[data-testid^="template-card-"]')
|
||||
|
||||
expect(cards?.length).toBe(3)
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* CustomizedList Integration
|
||||
* Tests for customized pipeline templates list including conditional rendering
|
||||
*/
|
||||
describe('CustomizedList Integration', () => {
|
||||
it('should return null when loading', () => {
|
||||
mockCustomizedIsLoading = true
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.queryByText('datasetPipeline.templates.customized')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should return null when list is empty', () => {
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: [],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.queryByText('datasetPipeline.templates.customized')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should return null when pipeline_templates is undefined', () => {
|
||||
mockCustomizedPipelineData = {} as PipelineTemplateListResponse
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.queryByText('datasetPipeline.templates.customized')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render customized section title when data is available', () => {
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ id: 'custom-1' })],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('datasetPipeline.templates.customized')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render customized title with correct styling', () => {
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate()],
|
||||
}
|
||||
|
||||
const { container } = render(<List />)
|
||||
|
||||
const title = container.querySelector('.system-sm-semibold-uppercase')
|
||||
expect(title).toBeInTheDocument()
|
||||
expect(title).toHaveClass('pt-2')
|
||||
expect(title).toHaveClass('text-text-tertiary')
|
||||
})
|
||||
|
||||
it('should render customized template cards', () => {
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'custom-1', name: 'Custom Pipeline 1' }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('template-card-custom-1')).toBeInTheDocument()
|
||||
expect(screen.getByText('Custom Pipeline 1')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render multiple customized templates', () => {
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'custom-1', name: 'Custom 1' }),
|
||||
createMockPipelineTemplate({ id: 'custom-2', name: 'Custom 2' }),
|
||||
createMockPipelineTemplate({ id: 'custom-3', name: 'Custom 3' }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('Custom 1')).toBeInTheDocument()
|
||||
expect(screen.getByText('Custom 2')).toBeInTheDocument()
|
||||
expect(screen.getByText('Custom 3')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should pass type=customized to TemplateCard', () => {
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ id: 'custom-type-test' })],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
const templateCard = screen.getByTestId('template-card-custom-type-test')
|
||||
expect(templateCard).toHaveAttribute('data-type', 'customized')
|
||||
})
|
||||
|
||||
it('should not pass showMoreOperations prop to customized TemplateCards (defaults to true)', () => {
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ id: 'custom-ops-test' })],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
const templateCard = screen.getByTestId('template-card-custom-ops-test')
|
||||
// showMoreOperations is not passed, so data-show-more should be undefined
|
||||
expect(templateCard).not.toHaveAttribute('data-show-more', 'false')
|
||||
})
|
||||
|
||||
it('should render customized grid with responsive classes', () => {
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate()],
|
||||
}
|
||||
|
||||
const { container } = render(<List />)
|
||||
|
||||
// Find the second grid (customized list grid)
|
||||
const grids = container.querySelectorAll('.grid')
|
||||
expect(grids.length).toBe(2) // built-in grid and customized grid
|
||||
expect(grids[1]).toHaveClass('grid-cols-1')
|
||||
expect(grids[1]).toHaveClass('gap-3')
|
||||
expect(grids[1]).toHaveClass('py-2')
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Language Handling
|
||||
* Tests for locale-based language selection in BuiltInPipelineList
|
||||
*/
|
||||
describe('Language Handling', () => {
|
||||
it('should use zh-Hans locale when set', () => {
|
||||
mockLocale = 'zh-Hans'
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate()],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('create-card')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should use ja-JP locale when set', () => {
|
||||
mockLocale = 'ja-JP'
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate()],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('create-card')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should fallback to default language for unsupported locales', () => {
|
||||
mockLocale = 'fr-FR'
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate()],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('create-card')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle ko-KR locale (fallback)', () => {
|
||||
mockLocale = 'ko-KR'
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('create-card')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Marketplace Feature Flag
|
||||
* Tests for enable_marketplace system feature affecting built-in templates fetching
|
||||
*/
|
||||
describe('Marketplace Feature Flag', () => {
|
||||
it('should not fetch built-in templates when marketplace is disabled', () => {
|
||||
mockEnableMarketplace = false
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ name: 'Should Not Show' })],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
// CreateCard should render but template should not (enabled=false)
|
||||
expect(screen.getByTestId('create-card')).toBeInTheDocument()
|
||||
expect(screen.queryByText('Should Not Show')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should fetch built-in templates when marketplace is enabled', () => {
|
||||
mockEnableMarketplace = true
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ id: 'marketplace', name: 'Marketplace Template' })],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('Marketplace Template')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Template Data Rendering
|
||||
* Tests for correct rendering of template properties (name, description, chunk_structure)
|
||||
*/
|
||||
describe('Template Data Rendering', () => {
|
||||
it('should render template name correctly', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'name-test', name: 'My Custom Pipeline Name' }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('template-name-name-test')).toHaveTextContent('My Custom Pipeline Name')
|
||||
})
|
||||
|
||||
it('should render template description correctly', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'desc-test', description: 'This is a detailed description' }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('template-description-desc-test')).toHaveTextContent('This is a detailed description')
|
||||
})
|
||||
|
||||
it('should render template with text chunk structure', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'chunk-text', chunk_structure: ChunkingMode.text }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('template-chunk-structure-chunk-text')).toHaveTextContent(ChunkingMode.text)
|
||||
})
|
||||
|
||||
it('should render template with qa chunk structure', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'chunk-qa', chunk_structure: ChunkingMode.qa }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('template-chunk-structure-chunk-qa')).toHaveTextContent(ChunkingMode.qa)
|
||||
})
|
||||
|
||||
it('should render template with parentChild chunk structure', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'chunk-pc', chunk_structure: ChunkingMode.parentChild }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('template-chunk-structure-chunk-pc')).toHaveTextContent(ChunkingMode.parentChild)
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Edge Cases
|
||||
* Tests for boundary conditions, special characters, and component lifecycle
|
||||
*/
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle component mounting without errors', () => {
|
||||
expect(() => render(<List />)).not.toThrow()
|
||||
})
|
||||
|
||||
it('should handle component unmounting without errors', () => {
|
||||
const { unmount } = render(<List />)
|
||||
|
||||
expect(() => unmount()).not.toThrow()
|
||||
})
|
||||
|
||||
it('should handle multiple rerenders without issues', () => {
|
||||
const { rerender } = render(<List />)
|
||||
|
||||
rerender(<List />)
|
||||
rerender(<List />)
|
||||
rerender(<List />)
|
||||
|
||||
expect(screen.getByTestId('create-card')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should maintain consistent DOM structure across rerenders', () => {
|
||||
const { container, rerender } = render(<List />)
|
||||
|
||||
const initialChildCount = (container.firstChild as HTMLElement)?.children.length
|
||||
|
||||
rerender(<List />)
|
||||
|
||||
const afterRerenderChildCount = (container.firstChild as HTMLElement)?.children.length
|
||||
expect(afterRerenderChildCount).toBe(initialChildCount)
|
||||
})
|
||||
|
||||
it('should handle concurrent built-in and customized templates', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'built-in-1', name: 'Built-in Template' }),
|
||||
],
|
||||
}
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'custom-1', name: 'Customized Template' }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('Built-in Template')).toBeInTheDocument()
|
||||
expect(screen.getByText('Customized Template')).toBeInTheDocument()
|
||||
expect(screen.getByText('datasetPipeline.templates.customized')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle templates with long names gracefully', () => {
|
||||
const longName = 'A'.repeat(100)
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'long-name', name: longName }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('template-name-long-name')).toHaveTextContent(longName)
|
||||
})
|
||||
|
||||
it('should handle templates with empty description', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'empty-desc', description: '' }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('template-description-empty-desc')).toHaveTextContent('')
|
||||
})
|
||||
|
||||
it('should handle templates with special characters in name', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [
|
||||
createMockPipelineTemplate({ id: 'special', name: 'Test <>&"\'Pipeline' }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('template-name-special')).toHaveTextContent('Test <>&"\'Pipeline')
|
||||
})
|
||||
|
||||
it('should handle rapid mount/unmount cycles', () => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const { unmount } = render(<List />)
|
||||
unmount()
|
||||
}
|
||||
|
||||
expect(true).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Loading States
|
||||
* Tests for component behavior during data loading
|
||||
*/
|
||||
describe('Loading States', () => {
|
||||
it('should handle both lists loading simultaneously', () => {
|
||||
mockBuiltInIsLoading = true
|
||||
mockCustomizedIsLoading = true
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByTestId('create-card')).toBeInTheDocument()
|
||||
expect(screen.queryByText('datasetPipeline.templates.customized')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle built-in loading while customized is loaded', () => {
|
||||
mockBuiltInIsLoading = true
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ id: 'custom-only', name: 'Customized Only' })],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('Customized Only')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle customized loading while built-in is loaded', () => {
|
||||
mockCustomizedIsLoading = true
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ id: 'built-only', name: 'Built-in Only' })],
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('Built-in Only')).toBeInTheDocument()
|
||||
expect(screen.queryByText('datasetPipeline.templates.customized')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should transition from loading to loaded state', () => {
|
||||
mockBuiltInIsLoading = true
|
||||
const { rerender } = render(<List />)
|
||||
|
||||
expect(screen.queryByTestId('template-card-transition')).not.toBeInTheDocument()
|
||||
|
||||
// Simulate data loaded
|
||||
mockBuiltInIsLoading = false
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ id: 'transition', name: 'After Load' })],
|
||||
}
|
||||
|
||||
rerender(<List />)
|
||||
|
||||
expect(screen.getByText('After Load')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Component Stability
|
||||
* Tests for consistent rendering and state management
|
||||
*/
|
||||
describe('Component Stability', () => {
|
||||
it('should render same structure on initial render and rerender', () => {
|
||||
const { container, rerender } = render(<List />)
|
||||
|
||||
const initialHTML = container.innerHTML
|
||||
|
||||
rerender(<List />)
|
||||
|
||||
const rerenderHTML = container.innerHTML
|
||||
expect(rerenderHTML).toBe(initialHTML)
|
||||
})
|
||||
|
||||
it('should not cause memory leaks on unmount', () => {
|
||||
const { unmount } = render(<List />)
|
||||
|
||||
unmount()
|
||||
|
||||
expect(true).toBe(true)
|
||||
})
|
||||
|
||||
it('should handle state changes correctly', () => {
|
||||
mockBuiltInPipelineData = undefined
|
||||
|
||||
const { rerender } = render(<List />)
|
||||
|
||||
// Add data
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate({ id: 'state-test', name: 'State Test' })],
|
||||
}
|
||||
|
||||
rerender(<List />)
|
||||
|
||||
expect(screen.getByText('State Test')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Accessibility
|
||||
* Tests for semantic structure and keyboard navigation support
|
||||
*/
|
||||
describe('Accessibility', () => {
|
||||
it('should use semantic div structure for main container', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer.tagName).toBe('DIV')
|
||||
})
|
||||
|
||||
it('should have scrollable container for keyboard navigation', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('overflow-y-auto')
|
||||
})
|
||||
|
||||
it('should have appropriate spacing for readability', () => {
|
||||
const { container } = render(<List />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('gap-y-1')
|
||||
expect(mainContainer).toHaveClass('px-16')
|
||||
})
|
||||
|
||||
it('should render grid structure for template cards', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: [createMockPipelineTemplate()],
|
||||
}
|
||||
|
||||
const { container } = render(<List />)
|
||||
|
||||
const grid = container.querySelector('.grid')
|
||||
expect(grid).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Large Datasets
|
||||
* Tests for performance with many templates
|
||||
*/
|
||||
describe('Large Datasets', () => {
|
||||
it('should handle many built-in templates', () => {
|
||||
mockBuiltInPipelineData = {
|
||||
pipeline_templates: Array.from({ length: 50 }, (_, i) =>
|
||||
createMockPipelineTemplate({ id: `built-${i}`, name: `Pipeline ${i}` }),
|
||||
),
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('Pipeline 0')).toBeInTheDocument()
|
||||
expect(screen.getByText('Pipeline 49')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle many customized templates', () => {
|
||||
mockCustomizedPipelineData = {
|
||||
pipeline_templates: Array.from({ length: 50 }, (_, i) =>
|
||||
createMockPipelineTemplate({ id: `custom-${i}`, name: `Custom ${i}` }),
|
||||
),
|
||||
}
|
||||
|
||||
render(<List />)
|
||||
|
||||
expect(screen.getByText('Custom 0')).toBeInTheDocument()
|
||||
expect(screen.getByText('Custom 49')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,786 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import Details from './index'
|
||||
import type { PipelineTemplateByIdResponse } from '@/models/pipeline'
|
||||
import { ChunkingMode } from '@/models/datasets'
|
||||
import type { Edge, Node, Viewport } from 'reactflow'
|
||||
|
||||
// Mock usePipelineTemplateById hook
|
||||
let mockPipelineTemplateData: PipelineTemplateByIdResponse | undefined
|
||||
let mockIsLoading = false
|
||||
|
||||
jest.mock('@/service/use-pipeline', () => ({
|
||||
usePipelineTemplateById: (params: { template_id: string; type: 'customized' | 'built-in' }, enabled: boolean) => ({
|
||||
data: enabled ? mockPipelineTemplateData : undefined,
|
||||
isLoading: mockIsLoading,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock WorkflowPreview component to avoid deep dependencies
|
||||
jest.mock('@/app/components/workflow/workflow-preview', () => ({
|
||||
__esModule: true,
|
||||
default: ({ nodes, edges, viewport, className }: {
|
||||
nodes: Node[]
|
||||
edges: Edge[]
|
||||
viewport: Viewport
|
||||
className?: string
|
||||
}) => (
|
||||
<div
|
||||
data-testid="workflow-preview"
|
||||
data-nodes-count={nodes?.length ?? 0}
|
||||
data-edges-count={edges?.length ?? 0}
|
||||
data-viewport-zoom={viewport?.zoom}
|
||||
className={className}
|
||||
>
|
||||
WorkflowPreview
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
// Factory function for creating mock pipeline template response
|
||||
const createMockPipelineTemplate = (
|
||||
overrides: Partial<PipelineTemplateByIdResponse> = {},
|
||||
): PipelineTemplateByIdResponse => ({
|
||||
id: 'test-template-id',
|
||||
name: 'Test Pipeline Template',
|
||||
icon_info: {
|
||||
icon_type: 'emoji',
|
||||
icon: '📙',
|
||||
icon_background: '#FFF4ED',
|
||||
icon_url: '',
|
||||
},
|
||||
description: 'Test pipeline description for testing purposes',
|
||||
chunk_structure: ChunkingMode.text,
|
||||
export_data: '{}',
|
||||
graph: {
|
||||
nodes: [
|
||||
{ id: 'node-1', type: 'custom', position: { x: 0, y: 0 }, data: {} },
|
||||
] as unknown as Node[],
|
||||
edges: [] as Edge[],
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
},
|
||||
created_by: 'Test Author',
|
||||
...overrides,
|
||||
})
|
||||
|
||||
// Default props factory
|
||||
const createDefaultProps = () => ({
|
||||
id: 'test-id',
|
||||
type: 'built-in' as const,
|
||||
onApplyTemplate: jest.fn(),
|
||||
onClose: jest.fn(),
|
||||
})
|
||||
|
||||
describe('Details', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockPipelineTemplateData = undefined
|
||||
mockIsLoading = false
|
||||
})
|
||||
|
||||
/**
|
||||
* Loading State Tests
|
||||
* Tests for component behavior when data is loading or undefined
|
||||
*/
|
||||
describe('Loading State', () => {
|
||||
it('should render Loading component when pipelineTemplateInfo is undefined', () => {
|
||||
mockPipelineTemplateData = undefined
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { container } = render(<Details {...props} />)
|
||||
|
||||
// Loading component renders a spinner SVG with spin-animation class
|
||||
const spinner = container.querySelector('.spin-animation')
|
||||
expect(spinner).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Loading component when data is still loading', () => {
|
||||
mockIsLoading = true
|
||||
mockPipelineTemplateData = undefined
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { container } = render(<Details {...props} />)
|
||||
|
||||
// Loading component renders a spinner SVG with spin-animation class
|
||||
const spinner = container.querySelector('.spin-animation')
|
||||
expect(spinner).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render main content while loading', () => {
|
||||
mockPipelineTemplateData = undefined
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.queryByTestId('workflow-preview')).not.toBeInTheDocument()
|
||||
expect(screen.queryByText('datasetPipeline.operations.useTemplate')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Rendering Tests
|
||||
* Tests for correct rendering when data is available
|
||||
*/
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing when data is available', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { container } = render(<Details {...props} />)
|
||||
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render the main container with flex layout', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { container } = render(<Details {...props} />)
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('flex')
|
||||
expect(mainContainer).toHaveClass('h-full')
|
||||
})
|
||||
|
||||
it('should render WorkflowPreview component', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByTestId('workflow-preview')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should pass graph data to WorkflowPreview', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({
|
||||
graph: {
|
||||
nodes: [
|
||||
{ id: '1', type: 'custom', position: { x: 0, y: 0 }, data: {} },
|
||||
{ id: '2', type: 'custom', position: { x: 100, y: 100 }, data: {} },
|
||||
] as unknown as Node[],
|
||||
edges: [
|
||||
{ id: 'e1', source: '1', target: '2' },
|
||||
] as unknown as Edge[],
|
||||
viewport: { x: 10, y: 20, zoom: 1.5 },
|
||||
},
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const preview = screen.getByTestId('workflow-preview')
|
||||
expect(preview).toHaveAttribute('data-nodes-count', '2')
|
||||
expect(preview).toHaveAttribute('data-edges-count', '1')
|
||||
expect(preview).toHaveAttribute('data-viewport-zoom', '1.5')
|
||||
})
|
||||
|
||||
it('should render template name', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ name: 'My Test Pipeline' })
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('My Test Pipeline')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render template description', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ description: 'This is a test description' })
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('This is a test description')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render created_by information when available', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ created_by: 'John Doe' })
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
// The translation key includes the author
|
||||
expect(screen.getByText('datasetPipeline.details.createdBy')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render created_by when not available', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ created_by: '' })
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.queryByText(/createdBy/)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render "Use Template" button', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('datasetPipeline.operations.useTemplate')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render close button', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: '' })
|
||||
expect(closeButton).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render structure section title', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('datasetPipeline.details.structure')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render structure tooltip', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
// Tooltip component should be rendered
|
||||
expect(screen.getByText('datasetPipeline.details.structure')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Event Handler Tests
|
||||
* Tests for user interactions and callback functions
|
||||
*/
|
||||
describe('Event Handlers', () => {
|
||||
it('should call onApplyTemplate when "Use Template" button is clicked', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const useTemplateButton = screen.getByText('datasetPipeline.operations.useTemplate').closest('button')
|
||||
fireEvent.click(useTemplateButton!)
|
||||
|
||||
expect(props.onApplyTemplate).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should call onClose when close button is clicked', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { container } = render(<Details {...props} />)
|
||||
|
||||
// Find the close button (the one with RiCloseLine icon)
|
||||
const closeButton = container.querySelector('button.absolute.right-4')
|
||||
fireEvent.click(closeButton!)
|
||||
|
||||
expect(props.onClose).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should not call handlers on multiple clicks (each click should trigger once)', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const useTemplateButton = screen.getByText('datasetPipeline.operations.useTemplate').closest('button')
|
||||
fireEvent.click(useTemplateButton!)
|
||||
fireEvent.click(useTemplateButton!)
|
||||
fireEvent.click(useTemplateButton!)
|
||||
|
||||
expect(props.onApplyTemplate).toHaveBeenCalledTimes(3)
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Props Variations Tests
|
||||
* Tests for different prop combinations
|
||||
*/
|
||||
describe('Props Variations', () => {
|
||||
it('should handle built-in type', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = { ...createDefaultProps(), type: 'built-in' as const }
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByTestId('workflow-preview')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle customized type', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = { ...createDefaultProps(), type: 'customized' as const }
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByTestId('workflow-preview')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle different template IDs', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = { ...createDefaultProps(), id: 'unique-template-123' }
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByTestId('workflow-preview')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* App Icon Memoization Tests
|
||||
* Tests for the useMemo logic that computes appIcon
|
||||
*/
|
||||
describe('App Icon Memoization', () => {
|
||||
it('should use default emoji icon when pipelineTemplateInfo is undefined', () => {
|
||||
mockPipelineTemplateData = undefined
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
// Loading state - no AppIcon rendered
|
||||
expect(screen.queryByTestId('workflow-preview')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle emoji icon type', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({
|
||||
icon_info: {
|
||||
icon_type: 'emoji',
|
||||
icon: '🚀',
|
||||
icon_background: '#E6F4FF',
|
||||
icon_url: '',
|
||||
},
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
// AppIcon should be rendered with emoji
|
||||
expect(screen.getByTestId('workflow-preview')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle image icon type', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({
|
||||
icon_info: {
|
||||
icon_type: 'image',
|
||||
icon: 'file-id-123',
|
||||
icon_background: '',
|
||||
icon_url: 'https://example.com/image.png',
|
||||
},
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByTestId('workflow-preview')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle image icon type with empty url and icon (fallback branch)', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({
|
||||
icon_info: {
|
||||
icon_type: 'image',
|
||||
icon: '', // empty string - triggers || '' fallback
|
||||
icon_background: '',
|
||||
icon_url: '', // empty string - triggers || '' fallback
|
||||
},
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
// Component should still render without errors
|
||||
expect(screen.getByTestId('workflow-preview')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle missing icon properties gracefully', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({
|
||||
icon_info: {
|
||||
icon_type: 'emoji',
|
||||
icon: '',
|
||||
icon_background: '',
|
||||
icon_url: '',
|
||||
},
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
expect(() => render(<Details {...props} />)).not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Chunk Structure Tests
|
||||
* Tests for different chunk_structure values and ChunkStructureCard rendering
|
||||
*/
|
||||
describe('Chunk Structure', () => {
|
||||
it('should render ChunkStructureCard for text chunk structure', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({
|
||||
chunk_structure: ChunkingMode.text,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
// ChunkStructureCard should be rendered
|
||||
expect(screen.getByText('datasetPipeline.details.structure')).toBeInTheDocument()
|
||||
// General option title
|
||||
expect(screen.getByText('General')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render ChunkStructureCard for parentChild chunk structure', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({
|
||||
chunk_structure: ChunkingMode.parentChild,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('Parent-Child')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render ChunkStructureCard for qa chunk structure', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({
|
||||
chunk_structure: ChunkingMode.qa,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('Q&A')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Edge Cases Tests
|
||||
* Tests for boundary conditions and unusual inputs
|
||||
*/
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty name', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ name: '' })
|
||||
const props = createDefaultProps()
|
||||
|
||||
expect(() => render(<Details {...props} />)).not.toThrow()
|
||||
})
|
||||
|
||||
it('should handle empty description', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ description: '' })
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByTestId('workflow-preview')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle very long name', () => {
|
||||
const longName = 'A'.repeat(200)
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ name: longName })
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const nameElement = screen.getByText(longName)
|
||||
expect(nameElement).toBeInTheDocument()
|
||||
expect(nameElement).toHaveClass('truncate')
|
||||
})
|
||||
|
||||
it('should handle very long description', () => {
|
||||
const longDesc = 'B'.repeat(1000)
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ description: longDesc })
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText(longDesc)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle special characters in name', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({
|
||||
name: 'Test <>&"\'Pipeline @#$%^&*()',
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('Test <>&"\'Pipeline @#$%^&*()')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle unicode characters', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({
|
||||
name: '测试管道 🚀 テスト',
|
||||
description: '这是一个测试描述 日本語テスト',
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('测试管道 🚀 テスト')).toBeInTheDocument()
|
||||
expect(screen.getByText('这是一个测试描述 日本語テスト')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle empty graph nodes and edges', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({
|
||||
graph: {
|
||||
nodes: [],
|
||||
edges: [],
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
},
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const preview = screen.getByTestId('workflow-preview')
|
||||
expect(preview).toHaveAttribute('data-nodes-count', '0')
|
||||
expect(preview).toHaveAttribute('data-edges-count', '0')
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Component Memoization Tests
|
||||
* Tests for React.memo behavior
|
||||
*/
|
||||
describe('Component Memoization', () => {
|
||||
it('should render correctly after rerender with same props', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { rerender } = render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('Test Pipeline Template')).toBeInTheDocument()
|
||||
|
||||
rerender(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('Test Pipeline Template')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should update when id prop changes', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ name: 'First Template' })
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { rerender } = render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('First Template')).toBeInTheDocument()
|
||||
|
||||
// Change the id prop which should trigger a rerender
|
||||
// Update mock data for the new id
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ name: 'Second Template' })
|
||||
rerender(<Details {...props} id="new-id" />)
|
||||
|
||||
expect(screen.getByText('Second Template')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle callback reference changes', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { rerender } = render(<Details {...props} />)
|
||||
|
||||
const newOnApplyTemplate = jest.fn()
|
||||
rerender(<Details {...props} onApplyTemplate={newOnApplyTemplate} />)
|
||||
|
||||
const useTemplateButton = screen.getByText('datasetPipeline.operations.useTemplate').closest('button')
|
||||
fireEvent.click(useTemplateButton!)
|
||||
|
||||
expect(newOnApplyTemplate).toHaveBeenCalledTimes(1)
|
||||
expect(props.onApplyTemplate).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Component Structure Tests
|
||||
* Tests for DOM structure and layout
|
||||
*/
|
||||
describe('Component Structure', () => {
|
||||
it('should have left panel for workflow preview', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { container } = render(<Details {...props} />)
|
||||
|
||||
const leftPanel = container.querySelector('.grow.items-center.justify-center')
|
||||
expect(leftPanel).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should have right panel with fixed width', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { container } = render(<Details {...props} />)
|
||||
|
||||
const rightPanel = container.querySelector('.w-\\[360px\\]')
|
||||
expect(rightPanel).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should have primary button variant for Use Template', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const button = screen.getByText('datasetPipeline.operations.useTemplate').closest('button')
|
||||
// Button should have primary styling
|
||||
expect(button).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should have title attribute for truncation tooltip', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ name: 'My Pipeline Name' })
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const nameElement = screen.getByText('My Pipeline Name')
|
||||
expect(nameElement).toHaveAttribute('title', 'My Pipeline Name')
|
||||
})
|
||||
|
||||
it('should have title attribute on created_by for truncation', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ created_by: 'Author Name' })
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const createdByElement = screen.getByText('datasetPipeline.details.createdBy')
|
||||
expect(createdByElement).toHaveAttribute('title', 'Author Name')
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Component Lifecycle Tests
|
||||
* Tests for mount/unmount behavior
|
||||
*/
|
||||
describe('Component Lifecycle', () => {
|
||||
it('should mount without errors', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
expect(() => render(<Details {...props} />)).not.toThrow()
|
||||
})
|
||||
|
||||
it('should unmount without errors', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { unmount } = render(<Details {...props} />)
|
||||
|
||||
expect(() => unmount()).not.toThrow()
|
||||
})
|
||||
|
||||
it('should handle rapid mount/unmount cycles', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const { unmount } = render(<Details {...props} />)
|
||||
unmount()
|
||||
}
|
||||
|
||||
expect(true).toBe(true)
|
||||
})
|
||||
|
||||
it('should transition from loading to loaded state', () => {
|
||||
mockPipelineTemplateData = undefined
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { rerender, container } = render(<Details {...props} />)
|
||||
|
||||
// Loading component renders a spinner SVG with spin-animation class
|
||||
const spinner = container.querySelector('.spin-animation')
|
||||
expect(spinner).toBeInTheDocument()
|
||||
|
||||
// Simulate data loaded - need to change props to trigger rerender with React.memo
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
rerender(<Details {...props} id="loaded-id" />)
|
||||
|
||||
expect(container.querySelector('.spin-animation')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('workflow-preview')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Styling Tests
|
||||
* Tests for CSS classes and visual styling
|
||||
*/
|
||||
describe('Styling', () => {
|
||||
it('should apply overflow-hidden rounded-2xl to WorkflowPreview container', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const preview = screen.getByTestId('workflow-preview')
|
||||
expect(preview).toHaveClass('overflow-hidden')
|
||||
expect(preview).toHaveClass('rounded-2xl')
|
||||
})
|
||||
|
||||
it('should apply correct typography classes to template name', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const nameElement = screen.getByText('Test Pipeline Template')
|
||||
expect(nameElement).toHaveClass('system-md-semibold')
|
||||
expect(nameElement).toHaveClass('text-text-secondary')
|
||||
})
|
||||
|
||||
it('should apply correct styling to description', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const description = screen.getByText('Test pipeline description for testing purposes')
|
||||
expect(description).toHaveClass('system-sm-regular')
|
||||
expect(description).toHaveClass('text-text-secondary')
|
||||
})
|
||||
|
||||
it('should apply correct styling to structure title', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
const structureTitle = screen.getByText('datasetPipeline.details.structure')
|
||||
expect(structureTitle).toHaveClass('system-sm-semibold-uppercase')
|
||||
expect(structureTitle).toHaveClass('text-text-secondary')
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* API Hook Integration Tests
|
||||
* Tests for usePipelineTemplateById hook behavior
|
||||
*/
|
||||
describe('API Hook Integration', () => {
|
||||
it('should pass correct params to usePipelineTemplateById for built-in type', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = { ...createDefaultProps(), id: 'test-id-123', type: 'built-in' as const }
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
// The hook should be called with the correct parameters
|
||||
expect(screen.getByTestId('workflow-preview')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should pass correct params to usePipelineTemplateById for customized type', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate()
|
||||
const props = { ...createDefaultProps(), id: 'custom-id-456', type: 'customized' as const }
|
||||
|
||||
render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByTestId('workflow-preview')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle data refetch on id change', () => {
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ name: 'First Template' })
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { rerender } = render(<Details {...props} />)
|
||||
|
||||
expect(screen.getByText('First Template')).toBeInTheDocument()
|
||||
|
||||
// Change id and update mock data
|
||||
mockPipelineTemplateData = createMockPipelineTemplate({ name: 'Second Template' })
|
||||
rerender(<Details {...props} id="new-id" />)
|
||||
|
||||
expect(screen.getByText('Second Template')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,965 @@
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import TemplateCard from './index'
|
||||
import type { PipelineTemplate, PipelineTemplateByIdResponse } from '@/models/pipeline'
|
||||
import { ChunkingMode } from '@/models/datasets'
|
||||
|
||||
// Mock Next.js router
|
||||
const mockPush = jest.fn()
|
||||
jest.mock('next/navigation', () => ({
|
||||
useRouter: () => ({
|
||||
push: mockPush,
|
||||
}),
|
||||
}))
|
||||
|
||||
let mockCreateDataset: jest.Mock
|
||||
let mockDeleteTemplate: jest.Mock
|
||||
let mockExportTemplateDSL: jest.Mock
|
||||
let mockInvalidCustomizedTemplateList: jest.Mock
|
||||
let mockInvalidDatasetList: jest.Mock
|
||||
let mockHandleCheckPluginDependencies: jest.Mock
|
||||
let mockIsExporting = false
|
||||
|
||||
// Mock service hooks
|
||||
let mockPipelineTemplateByIdData: PipelineTemplateByIdResponse | undefined
|
||||
let mockRefetch: jest.Mock
|
||||
|
||||
jest.mock('@/service/use-pipeline', () => ({
|
||||
usePipelineTemplateById: () => ({
|
||||
data: mockPipelineTemplateByIdData,
|
||||
refetch: mockRefetch,
|
||||
}),
|
||||
useDeleteTemplate: () => ({
|
||||
mutateAsync: mockDeleteTemplate,
|
||||
}),
|
||||
useExportTemplateDSL: () => ({
|
||||
mutateAsync: mockExportTemplateDSL,
|
||||
isPending: mockIsExporting,
|
||||
}),
|
||||
useInvalidCustomizedTemplateList: () => mockInvalidCustomizedTemplateList,
|
||||
}))
|
||||
|
||||
jest.mock('@/service/knowledge/use-create-dataset', () => ({
|
||||
useCreatePipelineDatasetFromCustomized: () => ({
|
||||
mutateAsync: mockCreateDataset,
|
||||
}),
|
||||
}))
|
||||
|
||||
jest.mock('@/service/knowledge/use-dataset', () => ({
|
||||
useInvalidDatasetList: () => mockInvalidDatasetList,
|
||||
}))
|
||||
|
||||
jest.mock('@/app/components/workflow/plugin-dependency/hooks', () => ({
|
||||
usePluginDependencies: () => ({
|
||||
handleCheckPluginDependencies: mockHandleCheckPluginDependencies,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock downloadFile
|
||||
const mockDownloadFile = jest.fn()
|
||||
jest.mock('@/utils/format', () => ({
|
||||
downloadFile: (params: { data: Blob; fileName: string }) => mockDownloadFile(params),
|
||||
}))
|
||||
|
||||
// Mock trackEvent
|
||||
const mockTrackEvent = jest.fn()
|
||||
jest.mock('@/app/components/base/amplitude', () => ({
|
||||
trackEvent: (name: string, params: Record<string, unknown>) => mockTrackEvent(name, params),
|
||||
}))
|
||||
|
||||
// Mock child components to simplify testing
|
||||
jest.mock('./content', () => ({
|
||||
__esModule: true,
|
||||
default: ({ name, description, iconInfo, chunkStructure }: {
|
||||
name: string
|
||||
description: string
|
||||
iconInfo: { icon_type: string }
|
||||
chunkStructure: string
|
||||
}) => (
|
||||
<div data-testid="content">
|
||||
<span data-testid="content-name">{name}</span>
|
||||
<span data-testid="content-description">{description}</span>
|
||||
<span data-testid="content-icon-type">{iconInfo.icon_type}</span>
|
||||
<span data-testid="content-chunk-structure">{chunkStructure}</span>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
jest.mock('./actions', () => ({
|
||||
__esModule: true,
|
||||
default: ({
|
||||
onApplyTemplate,
|
||||
handleShowTemplateDetails,
|
||||
showMoreOperations,
|
||||
openEditModal,
|
||||
handleExportDSL,
|
||||
handleDelete,
|
||||
}: {
|
||||
onApplyTemplate: () => void
|
||||
handleShowTemplateDetails: () => void
|
||||
showMoreOperations: boolean
|
||||
openEditModal: () => void
|
||||
handleExportDSL: () => void
|
||||
handleDelete: () => void
|
||||
}) => (
|
||||
<div data-testid="actions" data-show-more={showMoreOperations}>
|
||||
<button data-testid="apply-template-btn" onClick={onApplyTemplate}>Apply</button>
|
||||
<button data-testid="show-details-btn" onClick={handleShowTemplateDetails}>Details</button>
|
||||
<button data-testid="edit-modal-btn" onClick={openEditModal}>Edit</button>
|
||||
<button data-testid="export-dsl-btn" onClick={handleExportDSL}>Export</button>
|
||||
<button data-testid="delete-btn" onClick={handleDelete}>Delete</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
jest.mock('./details', () => ({
|
||||
__esModule: true,
|
||||
default: ({ id, type, onClose, onApplyTemplate }: {
|
||||
id: string
|
||||
type: string
|
||||
onClose: () => void
|
||||
onApplyTemplate: () => void
|
||||
}) => (
|
||||
<div data-testid="details-modal">
|
||||
<span data-testid="details-id">{id}</span>
|
||||
<span data-testid="details-type">{type}</span>
|
||||
<button data-testid="details-close-btn" onClick={onClose}>Close</button>
|
||||
<button data-testid="details-apply-btn" onClick={onApplyTemplate}>Apply</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
jest.mock('./edit-pipeline-info', () => ({
|
||||
__esModule: true,
|
||||
default: ({ pipeline, onClose }: {
|
||||
pipeline: PipelineTemplate
|
||||
onClose: () => void
|
||||
}) => (
|
||||
<div data-testid="edit-pipeline-modal">
|
||||
<span data-testid="edit-pipeline-id">{pipeline.id}</span>
|
||||
<button data-testid="edit-close-btn" onClick={onClose}>Close</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
// Factory function for creating mock pipeline template
|
||||
const createMockPipeline = (overrides: Partial<PipelineTemplate> = {}): PipelineTemplate => ({
|
||||
id: 'test-pipeline-id',
|
||||
name: 'Test Pipeline',
|
||||
description: 'Test pipeline description',
|
||||
icon: {
|
||||
icon_type: 'emoji',
|
||||
icon: '📙',
|
||||
icon_background: '#FFF4ED',
|
||||
icon_url: '',
|
||||
},
|
||||
position: 1,
|
||||
chunk_structure: ChunkingMode.text,
|
||||
...overrides,
|
||||
})
|
||||
|
||||
// Factory function for creating mock pipeline template by id response
|
||||
const createMockPipelineByIdResponse = (
|
||||
overrides: Partial<PipelineTemplateByIdResponse> = {},
|
||||
): PipelineTemplateByIdResponse => ({
|
||||
id: 'test-pipeline-id',
|
||||
name: 'Test Pipeline',
|
||||
description: 'Test pipeline description',
|
||||
icon_info: {
|
||||
icon_type: 'emoji',
|
||||
icon: '📙',
|
||||
icon_background: '#FFF4ED',
|
||||
icon_url: '',
|
||||
},
|
||||
chunk_structure: ChunkingMode.text,
|
||||
export_data: 'yaml_content_here',
|
||||
graph: {
|
||||
nodes: [],
|
||||
edges: [],
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
},
|
||||
created_by: 'Test Author',
|
||||
...overrides,
|
||||
})
|
||||
|
||||
// Default props factory
|
||||
const createDefaultProps = () => ({
|
||||
pipeline: createMockPipeline(),
|
||||
type: 'built-in' as const,
|
||||
showMoreOperations: true,
|
||||
})
|
||||
|
||||
describe('TemplateCard', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockPipelineTemplateByIdData = undefined
|
||||
mockRefetch = jest.fn().mockResolvedValue({ data: createMockPipelineByIdResponse() })
|
||||
mockCreateDataset = jest.fn()
|
||||
mockDeleteTemplate = jest.fn()
|
||||
mockExportTemplateDSL = jest.fn()
|
||||
mockInvalidCustomizedTemplateList = jest.fn()
|
||||
mockInvalidDatasetList = jest.fn()
|
||||
mockHandleCheckPluginDependencies = jest.fn()
|
||||
mockIsExporting = false
|
||||
})
|
||||
|
||||
/**
|
||||
* Rendering Tests
|
||||
* Tests for basic component rendering
|
||||
*/
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('actions')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Content component with correct props', () => {
|
||||
const pipeline = createMockPipeline({
|
||||
name: 'My Pipeline',
|
||||
description: 'My description',
|
||||
chunk_structure: ChunkingMode.qa,
|
||||
})
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content-name')).toHaveTextContent('My Pipeline')
|
||||
expect(screen.getByTestId('content-description')).toHaveTextContent('My description')
|
||||
expect(screen.getByTestId('content-chunk-structure')).toHaveTextContent(ChunkingMode.qa)
|
||||
})
|
||||
|
||||
it('should render Actions component with showMoreOperations=true by default', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
const actions = screen.getByTestId('actions')
|
||||
expect(actions).toHaveAttribute('data-show-more', 'true')
|
||||
})
|
||||
|
||||
it('should render Actions component with showMoreOperations=false when specified', () => {
|
||||
const props = { ...createDefaultProps(), showMoreOperations: false }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
const actions = screen.getByTestId('actions')
|
||||
expect(actions).toHaveAttribute('data-show-more', 'false')
|
||||
})
|
||||
|
||||
it('should have correct container styling', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { container } = render(<TemplateCard {...props} />)
|
||||
|
||||
const card = container.firstChild as HTMLElement
|
||||
expect(card).toHaveClass('group')
|
||||
expect(card).toHaveClass('relative')
|
||||
expect(card).toHaveClass('flex')
|
||||
expect(card).toHaveClass('h-[132px]')
|
||||
expect(card).toHaveClass('cursor-pointer')
|
||||
expect(card).toHaveClass('rounded-xl')
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Props Variations Tests
|
||||
* Tests for different prop combinations
|
||||
*/
|
||||
describe('Props Variations', () => {
|
||||
it('should handle built-in type', () => {
|
||||
const props = { ...createDefaultProps(), type: 'built-in' as const }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle customized type', () => {
|
||||
const props = { ...createDefaultProps(), type: 'customized' as const }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle different pipeline data', () => {
|
||||
const pipeline = createMockPipeline({
|
||||
id: 'unique-id-123',
|
||||
name: 'Unique Pipeline',
|
||||
description: 'Unique description',
|
||||
chunk_structure: ChunkingMode.parentChild,
|
||||
})
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content-name')).toHaveTextContent('Unique Pipeline')
|
||||
expect(screen.getByTestId('content-chunk-structure')).toHaveTextContent(ChunkingMode.parentChild)
|
||||
})
|
||||
|
||||
it('should handle image icon type', () => {
|
||||
const pipeline = createMockPipeline({
|
||||
icon: {
|
||||
icon_type: 'image',
|
||||
icon: 'file-id',
|
||||
icon_background: '',
|
||||
icon_url: 'https://example.com/image.png',
|
||||
},
|
||||
})
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content-icon-type')).toHaveTextContent('image')
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* State Management Tests
|
||||
* Tests for modal state (showEditModal, showDeleteConfirm, showDetailModal)
|
||||
*/
|
||||
describe('State Management', () => {
|
||||
it('should not show edit modal initially', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.queryByTestId('edit-pipeline-modal')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show edit modal when openEditModal is called', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('edit-modal-btn'))
|
||||
|
||||
expect(screen.getByTestId('edit-pipeline-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should close edit modal when onClose is called', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('edit-modal-btn'))
|
||||
expect(screen.getByTestId('edit-pipeline-modal')).toBeInTheDocument()
|
||||
|
||||
fireEvent.click(screen.getByTestId('edit-close-btn'))
|
||||
expect(screen.queryByTestId('edit-pipeline-modal')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not show delete confirm initially', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.queryByText('datasetPipeline.deletePipeline.title')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show delete confirm when handleDelete is called', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('delete-btn'))
|
||||
|
||||
expect(screen.getByText('datasetPipeline.deletePipeline.title')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not show details modal initially', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.queryByTestId('details-modal')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show details modal when handleShowTemplateDetails is called', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('show-details-btn'))
|
||||
|
||||
expect(screen.getByTestId('details-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should close details modal when onClose is called', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('show-details-btn'))
|
||||
expect(screen.getByTestId('details-modal')).toBeInTheDocument()
|
||||
|
||||
fireEvent.click(screen.getByTestId('details-close-btn'))
|
||||
expect(screen.queryByTestId('details-modal')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should pass correct props to details modal', () => {
|
||||
const pipeline = createMockPipeline({ id: 'detail-test-id' })
|
||||
const props = { ...createDefaultProps(), pipeline, type: 'customized' as const }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('show-details-btn'))
|
||||
|
||||
expect(screen.getByTestId('details-id')).toHaveTextContent('detail-test-id')
|
||||
expect(screen.getByTestId('details-type')).toHaveTextContent('customized')
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Event Handlers Tests
|
||||
* Tests for callback functions and user interactions
|
||||
*/
|
||||
describe('Event Handlers', () => {
|
||||
describe('handleUseTemplate', () => {
|
||||
it('should call getPipelineTemplateInfo when apply template is clicked', async () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('apply-template-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockRefetch).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('should not call createDataset when pipelineTemplateInfo is not available', async () => {
|
||||
mockRefetch = jest.fn().mockResolvedValue({ data: null })
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('apply-template-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockRefetch).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
// createDataset should not be called when pipelineTemplateInfo is null
|
||||
expect(mockCreateDataset).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call createDataset with correct yaml_content', async () => {
|
||||
const pipelineResponse = createMockPipelineByIdResponse({ export_data: 'test-yaml-content' })
|
||||
mockRefetch = jest.fn().mockResolvedValue({ data: pipelineResponse })
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('apply-template-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCreateDataset).toHaveBeenCalledWith(
|
||||
{ yaml_content: 'test-yaml-content' },
|
||||
expect.any(Object),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should invalidate list, check plugin dependencies, and navigate on success', async () => {
|
||||
mockRefetch = jest.fn().mockResolvedValue({ data: createMockPipelineByIdResponse() })
|
||||
mockCreateDataset = jest.fn().mockImplementation((_req, options) => {
|
||||
options.onSuccess({ dataset_id: 'new-dataset-id', pipeline_id: 'new-pipeline-id' })
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('apply-template-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockInvalidDatasetList).toHaveBeenCalled()
|
||||
expect(mockHandleCheckPluginDependencies).toHaveBeenCalledWith('new-pipeline-id', true)
|
||||
expect(mockPush).toHaveBeenCalledWith('/datasets/new-dataset-id/pipeline')
|
||||
})
|
||||
})
|
||||
|
||||
it('should track event on successful dataset creation', async () => {
|
||||
mockRefetch = jest.fn().mockResolvedValue({ data: createMockPipelineByIdResponse() })
|
||||
mockCreateDataset = jest.fn().mockImplementation((_req, options) => {
|
||||
options.onSuccess({ dataset_id: 'new-dataset-id', pipeline_id: 'new-pipeline-id' })
|
||||
})
|
||||
const pipeline = createMockPipeline({ id: 'track-test-id', name: 'Track Test Pipeline' })
|
||||
const props = { ...createDefaultProps(), pipeline, type: 'customized' as const }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('apply-template-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockTrackEvent).toHaveBeenCalledWith('create_datasets_with_pipeline', {
|
||||
template_name: 'Track Test Pipeline',
|
||||
template_id: 'track-test-id',
|
||||
template_type: 'customized',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should not call handleCheckPluginDependencies when pipeline_id is not present', async () => {
|
||||
mockRefetch = jest.fn().mockResolvedValue({ data: createMockPipelineByIdResponse() })
|
||||
mockCreateDataset = jest.fn().mockImplementation((_req, options) => {
|
||||
options.onSuccess({ dataset_id: 'new-dataset-id', pipeline_id: null })
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('apply-template-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockHandleCheckPluginDependencies).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('should call onError callback when createDataset fails', async () => {
|
||||
const onErrorSpy = jest.fn()
|
||||
mockRefetch = jest.fn().mockResolvedValue({ data: createMockPipelineByIdResponse() })
|
||||
mockCreateDataset = jest.fn().mockImplementation((_req, options) => {
|
||||
onErrorSpy()
|
||||
options.onError()
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('apply-template-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCreateDataset).toHaveBeenCalled()
|
||||
expect(onErrorSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
// Should not navigate on error
|
||||
expect(mockPush).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleExportDSL', () => {
|
||||
it('should call exportPipelineDSL with pipeline id', async () => {
|
||||
const pipeline = createMockPipeline({ id: 'export-test-id' })
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('export-dsl-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockExportTemplateDSL).toHaveBeenCalledWith('export-test-id', expect.any(Object))
|
||||
})
|
||||
})
|
||||
|
||||
it('should not call exportPipelineDSL when already exporting', async () => {
|
||||
mockIsExporting = true
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('export-dsl-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockExportTemplateDSL).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('should download file on export success', async () => {
|
||||
mockExportTemplateDSL = jest.fn().mockImplementation((_id, options) => {
|
||||
options.onSuccess({ data: 'exported-yaml-content' })
|
||||
})
|
||||
const pipeline = createMockPipeline({ name: 'Export Pipeline' })
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('export-dsl-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockDownloadFile).toHaveBeenCalledWith({
|
||||
data: expect.any(Blob),
|
||||
fileName: 'Export Pipeline.pipeline',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should call onError callback on export failure', async () => {
|
||||
const onErrorSpy = jest.fn()
|
||||
mockExportTemplateDSL = jest.fn().mockImplementation((_id, options) => {
|
||||
onErrorSpy()
|
||||
options.onError()
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('export-dsl-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockExportTemplateDSL).toHaveBeenCalled()
|
||||
expect(onErrorSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
// Should not download file on error
|
||||
expect(mockDownloadFile).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleDelete', () => {
|
||||
it('should call deletePipeline on confirm', async () => {
|
||||
mockDeleteTemplate = jest.fn().mockImplementation((_id, options) => {
|
||||
options.onSuccess()
|
||||
})
|
||||
const pipeline = createMockPipeline({ id: 'delete-test-id' })
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('delete-btn'))
|
||||
expect(screen.getByText('datasetPipeline.deletePipeline.title')).toBeInTheDocument()
|
||||
|
||||
// Find and click confirm button
|
||||
const confirmButton = screen.getByText('common.operation.confirm')
|
||||
fireEvent.click(confirmButton)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockDeleteTemplate).toHaveBeenCalledWith('delete-test-id', expect.any(Object))
|
||||
})
|
||||
})
|
||||
|
||||
it('should invalidate customized template list and close confirm on success', async () => {
|
||||
mockDeleteTemplate = jest.fn().mockImplementation((_id, options) => {
|
||||
options.onSuccess()
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('delete-btn'))
|
||||
const confirmButton = screen.getByText('common.operation.confirm')
|
||||
fireEvent.click(confirmButton)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockInvalidCustomizedTemplateList).toHaveBeenCalled()
|
||||
expect(screen.queryByText('datasetPipeline.deletePipeline.title')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('should close delete confirm on cancel', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('delete-btn'))
|
||||
expect(screen.getByText('datasetPipeline.deletePipeline.title')).toBeInTheDocument()
|
||||
|
||||
const cancelButton = screen.getByText('common.operation.cancel')
|
||||
fireEvent.click(cancelButton)
|
||||
|
||||
expect(screen.queryByText('datasetPipeline.deletePipeline.title')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Callback Stability Tests
|
||||
* Tests for useCallback memoization
|
||||
*/
|
||||
describe('Callback Stability', () => {
|
||||
it('should maintain stable handleShowTemplateDetails reference', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { rerender } = render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('show-details-btn'))
|
||||
expect(screen.getByTestId('details-modal')).toBeInTheDocument()
|
||||
|
||||
fireEvent.click(screen.getByTestId('details-close-btn'))
|
||||
rerender(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('show-details-btn'))
|
||||
expect(screen.getByTestId('details-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should maintain stable openEditModal reference', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { rerender } = render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('edit-modal-btn'))
|
||||
expect(screen.getByTestId('edit-pipeline-modal')).toBeInTheDocument()
|
||||
|
||||
fireEvent.click(screen.getByTestId('edit-close-btn'))
|
||||
rerender(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('edit-modal-btn'))
|
||||
expect(screen.getByTestId('edit-pipeline-modal')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Component Memoization Tests
|
||||
* Tests for React.memo behavior
|
||||
*/
|
||||
describe('Component Memoization', () => {
|
||||
it('should render correctly after rerender with same props', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { rerender } = render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content')).toBeInTheDocument()
|
||||
|
||||
rerender(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should update when pipeline prop changes', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { rerender } = render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content-name')).toHaveTextContent('Test Pipeline')
|
||||
|
||||
const newPipeline = createMockPipeline({ name: 'Updated Pipeline' })
|
||||
rerender(<TemplateCard {...props} pipeline={newPipeline} />)
|
||||
|
||||
expect(screen.getByTestId('content-name')).toHaveTextContent('Updated Pipeline')
|
||||
})
|
||||
|
||||
it('should update when type prop changes', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { rerender } = render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content')).toBeInTheDocument()
|
||||
|
||||
rerender(<TemplateCard {...props} type="customized" />)
|
||||
|
||||
expect(screen.getByTestId('content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should update when showMoreOperations prop changes', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { rerender } = render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('actions')).toHaveAttribute('data-show-more', 'true')
|
||||
|
||||
rerender(<TemplateCard {...props} showMoreOperations={false} />)
|
||||
|
||||
expect(screen.getByTestId('actions')).toHaveAttribute('data-show-more', 'false')
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Edge Cases Tests
|
||||
* Tests for boundary conditions and error handling
|
||||
*/
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty pipeline name', () => {
|
||||
const pipeline = createMockPipeline({ name: '' })
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
expect(() => render(<TemplateCard {...props} />)).not.toThrow()
|
||||
expect(screen.getByTestId('content-name')).toHaveTextContent('')
|
||||
})
|
||||
|
||||
it('should handle empty pipeline description', () => {
|
||||
const pipeline = createMockPipeline({ description: '' })
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
expect(() => render(<TemplateCard {...props} />)).not.toThrow()
|
||||
expect(screen.getByTestId('content-description')).toHaveTextContent('')
|
||||
})
|
||||
|
||||
it('should handle very long pipeline name', () => {
|
||||
const longName = 'A'.repeat(200)
|
||||
const pipeline = createMockPipeline({ name: longName })
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content-name')).toHaveTextContent(longName)
|
||||
})
|
||||
|
||||
it('should handle special characters in name', () => {
|
||||
const pipeline = createMockPipeline({ name: 'Test <>&"\'Pipeline @#$%' })
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content-name')).toHaveTextContent('Test <>&"\'Pipeline @#$%')
|
||||
})
|
||||
|
||||
it('should handle unicode characters', () => {
|
||||
const pipeline = createMockPipeline({ name: '测试管道 🚀 テスト' })
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content-name')).toHaveTextContent('测试管道 🚀 テスト')
|
||||
})
|
||||
|
||||
it('should handle all chunk structure types', () => {
|
||||
const chunkModes = [ChunkingMode.text, ChunkingMode.parentChild, ChunkingMode.qa]
|
||||
|
||||
chunkModes.forEach((mode) => {
|
||||
const pipeline = createMockPipeline({ chunk_structure: mode })
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
const { unmount } = render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content-chunk-structure')).toHaveTextContent(mode)
|
||||
unmount()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Component Lifecycle Tests
|
||||
* Tests for mount/unmount behavior
|
||||
*/
|
||||
describe('Component Lifecycle', () => {
|
||||
it('should mount without errors', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
expect(() => render(<TemplateCard {...props} />)).not.toThrow()
|
||||
})
|
||||
|
||||
it('should unmount without errors', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
const { unmount } = render(<TemplateCard {...props} />)
|
||||
|
||||
expect(() => unmount()).not.toThrow()
|
||||
})
|
||||
|
||||
it('should handle rapid mount/unmount cycles', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const { unmount } = render(<TemplateCard {...props} />)
|
||||
unmount()
|
||||
}
|
||||
|
||||
expect(true).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Modal Integration Tests
|
||||
* Tests for modal interactions and nested callbacks
|
||||
*/
|
||||
describe('Modal Integration', () => {
|
||||
it('should pass correct pipeline to edit modal', () => {
|
||||
const pipeline = createMockPipeline({ id: 'modal-test-id' })
|
||||
const props = { ...createDefaultProps(), pipeline }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('edit-modal-btn'))
|
||||
|
||||
expect(screen.getByTestId('edit-pipeline-id')).toHaveTextContent('modal-test-id')
|
||||
})
|
||||
|
||||
it('should be able to apply template from details modal', async () => {
|
||||
mockRefetch = jest.fn().mockResolvedValue({ data: createMockPipelineByIdResponse() })
|
||||
mockCreateDataset = jest.fn().mockImplementation((_req, options) => {
|
||||
options.onSuccess({ dataset_id: 'new-id', pipeline_id: 'new-pipeline' })
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('show-details-btn'))
|
||||
fireEvent.click(screen.getByTestId('details-apply-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockRefetch).toHaveBeenCalled()
|
||||
expect(mockCreateDataset).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle multiple modals sequentially', () => {
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
// Open edit modal
|
||||
fireEvent.click(screen.getByTestId('edit-modal-btn'))
|
||||
expect(screen.getByTestId('edit-pipeline-modal')).toBeInTheDocument()
|
||||
|
||||
// Close edit modal
|
||||
fireEvent.click(screen.getByTestId('edit-close-btn'))
|
||||
expect(screen.queryByTestId('edit-pipeline-modal')).not.toBeInTheDocument()
|
||||
|
||||
// Open details modal
|
||||
fireEvent.click(screen.getByTestId('show-details-btn'))
|
||||
expect(screen.getByTestId('details-modal')).toBeInTheDocument()
|
||||
|
||||
// Close details modal
|
||||
fireEvent.click(screen.getByTestId('details-close-btn'))
|
||||
expect(screen.queryByTestId('details-modal')).not.toBeInTheDocument()
|
||||
|
||||
// Open delete confirm
|
||||
fireEvent.click(screen.getByTestId('delete-btn'))
|
||||
expect(screen.getByText('datasetPipeline.deletePipeline.title')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* API Integration Tests
|
||||
* Tests for service hook interactions
|
||||
*/
|
||||
describe('API Integration', () => {
|
||||
it('should initialize hooks with correct parameters', () => {
|
||||
const pipeline = createMockPipeline({ id: 'hook-test-id' })
|
||||
const props = { ...createDefaultProps(), pipeline, type: 'customized' as const }
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
expect(screen.getByTestId('content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle async operations correctly', async () => {
|
||||
mockRefetch = jest.fn().mockResolvedValue({ data: createMockPipelineByIdResponse() })
|
||||
mockCreateDataset = jest.fn().mockImplementation(async (_req, options) => {
|
||||
await new Promise(resolve => setTimeout(resolve, 10))
|
||||
options.onSuccess({ dataset_id: 'async-test-id', pipeline_id: 'async-pipeline' })
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('apply-template-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockPush).toHaveBeenCalledWith('/datasets/async-test-id/pipeline')
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle concurrent API calls gracefully', async () => {
|
||||
mockRefetch = jest.fn().mockResolvedValue({ data: createMockPipelineByIdResponse() })
|
||||
mockCreateDataset = jest.fn().mockImplementation((_req, options) => {
|
||||
options.onSuccess({ dataset_id: 'concurrent-id', pipeline_id: 'concurrent-pipeline' })
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
render(<TemplateCard {...props} />)
|
||||
|
||||
// Trigger multiple clicks
|
||||
fireEvent.click(screen.getByTestId('apply-template-btn'))
|
||||
fireEvent.click(screen.getByTestId('apply-template-btn'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockRefetch).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -29,6 +29,7 @@ const NotionPagePreview = ({
|
||||
return
|
||||
try {
|
||||
const res = await fetchNotionPagePreview({
|
||||
workspaceID: currentPage.workspace_id,
|
||||
pageID: currentPage.page_id,
|
||||
pageType: currentPage.type,
|
||||
credentialID: notionCredentialId,
|
||||
|
||||
@@ -75,17 +75,11 @@ const OnlineDocuments = ({
|
||||
|
||||
const getOnlineDocuments = useCallback(async () => {
|
||||
const { currentCredentialId } = dataSourceStore.getState()
|
||||
// Convert datasource_parameters to inputs format for the API
|
||||
const inputs = Object.entries(nodeData.datasource_parameters || {}).reduce((acc, [key, value]) => {
|
||||
acc[key] = typeof value === 'object' && value !== null && 'value' in value ? value.value : value
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
|
||||
ssePost(
|
||||
datasourceNodeRunURL,
|
||||
{
|
||||
body: {
|
||||
inputs,
|
||||
inputs: {},
|
||||
credential_id: currentCredentialId,
|
||||
datasource_type: DatasourceType.onlineDocument,
|
||||
},
|
||||
@@ -103,7 +97,7 @@ const OnlineDocuments = ({
|
||||
},
|
||||
},
|
||||
)
|
||||
}, [dataSourceStore, datasourceNodeRunURL, nodeData.datasource_parameters])
|
||||
}, [dataSourceStore, datasourceNodeRunURL])
|
||||
|
||||
useEffect(() => {
|
||||
if (!currentCredentialId) return
|
||||
|
||||
@@ -62,7 +62,7 @@ type CurrChildChunkType = {
|
||||
showModal: boolean
|
||||
}
|
||||
|
||||
export type SegmentListContextValue = {
|
||||
type SegmentListContextValue = {
|
||||
isCollapsed: boolean
|
||||
fullScreen: boolean
|
||||
toggleFullScreen: (fullscreen?: boolean) => void
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -129,7 +129,6 @@ const SegmentCard: FC<ISegmentCardProps> = ({
|
||||
|
||||
return (
|
||||
<div
|
||||
data-testid="segment-card"
|
||||
className={cn(
|
||||
'chunk-card group/card w-full rounded-xl px-3',
|
||||
isFullDocMode ? '' : 'pb-2 pt-2.5 hover:bg-dataset-chunk-detail-card-hover-bg',
|
||||
@@ -173,7 +172,6 @@ const SegmentCard: FC<ISegmentCardProps> = ({
|
||||
popupClassName='text-text-secondary system-xs-medium'
|
||||
>
|
||||
<div
|
||||
data-testid="segment-edit-button"
|
||||
className='flex h-6 w-6 shrink-0 cursor-pointer items-center justify-center rounded-lg hover:bg-state-base-hover'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
@@ -186,9 +184,7 @@ const SegmentCard: FC<ISegmentCardProps> = ({
|
||||
popupContent='Delete'
|
||||
popupClassName='text-text-secondary system-xs-medium'
|
||||
>
|
||||
<div
|
||||
data-testid="segment-delete-button"
|
||||
className='group/delete flex h-6 w-6 shrink-0 cursor-pointer items-center justify-center rounded-lg hover:bg-state-destructive-hover'
|
||||
<div className='group/delete flex h-6 w-6 shrink-0 cursor-pointer items-center justify-center rounded-lg hover:bg-state-destructive-hover'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
setShowModal(true)
|
||||
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
const ParentChunkCardSkelton = () => {
|
||||
const { t } = useTranslation()
|
||||
return (
|
||||
<div data-testid='parent-chunk-card-skeleton' className='flex flex-col pb-2'>
|
||||
<div className='flex flex-col pb-2'>
|
||||
<SkeletonContainer className='gap-y-0 p-1 pb-0'>
|
||||
<SkeletonContainer className='gap-y-0.5 px-2 pt-1.5'>
|
||||
<SkeletonRow className='py-0.5'>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ChunkingMode, ParentMode } from '@/models/datasets'
|
||||
import { createContext, useContextSelector } from 'use-context-selector'
|
||||
|
||||
export type DocumentContextValue = {
|
||||
type DocumentContextValue = {
|
||||
datasetId?: string
|
||||
documentId?: string
|
||||
docForm?: ChunkingMode
|
||||
|
||||
@@ -1,786 +0,0 @@
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import PipelineSettings from './index'
|
||||
import { DatasourceType } from '@/models/pipeline'
|
||||
import type { PipelineExecutionLogResponse } from '@/models/pipeline'
|
||||
|
||||
// Mock i18n
|
||||
jest.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock Next.js router
|
||||
const mockPush = jest.fn()
|
||||
const mockBack = jest.fn()
|
||||
jest.mock('next/navigation', () => ({
|
||||
useRouter: () => ({
|
||||
push: mockPush,
|
||||
back: mockBack,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock dataset detail context
|
||||
const mockPipelineId = 'pipeline-123'
|
||||
jest.mock('@/context/dataset-detail', () => ({
|
||||
useDatasetDetailContextWithSelector: (selector: (state: { dataset: { pipeline_id: string; doc_form: string } }) => unknown) =>
|
||||
selector({ dataset: { pipeline_id: mockPipelineId, doc_form: 'text_model' } }),
|
||||
}))
|
||||
|
||||
// Mock API hooks for PipelineSettings
|
||||
const mockUsePipelineExecutionLog = jest.fn()
|
||||
const mockMutateAsync = jest.fn()
|
||||
const mockUseRunPublishedPipeline = jest.fn()
|
||||
jest.mock('@/service/use-pipeline', () => ({
|
||||
usePipelineExecutionLog: (params: { dataset_id: string; document_id: string }) => mockUsePipelineExecutionLog(params),
|
||||
useRunPublishedPipeline: () => mockUseRunPublishedPipeline(),
|
||||
// For ProcessDocuments component
|
||||
usePublishedPipelineProcessingParams: () => ({
|
||||
data: { variables: [] },
|
||||
isFetching: false,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock document invalidation hooks
|
||||
const mockInvalidDocumentList = jest.fn()
|
||||
const mockInvalidDocumentDetail = jest.fn()
|
||||
jest.mock('@/service/knowledge/use-document', () => ({
|
||||
useInvalidDocumentList: () => mockInvalidDocumentList,
|
||||
useInvalidDocumentDetail: () => mockInvalidDocumentDetail,
|
||||
}))
|
||||
|
||||
// Mock Form component in ProcessDocuments - internal dependencies are too complex
|
||||
jest.mock('../../../create-from-pipeline/process-documents/form', () => {
|
||||
return function MockForm({
|
||||
ref,
|
||||
initialData,
|
||||
configurations,
|
||||
onSubmit,
|
||||
onPreview,
|
||||
isRunning,
|
||||
}: {
|
||||
ref: React.RefObject<{ submit: () => void }>
|
||||
initialData: Record<string, unknown>
|
||||
configurations: Array<{ variable: string; label: string; type: string }>
|
||||
schema: unknown
|
||||
onSubmit: (data: Record<string, unknown>) => void
|
||||
onPreview: () => void
|
||||
isRunning: boolean
|
||||
}) {
|
||||
if (ref && typeof ref === 'object' && 'current' in ref) {
|
||||
(ref as React.MutableRefObject<{ submit: () => void }>).current = {
|
||||
submit: () => onSubmit(initialData),
|
||||
}
|
||||
}
|
||||
return (
|
||||
<form
|
||||
data-testid="process-form"
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault()
|
||||
onSubmit(initialData)
|
||||
}}
|
||||
>
|
||||
{configurations.map((config, index) => (
|
||||
<div key={index} data-testid={`field-${config.variable}`}>
|
||||
<label>{config.label}</label>
|
||||
</div>
|
||||
))}
|
||||
<button type="button" data-testid="preview-btn" onClick={onPreview} disabled={isRunning}>
|
||||
Preview
|
||||
</button>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
// Mock ChunkPreview - has complex internal state and many dependencies
|
||||
jest.mock('../../../create-from-pipeline/preview/chunk-preview', () => {
|
||||
return function MockChunkPreview({
|
||||
dataSourceType,
|
||||
localFiles,
|
||||
onlineDocuments,
|
||||
websitePages,
|
||||
onlineDriveFiles,
|
||||
isIdle,
|
||||
isPending,
|
||||
estimateData,
|
||||
}: {
|
||||
dataSourceType: string
|
||||
localFiles: unknown[]
|
||||
onlineDocuments: unknown[]
|
||||
websitePages: unknown[]
|
||||
onlineDriveFiles: unknown[]
|
||||
isIdle: boolean
|
||||
isPending: boolean
|
||||
estimateData: unknown
|
||||
}) {
|
||||
return (
|
||||
<div data-testid="chunk-preview">
|
||||
<span data-testid="datasource-type">{dataSourceType}</span>
|
||||
<span data-testid="local-files-count">{localFiles.length}</span>
|
||||
<span data-testid="online-documents-count">{onlineDocuments.length}</span>
|
||||
<span data-testid="website-pages-count">{websitePages.length}</span>
|
||||
<span data-testid="online-drive-files-count">{onlineDriveFiles.length}</span>
|
||||
<span data-testid="is-idle">{String(isIdle)}</span>
|
||||
<span data-testid="is-pending">{String(isPending)}</span>
|
||||
<span data-testid="has-estimate-data">{String(!!estimateData)}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
// Test utilities
|
||||
const createQueryClient = () =>
|
||||
new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
})
|
||||
|
||||
const renderWithProviders = (ui: React.ReactElement) => {
|
||||
const queryClient = createQueryClient()
|
||||
return render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
{ui}
|
||||
</QueryClientProvider>,
|
||||
)
|
||||
}
|
||||
|
||||
// Factory functions for test data
|
||||
const createMockExecutionLogResponse = (
|
||||
overrides: Partial<PipelineExecutionLogResponse> = {},
|
||||
): PipelineExecutionLogResponse => ({
|
||||
datasource_type: DatasourceType.localFile,
|
||||
input_data: { chunk_size: '100' },
|
||||
datasource_node_id: 'datasource-node-1',
|
||||
datasource_info: {
|
||||
related_id: 'file-1',
|
||||
name: 'test-file.pdf',
|
||||
extension: 'pdf',
|
||||
},
|
||||
...overrides,
|
||||
})
|
||||
|
||||
const createDefaultProps = () => ({
|
||||
datasetId: 'dataset-123',
|
||||
documentId: 'document-456',
|
||||
})
|
||||
|
||||
describe('PipelineSettings', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockPush.mockClear()
|
||||
mockBack.mockClear()
|
||||
mockMutateAsync.mockClear()
|
||||
mockInvalidDocumentList.mockClear()
|
||||
mockInvalidDocumentDetail.mockClear()
|
||||
|
||||
// Default: successful data fetch
|
||||
mockUsePipelineExecutionLog.mockReturnValue({
|
||||
data: createMockExecutionLogResponse(),
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
})
|
||||
|
||||
// Default: useRunPublishedPipeline mock
|
||||
mockUseRunPublishedPipeline.mockReturnValue({
|
||||
mutateAsync: mockMutateAsync,
|
||||
isIdle: true,
|
||||
isPending: false,
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Rendering Tests ====================
|
||||
// Test basic rendering with real components
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing when data is loaded', () => {
|
||||
// Arrange
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert - Real LeftHeader should render with correct content
|
||||
expect(screen.getByText('datasetPipeline.documentSettings.title')).toBeInTheDocument()
|
||||
expect(screen.getByText('datasetPipeline.addDocuments.steps.processDocuments')).toBeInTheDocument()
|
||||
// Real ProcessDocuments should render
|
||||
expect(screen.getByTestId('process-form')).toBeInTheDocument()
|
||||
// ChunkPreview should render
|
||||
expect(screen.getByTestId('chunk-preview')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Loading component when fetching data', () => {
|
||||
// Arrange
|
||||
mockUsePipelineExecutionLog.mockReturnValue({
|
||||
data: undefined,
|
||||
isFetching: true,
|
||||
isError: false,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert - Loading component should be rendered, not main content
|
||||
expect(screen.queryByText('datasetPipeline.documentSettings.title')).not.toBeInTheDocument()
|
||||
expect(screen.queryByTestId('process-form')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render AppUnavailable when there is an error', () => {
|
||||
// Arrange
|
||||
mockUsePipelineExecutionLog.mockReturnValue({
|
||||
data: undefined,
|
||||
isFetching: false,
|
||||
isError: true,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert - AppUnavailable should be rendered
|
||||
expect(screen.queryByText('datasetPipeline.documentSettings.title')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render container with correct CSS classes', () => {
|
||||
// Arrange
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
const { container } = renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert
|
||||
const mainContainer = container.firstChild as HTMLElement
|
||||
expect(mainContainer).toHaveClass('relative', 'flex', 'min-w-[1024px]')
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== LeftHeader Integration ====================
|
||||
// Test real LeftHeader component behavior
|
||||
describe('LeftHeader Integration', () => {
|
||||
it('should render LeftHeader with title prop', () => {
|
||||
// Arrange
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert - LeftHeader displays the title
|
||||
expect(screen.getByText('datasetPipeline.documentSettings.title')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render back button in LeftHeader', () => {
|
||||
// Arrange
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert - Back button should exist with proper aria-label
|
||||
const backButton = screen.getByRole('button', { name: 'common.operation.back' })
|
||||
expect(backButton).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should call router.back when back button is clicked', () => {
|
||||
// Arrange
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
const backButton = screen.getByRole('button', { name: 'common.operation.back' })
|
||||
fireEvent.click(backButton)
|
||||
|
||||
// Assert
|
||||
expect(mockBack).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Props Testing ====================
|
||||
describe('Props', () => {
|
||||
it('should pass datasetId and documentId to usePipelineExecutionLog', () => {
|
||||
// Arrange
|
||||
const props = { datasetId: 'custom-dataset', documentId: 'custom-document' }
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(mockUsePipelineExecutionLog).toHaveBeenCalledWith({
|
||||
dataset_id: 'custom-dataset',
|
||||
document_id: 'custom-document',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Memoization - Data Transformation ====================
|
||||
describe('Memoization - Data Transformation', () => {
|
||||
it('should transform localFile datasource correctly', () => {
|
||||
// Arrange
|
||||
const mockData = createMockExecutionLogResponse({
|
||||
datasource_type: DatasourceType.localFile,
|
||||
datasource_info: {
|
||||
related_id: 'file-123',
|
||||
name: 'document.pdf',
|
||||
extension: 'pdf',
|
||||
},
|
||||
})
|
||||
mockUsePipelineExecutionLog.mockReturnValue({
|
||||
data: mockData,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('local-files-count')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('datasource-type')).toHaveTextContent(DatasourceType.localFile)
|
||||
})
|
||||
|
||||
it('should transform websiteCrawl datasource correctly', () => {
|
||||
// Arrange
|
||||
const mockData = createMockExecutionLogResponse({
|
||||
datasource_type: DatasourceType.websiteCrawl,
|
||||
datasource_info: {
|
||||
content: 'Page content',
|
||||
description: 'Page description',
|
||||
source_url: 'https://example.com/page',
|
||||
title: 'Page Title',
|
||||
},
|
||||
})
|
||||
mockUsePipelineExecutionLog.mockReturnValue({
|
||||
data: mockData,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('website-pages-count')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('local-files-count')).toHaveTextContent('0')
|
||||
})
|
||||
|
||||
it('should transform onlineDocument datasource correctly', () => {
|
||||
// Arrange
|
||||
const mockData = createMockExecutionLogResponse({
|
||||
datasource_type: DatasourceType.onlineDocument,
|
||||
datasource_info: {
|
||||
workspace_id: 'workspace-1',
|
||||
page: { page_id: 'page-1', page_name: 'Notion Page' },
|
||||
},
|
||||
})
|
||||
mockUsePipelineExecutionLog.mockReturnValue({
|
||||
data: mockData,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('online-documents-count')).toHaveTextContent('1')
|
||||
})
|
||||
|
||||
it('should transform onlineDrive datasource correctly', () => {
|
||||
// Arrange
|
||||
const mockData = createMockExecutionLogResponse({
|
||||
datasource_type: DatasourceType.onlineDrive,
|
||||
datasource_info: { id: 'drive-1', type: 'doc', name: 'Google Doc', size: 1024 },
|
||||
})
|
||||
mockUsePipelineExecutionLog.mockReturnValue({
|
||||
data: mockData,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('online-drive-files-count')).toHaveTextContent('1')
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== User Interactions - Process ====================
|
||||
describe('User Interactions - Process', () => {
|
||||
it('should trigger form submit when process button is clicked', async () => {
|
||||
// Arrange
|
||||
mockMutateAsync.mockResolvedValue({})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
// Find the "Save and Process" button (from real ProcessDocuments > Actions)
|
||||
const processButton = screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' })
|
||||
fireEvent.click(processButton)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('should call handleProcess with is_preview=false', async () => {
|
||||
// Arrange
|
||||
mockMutateAsync.mockResolvedValue({})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
fireEvent.click(screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' }))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
is_preview: false,
|
||||
pipeline_id: mockPipelineId,
|
||||
original_document_id: 'document-456',
|
||||
}),
|
||||
expect.any(Object),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should navigate to documents list after successful process', async () => {
|
||||
// Arrange
|
||||
mockMutateAsync.mockImplementation((_request, options) => {
|
||||
options?.onSuccess?.()
|
||||
return Promise.resolve({})
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
fireEvent.click(screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' }))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockPush).toHaveBeenCalledWith('/datasets/dataset-123/documents')
|
||||
})
|
||||
})
|
||||
|
||||
it('should invalidate document cache after successful process', async () => {
|
||||
// Arrange
|
||||
mockMutateAsync.mockImplementation((_request, options) => {
|
||||
options?.onSuccess?.()
|
||||
return Promise.resolve({})
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
fireEvent.click(screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' }))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockInvalidDocumentList).toHaveBeenCalled()
|
||||
expect(mockInvalidDocumentDetail).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== User Interactions - Preview ====================
|
||||
describe('User Interactions - Preview', () => {
|
||||
it('should trigger preview when preview button is clicked', async () => {
|
||||
// Arrange
|
||||
mockMutateAsync.mockResolvedValue({ data: { outputs: {} } })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
fireEvent.click(screen.getByTestId('preview-btn'))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('should call handlePreviewChunks with is_preview=true', async () => {
|
||||
// Arrange
|
||||
mockMutateAsync.mockResolvedValue({ data: { outputs: {} } })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
fireEvent.click(screen.getByTestId('preview-btn'))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
is_preview: true,
|
||||
pipeline_id: mockPipelineId,
|
||||
}),
|
||||
expect.any(Object),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should update estimateData on successful preview', async () => {
|
||||
// Arrange
|
||||
const mockOutputs = { chunks: [], total_tokens: 50 }
|
||||
mockMutateAsync.mockImplementation((_req, opts) => {
|
||||
opts?.onSuccess?.({ data: { outputs: mockOutputs } })
|
||||
return Promise.resolve({ data: { outputs: mockOutputs } })
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
fireEvent.click(screen.getByTestId('preview-btn'))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('has-estimate-data')).toHaveTextContent('true')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== API Integration ====================
|
||||
describe('API Integration', () => {
|
||||
it('should pass correct parameters for preview', async () => {
|
||||
// Arrange
|
||||
const mockData = createMockExecutionLogResponse({
|
||||
datasource_type: DatasourceType.localFile,
|
||||
datasource_node_id: 'node-xyz',
|
||||
datasource_info: { related_id: 'file-1', name: 'test.pdf', extension: 'pdf' },
|
||||
input_data: {},
|
||||
})
|
||||
mockUsePipelineExecutionLog.mockReturnValue({
|
||||
data: mockData,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
})
|
||||
mockMutateAsync.mockResolvedValue({ data: { outputs: {} } })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
fireEvent.click(screen.getByTestId('preview-btn'))
|
||||
|
||||
// Assert - inputs come from initialData which is transformed by useInitialData
|
||||
// Since usePublishedPipelineProcessingParams returns empty variables, inputs is {}
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith(
|
||||
{
|
||||
pipeline_id: mockPipelineId,
|
||||
inputs: {},
|
||||
start_node_id: 'node-xyz',
|
||||
datasource_type: DatasourceType.localFile,
|
||||
datasource_info_list: [{ related_id: 'file-1', name: 'test.pdf', extension: 'pdf' }],
|
||||
is_preview: true,
|
||||
},
|
||||
expect.any(Object),
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Edge Cases ====================
|
||||
describe('Edge Cases', () => {
|
||||
it.each([
|
||||
[DatasourceType.localFile, 'local-files-count', '1'],
|
||||
[DatasourceType.websiteCrawl, 'website-pages-count', '1'],
|
||||
[DatasourceType.onlineDocument, 'online-documents-count', '1'],
|
||||
[DatasourceType.onlineDrive, 'online-drive-files-count', '1'],
|
||||
])('should handle %s datasource type correctly', (datasourceType, testId, expectedCount) => {
|
||||
// Arrange
|
||||
const datasourceInfoMap: Record<DatasourceType, Record<string, unknown>> = {
|
||||
[DatasourceType.localFile]: { related_id: 'f1', name: 'file.pdf', extension: 'pdf' },
|
||||
[DatasourceType.websiteCrawl]: { content: 'c', description: 'd', source_url: 'u', title: 't' },
|
||||
[DatasourceType.onlineDocument]: { workspace_id: 'w1', page: { page_id: 'p1' } },
|
||||
[DatasourceType.onlineDrive]: { id: 'd1', type: 'doc', name: 'n', size: 100 },
|
||||
}
|
||||
|
||||
const mockData = createMockExecutionLogResponse({
|
||||
datasource_type: datasourceType,
|
||||
datasource_info: datasourceInfoMap[datasourceType],
|
||||
})
|
||||
mockUsePipelineExecutionLog.mockReturnValue({
|
||||
data: mockData,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId(testId)).toHaveTextContent(expectedCount)
|
||||
})
|
||||
|
||||
it('should show loading state during initial fetch', () => {
|
||||
// Arrange
|
||||
mockUsePipelineExecutionLog.mockReturnValue({
|
||||
data: undefined,
|
||||
isFetching: true,
|
||||
isError: false,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByTestId('process-form')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show error state when API fails', () => {
|
||||
// Arrange
|
||||
mockUsePipelineExecutionLog.mockReturnValue({
|
||||
data: undefined,
|
||||
isFetching: false,
|
||||
isError: true,
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByTestId('process-form')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== State Management ====================
|
||||
describe('State Management', () => {
|
||||
it('should initialize with undefined estimateData', () => {
|
||||
// Arrange
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('has-estimate-data')).toHaveTextContent('false')
|
||||
})
|
||||
|
||||
it('should update estimateData after successful preview', async () => {
|
||||
// Arrange
|
||||
const mockEstimateData = { chunks: [], total_tokens: 50 }
|
||||
mockMutateAsync.mockImplementation((_req, opts) => {
|
||||
opts?.onSuccess?.({ data: { outputs: mockEstimateData } })
|
||||
return Promise.resolve({ data: { outputs: mockEstimateData } })
|
||||
})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
fireEvent.click(screen.getByTestId('preview-btn'))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('has-estimate-data')).toHaveTextContent('true')
|
||||
})
|
||||
})
|
||||
|
||||
it('should set isPreview ref to false when process is clicked', async () => {
|
||||
// Arrange
|
||||
mockMutateAsync.mockResolvedValue({})
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
fireEvent.click(screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' }))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ is_preview: false }),
|
||||
expect.any(Object),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should set isPreview ref to true when preview is clicked', async () => {
|
||||
// Arrange
|
||||
mockMutateAsync.mockResolvedValue({ data: { outputs: {} } })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
fireEvent.click(screen.getByTestId('preview-btn'))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ is_preview: true }),
|
||||
expect.any(Object),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should pass isPending=true to ChunkPreview when preview is pending', async () => {
|
||||
// Arrange - Start with isPending=false so buttons are enabled
|
||||
let isPendingState = false
|
||||
mockUseRunPublishedPipeline.mockImplementation(() => ({
|
||||
mutateAsync: mockMutateAsync,
|
||||
isIdle: !isPendingState,
|
||||
isPending: isPendingState,
|
||||
}))
|
||||
|
||||
// A promise that never resolves to keep the pending state
|
||||
const pendingPromise = new Promise<void>(() => undefined)
|
||||
// When mutateAsync is called, set isPending to true and trigger rerender
|
||||
mockMutateAsync.mockImplementation(() => {
|
||||
isPendingState = true
|
||||
return pendingPromise
|
||||
})
|
||||
|
||||
const props = createDefaultProps()
|
||||
const { rerender } = renderWithProviders(<PipelineSettings {...props} />)
|
||||
|
||||
// Act - Click preview button (sets isPreview.current = true and calls mutateAsync)
|
||||
fireEvent.click(screen.getByTestId('preview-btn'))
|
||||
|
||||
// Update mock and rerender to reflect isPending=true state
|
||||
mockUseRunPublishedPipeline.mockReturnValue({
|
||||
mutateAsync: mockMutateAsync,
|
||||
isIdle: false,
|
||||
isPending: true,
|
||||
})
|
||||
rerender(
|
||||
<QueryClientProvider client={createQueryClient()}>
|
||||
<PipelineSettings {...props} />
|
||||
</QueryClientProvider>,
|
||||
)
|
||||
|
||||
// Assert - isPending && isPreview.current should both be true now
|
||||
expect(screen.getByTestId('is-pending')).toHaveTextContent('true')
|
||||
})
|
||||
|
||||
it('should pass isPending=false to ChunkPreview when process is pending (not preview)', async () => {
|
||||
// Arrange - isPending is true but isPreview.current is false
|
||||
mockUseRunPublishedPipeline.mockReturnValue({
|
||||
mutateAsync: mockMutateAsync,
|
||||
isIdle: false,
|
||||
isPending: true,
|
||||
})
|
||||
mockMutateAsync.mockReturnValue(new Promise<void>(() => undefined))
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<PipelineSettings {...props} />)
|
||||
// Click process (not preview) to set isPreview.current = false
|
||||
fireEvent.click(screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' }))
|
||||
|
||||
// Assert - isPending && isPreview.current should be false (true && false = false)
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('is-pending')).toHaveTextContent('false')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -31,7 +31,6 @@ const LeftHeader = ({
|
||||
variant='secondary-accent'
|
||||
className='absolute -left-11 top-3.5 size-9 rounded-full p-0'
|
||||
onClick={navigateBack}
|
||||
aria-label={t('common.operation.back')}
|
||||
>
|
||||
<RiArrowLeftLine className='size-5 ' />
|
||||
</Button>
|
||||
|
||||
@@ -1,573 +0,0 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import ProcessDocuments from './index'
|
||||
import { PipelineInputVarType } from '@/models/pipeline'
|
||||
import type { RAGPipelineVariable } from '@/models/pipeline'
|
||||
|
||||
// Mock i18n
|
||||
jest.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock dataset detail context - required for useInputVariables hook
|
||||
const mockPipelineId = 'pipeline-123'
|
||||
jest.mock('@/context/dataset-detail', () => ({
|
||||
useDatasetDetailContextWithSelector: (selector: (state: { dataset: { pipeline_id: string } }) => string) =>
|
||||
selector({ dataset: { pipeline_id: mockPipelineId } }),
|
||||
}))
|
||||
|
||||
// Mock API call for pipeline processing params
|
||||
const mockParamsConfig = jest.fn()
|
||||
jest.mock('@/service/use-pipeline', () => ({
|
||||
usePublishedPipelineProcessingParams: () => ({
|
||||
data: mockParamsConfig(),
|
||||
isFetching: false,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock Form component - internal dependencies (useAppForm, BaseField) are too complex
|
||||
// Keep the mock minimal and focused on testing the integration
|
||||
jest.mock('../../../../create-from-pipeline/process-documents/form', () => {
|
||||
return function MockForm({
|
||||
ref,
|
||||
initialData,
|
||||
configurations,
|
||||
onSubmit,
|
||||
onPreview,
|
||||
isRunning,
|
||||
}: {
|
||||
ref: React.RefObject<{ submit: () => void }>
|
||||
initialData: Record<string, unknown>
|
||||
configurations: Array<{ variable: string; label: string; type: string }>
|
||||
schema: unknown
|
||||
onSubmit: (data: Record<string, unknown>) => void
|
||||
onPreview: () => void
|
||||
isRunning: boolean
|
||||
}) {
|
||||
// Expose submit method via ref for parent component control
|
||||
if (ref && typeof ref === 'object' && 'current' in ref) {
|
||||
(ref as React.MutableRefObject<{ submit: () => void }>).current = {
|
||||
submit: () => onSubmit(initialData),
|
||||
}
|
||||
}
|
||||
return (
|
||||
<form
|
||||
data-testid="process-form"
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault()
|
||||
onSubmit(initialData)
|
||||
}}
|
||||
>
|
||||
{/* Render actual field labels from configurations */}
|
||||
{configurations.map((config, index) => (
|
||||
<div key={index} data-testid={`field-${config.variable}`}>
|
||||
<label>{config.label}</label>
|
||||
<input
|
||||
name={config.variable}
|
||||
defaultValue={String(initialData[config.variable] ?? '')}
|
||||
data-testid={`input-${config.variable}`}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
<button type="button" data-testid="preview-btn" onClick={onPreview} disabled={isRunning}>
|
||||
Preview
|
||||
</button>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
// Test utilities
|
||||
const createQueryClient = () =>
|
||||
new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
})
|
||||
|
||||
const renderWithProviders = (ui: React.ReactElement) => {
|
||||
const queryClient = createQueryClient()
|
||||
return render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
{ui}
|
||||
</QueryClientProvider>,
|
||||
)
|
||||
}
|
||||
|
||||
// Factory function for creating mock variables - matches RAGPipelineVariable type
|
||||
const createMockVariable = (overrides: Partial<RAGPipelineVariable> = {}): RAGPipelineVariable => ({
|
||||
belong_to_node_id: 'node-123',
|
||||
type: PipelineInputVarType.textInput,
|
||||
variable: 'test_var',
|
||||
label: 'Test Variable',
|
||||
required: false,
|
||||
...overrides,
|
||||
})
|
||||
|
||||
// Default props factory
|
||||
const createDefaultProps = (overrides: Partial<{
|
||||
datasourceNodeId: string
|
||||
lastRunInputData: Record<string, unknown>
|
||||
isRunning: boolean
|
||||
ref: React.RefObject<{ submit: () => void } | null>
|
||||
onProcess: () => void
|
||||
onPreview: () => void
|
||||
onSubmit: (data: Record<string, unknown>) => void
|
||||
}> = {}) => ({
|
||||
datasourceNodeId: 'node-123',
|
||||
lastRunInputData: {},
|
||||
isRunning: false,
|
||||
ref: { current: null } as React.RefObject<{ submit: () => void } | null>,
|
||||
onProcess: jest.fn(),
|
||||
onPreview: jest.fn(),
|
||||
onSubmit: jest.fn(),
|
||||
...overrides,
|
||||
})
|
||||
|
||||
describe('ProcessDocuments', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
// Default: return empty variables
|
||||
mockParamsConfig.mockReturnValue({ variables: [] })
|
||||
})
|
||||
|
||||
// ==================== Rendering Tests ====================
|
||||
// Test basic rendering and component structure
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
// Arrange
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert - verify both Form and Actions are rendered
|
||||
expect(screen.getByTestId('process-form')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render with correct container structure', () => {
|
||||
// Arrange
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
const { container } = renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
const wrapper = container.firstChild as HTMLElement
|
||||
expect(wrapper).toHaveClass('flex', 'flex-col', 'gap-y-4', 'pt-4')
|
||||
})
|
||||
|
||||
it('should render form fields based on variables configuration', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'chunk_size', label: 'Chunk Size', type: PipelineInputVarType.number }),
|
||||
createMockVariable({ variable: 'separator', label: 'Separator', type: PipelineInputVarType.textInput }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert - real hooks transform variables to configurations
|
||||
expect(screen.getByTestId('field-chunk_size')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('field-separator')).toBeInTheDocument()
|
||||
expect(screen.getByText('Chunk Size')).toBeInTheDocument()
|
||||
expect(screen.getByText('Separator')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Props Testing ====================
|
||||
// Test how component behaves with different prop values
|
||||
describe('Props', () => {
|
||||
describe('lastRunInputData', () => {
|
||||
it('should use lastRunInputData as initial form values', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'chunk_size', label: 'Chunk Size', type: PipelineInputVarType.number, default_value: '100' }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const lastRunInputData = { chunk_size: 500 }
|
||||
const props = createDefaultProps({ lastRunInputData })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert - lastRunInputData should override default_value
|
||||
const input = screen.getByTestId('input-chunk_size') as HTMLInputElement
|
||||
expect(input.defaultValue).toBe('500')
|
||||
})
|
||||
|
||||
it('should use default_value when lastRunInputData is empty', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'chunk_size', label: 'Chunk Size', type: PipelineInputVarType.number, default_value: '100' }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const props = createDefaultProps({ lastRunInputData: {} })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
const input = screen.getByTestId('input-chunk_size') as HTMLInputElement
|
||||
expect(input.value).toBe('100')
|
||||
})
|
||||
})
|
||||
|
||||
describe('isRunning', () => {
|
||||
it('should enable Actions button when isRunning is false', () => {
|
||||
// Arrange
|
||||
const props = createDefaultProps({ isRunning: false })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
const processButton = screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' })
|
||||
expect(processButton).not.toBeDisabled()
|
||||
})
|
||||
|
||||
it('should disable Actions button when isRunning is true', () => {
|
||||
// Arrange
|
||||
const props = createDefaultProps({ isRunning: true })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
const processButton = screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' })
|
||||
expect(processButton).toBeDisabled()
|
||||
})
|
||||
|
||||
it('should disable preview button when isRunning is true', () => {
|
||||
// Arrange
|
||||
const props = createDefaultProps({ isRunning: true })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('preview-btn')).toBeDisabled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('ref', () => {
|
||||
it('should expose submit method via ref', () => {
|
||||
// Arrange
|
||||
const ref = { current: null } as React.RefObject<{ submit: () => void } | null>
|
||||
const onSubmit = jest.fn()
|
||||
const props = createDefaultProps({ ref, onSubmit })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(ref.current).not.toBeNull()
|
||||
expect(typeof ref.current?.submit).toBe('function')
|
||||
|
||||
// Act - call submit via ref
|
||||
ref.current?.submit()
|
||||
|
||||
// Assert - onSubmit should be called
|
||||
expect(onSubmit).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== User Interactions ====================
|
||||
// Test event handlers and user interactions
|
||||
describe('User Interactions', () => {
|
||||
describe('onProcess', () => {
|
||||
it('should call onProcess when Save and Process button is clicked', () => {
|
||||
// Arrange
|
||||
const onProcess = jest.fn()
|
||||
const props = createDefaultProps({ onProcess })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
fireEvent.click(screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' }))
|
||||
|
||||
// Assert
|
||||
expect(onProcess).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should not call onProcess when button is disabled due to isRunning', () => {
|
||||
// Arrange
|
||||
const onProcess = jest.fn()
|
||||
const props = createDefaultProps({ onProcess, isRunning: true })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
fireEvent.click(screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' }))
|
||||
|
||||
// Assert
|
||||
expect(onProcess).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('onPreview', () => {
|
||||
it('should call onPreview when preview button is clicked', () => {
|
||||
// Arrange
|
||||
const onPreview = jest.fn()
|
||||
const props = createDefaultProps({ onPreview })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
fireEvent.click(screen.getByTestId('preview-btn'))
|
||||
|
||||
// Assert
|
||||
expect(onPreview).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('onSubmit', () => {
|
||||
it('should call onSubmit with form data when form is submitted', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'chunk_size', label: 'Chunk Size', type: PipelineInputVarType.number, default_value: '100' }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const onSubmit = jest.fn()
|
||||
const props = createDefaultProps({ onSubmit })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
fireEvent.submit(screen.getByTestId('process-form'))
|
||||
|
||||
// Assert - should submit with initial data transformed by real hooks
|
||||
// Note: default_value is string type, so the value remains as string
|
||||
expect(onSubmit).toHaveBeenCalledWith({ chunk_size: '100' })
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Data Transformation Tests ====================
|
||||
// Test real hooks transform data correctly
|
||||
describe('Data Transformation', () => {
|
||||
it('should transform text-input variable to string initial value', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'name', label: 'Name', type: PipelineInputVarType.textInput, default_value: 'default' }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
const input = screen.getByTestId('input-name') as HTMLInputElement
|
||||
expect(input.defaultValue).toBe('default')
|
||||
})
|
||||
|
||||
it('should transform number variable to number initial value', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'count', label: 'Count', type: PipelineInputVarType.number, default_value: '42' }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
const input = screen.getByTestId('input-count') as HTMLInputElement
|
||||
expect(input.defaultValue).toBe('42')
|
||||
})
|
||||
|
||||
it('should use empty string for text-input without default value', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'name', label: 'Name', type: PipelineInputVarType.textInput }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
const input = screen.getByTestId('input-name') as HTMLInputElement
|
||||
expect(input.defaultValue).toBe('')
|
||||
})
|
||||
|
||||
it('should prioritize lastRunInputData over default_value', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'size', label: 'Size', type: PipelineInputVarType.number, default_value: '100' }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const props = createDefaultProps({ lastRunInputData: { size: 999 } })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
const input = screen.getByTestId('input-size') as HTMLInputElement
|
||||
expect(input.defaultValue).toBe('999')
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Edge Cases ====================
|
||||
// Test boundary conditions and error handling
|
||||
describe('Edge Cases', () => {
|
||||
describe('Empty/Null data handling', () => {
|
||||
it('should handle undefined paramsConfig.variables', () => {
|
||||
// Arrange
|
||||
mockParamsConfig.mockReturnValue({ variables: undefined })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert - should render without fields
|
||||
expect(screen.getByTestId('process-form')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId(/^field-/)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle null paramsConfig', () => {
|
||||
// Arrange
|
||||
mockParamsConfig.mockReturnValue(null)
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('process-form')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle empty variables array', () => {
|
||||
// Arrange
|
||||
mockParamsConfig.mockReturnValue({ variables: [] })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('process-form')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId(/^field-/)).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Multiple variables', () => {
|
||||
it('should handle multiple variables of different types', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'text_field', label: 'Text', type: PipelineInputVarType.textInput, default_value: 'hello' }),
|
||||
createMockVariable({ variable: 'number_field', label: 'Number', type: PipelineInputVarType.number, default_value: '123' }),
|
||||
createMockVariable({ variable: 'select_field', label: 'Select', type: PipelineInputVarType.select, default_value: 'option1' }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert - all fields should be rendered
|
||||
expect(screen.getByTestId('field-text_field')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('field-number_field')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('field-select_field')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should submit all variables data correctly', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'field1', label: 'Field 1', type: PipelineInputVarType.textInput, default_value: 'value1' }),
|
||||
createMockVariable({ variable: 'field2', label: 'Field 2', type: PipelineInputVarType.number, default_value: '42' }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const onSubmit = jest.fn()
|
||||
const props = createDefaultProps({ onSubmit })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
fireEvent.submit(screen.getByTestId('process-form'))
|
||||
|
||||
// Assert - default_value is string type, so values remain as strings
|
||||
expect(onSubmit).toHaveBeenCalledWith({
|
||||
field1: 'value1',
|
||||
field2: '42',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Variable with options (select type)', () => {
|
||||
it('should handle select variable with options', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({
|
||||
variable: 'mode',
|
||||
label: 'Mode',
|
||||
type: PipelineInputVarType.select,
|
||||
options: ['auto', 'manual', 'custom'],
|
||||
default_value: 'auto',
|
||||
}),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByTestId('field-mode')).toBeInTheDocument()
|
||||
const input = screen.getByTestId('input-mode') as HTMLInputElement
|
||||
expect(input.defaultValue).toBe('auto')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Integration Tests ====================
|
||||
// Test Form and Actions components work together with real hooks
|
||||
describe('Integration', () => {
|
||||
it('should coordinate form submission flow correctly', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'setting', label: 'Setting', type: PipelineInputVarType.textInput, default_value: 'initial' }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const onProcess = jest.fn()
|
||||
const onSubmit = jest.fn()
|
||||
const props = createDefaultProps({ onProcess, onSubmit })
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert - form is rendered with correct initial data
|
||||
const input = screen.getByTestId('input-setting') as HTMLInputElement
|
||||
expect(input.defaultValue).toBe('initial')
|
||||
|
||||
// Act - click process button
|
||||
fireEvent.click(screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' }))
|
||||
|
||||
// Assert - onProcess is called
|
||||
expect(onProcess).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should render complete UI with all interactive elements', () => {
|
||||
// Arrange
|
||||
const variables: RAGPipelineVariable[] = [
|
||||
createMockVariable({ variable: 'test', label: 'Test Field', type: PipelineInputVarType.textInput }),
|
||||
]
|
||||
mockParamsConfig.mockReturnValue({ variables })
|
||||
const props = createDefaultProps()
|
||||
|
||||
// Act
|
||||
renderWithProviders(<ProcessDocuments {...props} />)
|
||||
|
||||
// Assert - all UI elements are present
|
||||
expect(screen.getByTestId('process-form')).toBeInTheDocument()
|
||||
expect(screen.getByText('Test Field')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('preview-btn')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'datasetPipeline.operations.saveAndProcess' })).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,968 +0,0 @@
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import StatusItem from './index'
|
||||
import type { DocumentDisplayStatus } from '@/models/datasets'
|
||||
|
||||
// Mock i18n - required for translation
|
||||
jest.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock ToastContext - required to verify notifications
|
||||
const mockNotify = jest.fn()
|
||||
jest.mock('use-context-selector', () => ({
|
||||
...jest.requireActual('use-context-selector'),
|
||||
useContext: () => ({ notify: mockNotify }),
|
||||
}))
|
||||
|
||||
// Mock document service hooks - required to avoid real API calls
|
||||
const mockEnableDocument = jest.fn()
|
||||
const mockDisableDocument = jest.fn()
|
||||
const mockDeleteDocument = jest.fn()
|
||||
|
||||
jest.mock('@/service/knowledge/use-document', () => ({
|
||||
useDocumentEnable: () => ({ mutateAsync: mockEnableDocument }),
|
||||
useDocumentDisable: () => ({ mutateAsync: mockDisableDocument }),
|
||||
useDocumentDelete: () => ({ mutateAsync: mockDeleteDocument }),
|
||||
}))
|
||||
|
||||
// Mock useDebounceFn to execute immediately for testing
|
||||
jest.mock('ahooks', () => ({
|
||||
...jest.requireActual('ahooks'),
|
||||
useDebounceFn: (fn: (...args: unknown[]) => void) => ({ run: fn }),
|
||||
}))
|
||||
|
||||
// Test utilities
|
||||
const createQueryClient = () =>
|
||||
new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
})
|
||||
|
||||
const renderWithProviders = (ui: React.ReactElement) => {
|
||||
const queryClient = createQueryClient()
|
||||
return render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
{ui}
|
||||
</QueryClientProvider>,
|
||||
)
|
||||
}
|
||||
|
||||
// Factory functions for test data
|
||||
const createDetailProps = (overrides: Partial<{
|
||||
enabled: boolean
|
||||
archived: boolean
|
||||
id: string
|
||||
}> = {}) => ({
|
||||
enabled: false,
|
||||
archived: false,
|
||||
id: 'doc-123',
|
||||
...overrides,
|
||||
})
|
||||
|
||||
describe('StatusItem', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
mockEnableDocument.mockResolvedValue({ result: 'success' })
|
||||
mockDisableDocument.mockResolvedValue({ result: 'success' })
|
||||
mockDeleteDocument.mockResolvedValue({ result: 'success' })
|
||||
})
|
||||
|
||||
// ==================== Rendering Tests ====================
|
||||
// Test basic rendering with different status values
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="available" />)
|
||||
|
||||
// Assert - check indicator element exists (real Indicator component)
|
||||
const indicator = screen.getByTestId('status-indicator')
|
||||
expect(indicator).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it.each([
|
||||
['queuing', 'bg-components-badge-status-light-warning-bg'],
|
||||
['indexing', 'bg-components-badge-status-light-normal-bg'],
|
||||
['paused', 'bg-components-badge-status-light-warning-bg'],
|
||||
['error', 'bg-components-badge-status-light-error-bg'],
|
||||
['available', 'bg-components-badge-status-light-success-bg'],
|
||||
['enabled', 'bg-components-badge-status-light-success-bg'],
|
||||
['disabled', 'bg-components-badge-status-light-disabled-bg'],
|
||||
['archived', 'bg-components-badge-status-light-disabled-bg'],
|
||||
] as const)('should render status "%s" with correct indicator background', (status, expectedBg) => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status={status} />)
|
||||
|
||||
// Assert
|
||||
const indicator = screen.getByTestId('status-indicator')
|
||||
expect(indicator).toHaveClass(expectedBg)
|
||||
})
|
||||
|
||||
it('should render status text from translation', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="available" />)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('datasetDocuments.list.status.available')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle case-insensitive status', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem status={'AVAILABLE' as DocumentDisplayStatus} />,
|
||||
)
|
||||
|
||||
// Assert
|
||||
const indicator = screen.getByTestId('status-indicator')
|
||||
expect(indicator).toHaveClass('bg-components-badge-status-light-success-bg')
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Props Testing ====================
|
||||
// Test all prop variations and combinations
|
||||
describe('Props', () => {
|
||||
// reverse prop tests
|
||||
describe('reverse prop', () => {
|
||||
it('should apply default layout when reverse is false', () => {
|
||||
// Arrange & Act
|
||||
const { container } = renderWithProviders(<StatusItem status="available" reverse={false} />)
|
||||
|
||||
// Assert
|
||||
const wrapper = container.firstChild as HTMLElement
|
||||
expect(wrapper).not.toHaveClass('flex-row-reverse')
|
||||
})
|
||||
|
||||
it('should apply reversed layout when reverse is true', () => {
|
||||
// Arrange & Act
|
||||
const { container } = renderWithProviders(<StatusItem status="available" reverse />)
|
||||
|
||||
// Assert
|
||||
const wrapper = container.firstChild as HTMLElement
|
||||
expect(wrapper).toHaveClass('flex-row-reverse')
|
||||
})
|
||||
|
||||
it('should apply ml-2 to indicator when reversed', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="available" reverse />)
|
||||
|
||||
// Assert
|
||||
const indicator = screen.getByTestId('status-indicator')
|
||||
expect(indicator).toHaveClass('ml-2')
|
||||
})
|
||||
|
||||
it('should apply mr-2 to indicator when not reversed', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="available" reverse={false} />)
|
||||
|
||||
// Assert
|
||||
const indicator = screen.getByTestId('status-indicator')
|
||||
expect(indicator).toHaveClass('mr-2')
|
||||
})
|
||||
})
|
||||
|
||||
// scene prop tests
|
||||
describe('scene prop', () => {
|
||||
it('should not render switch in list scene', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="available"
|
||||
scene="list"
|
||||
detail={createDetailProps()}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert - Switch renders as a button element
|
||||
expect(screen.queryByRole('switch')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render switch in detail scene', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="available"
|
||||
scene="detail"
|
||||
detail={createDetailProps()}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert
|
||||
expect(screen.getByRole('switch')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should default to list scene', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="available"
|
||||
detail={createDetailProps()}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert
|
||||
expect(screen.queryByRole('switch')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// textCls prop tests
|
||||
describe('textCls prop', () => {
|
||||
it('should apply custom text class', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem status="available" textCls="custom-text-class" />,
|
||||
)
|
||||
|
||||
// Assert
|
||||
const statusText = screen.getByText('datasetDocuments.list.status.available')
|
||||
expect(statusText).toHaveClass('custom-text-class')
|
||||
})
|
||||
|
||||
it('should default to empty string', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="available" />)
|
||||
|
||||
// Assert
|
||||
const statusText = screen.getByText('datasetDocuments.list.status.available')
|
||||
expect(statusText).toHaveClass('text-sm')
|
||||
})
|
||||
})
|
||||
|
||||
// errorMessage prop tests
|
||||
describe('errorMessage prop', () => {
|
||||
it('should render tooltip trigger when errorMessage is provided', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem status="error" errorMessage="Something went wrong" />,
|
||||
)
|
||||
|
||||
// Assert - tooltip trigger element should exist
|
||||
const tooltipTrigger = screen.getByTestId('error-tooltip-trigger')
|
||||
expect(tooltipTrigger).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show error message on hover', async () => {
|
||||
// Arrange
|
||||
renderWithProviders(
|
||||
<StatusItem status="error" errorMessage="Something went wrong" />,
|
||||
)
|
||||
|
||||
// Act - hover the tooltip trigger
|
||||
const tooltipTrigger = screen.getByTestId('error-tooltip-trigger')
|
||||
fireEvent.mouseEnter(tooltipTrigger)
|
||||
|
||||
// Assert - wait for tooltip content to appear
|
||||
expect(await screen.findByText('Something went wrong')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render tooltip trigger when errorMessage is not provided', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="error" />)
|
||||
|
||||
// Assert - tooltip trigger should not exist
|
||||
const tooltipTrigger = screen.queryByTestId('error-tooltip-trigger')
|
||||
expect(tooltipTrigger).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render tooltip trigger when errorMessage is empty', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="error" errorMessage="" />)
|
||||
|
||||
// Assert - tooltip trigger should not exist
|
||||
const tooltipTrigger = screen.queryByTestId('error-tooltip-trigger')
|
||||
expect(tooltipTrigger).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// detail prop tests
|
||||
describe('detail prop', () => {
|
||||
it('should use default values when detail is undefined', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem status="available" scene="detail" />,
|
||||
)
|
||||
|
||||
// Assert - switch should be unchecked (defaultValue = false when archived = false and enabled = false)
|
||||
const switchEl = screen.getByRole('switch')
|
||||
expect(switchEl).toHaveAttribute('aria-checked', 'false')
|
||||
})
|
||||
|
||||
it('should use enabled value from detail', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="available"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: true })}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert
|
||||
const switchEl = screen.getByRole('switch')
|
||||
expect(switchEl).toHaveAttribute('aria-checked', 'true')
|
||||
})
|
||||
|
||||
it('should set switch to false when archived regardless of enabled', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="available"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: true, archived: true })}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert - archived overrides enabled, defaultValue becomes false
|
||||
const switchEl = screen.getByRole('switch')
|
||||
expect(switchEl).toHaveAttribute('aria-checked', 'false')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Memoization Tests ====================
|
||||
// Test useMemo logic for embedding status (disables switch)
|
||||
describe('Memoization', () => {
|
||||
it.each([
|
||||
['queuing', true],
|
||||
['indexing', true],
|
||||
['paused', true],
|
||||
['available', false],
|
||||
['enabled', false],
|
||||
['disabled', false],
|
||||
['archived', false],
|
||||
['error', false],
|
||||
] as const)('should correctly identify embedding status for "%s" - disabled: %s', (status, isEmbedding) => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status={status}
|
||||
scene="detail"
|
||||
detail={createDetailProps()}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert - check if switch is visually disabled (via CSS classes)
|
||||
// The Switch component uses CSS classes for disabled state, not the native disabled attribute
|
||||
const switchEl = screen.getByRole('switch')
|
||||
if (isEmbedding)
|
||||
expect(switchEl).toHaveClass('!cursor-not-allowed', '!opacity-50')
|
||||
else
|
||||
expect(switchEl).not.toHaveClass('!cursor-not-allowed')
|
||||
})
|
||||
|
||||
it('should disable switch when archived', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="available"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ archived: true })}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert - visually disabled via CSS classes
|
||||
const switchEl = screen.getByRole('switch')
|
||||
expect(switchEl).toHaveClass('!cursor-not-allowed', '!opacity-50')
|
||||
})
|
||||
|
||||
it('should disable switch when both embedding and archived', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="indexing"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ archived: true })}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert - visually disabled via CSS classes
|
||||
const switchEl = screen.getByRole('switch')
|
||||
expect(switchEl).toHaveClass('!cursor-not-allowed', '!opacity-50')
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Switch Toggle Tests ====================
|
||||
// Test Switch toggle interactions
|
||||
describe('Switch Toggle', () => {
|
||||
it('should call enable operation when switch is toggled on', async () => {
|
||||
// Arrange
|
||||
const mockOnUpdate = jest.fn()
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="disabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: false })}
|
||||
datasetId="dataset-123"
|
||||
onUpdate={mockOnUpdate}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockEnableDocument).toHaveBeenCalledWith({
|
||||
datasetId: 'dataset-123',
|
||||
documentId: 'doc-123',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should call disable operation when switch is toggled off', async () => {
|
||||
// Arrange
|
||||
const mockOnUpdate = jest.fn()
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="enabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: true })}
|
||||
datasetId="dataset-123"
|
||||
onUpdate={mockOnUpdate}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockDisableDocument).toHaveBeenCalledWith({
|
||||
datasetId: 'dataset-123',
|
||||
documentId: 'doc-123',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should not call any operation when archived', () => {
|
||||
// Arrange
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="available"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ archived: true })}
|
||||
datasetId="dataset-123"
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert
|
||||
expect(mockEnableDocument).not.toHaveBeenCalled()
|
||||
expect(mockDisableDocument).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should render switch as checked when enabled is true', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="enabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: true })}
|
||||
datasetId="dataset-123"
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert - verify switch shows checked state
|
||||
const switchEl = screen.getByRole('switch')
|
||||
expect(switchEl).toHaveAttribute('aria-checked', 'true')
|
||||
})
|
||||
|
||||
it('should render switch as unchecked when enabled is false', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="disabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: false })}
|
||||
datasetId="dataset-123"
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert - verify switch shows unchecked state
|
||||
const switchEl = screen.getByRole('switch')
|
||||
expect(switchEl).toHaveAttribute('aria-checked', 'false')
|
||||
})
|
||||
|
||||
it('should skip enable operation when props.enabled is true (guard branch)', () => {
|
||||
// Covers guard condition: if (operationName === 'enable' && enabled) return
|
||||
// Note: The guard checks props.enabled, NOT the Switch's internal UI state.
|
||||
// This prevents redundant API calls when the UI toggles back to a state
|
||||
// that already matches the server-side data (props haven't been updated yet).
|
||||
const mockOnUpdate = jest.fn()
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="enabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: true })}
|
||||
datasetId="dataset-123"
|
||||
onUpdate={mockOnUpdate}
|
||||
/>,
|
||||
)
|
||||
|
||||
const switchEl = screen.getByRole('switch')
|
||||
// First click: Switch UI toggles OFF, calls disable (props.enabled=true, so allowed)
|
||||
fireEvent.click(switchEl)
|
||||
// Second click: Switch UI toggles ON, tries to call enable
|
||||
// BUT props.enabled is still true (not updated), so guard skips the API call
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert - disable was called once, enable was skipped because props.enabled=true
|
||||
expect(mockDisableDocument).toHaveBeenCalledTimes(1)
|
||||
expect(mockEnableDocument).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should skip disable operation when props.enabled is false (guard branch)', () => {
|
||||
// Covers guard condition: if (operationName === 'disable' && !enabled) return
|
||||
// Note: The guard checks props.enabled, NOT the Switch's internal UI state.
|
||||
// This prevents redundant API calls when the UI toggles back to a state
|
||||
// that already matches the server-side data (props haven't been updated yet).
|
||||
const mockOnUpdate = jest.fn()
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="disabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: false })}
|
||||
datasetId="dataset-123"
|
||||
onUpdate={mockOnUpdate}
|
||||
/>,
|
||||
)
|
||||
|
||||
const switchEl = screen.getByRole('switch')
|
||||
// First click: Switch UI toggles ON, calls enable (props.enabled=false, so allowed)
|
||||
fireEvent.click(switchEl)
|
||||
// Second click: Switch UI toggles OFF, tries to call disable
|
||||
// BUT props.enabled is still false (not updated), so guard skips the API call
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert - enable was called once, disable was skipped because props.enabled=false
|
||||
expect(mockEnableDocument).toHaveBeenCalledTimes(1)
|
||||
expect(mockDisableDocument).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== onUpdate Callback Tests ====================
|
||||
// Test onUpdate callback behavior
|
||||
describe('onUpdate Callback', () => {
|
||||
it('should call onUpdate with operation name on successful enable', async () => {
|
||||
// Arrange
|
||||
const mockOnUpdate = jest.fn()
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="disabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: false })}
|
||||
datasetId="dataset-123"
|
||||
onUpdate={mockOnUpdate}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockOnUpdate).toHaveBeenCalledWith('enable')
|
||||
})
|
||||
})
|
||||
|
||||
it('should call onUpdate with operation name on successful disable', async () => {
|
||||
// Arrange
|
||||
const mockOnUpdate = jest.fn()
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="enabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: true })}
|
||||
datasetId="dataset-123"
|
||||
onUpdate={mockOnUpdate}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockOnUpdate).toHaveBeenCalledWith('disable')
|
||||
})
|
||||
})
|
||||
|
||||
it('should not call onUpdate when operation fails', async () => {
|
||||
// Arrange
|
||||
mockEnableDocument.mockRejectedValue(new Error('API Error'))
|
||||
const mockOnUpdate = jest.fn()
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="disabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: false })}
|
||||
datasetId="dataset-123"
|
||||
onUpdate={mockOnUpdate}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockNotify).toHaveBeenCalledWith({
|
||||
type: 'error',
|
||||
message: 'common.actionMsg.modifiedUnsuccessfully',
|
||||
})
|
||||
})
|
||||
expect(mockOnUpdate).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not throw when onUpdate is not provided', () => {
|
||||
// Arrange
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="disabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: false })}
|
||||
datasetId="dataset-123"
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
|
||||
// Assert - should not throw
|
||||
expect(() => fireEvent.click(switchEl)).not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== API Calls ====================
|
||||
// Test API operations and toast notifications
|
||||
describe('API Operations', () => {
|
||||
it('should show success toast on successful operation', async () => {
|
||||
// Arrange
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="disabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: false })}
|
||||
datasetId="dataset-123"
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockNotify).toHaveBeenCalledWith({
|
||||
type: 'success',
|
||||
message: 'common.actionMsg.modifiedSuccessfully',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should show error toast on failed operation', async () => {
|
||||
// Arrange
|
||||
mockDisableDocument.mockRejectedValue(new Error('Network error'))
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="enabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: true })}
|
||||
datasetId="dataset-123"
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockNotify).toHaveBeenCalledWith({
|
||||
type: 'error',
|
||||
message: 'common.actionMsg.modifiedUnsuccessfully',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should pass correct parameters to enable API', async () => {
|
||||
// Arrange
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="disabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: false, id: 'test-doc-id' })}
|
||||
datasetId="test-dataset-id"
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockEnableDocument).toHaveBeenCalledWith({
|
||||
datasetId: 'test-dataset-id',
|
||||
documentId: 'test-doc-id',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should pass correct parameters to disable API', async () => {
|
||||
// Arrange
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="enabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: true, id: 'test-doc-456' })}
|
||||
datasetId="test-dataset-456"
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockDisableDocument).toHaveBeenCalledWith({
|
||||
datasetId: 'test-dataset-456',
|
||||
documentId: 'test-doc-456',
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Edge Cases ====================
|
||||
// Test boundary conditions and unusual inputs
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty datasetId', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="available"
|
||||
scene="detail"
|
||||
detail={createDetailProps()}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert - should render without errors
|
||||
expect(screen.getByRole('switch')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle undefined detail gracefully', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="available"
|
||||
scene="detail"
|
||||
detail={undefined}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert
|
||||
const switchEl = screen.getByRole('switch')
|
||||
expect(switchEl).toHaveAttribute('aria-checked', 'false')
|
||||
})
|
||||
|
||||
it('should handle empty string id in detail', async () => {
|
||||
// Arrange
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="disabled"
|
||||
scene="detail"
|
||||
detail={createDetailProps({ enabled: false, id: '' })}
|
||||
datasetId="dataset-123"
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act
|
||||
const switchEl = screen.getByRole('switch')
|
||||
fireEvent.click(switchEl)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockEnableDocument).toHaveBeenCalledWith({
|
||||
datasetId: 'dataset-123',
|
||||
documentId: '',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle very long error messages', async () => {
|
||||
// Arrange
|
||||
const longErrorMessage = 'A'.repeat(500)
|
||||
renderWithProviders(
|
||||
<StatusItem status="error" errorMessage={longErrorMessage} />,
|
||||
)
|
||||
|
||||
// Act - hover to show tooltip
|
||||
const tooltipTrigger = screen.getByTestId('error-tooltip-trigger')
|
||||
fireEvent.mouseEnter(tooltipTrigger)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(longErrorMessage)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle special characters in error message', async () => {
|
||||
// Arrange
|
||||
const specialChars = '<script>alert("xss")</script> & < > " \''
|
||||
renderWithProviders(
|
||||
<StatusItem status="error" errorMessage={specialChars} />,
|
||||
)
|
||||
|
||||
// Act - hover to show tooltip
|
||||
const tooltipTrigger = screen.getByTestId('error-tooltip-trigger')
|
||||
fireEvent.mouseEnter(tooltipTrigger)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(specialChars)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle all status types in sequence', () => {
|
||||
// Arrange
|
||||
const statuses: DocumentDisplayStatus[] = [
|
||||
'queuing', 'indexing', 'paused', 'error',
|
||||
'available', 'enabled', 'disabled', 'archived',
|
||||
]
|
||||
|
||||
// Act & Assert
|
||||
statuses.forEach((status) => {
|
||||
const { unmount } = renderWithProviders(<StatusItem status={status} />)
|
||||
const indicator = screen.getByTestId('status-indicator')
|
||||
expect(indicator).toBeInTheDocument()
|
||||
unmount()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Component Memoization ====================
|
||||
// Test React.memo behavior
|
||||
describe('Component Memoization', () => {
|
||||
it('should be wrapped with React.memo', () => {
|
||||
// Assert
|
||||
expect(StatusItem).toHaveProperty('$$typeof', Symbol.for('react.memo'))
|
||||
})
|
||||
|
||||
it('should render correctly with same props', () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
status: 'available' as const,
|
||||
scene: 'detail' as const,
|
||||
detail: createDetailProps(),
|
||||
}
|
||||
|
||||
// Act
|
||||
const { rerender } = renderWithProviders(<StatusItem {...props} />)
|
||||
rerender(
|
||||
<QueryClientProvider client={createQueryClient()}>
|
||||
<StatusItem {...props} />
|
||||
</QueryClientProvider>,
|
||||
)
|
||||
|
||||
// Assert
|
||||
const indicator = screen.getByTestId('status-indicator')
|
||||
expect(indicator).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should update when status prop changes', () => {
|
||||
// Arrange
|
||||
const { rerender } = renderWithProviders(<StatusItem status="available" />)
|
||||
|
||||
// Assert initial - green/success background
|
||||
let indicator = screen.getByTestId('status-indicator')
|
||||
expect(indicator).toHaveClass('bg-components-badge-status-light-success-bg')
|
||||
|
||||
// Act
|
||||
rerender(
|
||||
<QueryClientProvider client={createQueryClient()}>
|
||||
<StatusItem status="error" />
|
||||
</QueryClientProvider>,
|
||||
)
|
||||
|
||||
// Assert updated - red/error background
|
||||
indicator = screen.getByTestId('status-indicator')
|
||||
expect(indicator).toHaveClass('bg-components-badge-status-light-error-bg')
|
||||
})
|
||||
})
|
||||
|
||||
// ==================== Styling Tests ====================
|
||||
// Test CSS classes and styling
|
||||
describe('Styling', () => {
|
||||
it('should apply correct status text color for green status', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="available" />)
|
||||
|
||||
// Assert
|
||||
const statusText = screen.getByText('datasetDocuments.list.status.available')
|
||||
expect(statusText).toHaveClass('text-util-colors-green-green-600')
|
||||
})
|
||||
|
||||
it('should apply correct status text color for red status', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="error" />)
|
||||
|
||||
// Assert
|
||||
const statusText = screen.getByText('datasetDocuments.list.status.error')
|
||||
expect(statusText).toHaveClass('text-util-colors-red-red-600')
|
||||
})
|
||||
|
||||
it('should apply correct status text color for orange status', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="queuing" />)
|
||||
|
||||
// Assert
|
||||
const statusText = screen.getByText('datasetDocuments.list.status.queuing')
|
||||
expect(statusText).toHaveClass('text-util-colors-warning-warning-600')
|
||||
})
|
||||
|
||||
it('should apply correct status text color for blue status', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="indexing" />)
|
||||
|
||||
// Assert
|
||||
const statusText = screen.getByText('datasetDocuments.list.status.indexing')
|
||||
expect(statusText).toHaveClass('text-util-colors-blue-light-blue-light-600')
|
||||
})
|
||||
|
||||
it('should apply correct status text color for gray status', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(<StatusItem status="disabled" />)
|
||||
|
||||
// Assert
|
||||
const statusText = screen.getByText('datasetDocuments.list.status.disabled')
|
||||
expect(statusText).toHaveClass('text-text-tertiary')
|
||||
})
|
||||
|
||||
it('should render switch with md size in detail scene', () => {
|
||||
// Arrange & Act
|
||||
renderWithProviders(
|
||||
<StatusItem
|
||||
status="available"
|
||||
scene="detail"
|
||||
detail={createDetailProps()}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert - check switch has the md size class (h-4 w-7)
|
||||
const switchEl = screen.getByRole('switch')
|
||||
expect(switchEl).toHaveClass('h-4', 'w-7')
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -105,7 +105,6 @@ const StatusItem = ({
|
||||
<div className='max-w-[260px] break-all'>{errorMessage}</div>
|
||||
}
|
||||
triggerClassName='ml-1 w-4 h-4'
|
||||
triggerTestId='error-tooltip-trigger'
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -47,7 +47,6 @@ export default function Indicator({
|
||||
}: IndicatorProps) {
|
||||
return (
|
||||
<div
|
||||
data-testid="status-indicator"
|
||||
className={classNames(
|
||||
'h-2 w-2 rounded-[3px] border border-solid',
|
||||
BACKGROUND_MAP[color],
|
||||
|
||||
@@ -37,7 +37,7 @@ const useRefreshPluginList = () => {
|
||||
if ((manifest && PluginCategoryEnum.tool.includes(manifest.category)) || refreshAllType) {
|
||||
invalidateAllToolProviders()
|
||||
invalidateAllBuiltInTools()
|
||||
invalidateRAGRecommendedPlugins('tool')
|
||||
invalidateRAGRecommendedPlugins()
|
||||
// TODO: update suggested tools. It's a function in hook useMarketplacePlugins,handleUpdatePlugins
|
||||
}
|
||||
|
||||
|
||||
@@ -61,8 +61,7 @@ export const pluginManifestInMarketToPluginProps = (pluginManifest: PluginManife
|
||||
}
|
||||
|
||||
export const parseGitHubUrl = (url: string): GitHubUrlInfo => {
|
||||
const githubUrlRegex = /^https:\/\/github\.com\/([^/]+)\/([^/]+)\/?$/
|
||||
const match = githubUrlRegex.exec(url)
|
||||
const match = url.match(/^https:\/\/github\.com\/([^/]+)\/([^/]+)\/?$/)
|
||||
return match ? { isValid: true, owner: match[1], repo: match[2] } : { isValid: false }
|
||||
}
|
||||
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
import React from 'react'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import CSVDownload from './index'
|
||||
|
||||
const mockType = { Link: 'mock-link' }
|
||||
let capturedProps: Record<string, unknown> | undefined
|
||||
|
||||
jest.mock('react-papaparse', () => ({
|
||||
useCSVDownloader: () => {
|
||||
const CSVDownloader = ({ children, ...props }: React.PropsWithChildren<Record<string, unknown>>) => {
|
||||
capturedProps = props
|
||||
return <div data-testid="csv-downloader" className={props.className as string}>{children}</div>
|
||||
}
|
||||
return {
|
||||
CSVDownloader,
|
||||
Type: mockType,
|
||||
}
|
||||
},
|
||||
}))
|
||||
|
||||
describe('CSVDownload', () => {
|
||||
const vars = [{ name: 'prompt' }, { name: 'context' }]
|
||||
|
||||
beforeEach(() => {
|
||||
capturedProps = undefined
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
test('should render table headers and sample row for each variable', () => {
|
||||
render(<CSVDownload vars={vars} />)
|
||||
|
||||
expect(screen.getByText('share.generation.csvStructureTitle')).toBeInTheDocument()
|
||||
expect(screen.getAllByRole('row')[0].children).toHaveLength(2)
|
||||
expect(screen.getByText('prompt share.generation.field')).toBeInTheDocument()
|
||||
expect(screen.getByText('context share.generation.field')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should configure CSV downloader with template data', () => {
|
||||
render(<CSVDownload vars={vars} />)
|
||||
|
||||
expect(capturedProps?.filename).toBe('template')
|
||||
expect(capturedProps?.type).toBe(mockType.Link)
|
||||
expect(capturedProps?.bom).toBe(true)
|
||||
expect(capturedProps?.data).toEqual([
|
||||
{ prompt: '', context: '' },
|
||||
])
|
||||
expect(screen.getByText('share.generation.downloadTemplate')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@@ -1,70 +0,0 @@
|
||||
import React from 'react'
|
||||
import { act, render, screen, waitFor } from '@testing-library/react'
|
||||
import CSVReader from './index'
|
||||
|
||||
let mockAcceptedFile: { name: string } | null = null
|
||||
let capturedHandlers: Record<string, (payload: any) => void> = {}
|
||||
|
||||
jest.mock('react-papaparse', () => ({
|
||||
useCSVReader: () => ({
|
||||
CSVReader: ({ children, ...handlers }: any) => {
|
||||
capturedHandlers = handlers
|
||||
return (
|
||||
<div data-testid="csv-reader-wrapper">
|
||||
{children({
|
||||
getRootProps: () => ({ 'data-testid': 'drop-zone' }),
|
||||
acceptedFile: mockAcceptedFile,
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
describe('CSVReader', () => {
|
||||
beforeEach(() => {
|
||||
mockAcceptedFile = null
|
||||
capturedHandlers = {}
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
test('should display upload instructions when no file selected', async () => {
|
||||
const onParsed = jest.fn()
|
||||
render(<CSVReader onParsed={onParsed} />)
|
||||
|
||||
expect(screen.getByText('share.generation.csvUploadTitle')).toBeInTheDocument()
|
||||
expect(screen.getByText('share.generation.browse')).toBeInTheDocument()
|
||||
|
||||
await act(async () => {
|
||||
capturedHandlers.onUploadAccepted?.({ data: [['row1']] })
|
||||
})
|
||||
expect(onParsed).toHaveBeenCalledWith([['row1']])
|
||||
})
|
||||
|
||||
test('should show accepted file name without extension', () => {
|
||||
mockAcceptedFile = { name: 'batch.csv' }
|
||||
render(<CSVReader onParsed={jest.fn()} />)
|
||||
|
||||
expect(screen.getByText('batch')).toBeInTheDocument()
|
||||
expect(screen.getByText('.csv')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
test('should toggle hover styling on drag events', async () => {
|
||||
render(<CSVReader onParsed={jest.fn()} />)
|
||||
const dragEvent = { preventDefault: jest.fn() } as unknown as DragEvent
|
||||
|
||||
await act(async () => {
|
||||
capturedHandlers.onDragOver?.(dragEvent)
|
||||
})
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('drop-zone')).toHaveClass('border-components-dropzone-border-accent')
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
capturedHandlers.onDragLeave?.(dragEvent)
|
||||
})
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('drop-zone')).not.toHaveClass('border-components-dropzone-border-accent')
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,88 +0,0 @@
|
||||
import React from 'react'
|
||||
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import RunBatch from './index'
|
||||
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
|
||||
|
||||
jest.mock('@/hooks/use-breakpoints', () => {
|
||||
const actual = jest.requireActual('@/hooks/use-breakpoints')
|
||||
return {
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
MediaType: actual.MediaType,
|
||||
}
|
||||
})
|
||||
|
||||
let latestOnParsed: ((data: string[][]) => void) | undefined
|
||||
let receivedCSVDownloadProps: Record<string, unknown> | undefined
|
||||
|
||||
jest.mock('./csv-reader', () => (props: { onParsed: (data: string[][]) => void }) => {
|
||||
latestOnParsed = props.onParsed
|
||||
return <div data-testid="csv-reader" />
|
||||
})
|
||||
|
||||
jest.mock('./csv-download', () => (props: { vars: { name: string }[] }) => {
|
||||
receivedCSVDownloadProps = props
|
||||
return <div data-testid="csv-download" />
|
||||
})
|
||||
|
||||
const mockUseBreakpoints = useBreakpoints as jest.Mock
|
||||
|
||||
describe('RunBatch', () => {
|
||||
const vars = [{ name: 'prompt' }]
|
||||
|
||||
beforeEach(() => {
|
||||
mockUseBreakpoints.mockReturnValue(MediaType.pc)
|
||||
latestOnParsed = undefined
|
||||
receivedCSVDownloadProps = undefined
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
test('should enable run button after CSV parsed and send data', async () => {
|
||||
const onSend = jest.fn()
|
||||
render(
|
||||
<RunBatch
|
||||
vars={vars}
|
||||
onSend={onSend}
|
||||
isAllFinished
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(receivedCSVDownloadProps?.vars).toEqual(vars)
|
||||
await act(async () => {
|
||||
latestOnParsed?.([['row1']])
|
||||
})
|
||||
|
||||
const runButton = screen.getByRole('button', { name: 'share.generation.run' })
|
||||
await waitFor(() => {
|
||||
expect(runButton).not.toBeDisabled()
|
||||
})
|
||||
|
||||
fireEvent.click(runButton)
|
||||
expect(onSend).toHaveBeenCalledWith([['row1']])
|
||||
})
|
||||
|
||||
test('should keep button disabled and show spinner when results still running on mobile', async () => {
|
||||
mockUseBreakpoints.mockReturnValue(MediaType.mobile)
|
||||
const onSend = jest.fn()
|
||||
const { container } = render(
|
||||
<RunBatch
|
||||
vars={vars}
|
||||
onSend={onSend}
|
||||
isAllFinished={false}
|
||||
/>,
|
||||
)
|
||||
|
||||
await act(async () => {
|
||||
latestOnParsed?.([['row']])
|
||||
})
|
||||
|
||||
const runButton = screen.getByRole('button', { name: 'share.generation.run' })
|
||||
await waitFor(() => {
|
||||
expect(runButton).toBeDisabled()
|
||||
})
|
||||
expect(runButton).toHaveClass('grow')
|
||||
const icon = container.querySelector('svg')
|
||||
expect(icon).toHaveClass('animate-spin')
|
||||
expect(onSend).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
@@ -52,7 +52,7 @@ const RAGToolRecommendations = ({
|
||||
data: ragRecommendedPlugins,
|
||||
isLoading: isLoadingRAGRecommendedPlugins,
|
||||
isFetching: isFetchingRAGRecommendedPlugins,
|
||||
} = useRAGRecommendedPlugins('tool')
|
||||
} = useRAGRecommendedPlugins()
|
||||
|
||||
const recommendedPlugins = useMemo(() => {
|
||||
if (ragRecommendedPlugins)
|
||||
|
||||
@@ -84,8 +84,7 @@ const CodeEditor: FC<Props> = ({
|
||||
|
||||
const getUniqVarName = (varName: string) => {
|
||||
if (varList.find(v => v.variable === varName)) {
|
||||
const varNameRegex = /_(\d+)$/
|
||||
const match = varNameRegex.exec(varName)
|
||||
const match = varName.match(/_(\d+)$/)
|
||||
|
||||
const index = (() => {
|
||||
if (match)
|
||||
|
||||
@@ -25,8 +25,7 @@ const SupportVarInput: FC<Props> = ({
|
||||
const renderSafeContent = (inputValue: string) => {
|
||||
const parts = inputValue.split(/(\{\{[^}]+\}\}|\n)/g)
|
||||
return parts.map((part, index) => {
|
||||
const variableRegex = /^\{\{([^}]+)\}\}$/
|
||||
const variableMatch = variableRegex.exec(part)
|
||||
const variableMatch = part.match(/^\{\{([^}]+)\}\}$/)
|
||||
if (variableMatch) {
|
||||
return (
|
||||
<VarHighlight
|
||||
|
||||
@@ -10,7 +10,7 @@ export const extractFunctionParams = (code: string, language: CodeLanguage) => {
|
||||
[CodeLanguage.python3]: /def\s+main\s*\((.*?)\)/,
|
||||
[CodeLanguage.javascript]: /function\s+main\s*\((.*?)\)/,
|
||||
}
|
||||
const match = patterns[language].exec(code)
|
||||
const match = code.match(patterns[language])
|
||||
const params: string[] = []
|
||||
|
||||
if (match?.[1]) {
|
||||
|
||||
@@ -75,8 +75,7 @@ const parseCurl = (curlCommand: string): { node: HttpNodeType | null; error: str
|
||||
|
||||
// To support command like `curl -F "file=@/path/to/file;type=application/zip"`
|
||||
// the `;type=application/zip` should translate to `Content-Type: application/zip`
|
||||
const typeRegex = /^(.+?);type=(.+)$/
|
||||
const typeMatch = typeRegex.exec(value)
|
||||
const typeMatch = value.match(/^(.+?);type=(.+)$/)
|
||||
if (typeMatch) {
|
||||
const [, actualValue, mimeType] = typeMatch
|
||||
value = actualValue
|
||||
|
||||
@@ -5,8 +5,7 @@ export type HttpMethod = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH' | 'HEAD'
|
||||
export type ArrayElementType = 'string' | 'number' | 'boolean' | 'object'
|
||||
|
||||
export const getArrayElementType = (arrayType: `array[${ArrayElementType}]`): ArrayElementType => {
|
||||
const arrayRegex = /^array\[(.+)\]$/
|
||||
const match = arrayRegex.exec(arrayType)
|
||||
const match = arrayType.match(/^array\[(.+)\]$/)
|
||||
return (match?.[1] as ArrayElementType) || 'string'
|
||||
}
|
||||
|
||||
|
||||
@@ -105,7 +105,7 @@ export function getLoopStartNode(loopId: string): Node {
|
||||
|
||||
export const genNewNodeTitleFromOld = (oldTitle: string) => {
|
||||
const regex = /^(.+?)\s*\((\d+)\)\s*$/
|
||||
const match = regex.exec(oldTitle)
|
||||
const match = oldTitle.match(regex)
|
||||
|
||||
if (match) {
|
||||
const title = match[1]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user