Merge branch 'main' into jzh

This commit is contained in:
JzoNg
2026-03-27 14:04:10 +08:00
51 changed files with 1160 additions and 1047 deletions

View File

@@ -25,7 +25,6 @@ jobs:
strategy:
matrix:
python-version:
- "3.11"
- "3.12"
steps:

View File

@@ -10,6 +10,7 @@ on:
branches: ["main"]
permissions:
actions: write
contents: write
pull-requests: write
checks: write
@@ -20,9 +21,24 @@ concurrency:
cancel-in-progress: true
jobs:
pre_job:
name: Skip Duplicate Checks
runs-on: ubuntu-latest
outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip || 'false' }}
steps:
- id: skip_check
continue-on-error: true
uses: fkirc/skip-duplicate-actions@f75f66ce1886f00957d99748a42c724f4330bdcf # v5.3.1
with:
cancel_others: 'true'
concurrent_skipping: same_content_newer
# Check which paths were changed to determine which tests to run
check-changes:
name: Check Changed Files
needs: pre_job
if: needs.pre_job.outputs.should_skip != 'true'
runs-on: ubuntu-latest
outputs:
api-changed: ${{ steps.changes.outputs.api }}
@@ -56,15 +72,19 @@ jobs:
# Run tests in parallel while always emitting stable required checks.
api-tests-run:
name: Run API Tests
needs: check-changes
if: needs.check-changes.outputs.api-changed == 'true'
needs:
- pre_job
- check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.api-changed == 'true'
uses: ./.github/workflows/api-tests.yml
secrets: inherit
api-tests-skip:
name: Skip API Tests
needs: check-changes
if: needs.check-changes.outputs.api-changed != 'true'
needs:
- pre_job
- check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.api-changed != 'true'
runs-on: ubuntu-latest
steps:
- name: Report skipped API tests
@@ -74,6 +94,7 @@ jobs:
name: API Tests
if: ${{ always() }}
needs:
- pre_job
- check-changes
- api-tests-run
- api-tests-skip
@@ -81,10 +102,16 @@ jobs:
steps:
- name: Finalize API Tests status
env:
SHOULD_SKIP_WORKFLOW: ${{ needs.pre_job.outputs.should_skip }}
TESTS_CHANGED: ${{ needs.check-changes.outputs.api-changed }}
RUN_RESULT: ${{ needs.api-tests-run.result }}
SKIP_RESULT: ${{ needs.api-tests-skip.result }}
run: |
if [[ "$SHOULD_SKIP_WORKFLOW" == 'true' ]]; then
echo "API tests were skipped because this workflow run duplicated a successful or newer run."
exit 0
fi
if [[ "$TESTS_CHANGED" == 'true' ]]; then
if [[ "$RUN_RESULT" == 'success' ]]; then
echo "API tests ran successfully."
@@ -105,15 +132,19 @@ jobs:
web-tests-run:
name: Run Web Tests
needs: check-changes
if: needs.check-changes.outputs.web-changed == 'true'
needs:
- pre_job
- check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.web-changed == 'true'
uses: ./.github/workflows/web-tests.yml
secrets: inherit
web-tests-skip:
name: Skip Web Tests
needs: check-changes
if: needs.check-changes.outputs.web-changed != 'true'
needs:
- pre_job
- check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.web-changed != 'true'
runs-on: ubuntu-latest
steps:
- name: Report skipped web tests
@@ -123,6 +154,7 @@ jobs:
name: Web Tests
if: ${{ always() }}
needs:
- pre_job
- check-changes
- web-tests-run
- web-tests-skip
@@ -130,10 +162,16 @@ jobs:
steps:
- name: Finalize Web Tests status
env:
SHOULD_SKIP_WORKFLOW: ${{ needs.pre_job.outputs.should_skip }}
TESTS_CHANGED: ${{ needs.check-changes.outputs.web-changed }}
RUN_RESULT: ${{ needs.web-tests-run.result }}
SKIP_RESULT: ${{ needs.web-tests-skip.result }}
run: |
if [[ "$SHOULD_SKIP_WORKFLOW" == 'true' ]]; then
echo "Web tests were skipped because this workflow run duplicated a successful or newer run."
exit 0
fi
if [[ "$TESTS_CHANGED" == 'true' ]]; then
if [[ "$RUN_RESULT" == 'success' ]]; then
echo "Web tests ran successfully."
@@ -154,18 +192,24 @@ jobs:
style-check:
name: Style Check
needs: pre_job
if: needs.pre_job.outputs.should_skip != 'true'
uses: ./.github/workflows/style.yml
vdb-tests-run:
name: Run VDB Tests
needs: check-changes
if: needs.check-changes.outputs.vdb-changed == 'true'
needs:
- pre_job
- check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.vdb-changed == 'true'
uses: ./.github/workflows/vdb-tests.yml
vdb-tests-skip:
name: Skip VDB Tests
needs: check-changes
if: needs.check-changes.outputs.vdb-changed != 'true'
needs:
- pre_job
- check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.vdb-changed != 'true'
runs-on: ubuntu-latest
steps:
- name: Report skipped VDB tests
@@ -175,6 +219,7 @@ jobs:
name: VDB Tests
if: ${{ always() }}
needs:
- pre_job
- check-changes
- vdb-tests-run
- vdb-tests-skip
@@ -182,10 +227,16 @@ jobs:
steps:
- name: Finalize VDB Tests status
env:
SHOULD_SKIP_WORKFLOW: ${{ needs.pre_job.outputs.should_skip }}
TESTS_CHANGED: ${{ needs.check-changes.outputs.vdb-changed }}
RUN_RESULT: ${{ needs.vdb-tests-run.result }}
SKIP_RESULT: ${{ needs.vdb-tests-skip.result }}
run: |
if [[ "$SHOULD_SKIP_WORKFLOW" == 'true' ]]; then
echo "VDB tests were skipped because this workflow run duplicated a successful or newer run."
exit 0
fi
if [[ "$TESTS_CHANGED" == 'true' ]]; then
if [[ "$RUN_RESULT" == 'success' ]]; then
echo "VDB tests ran successfully."
@@ -206,14 +257,18 @@ jobs:
db-migration-test-run:
name: Run DB Migration Test
needs: check-changes
if: needs.check-changes.outputs.migration-changed == 'true'
needs:
- pre_job
- check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.migration-changed == 'true'
uses: ./.github/workflows/db-migration-test.yml
db-migration-test-skip:
name: Skip DB Migration Test
needs: check-changes
if: needs.check-changes.outputs.migration-changed != 'true'
needs:
- pre_job
- check-changes
if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.migration-changed != 'true'
runs-on: ubuntu-latest
steps:
- name: Report skipped DB migration tests
@@ -223,6 +278,7 @@ jobs:
name: DB Migration Test
if: ${{ always() }}
needs:
- pre_job
- check-changes
- db-migration-test-run
- db-migration-test-skip
@@ -230,10 +286,16 @@ jobs:
steps:
- name: Finalize DB Migration Test status
env:
SHOULD_SKIP_WORKFLOW: ${{ needs.pre_job.outputs.should_skip }}
TESTS_CHANGED: ${{ needs.check-changes.outputs.migration-changed }}
RUN_RESULT: ${{ needs.db-migration-test-run.result }}
SKIP_RESULT: ${{ needs.db-migration-test-skip.result }}
run: |
if [[ "$SHOULD_SKIP_WORKFLOW" == 'true' ]]; then
echo "DB migration tests were skipped because this workflow run duplicated a successful or newer run."
exit 0
fi
if [[ "$TESTS_CHANGED" == 'true' ]]; then
if [[ "$RUN_RESULT" == 'success' ]]; then
echo "DB migration tests ran successfully."

View File

@@ -14,7 +14,6 @@ jobs:
strategy:
matrix:
python-version:
- "3.11"
- "3.12"
steps:

View File

@@ -39,6 +39,7 @@ from core.ops.entities.trace_entity import (
)
from core.repositories import DifyCoreRepositoryFactory
from extensions.ext_database import db
from graphon.enums import WorkflowNodeExecutionStatus
from models.model import EndUser, MessageFile
from models.workflow import WorkflowNodeExecutionTriggeredFrom
@@ -300,7 +301,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
"app_name": node_execution.title,
"status": node_execution.status,
"status_message": node_execution.error or "",
"level": "ERROR" if node_execution.status == "failed" else "DEFAULT",
"level": "ERROR" if node_execution.status == WorkflowNodeExecutionStatus.FAILED else "DEFAULT",
}
)
@@ -361,7 +362,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
llm_attributes.update(self._construct_llm_attributes(process_data.get("prompts", [])))
node_span.set_attributes(llm_attributes)
finally:
if node_execution.status == "failed":
if node_execution.status == WorkflowNodeExecutionStatus.FAILED:
set_span_status(node_span, node_execution.error)
else:
set_span_status(node_span)

View File

@@ -60,7 +60,7 @@ def _dict_to_workflow_node_execution_model(data: dict[str, Any]) -> WorkflowNode
model.triggered_from = WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN
model.node_id = data.get("node_id") or ""
model.node_type = data.get("node_type") or ""
model.status = data.get("status") or "running" # Default status if missing
model.status = WorkflowNodeExecutionStatus(data.get("status") or "running")
model.title = data.get("title") or ""
created_by_role_val = data.get("created_by_role")
try:

View File

@@ -33,7 +33,13 @@ from extensions.ext_storage import Storage
from factories.variable_factory import TypeMismatchError, build_segment_with_type
from graphon.entities.graph_config import NodeConfigDict, NodeConfigDictAdapter
from graphon.entities.pause_reason import HumanInputRequired, PauseReason, PauseReasonType, SchedulingPause
from graphon.enums import BuiltinNodeTypes, NodeType, WorkflowExecutionStatus, WorkflowNodeExecutionMetadataKey
from graphon.enums import (
BuiltinNodeTypes,
NodeType,
WorkflowExecutionStatus,
WorkflowNodeExecutionMetadataKey,
WorkflowNodeExecutionStatus,
)
from graphon.file.constants import maybe_file_object
from graphon.file.models import File
from graphon.variables import utils as variable_utils
@@ -941,7 +947,7 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo
inputs: Mapped[str | None] = mapped_column(LongText)
process_data: Mapped[str | None] = mapped_column(LongText)
outputs: Mapped[str | None] = mapped_column(LongText)
status: Mapped[str] = mapped_column(String(255))
status: Mapped[WorkflowNodeExecutionStatus] = mapped_column(EnumText(WorkflowNodeExecutionStatus, length=255))
error: Mapped[str | None] = mapped_column(LongText)
elapsed_time: Mapped[float] = mapped_column(sa.Float, server_default=sa.text("0"))
execution_metadata: Mapped[str | None] = mapped_column(LongText)

View File

@@ -125,7 +125,7 @@ def _create_node_execution_from_domain(
else:
node_execution.execution_metadata = "{}"
node_execution.status = execution.status.value
node_execution.status = execution.status
node_execution.error = execution.error
node_execution.elapsed_time = execution.elapsed_time
node_execution.created_by_role = creator_user_role
@@ -159,7 +159,7 @@ def _update_node_execution_from_domain(node_execution: WorkflowNodeExecutionMode
node_execution.execution_metadata = "{}"
# Update other fields
node_execution.status = execution.status.value
node_execution.status = execution.status
node_execution.error = execution.error
node_execution.elapsed_time = execution.elapsed_time
node_execution.finished_at = execution.finished_at

View File

@@ -0,0 +1,289 @@
from __future__ import annotations
import json
from unittest.mock import Mock, patch
from uuid import uuid4
import pytest
from models.source import DataSourceApiKeyAuthBinding
from services.auth.api_key_auth_service import ApiKeyAuthService
class TestApiKeyAuthService:
@pytest.fixture
def tenant_id(self) -> str:
return str(uuid4())
@pytest.fixture
def category(self) -> str:
return "search"
@pytest.fixture
def provider(self) -> str:
return "google"
@pytest.fixture
def mock_credentials(self) -> dict:
return {"auth_type": "api_key", "config": {"api_key": "test_secret_key_123"}}
@pytest.fixture
def mock_args(self, category, provider, mock_credentials) -> dict:
return {"category": category, "provider": provider, "credentials": mock_credentials}
def _create_binding(self, db_session, *, tenant_id, category, provider, credentials=None, disabled=False):
binding = DataSourceApiKeyAuthBinding(
tenant_id=tenant_id,
category=category,
provider=provider,
credentials=json.dumps(credentials, ensure_ascii=False) if credentials else None,
disabled=disabled,
)
db_session.add(binding)
db_session.commit()
return binding
def test_get_provider_auth_list_success(
self, flask_app_with_containers, db_session_with_containers, tenant_id, category, provider
):
self._create_binding(db_session_with_containers, tenant_id=tenant_id, category=category, provider=provider)
db_session_with_containers.expire_all()
result = ApiKeyAuthService.get_provider_auth_list(tenant_id)
assert len(result) >= 1
tenant_results = [r for r in result if r.tenant_id == tenant_id]
assert len(tenant_results) == 1
assert tenant_results[0].provider == provider
def test_get_provider_auth_list_empty(self, flask_app_with_containers, db_session_with_containers, tenant_id):
result = ApiKeyAuthService.get_provider_auth_list(tenant_id)
tenant_results = [r for r in result if r.tenant_id == tenant_id]
assert tenant_results == []
def test_get_provider_auth_list_filters_disabled(
self, flask_app_with_containers, db_session_with_containers, tenant_id, category, provider
):
self._create_binding(
db_session_with_containers, tenant_id=tenant_id, category=category, provider=provider, disabled=True
)
db_session_with_containers.expire_all()
result = ApiKeyAuthService.get_provider_auth_list(tenant_id)
tenant_results = [r for r in result if r.tenant_id == tenant_id]
assert tenant_results == []
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
@patch("services.auth.api_key_auth_service.encrypter")
def test_create_provider_auth_success(
self, mock_encrypter, mock_factory, flask_app_with_containers, db_session_with_containers, tenant_id, mock_args
):
mock_auth_instance = Mock()
mock_auth_instance.validate_credentials.return_value = True
mock_factory.return_value = mock_auth_instance
mock_encrypter.encrypt_token.return_value = "encrypted_test_key_123"
ApiKeyAuthService.create_provider_auth(tenant_id, mock_args)
mock_factory.assert_called_once()
mock_auth_instance.validate_credentials.assert_called_once()
mock_encrypter.encrypt_token.assert_called_once_with(tenant_id, "test_secret_key_123")
db_session_with_containers.expire_all()
bindings = db_session_with_containers.query(DataSourceApiKeyAuthBinding).filter_by(tenant_id=tenant_id).all()
assert len(bindings) == 1
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
def test_create_provider_auth_validation_failed(
self, mock_factory, flask_app_with_containers, db_session_with_containers, tenant_id, mock_args
):
mock_auth_instance = Mock()
mock_auth_instance.validate_credentials.return_value = False
mock_factory.return_value = mock_auth_instance
ApiKeyAuthService.create_provider_auth(tenant_id, mock_args)
db_session_with_containers.expire_all()
bindings = db_session_with_containers.query(DataSourceApiKeyAuthBinding).filter_by(tenant_id=tenant_id).all()
assert len(bindings) == 0
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
@patch("services.auth.api_key_auth_service.encrypter")
def test_create_provider_auth_encrypts_api_key(
self, mock_encrypter, mock_factory, flask_app_with_containers, db_session_with_containers, tenant_id, mock_args
):
mock_auth_instance = Mock()
mock_auth_instance.validate_credentials.return_value = True
mock_factory.return_value = mock_auth_instance
mock_encrypter.encrypt_token.return_value = "encrypted_test_key_123"
original_key = mock_args["credentials"]["config"]["api_key"]
ApiKeyAuthService.create_provider_auth(tenant_id, mock_args)
assert mock_args["credentials"]["config"]["api_key"] == "encrypted_test_key_123"
assert mock_args["credentials"]["config"]["api_key"] != original_key
mock_encrypter.encrypt_token.assert_called_once_with(tenant_id, original_key)
def test_get_auth_credentials_success(
self, flask_app_with_containers, db_session_with_containers, tenant_id, category, provider, mock_credentials
):
self._create_binding(
db_session_with_containers,
tenant_id=tenant_id,
category=category,
provider=provider,
credentials=mock_credentials,
)
db_session_with_containers.expire_all()
result = ApiKeyAuthService.get_auth_credentials(tenant_id, category, provider)
assert result == mock_credentials
def test_get_auth_credentials_not_found(
self, flask_app_with_containers, db_session_with_containers, tenant_id, category, provider
):
result = ApiKeyAuthService.get_auth_credentials(tenant_id, category, provider)
assert result is None
def test_get_auth_credentials_json_parsing(
self, flask_app_with_containers, db_session_with_containers, tenant_id, category, provider
):
special_credentials = {"auth_type": "api_key", "config": {"api_key": "key_with_中文_and_special_chars_!@#$%"}}
self._create_binding(
db_session_with_containers,
tenant_id=tenant_id,
category=category,
provider=provider,
credentials=special_credentials,
)
db_session_with_containers.expire_all()
result = ApiKeyAuthService.get_auth_credentials(tenant_id, category, provider)
assert result == special_credentials
assert result["config"]["api_key"] == "key_with_中文_and_special_chars_!@#$%"
def test_delete_provider_auth_success(
self, flask_app_with_containers, db_session_with_containers, tenant_id, category, provider
):
binding = self._create_binding(
db_session_with_containers, tenant_id=tenant_id, category=category, provider=provider
)
binding_id = binding.id
db_session_with_containers.expire_all()
ApiKeyAuthService.delete_provider_auth(tenant_id, binding_id)
db_session_with_containers.expire_all()
remaining = db_session_with_containers.query(DataSourceApiKeyAuthBinding).filter_by(id=binding_id).first()
assert remaining is None
def test_delete_provider_auth_not_found(self, flask_app_with_containers, db_session_with_containers, tenant_id):
# Should not raise when binding not found
ApiKeyAuthService.delete_provider_auth(tenant_id, str(uuid4()))
def test_validate_api_key_auth_args_success(self, mock_args):
ApiKeyAuthService.validate_api_key_auth_args(mock_args)
def test_validate_api_key_auth_args_missing_category(self, mock_args):
del mock_args["category"]
with pytest.raises(ValueError, match="category is required"):
ApiKeyAuthService.validate_api_key_auth_args(mock_args)
def test_validate_api_key_auth_args_empty_category(self, mock_args):
mock_args["category"] = ""
with pytest.raises(ValueError, match="category is required"):
ApiKeyAuthService.validate_api_key_auth_args(mock_args)
def test_validate_api_key_auth_args_missing_provider(self, mock_args):
del mock_args["provider"]
with pytest.raises(ValueError, match="provider is required"):
ApiKeyAuthService.validate_api_key_auth_args(mock_args)
def test_validate_api_key_auth_args_empty_provider(self, mock_args):
mock_args["provider"] = ""
with pytest.raises(ValueError, match="provider is required"):
ApiKeyAuthService.validate_api_key_auth_args(mock_args)
def test_validate_api_key_auth_args_missing_credentials(self, mock_args):
del mock_args["credentials"]
with pytest.raises(ValueError, match="credentials is required"):
ApiKeyAuthService.validate_api_key_auth_args(mock_args)
def test_validate_api_key_auth_args_empty_credentials(self, mock_args):
mock_args["credentials"] = None
with pytest.raises(ValueError, match="credentials is required"):
ApiKeyAuthService.validate_api_key_auth_args(mock_args)
def test_validate_api_key_auth_args_invalid_credentials_type(self, mock_args):
mock_args["credentials"] = "not_a_dict"
with pytest.raises(ValueError, match="credentials must be a dictionary"):
ApiKeyAuthService.validate_api_key_auth_args(mock_args)
def test_validate_api_key_auth_args_missing_auth_type(self, mock_args):
del mock_args["credentials"]["auth_type"]
with pytest.raises(ValueError, match="auth_type is required"):
ApiKeyAuthService.validate_api_key_auth_args(mock_args)
def test_validate_api_key_auth_args_empty_auth_type(self, mock_args):
mock_args["credentials"]["auth_type"] = ""
with pytest.raises(ValueError, match="auth_type is required"):
ApiKeyAuthService.validate_api_key_auth_args(mock_args)
@pytest.mark.parametrize(
"malicious_input",
[
"<script>alert('xss')</script>",
"'; DROP TABLE users; --",
"../../../etc/passwd",
"\\x00\\x00",
"A" * 10000,
],
)
def test_validate_api_key_auth_args_malicious_input(self, malicious_input, mock_args):
mock_args["category"] = malicious_input
ApiKeyAuthService.validate_api_key_auth_args(mock_args)
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
@patch("services.auth.api_key_auth_service.encrypter")
def test_create_provider_auth_database_error_handling(
self, mock_encrypter, mock_factory, flask_app_with_containers, tenant_id, mock_args
):
mock_auth_instance = Mock()
mock_auth_instance.validate_credentials.return_value = True
mock_factory.return_value = mock_auth_instance
mock_encrypter.encrypt_token.return_value = "encrypted_key"
with patch("services.auth.api_key_auth_service.db.session") as mock_session:
mock_session.commit.side_effect = Exception("Database error")
with pytest.raises(Exception, match="Database error"):
ApiKeyAuthService.create_provider_auth(tenant_id, mock_args)
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
def test_create_provider_auth_factory_exception(self, mock_factory, tenant_id, mock_args):
mock_factory.side_effect = Exception("Factory error")
with pytest.raises(Exception, match="Factory error"):
ApiKeyAuthService.create_provider_auth(tenant_id, mock_args)
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
@patch("services.auth.api_key_auth_service.encrypter")
def test_create_provider_auth_encryption_exception(self, mock_encrypter, mock_factory, tenant_id, mock_args):
mock_auth_instance = Mock()
mock_auth_instance.validate_credentials.return_value = True
mock_factory.return_value = mock_auth_instance
mock_encrypter.encrypt_token.side_effect = Exception("Encryption error")
with pytest.raises(Exception, match="Encryption error"):
ApiKeyAuthService.create_provider_auth(tenant_id, mock_args)
def test_validate_api_key_auth_args_none_input(self):
with pytest.raises(TypeError):
ApiKeyAuthService.validate_api_key_auth_args(None)
def test_validate_api_key_auth_args_dict_credentials_with_list_auth_type(self, mock_args):
mock_args["credentials"]["auth_type"] = ["api_key"]
ApiKeyAuthService.validate_api_key_auth_args(mock_args)

View File

@@ -0,0 +1,264 @@
"""
API Key Authentication System Integration Tests
"""
from __future__ import annotations
from concurrent.futures import ThreadPoolExecutor
from unittest.mock import Mock, patch
from uuid import uuid4
import httpx
import pytest
from models.source import DataSourceApiKeyAuthBinding
from services.auth.api_key_auth_factory import ApiKeyAuthFactory
from services.auth.api_key_auth_service import ApiKeyAuthService
from services.auth.auth_type import AuthType
class TestAuthIntegration:
@pytest.fixture
def tenant_id_1(self) -> str:
return str(uuid4())
@pytest.fixture
def tenant_id_2(self) -> str:
return str(uuid4())
@pytest.fixture
def category(self) -> str:
return "search"
@pytest.fixture
def firecrawl_credentials(self) -> dict:
return {"auth_type": "bearer", "config": {"api_key": "fc_test_key_123"}}
@pytest.fixture
def jina_credentials(self) -> dict:
return {"auth_type": "bearer", "config": {"api_key": "jina_test_key_456"}}
@patch("services.auth.firecrawl.firecrawl.httpx.post")
@patch("services.auth.api_key_auth_service.encrypter.encrypt_token")
def test_end_to_end_auth_flow(
self,
mock_encrypt,
mock_http,
flask_app_with_containers,
db_session_with_containers,
tenant_id_1,
category,
firecrawl_credentials,
):
mock_http.return_value = self._create_success_response()
mock_encrypt.return_value = "encrypted_fc_test_key_123"
args = {"category": category, "provider": AuthType.FIRECRAWL, "credentials": firecrawl_credentials}
ApiKeyAuthService.create_provider_auth(tenant_id_1, args)
mock_http.assert_called_once()
call_args = mock_http.call_args
assert "https://api.firecrawl.dev/v1/crawl" in call_args[0][0]
assert call_args[1]["headers"]["Authorization"] == "Bearer fc_test_key_123"
mock_encrypt.assert_called_once_with(tenant_id_1, "fc_test_key_123")
db_session_with_containers.expire_all()
bindings = db_session_with_containers.query(DataSourceApiKeyAuthBinding).filter_by(tenant_id=tenant_id_1).all()
assert len(bindings) == 1
assert bindings[0].provider == AuthType.FIRECRAWL
@patch("services.auth.firecrawl.firecrawl.httpx.post")
def test_cross_component_integration(self, mock_http, firecrawl_credentials):
mock_http.return_value = self._create_success_response()
factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, firecrawl_credentials)
result = factory.validate_credentials()
assert result is True
mock_http.assert_called_once()
@patch("services.auth.api_key_auth_service.encrypter.encrypt_token")
@patch("services.auth.firecrawl.firecrawl.httpx.post")
@patch("services.auth.jina.jina.httpx.post")
def test_multi_tenant_isolation(
self,
mock_jina_http,
mock_fc_http,
mock_encrypt,
flask_app_with_containers,
db_session_with_containers,
tenant_id_1,
tenant_id_2,
category,
firecrawl_credentials,
jina_credentials,
):
mock_fc_http.return_value = self._create_success_response()
mock_jina_http.return_value = self._create_success_response()
mock_encrypt.return_value = "encrypted_key"
args1 = {"category": category, "provider": AuthType.FIRECRAWL, "credentials": firecrawl_credentials}
ApiKeyAuthService.create_provider_auth(tenant_id_1, args1)
args2 = {"category": category, "provider": AuthType.JINA, "credentials": jina_credentials}
ApiKeyAuthService.create_provider_auth(tenant_id_2, args2)
db_session_with_containers.expire_all()
result1 = ApiKeyAuthService.get_provider_auth_list(tenant_id_1)
result2 = ApiKeyAuthService.get_provider_auth_list(tenant_id_2)
assert len(result1) == 1
assert result1[0].tenant_id == tenant_id_1
assert len(result2) == 1
assert result2[0].tenant_id == tenant_id_2
def test_cross_tenant_access_prevention(
self, flask_app_with_containers, db_session_with_containers, tenant_id_2, category
):
result = ApiKeyAuthService.get_auth_credentials(tenant_id_2, category, AuthType.FIRECRAWL)
assert result is None
def test_sensitive_data_protection(self):
credentials_with_secrets = {
"auth_type": "bearer",
"config": {"api_key": "super_secret_key_do_not_log", "secret": "another_secret"},
}
factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, credentials_with_secrets)
factory_str = str(factory)
assert "super_secret_key_do_not_log" not in factory_str
assert "another_secret" not in factory_str
@patch("services.auth.firecrawl.firecrawl.httpx.post")
@patch("services.auth.api_key_auth_service.encrypter.encrypt_token", return_value="encrypted_key")
def test_concurrent_creation_safety(
self,
mock_encrypt,
mock_http,
flask_app_with_containers,
db_session_with_containers,
tenant_id_1,
category,
firecrawl_credentials,
):
app = flask_app_with_containers
mock_http.return_value = self._create_success_response()
results = []
exceptions = []
def create_auth():
try:
with app.app_context():
thread_args = {
"category": category,
"provider": AuthType.FIRECRAWL,
"credentials": {"auth_type": "bearer", "config": {"api_key": "fc_test_key_123"}},
}
ApiKeyAuthService.create_provider_auth(tenant_id_1, thread_args)
results.append("success")
except Exception as e:
exceptions.append(e)
with ThreadPoolExecutor(max_workers=5) as executor:
futures = [executor.submit(create_auth) for _ in range(5)]
for future in futures:
future.result()
assert len(results) == 5
assert len(exceptions) == 0
@pytest.mark.parametrize(
"invalid_input",
[
None,
{},
{"auth_type": "bearer"},
{"auth_type": "bearer", "config": {}},
],
)
def test_invalid_input_boundary(self, invalid_input):
with pytest.raises((ValueError, KeyError, TypeError, AttributeError)):
ApiKeyAuthFactory(AuthType.FIRECRAWL, invalid_input)
@patch("services.auth.firecrawl.firecrawl.httpx.post")
def test_http_error_handling(self, mock_http, firecrawl_credentials):
mock_response = Mock()
mock_response.status_code = 401
mock_response.text = '{"error": "Unauthorized"}'
mock_response.raise_for_status.side_effect = httpx.HTTPError("Unauthorized")
mock_http.return_value = mock_response
factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, firecrawl_credentials)
with pytest.raises((httpx.HTTPError, Exception)):
factory.validate_credentials()
@patch("services.auth.firecrawl.firecrawl.httpx.post")
def test_network_failure_recovery(
self,
mock_http,
flask_app_with_containers,
db_session_with_containers,
tenant_id_1,
category,
firecrawl_credentials,
):
mock_http.side_effect = httpx.RequestError("Network timeout")
args = {"category": category, "provider": AuthType.FIRECRAWL, "credentials": firecrawl_credentials}
with pytest.raises(httpx.RequestError):
ApiKeyAuthService.create_provider_auth(tenant_id_1, args)
db_session_with_containers.expire_all()
bindings = db_session_with_containers.query(DataSourceApiKeyAuthBinding).filter_by(tenant_id=tenant_id_1).all()
assert len(bindings) == 0
@pytest.mark.parametrize(
("provider", "credentials"),
[
(AuthType.FIRECRAWL, {"auth_type": "bearer", "config": {"api_key": "fc_key"}}),
(AuthType.JINA, {"auth_type": "bearer", "config": {"api_key": "jina_key"}}),
(AuthType.WATERCRAWL, {"auth_type": "x-api-key", "config": {"api_key": "wc_key"}}),
],
)
def test_all_providers_factory_creation(self, provider, credentials):
auth_class = ApiKeyAuthFactory.get_apikey_auth_factory(provider)
assert auth_class is not None
factory = ApiKeyAuthFactory(provider, credentials)
assert factory.auth is not None
@patch("services.auth.api_key_auth_service.encrypter.encrypt_token")
@patch("services.auth.firecrawl.firecrawl.httpx.post")
def test_get_auth_credentials_returns_stored_credentials(
self,
mock_http,
mock_encrypt,
flask_app_with_containers,
db_session_with_containers,
tenant_id_1,
category,
firecrawl_credentials,
):
mock_http.return_value = self._create_success_response()
mock_encrypt.return_value = "encrypted_key"
args = {"category": category, "provider": AuthType.FIRECRAWL, "credentials": firecrawl_credentials}
ApiKeyAuthService.create_provider_auth(tenant_id_1, args)
db_session_with_containers.expire_all()
result = ApiKeyAuthService.get_auth_credentials(tenant_id_1, category, AuthType.FIRECRAWL)
assert result is not None
assert result["config"]["api_key"] == "encrypted_key"
def _create_success_response(self, status_code=200):
mock_response = Mock()
mock_response.status_code = status_code
mock_response.json.return_value = {"status": "success"}
mock_response.raise_for_status.return_value = None
return mock_response

View File

@@ -8,15 +8,27 @@ verification, marketplace upgrade flows, and uninstall with credential cleanup.
from __future__ import annotations
from unittest.mock import MagicMock, patch
from uuid import uuid4
import pytest
from sqlalchemy import select
from core.plugin.entities.plugin import PluginInstallationSource
from core.plugin.entities.plugin_daemon import PluginVerification
from models.provider import Provider, ProviderCredential, TenantPreferredModelProvider
from services.errors.plugin import PluginInstallationForbiddenError
from services.feature_service import PluginInstallationScope
from services.plugin.plugin_service import PluginService
from tests.unit_tests.services.plugin.conftest import make_features
def _make_features(
restrict_to_marketplace: bool = False,
scope: PluginInstallationScope = PluginInstallationScope.ALL,
) -> MagicMock:
features = MagicMock()
features.plugin_installation_permission.restrict_to_marketplace_only = restrict_to_marketplace
features.plugin_installation_permission.plugin_installation_scope = scope
return features
class TestFetchLatestPluginVersion:
@@ -80,14 +92,14 @@ class TestFetchLatestPluginVersion:
class TestCheckMarketplaceOnlyPermission:
@patch("services.plugin.plugin_service.FeatureService")
def test_raises_when_restricted(self, mock_fs):
mock_fs.get_system_features.return_value = make_features(restrict_to_marketplace=True)
mock_fs.get_system_features.return_value = _make_features(restrict_to_marketplace=True)
with pytest.raises(PluginInstallationForbiddenError):
PluginService._check_marketplace_only_permission()
@patch("services.plugin.plugin_service.FeatureService")
def test_passes_when_not_restricted(self, mock_fs):
mock_fs.get_system_features.return_value = make_features(restrict_to_marketplace=False)
mock_fs.get_system_features.return_value = _make_features(restrict_to_marketplace=False)
PluginService._check_marketplace_only_permission() # should not raise
@@ -95,7 +107,7 @@ class TestCheckMarketplaceOnlyPermission:
class TestCheckPluginInstallationScope:
@patch("services.plugin.plugin_service.FeatureService")
def test_official_only_allows_langgenius(self, mock_fs):
mock_fs.get_system_features.return_value = make_features(scope=PluginInstallationScope.OFFICIAL_ONLY)
mock_fs.get_system_features.return_value = _make_features(scope=PluginInstallationScope.OFFICIAL_ONLY)
verification = MagicMock()
verification.authorized_category = PluginVerification.AuthorizedCategory.Langgenius
@@ -103,14 +115,14 @@ class TestCheckPluginInstallationScope:
@patch("services.plugin.plugin_service.FeatureService")
def test_official_only_rejects_third_party(self, mock_fs):
mock_fs.get_system_features.return_value = make_features(scope=PluginInstallationScope.OFFICIAL_ONLY)
mock_fs.get_system_features.return_value = _make_features(scope=PluginInstallationScope.OFFICIAL_ONLY)
with pytest.raises(PluginInstallationForbiddenError):
PluginService._check_plugin_installation_scope(None)
@patch("services.plugin.plugin_service.FeatureService")
def test_official_and_partners_allows_partner(self, mock_fs):
mock_fs.get_system_features.return_value = make_features(
mock_fs.get_system_features.return_value = _make_features(
scope=PluginInstallationScope.OFFICIAL_AND_SPECIFIC_PARTNERS
)
verification = MagicMock()
@@ -120,7 +132,7 @@ class TestCheckPluginInstallationScope:
@patch("services.plugin.plugin_service.FeatureService")
def test_official_and_partners_rejects_none(self, mock_fs):
mock_fs.get_system_features.return_value = make_features(
mock_fs.get_system_features.return_value = _make_features(
scope=PluginInstallationScope.OFFICIAL_AND_SPECIFIC_PARTNERS
)
@@ -129,7 +141,7 @@ class TestCheckPluginInstallationScope:
@patch("services.plugin.plugin_service.FeatureService")
def test_none_scope_always_raises(self, mock_fs):
mock_fs.get_system_features.return_value = make_features(scope=PluginInstallationScope.NONE)
mock_fs.get_system_features.return_value = _make_features(scope=PluginInstallationScope.NONE)
verification = MagicMock()
verification.authorized_category = PluginVerification.AuthorizedCategory.Langgenius
@@ -138,7 +150,7 @@ class TestCheckPluginInstallationScope:
@patch("services.plugin.plugin_service.FeatureService")
def test_all_scope_passes_any(self, mock_fs):
mock_fs.get_system_features.return_value = make_features(scope=PluginInstallationScope.ALL)
mock_fs.get_system_features.return_value = _make_features(scope=PluginInstallationScope.ALL)
PluginService._check_plugin_installation_scope(None) # should not raise
@@ -209,9 +221,9 @@ class TestUpgradePluginWithMarketplace:
@patch("services.plugin.plugin_service.dify_config")
def test_skips_download_when_already_installed(self, mock_config, mock_installer_cls, mock_fs, mock_marketplace):
mock_config.MARKETPLACE_ENABLED = True
mock_fs.get_system_features.return_value = make_features()
mock_fs.get_system_features.return_value = _make_features()
installer = mock_installer_cls.return_value
installer.fetch_plugin_manifest.return_value = MagicMock() # no exception = already installed
installer.fetch_plugin_manifest.return_value = MagicMock()
installer.upgrade_plugin.return_value = MagicMock()
PluginService.upgrade_plugin_with_marketplace("t1", "old-uid", "new-uid")
@@ -225,7 +237,7 @@ class TestUpgradePluginWithMarketplace:
@patch("services.plugin.plugin_service.dify_config")
def test_downloads_when_not_installed(self, mock_config, mock_installer_cls, mock_fs, mock_download):
mock_config.MARKETPLACE_ENABLED = True
mock_fs.get_system_features.return_value = make_features()
mock_fs.get_system_features.return_value = _make_features()
installer = mock_installer_cls.return_value
installer.fetch_plugin_manifest.side_effect = RuntimeError("not found")
mock_download.return_value = b"pkg-bytes"
@@ -244,7 +256,7 @@ class TestUpgradePluginWithGithub:
@patch("services.plugin.plugin_service.FeatureService")
@patch("services.plugin.plugin_service.PluginInstaller")
def test_checks_marketplace_permission_and_delegates(self, mock_installer_cls, mock_fs):
mock_fs.get_system_features.return_value = make_features()
mock_fs.get_system_features.return_value = _make_features()
installer = mock_installer_cls.return_value
installer.upgrade_plugin.return_value = MagicMock()
@@ -259,7 +271,7 @@ class TestUploadPkg:
@patch("services.plugin.plugin_service.FeatureService")
@patch("services.plugin.plugin_service.PluginInstaller")
def test_runs_permission_and_scope_checks(self, mock_installer_cls, mock_fs):
mock_fs.get_system_features.return_value = make_features()
mock_fs.get_system_features.return_value = _make_features()
upload_resp = MagicMock()
upload_resp.verification = None
mock_installer_cls.return_value.upload_pkg.return_value = upload_resp
@@ -283,7 +295,7 @@ class TestInstallFromMarketplacePkg:
@patch("services.plugin.plugin_service.dify_config")
def test_downloads_when_not_cached(self, mock_config, mock_installer_cls, mock_fs, mock_download):
mock_config.MARKETPLACE_ENABLED = True
mock_fs.get_system_features.return_value = make_features()
mock_fs.get_system_features.return_value = _make_features()
installer = mock_installer_cls.return_value
installer.fetch_plugin_manifest.side_effect = RuntimeError("not found")
mock_download.return_value = b"pkg"
@@ -298,14 +310,14 @@ class TestInstallFromMarketplacePkg:
assert result == "task-id"
installer.install_from_identifiers.assert_called_once()
call_args = installer.install_from_identifiers.call_args[0]
assert call_args[1] == ["resolved-uid"] # uses response uid, not input
assert call_args[1] == ["resolved-uid"]
@patch("services.plugin.plugin_service.FeatureService")
@patch("services.plugin.plugin_service.PluginInstaller")
@patch("services.plugin.plugin_service.dify_config")
def test_uses_cached_when_already_downloaded(self, mock_config, mock_installer_cls, mock_fs):
mock_config.MARKETPLACE_ENABLED = True
mock_fs.get_system_features.return_value = make_features()
mock_fs.get_system_features.return_value = _make_features()
installer = mock_installer_cls.return_value
installer.fetch_plugin_manifest.return_value = MagicMock()
decode_resp = MagicMock()
@@ -317,7 +329,7 @@ class TestInstallFromMarketplacePkg:
installer.install_from_identifiers.assert_called_once()
call_args = installer.install_from_identifiers.call_args[0]
assert call_args[1] == ["uid-1"] # uses original uid
assert call_args[1] == ["uid-1"]
class TestUninstall:
@@ -332,26 +344,70 @@ class TestUninstall:
assert result is True
installer.uninstall.assert_called_once_with("t1", "install-1")
@patch("services.plugin.plugin_service.db")
@patch("services.plugin.plugin_service.PluginInstaller")
def test_cleans_credentials_when_plugin_found(self, mock_installer_cls, mock_db):
def test_cleans_credentials_when_plugin_found(
self, mock_installer_cls, flask_app_with_containers, db_session_with_containers
):
tenant_id = str(uuid4())
plugin_id = "org/myplugin"
provider_name = f"{plugin_id}/model-provider"
credential = ProviderCredential(
tenant_id=tenant_id,
provider_name=provider_name,
credential_name="default",
encrypted_config="{}",
)
db_session_with_containers.add(credential)
db_session_with_containers.flush()
credential_id = credential.id
provider = Provider(
tenant_id=tenant_id,
provider_name=provider_name,
credential_id=credential_id,
)
db_session_with_containers.add(provider)
db_session_with_containers.flush()
provider_id = provider.id
pref = TenantPreferredModelProvider(
tenant_id=tenant_id,
provider_name=provider_name,
preferred_provider_type="custom",
)
db_session_with_containers.add(pref)
db_session_with_containers.commit()
plugin = MagicMock()
plugin.installation_id = "install-1"
plugin.plugin_id = "org/myplugin"
plugin.plugin_id = plugin_id
installer = mock_installer_cls.return_value
installer.list_plugins.return_value = [plugin]
installer.uninstall.return_value = True
# Mock Session context manager
mock_session = MagicMock()
mock_db.engine = MagicMock()
mock_session.scalars.return_value.all.return_value = [] # no credentials found
with patch("services.plugin.plugin_service.Session") as mock_session_cls:
mock_session_cls.return_value.__enter__ = MagicMock(return_value=mock_session)
mock_session_cls.return_value.__exit__ = MagicMock(return_value=False)
result = PluginService.uninstall("t1", "install-1")
with patch("services.plugin.plugin_service.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = False
result = PluginService.uninstall(tenant_id, "install-1")
assert result is True
installer.uninstall.assert_called_once()
db_session_with_containers.expire_all()
remaining_creds = db_session_with_containers.scalars(
select(ProviderCredential).where(ProviderCredential.id == credential_id)
).all()
assert len(remaining_creds) == 0
updated_provider = db_session_with_containers.get(Provider, provider_id)
assert updated_provider is not None
assert updated_provider.credential_id is None
remaining_prefs = db_session_with_containers.scalars(
select(TenantPreferredModelProvider).where(
TenantPreferredModelProvider.tenant_id == tenant_id,
TenantPreferredModelProvider.provider_name == provider_name,
)
).all()
assert len(remaining_prefs) == 0

View File

@@ -1,80 +1,63 @@
from __future__ import annotations
from datetime import datetime
from types import SimpleNamespace
from unittest.mock import MagicMock, patch
from uuid import uuid4
import pytest
from werkzeug.exceptions import Unauthorized
import services.api_token_service as api_token_service_module
from models.model import ApiToken
from services.api_token_service import ApiTokenCache, CachedApiToken
@pytest.fixture
def mock_db_session():
"""Fixture providing common DB session mocking for query_token_from_db tests."""
fake_engine = MagicMock()
session = MagicMock()
session_context = MagicMock()
session_context.__enter__.return_value = session
session_context.__exit__.return_value = None
with (
patch.object(api_token_service_module, "db", new=SimpleNamespace(engine=fake_engine)),
patch.object(api_token_service_module, "Session", return_value=session_context) as mock_session_class,
patch.object(api_token_service_module.ApiTokenCache, "set") as mock_cache_set,
patch.object(api_token_service_module, "record_token_usage") as mock_record_usage,
):
yield {
"session": session,
"mock_session_class": mock_session_class,
"mock_cache_set": mock_cache_set,
"mock_record_usage": mock_record_usage,
"fake_engine": fake_engine,
}
class TestQueryTokenFromDb:
def test_should_return_api_token_and_cache_when_token_exists(self, mock_db_session):
"""Test DB lookup success path caches token and records usage."""
# Arrange
auth_token = "token-123"
scope = "app"
api_token = MagicMock()
def test_should_return_api_token_and_cache_when_token_exists(
self, flask_app_with_containers, db_session_with_containers
):
tenant_id = str(uuid4())
app_id = str(uuid4())
token_value = f"app-test-{uuid4()}"
mock_db_session["session"].scalar.return_value = api_token
api_token = ApiToken()
api_token.id = str(uuid4())
api_token.app_id = app_id
api_token.tenant_id = tenant_id
api_token.type = "app"
api_token.token = token_value
db_session_with_containers.add(api_token)
db_session_with_containers.commit()
# Act
result = api_token_service_module.query_token_from_db(auth_token, scope)
with (
patch.object(api_token_service_module.ApiTokenCache, "set") as mock_cache_set,
patch.object(api_token_service_module, "record_token_usage") as mock_record_usage,
):
result = api_token_service_module.query_token_from_db(token_value, "app")
# Assert
assert result == api_token
mock_db_session["mock_session_class"].assert_called_once_with(
mock_db_session["fake_engine"], expire_on_commit=False
)
mock_db_session["mock_cache_set"].assert_called_once_with(auth_token, scope, api_token)
mock_db_session["mock_record_usage"].assert_called_once_with(auth_token, scope)
assert result.id == api_token.id
assert result.token == token_value
mock_cache_set.assert_called_once()
mock_record_usage.assert_called_once_with(token_value, "app")
def test_should_cache_null_and_raise_unauthorized_when_token_not_found(self, mock_db_session):
"""Test DB lookup miss path caches null marker and raises Unauthorized."""
# Arrange
auth_token = "missing-token"
scope = "app"
def test_should_cache_null_and_raise_unauthorized_when_token_not_found(
self, flask_app_with_containers, db_session_with_containers
):
with (
patch.object(api_token_service_module.ApiTokenCache, "set") as mock_cache_set,
patch.object(api_token_service_module, "record_token_usage") as mock_record_usage,
):
with pytest.raises(Unauthorized, match="Access token is invalid"):
api_token_service_module.query_token_from_db(f"missing-{uuid4()}", "app")
mock_db_session["session"].scalar.return_value = None
# Act / Assert
with pytest.raises(Unauthorized, match="Access token is invalid"):
api_token_service_module.query_token_from_db(auth_token, scope)
mock_db_session["mock_cache_set"].assert_called_once_with(auth_token, scope, None)
mock_db_session["mock_record_usage"].assert_not_called()
mock_cache_set.assert_called_once()
call_args = mock_cache_set.call_args[0]
assert call_args[2] is None # cached None
mock_record_usage.assert_not_called()
class TestRecordTokenUsage:
def test_should_write_active_key_with_iso_timestamp_and_ttl(self):
"""Test record_token_usage writes usage timestamp with one-hour TTL."""
# Arrange
auth_token = "token-123"
scope = "dataset"
fixed_time = datetime(2026, 2, 24, 12, 0, 0)
@@ -84,26 +67,18 @@ class TestRecordTokenUsage:
patch.object(api_token_service_module, "naive_utc_now", return_value=fixed_time),
patch.object(api_token_service_module, "redis_client") as mock_redis,
):
# Act
api_token_service_module.record_token_usage(auth_token, scope)
# Assert
mock_redis.set.assert_called_once_with(expected_key, fixed_time.isoformat(), ex=3600)
def test_should_not_raise_when_redis_write_fails(self):
"""Test record_token_usage swallows Redis errors."""
# Arrange
with patch.object(api_token_service_module, "redis_client") as mock_redis:
mock_redis.set.side_effect = Exception("redis unavailable")
# Act / Assert
api_token_service_module.record_token_usage("token-123", "app")
class TestFetchTokenWithSingleFlight:
def test_should_return_cached_token_when_lock_acquired_and_cache_filled(self):
"""Test single-flight returns cache when another request already populated it."""
# Arrange
auth_token = "token-123"
scope = "app"
cached_token = CachedApiToken(
@@ -115,39 +90,26 @@ class TestFetchTokenWithSingleFlight:
last_used_at=None,
created_at=None,
)
lock = MagicMock()
lock.acquire.return_value = True
with (
patch.object(api_token_service_module, "redis_client") as mock_redis,
patch.object(api_token_service_module.ApiTokenCache, "get", return_value=cached_token) as mock_cache_get,
patch.object(api_token_service_module.ApiTokenCache, "get", return_value=cached_token),
patch.object(api_token_service_module, "query_token_from_db") as mock_query_db,
):
mock_redis.lock.return_value = lock
# Act
result = api_token_service_module.fetch_token_with_single_flight(auth_token, scope)
# Assert
assert result == cached_token
mock_redis.lock.assert_called_once_with(
f"api_token_query_lock:{scope}:{auth_token}",
timeout=10,
blocking_timeout=5,
)
lock.acquire.assert_called_once_with(blocking=True)
lock.release.assert_called_once()
mock_cache_get.assert_called_once_with(auth_token, scope)
mock_query_db.assert_not_called()
def test_should_query_db_when_lock_acquired_and_cache_missed(self):
"""Test single-flight queries DB when cache remains empty after lock acquisition."""
# Arrange
auth_token = "token-123"
scope = "app"
db_token = MagicMock()
lock = MagicMock()
lock.acquire.return_value = True
@@ -157,22 +119,16 @@ class TestFetchTokenWithSingleFlight:
patch.object(api_token_service_module, "query_token_from_db", return_value=db_token) as mock_query_db,
):
mock_redis.lock.return_value = lock
# Act
result = api_token_service_module.fetch_token_with_single_flight(auth_token, scope)
# Assert
assert result == db_token
mock_query_db.assert_called_once_with(auth_token, scope)
lock.release.assert_called_once()
def test_should_query_db_directly_when_lock_not_acquired(self):
"""Test lock timeout branch falls back to direct DB query."""
# Arrange
auth_token = "token-123"
scope = "app"
db_token = MagicMock()
lock = MagicMock()
lock.acquire.return_value = False
@@ -182,19 +138,14 @@ class TestFetchTokenWithSingleFlight:
patch.object(api_token_service_module, "query_token_from_db", return_value=db_token) as mock_query_db,
):
mock_redis.lock.return_value = lock
# Act
result = api_token_service_module.fetch_token_with_single_flight(auth_token, scope)
# Assert
assert result == db_token
mock_cache_get.assert_not_called()
mock_query_db.assert_called_once_with(auth_token, scope)
lock.release.assert_not_called()
def test_should_reraise_unauthorized_from_db_query(self):
"""Test Unauthorized from DB query is propagated unchanged."""
# Arrange
auth_token = "token-123"
scope = "app"
lock = MagicMock()
@@ -210,20 +161,15 @@ class TestFetchTokenWithSingleFlight:
),
):
mock_redis.lock.return_value = lock
# Act / Assert
with pytest.raises(Unauthorized, match="Access token is invalid"):
api_token_service_module.fetch_token_with_single_flight(auth_token, scope)
lock.release.assert_called_once()
def test_should_fallback_to_db_query_when_lock_raises_exception(self):
"""Test Redis lock errors fall back to direct DB query."""
# Arrange
auth_token = "token-123"
scope = "app"
db_token = MagicMock()
lock = MagicMock()
lock.acquire.side_effect = RuntimeError("redis lock error")
@@ -232,11 +178,8 @@ class TestFetchTokenWithSingleFlight:
patch.object(api_token_service_module, "query_token_from_db", return_value=db_token) as mock_query_db,
):
mock_redis.lock.return_value = lock
# Act
result = api_token_service_module.fetch_token_with_single_flight(auth_token, scope)
# Assert
assert result == db_token
mock_query_db.assert_called_once_with(auth_token, scope)
@@ -244,8 +187,6 @@ class TestFetchTokenWithSingleFlight:
class TestApiTokenCacheTenantBranches:
@patch("services.api_token_service.redis_client")
def test_delete_with_scope_should_remove_from_tenant_index_when_tenant_found(self, mock_redis):
"""Test scoped delete removes cache key and tenant index membership."""
# Arrange
token = "token-123"
scope = "app"
cache_key = ApiTokenCache._make_cache_key(token, scope)
@@ -261,18 +202,14 @@ class TestApiTokenCacheTenantBranches:
mock_redis.get.return_value = cached_token.model_dump_json().encode("utf-8")
with patch.object(ApiTokenCache, "_remove_from_tenant_index") as mock_remove_index:
# Act
result = ApiTokenCache.delete(token, scope)
# Assert
assert result is True
mock_redis.delete.assert_called_once_with(cache_key)
mock_remove_index.assert_called_once_with("tenant-1", cache_key)
@patch("services.api_token_service.redis_client")
def test_invalidate_by_tenant_should_delete_all_indexed_cache_keys(self, mock_redis):
"""Test tenant invalidation deletes indexed cache entries and index key."""
# Arrange
tenant_id = "tenant-1"
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
mock_redis.smembers.return_value = {
@@ -280,10 +217,8 @@ class TestApiTokenCacheTenantBranches:
b"api_token:any:token-2",
}
# Act
result = ApiTokenCache.invalidate_by_tenant(tenant_id)
# Assert
assert result is True
mock_redis.smembers.assert_called_once_with(index_key)
mock_redis.delete.assert_any_call("api_token:app:token-1")
@@ -293,7 +228,6 @@ class TestApiTokenCacheTenantBranches:
class TestApiTokenCacheCoreBranches:
def test_cached_api_token_repr_should_include_id_and_type(self):
"""Test CachedApiToken __repr__ includes key identity fields."""
token = CachedApiToken(
id="id-123",
app_id="app-123",
@@ -303,11 +237,9 @@ class TestApiTokenCacheCoreBranches:
last_used_at=None,
created_at=None,
)
assert repr(token) == "<CachedApiToken id=id-123 type=app>"
def test_serialize_token_should_handle_cached_api_token_instances(self):
"""Test serialization path when input is already a CachedApiToken."""
token = CachedApiToken(
id="id-123",
app_id="app-123",
@@ -317,35 +249,25 @@ class TestApiTokenCacheCoreBranches:
last_used_at=None,
created_at=None,
)
serialized = ApiTokenCache._serialize_token(token)
assert isinstance(serialized, bytes)
assert b'"id":"id-123"' in serialized
assert b'"token":"token-123"' in serialized
def test_deserialize_token_should_return_none_for_null_markers(self):
"""Test null cache marker deserializes to None."""
assert ApiTokenCache._deserialize_token("null") is None
assert ApiTokenCache._deserialize_token(b"null") is None
def test_deserialize_token_should_return_none_for_invalid_payload(self):
"""Test invalid serialized payload returns None."""
assert ApiTokenCache._deserialize_token("not-json") is None
@patch("services.api_token_service.redis_client")
def test_get_should_return_none_on_cache_miss(self, mock_redis):
"""Test cache miss branch in ApiTokenCache.get."""
mock_redis.get.return_value = None
result = ApiTokenCache.get("token-123", "app")
assert result is None
mock_redis.get.assert_called_once_with("api_token:app:token-123")
@patch("services.api_token_service.redis_client")
def test_get_should_deserialize_cached_payload_on_cache_hit(self, mock_redis):
"""Test cache hit branch in ApiTokenCache.get."""
token = CachedApiToken(
id="id-123",
app_id="app-123",
@@ -356,48 +278,34 @@ class TestApiTokenCacheCoreBranches:
created_at=None,
)
mock_redis.get.return_value = token.model_dump_json().encode("utf-8")
result = ApiTokenCache.get("token-123", "app")
assert isinstance(result, CachedApiToken)
assert result.id == "id-123"
@patch("services.api_token_service.redis_client")
def test_add_to_tenant_index_should_skip_when_tenant_id_missing(self, mock_redis):
"""Test tenant index update exits early for missing tenant id."""
ApiTokenCache._add_to_tenant_index(None, "api_token:app:token-123")
mock_redis.sadd.assert_not_called()
mock_redis.expire.assert_not_called()
@patch("services.api_token_service.redis_client")
def test_add_to_tenant_index_should_swallow_index_update_errors(self, mock_redis):
"""Test tenant index update handles Redis write errors gracefully."""
mock_redis.sadd.side_effect = Exception("redis down")
ApiTokenCache._add_to_tenant_index("tenant-123", "api_token:app:token-123")
mock_redis.sadd.assert_called_once()
@patch("services.api_token_service.redis_client")
def test_remove_from_tenant_index_should_skip_when_tenant_id_missing(self, mock_redis):
"""Test tenant index removal exits early for missing tenant id."""
ApiTokenCache._remove_from_tenant_index(None, "api_token:app:token-123")
mock_redis.srem.assert_not_called()
@patch("services.api_token_service.redis_client")
def test_remove_from_tenant_index_should_swallow_redis_errors(self, mock_redis):
"""Test tenant index removal handles Redis errors gracefully."""
mock_redis.srem.side_effect = Exception("redis down")
ApiTokenCache._remove_from_tenant_index("tenant-123", "api_token:app:token-123")
mock_redis.srem.assert_called_once()
@patch("services.api_token_service.redis_client")
def test_set_should_return_false_when_cache_write_raises_exception(self, mock_redis):
"""Test set returns False when Redis setex fails."""
mock_redis.setex.side_effect = Exception("redis write failed")
api_token = MagicMock()
api_token.id = "id-123"
@@ -407,60 +315,41 @@ class TestApiTokenCacheCoreBranches:
api_token.token = "token-123"
api_token.last_used_at = None
api_token.created_at = None
result = ApiTokenCache.set("token-123", "app", api_token)
assert result is False
@patch("services.api_token_service.redis_client")
def test_delete_without_scope_should_return_false_when_scan_fails(self, mock_redis):
"""Test delete(scope=None) returns False when scan_iter raises."""
mock_redis.scan_iter.side_effect = Exception("scan failed")
result = ApiTokenCache.delete("token-123", None)
assert result is False
@patch("services.api_token_service.redis_client")
def test_delete_with_scope_should_continue_when_tenant_lookup_raises(self, mock_redis):
"""Test scoped delete still succeeds when tenant lookup from cache fails."""
token = "token-123"
scope = "app"
cache_key = ApiTokenCache._make_cache_key(token, scope)
mock_redis.get.side_effect = Exception("get failed")
result = ApiTokenCache.delete(token, scope)
assert result is True
mock_redis.delete.assert_called_once_with(cache_key)
@patch("services.api_token_service.redis_client")
def test_delete_with_scope_should_return_false_when_delete_raises(self, mock_redis):
"""Test scoped delete returns False when delete operation fails."""
token = "token-123"
scope = "app"
mock_redis.get.return_value = None
mock_redis.delete.side_effect = Exception("delete failed")
result = ApiTokenCache.delete(token, scope)
result = ApiTokenCache.delete("token-123", "app")
assert result is False
@patch("services.api_token_service.redis_client")
def test_invalidate_by_tenant_should_return_true_when_index_not_found(self, mock_redis):
"""Test tenant invalidation returns True when tenant index is empty."""
mock_redis.smembers.return_value = set()
result = ApiTokenCache.invalidate_by_tenant("tenant-123")
assert result is True
mock_redis.delete.assert_not_called()
@patch("services.api_token_service.redis_client")
def test_invalidate_by_tenant_should_return_false_when_redis_raises(self, mock_redis):
"""Test tenant invalidation returns False when Redis operation fails."""
mock_redis.smembers.side_effect = Exception("redis failed")
result = ApiTokenCache.invalidate_by_tenant("tenant-123")
assert result is False

View File

@@ -1,5 +1,8 @@
from __future__ import annotations
from types import SimpleNamespace
from unittest.mock import MagicMock, patch
from uuid import uuid4
import pytest
from sqlalchemy.engine import Engine
@@ -13,6 +16,7 @@ from core.workflow.human_input_compat import (
MemberRecipient,
)
from graphon.runtime import VariablePool
from models.account import Account, TenantAccountJoin
from services import human_input_delivery_test_service as service_module
from services.human_input_delivery_test_service import (
DeliveryTestContext,
@@ -28,13 +32,6 @@ from services.human_input_delivery_test_service import (
)
@pytest.fixture
def mock_db(monkeypatch):
mock_db = MagicMock()
monkeypatch.setattr(service_module, "db", mock_db)
return mock_db
def _make_valid_email_config():
return EmailDeliveryConfig(
recipients=EmailRecipients(include_bound_group=False, items=[]),
@@ -91,7 +88,7 @@ class TestDeliveryTestRegistry:
with pytest.raises(DeliveryTestUnsupportedError, match="Delivery method does not support test send."):
registry.dispatch(context=context, method=method)
def test_default(self, mock_db):
def test_default(self, flask_app_with_containers, db_session_with_containers):
registry = DeliveryTestRegistry.default()
assert len(registry._handlers) == 1
assert isinstance(registry._handlers[0], EmailDeliveryTestHandler)
@@ -250,10 +247,8 @@ class TestEmailDeliveryTestHandler:
_, kwargs = mock_mail_send.call_args
assert kwargs["subject"] == "Notice BCC:test@example.com"
def test_resolve_recipients(self):
def test_resolve_recipients_external(self):
handler = EmailDeliveryTestHandler(session_factory=MagicMock())
# Test Case 1: External Recipient
method = EmailDeliveryMethod(
config=EmailDeliveryConfig(
recipients=EmailRecipients(
@@ -265,18 +260,43 @@ class TestEmailDeliveryTestHandler:
)
assert handler._resolve_recipients(tenant_id="t1", method=method) == ["ext@example.com"]
# Test Case 2: Member Recipient
def test_resolve_recipients_member(self, flask_app_with_containers, db_session_with_containers):
tenant_id = str(uuid4())
account = Account(name="Test User", email="member@example.com")
db_session_with_containers.add(account)
db_session_with_containers.commit()
join = TenantAccountJoin(tenant_id=tenant_id, account_id=account.id)
db_session_with_containers.add(join)
db_session_with_containers.commit()
from extensions.ext_database import db
handler = EmailDeliveryTestHandler(session_factory=db.engine)
method = EmailDeliveryMethod(
config=EmailDeliveryConfig(
recipients=EmailRecipients(items=[MemberRecipient(reference_id="u1")], include_bound_group=False),
recipients=EmailRecipients(items=[MemberRecipient(reference_id=account.id)], include_bound_group=False),
subject="",
body="",
)
)
handler._query_workspace_member_emails = MagicMock(return_value={"u1": "u1@example.com"})
assert handler._resolve_recipients(tenant_id="t1", method=method) == ["u1@example.com"]
assert handler._resolve_recipients(tenant_id=tenant_id, method=method) == ["member@example.com"]
# Test Case 3: Whole Workspace
def test_resolve_recipients_whole_workspace(self, flask_app_with_containers, db_session_with_containers):
tenant_id = str(uuid4())
account1 = Account(name="User 1", email=f"u1-{uuid4()}@example.com")
account2 = Account(name="User 2", email=f"u2-{uuid4()}@example.com")
db_session_with_containers.add_all([account1, account2])
db_session_with_containers.commit()
for acc in [account1, account2]:
join = TenantAccountJoin(tenant_id=tenant_id, account_id=acc.id)
db_session_with_containers.add(join)
db_session_with_containers.commit()
from extensions.ext_database import db
handler = EmailDeliveryTestHandler(session_factory=db.engine)
method = EmailDeliveryMethod(
config=EmailDeliveryConfig(
recipients=EmailRecipients(items=[], include_bound_group=True),
@@ -284,36 +304,13 @@ class TestEmailDeliveryTestHandler:
body="",
)
)
handler._query_workspace_member_emails = MagicMock(
return_value={"u1": "u1@example.com", "u2": "u2@example.com"}
)
recipients = handler._resolve_recipients(tenant_id="t1", method=method)
assert set(recipients) == {"u1@example.com", "u2@example.com"}
recipients = handler._resolve_recipients(tenant_id=tenant_id, method=method)
assert set(recipients) == {account1.email, account2.email}
def test_query_workspace_member_emails(self):
mock_session = MagicMock()
mock_session_factory = MagicMock(return_value=mock_session)
mock_session.__enter__.return_value = mock_session
handler = EmailDeliveryTestHandler(session_factory=mock_session_factory)
# Empty user_ids
def test_query_workspace_member_emails_empty_ids(self):
handler = EmailDeliveryTestHandler(session_factory=MagicMock())
assert handler._query_workspace_member_emails(tenant_id="t1", user_ids=[]) == {}
# user_ids is None (all)
mock_execute = MagicMock()
mock_tuples = MagicMock()
mock_session.execute.return_value = mock_execute
mock_execute.tuples.return_value = mock_tuples
mock_tuples.all.return_value = [("u1", "u1@example.com")]
result = handler._query_workspace_member_emails(tenant_id="t1", user_ids=None)
assert result == {"u1": "u1@example.com"}
# user_ids with values
result = handler._query_workspace_member_emails(tenant_id="t1", user_ids=["u1"])
assert result == {"u1": "u1@example.com"}
def test_build_substitutions(self):
context = DeliveryTestContext(
tenant_id="t1",
@@ -335,7 +332,6 @@ class TestEmailDeliveryTestHandler:
assert subs["form_token"] == "token123"
assert "form/token123" in subs["form_link"]
# Without matching recipient
subs_no_match = EmailDeliveryTestHandler._build_substitutions(
context=context, recipient_email="other@example.com"
)

View File

@@ -1,387 +0,0 @@
import json
from unittest.mock import Mock, patch
import pytest
from models.source import DataSourceApiKeyAuthBinding
from services.auth.api_key_auth_service import ApiKeyAuthService
class TestApiKeyAuthService:
"""API key authentication service security tests"""
def setup_method(self):
"""Setup test fixtures"""
self.tenant_id = "test_tenant_123"
self.category = "search"
self.provider = "google"
self.binding_id = "binding_123"
self.mock_credentials = {"auth_type": "api_key", "config": {"api_key": "test_secret_key_123"}}
self.mock_args = {"category": self.category, "provider": self.provider, "credentials": self.mock_credentials}
@patch("services.auth.api_key_auth_service.db.session")
def test_get_provider_auth_list_success(self, mock_session):
"""Test get provider auth list - success scenario"""
# Mock database query result
mock_binding = Mock()
mock_binding.tenant_id = self.tenant_id
mock_binding.provider = self.provider
mock_binding.disabled = False
mock_session.scalars.return_value.all.return_value = [mock_binding]
result = ApiKeyAuthService.get_provider_auth_list(self.tenant_id)
assert len(result) == 1
assert result[0].tenant_id == self.tenant_id
assert mock_session.scalars.call_count == 1
select_arg = mock_session.scalars.call_args[0][0]
assert "data_source_api_key_auth_binding" in str(select_arg).lower()
@patch("services.auth.api_key_auth_service.db.session")
def test_get_provider_auth_list_empty(self, mock_session):
"""Test get provider auth list - empty result"""
mock_session.scalars.return_value.all.return_value = []
result = ApiKeyAuthService.get_provider_auth_list(self.tenant_id)
assert result == []
@patch("services.auth.api_key_auth_service.db.session")
def test_get_provider_auth_list_filters_disabled(self, mock_session):
"""Test get provider auth list - filters disabled items"""
mock_session.scalars.return_value.all.return_value = []
ApiKeyAuthService.get_provider_auth_list(self.tenant_id)
select_stmt = mock_session.scalars.call_args[0][0]
where_clauses = list(getattr(select_stmt, "_where_criteria", []) or [])
# Ensure both tenant filter and disabled filter exist
where_strs = [str(c).lower() for c in where_clauses]
assert any("tenant_id" in s for s in where_strs)
assert any("disabled" in s for s in where_strs)
@patch("services.auth.api_key_auth_service.db.session")
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
@patch("services.auth.api_key_auth_service.encrypter")
def test_create_provider_auth_success(self, mock_encrypter, mock_factory, mock_session):
"""Test create provider auth - success scenario"""
# Mock successful auth validation
mock_auth_instance = Mock()
mock_auth_instance.validate_credentials.return_value = True
mock_factory.return_value = mock_auth_instance
# Mock encryption
encrypted_key = "encrypted_test_key_123"
mock_encrypter.encrypt_token.return_value = encrypted_key
# Mock database operations
mock_session.add = Mock()
mock_session.commit = Mock()
ApiKeyAuthService.create_provider_auth(self.tenant_id, self.mock_args)
# Verify factory class calls
mock_factory.assert_called_once_with(self.provider, self.mock_credentials)
mock_auth_instance.validate_credentials.assert_called_once()
# Verify encryption calls
mock_encrypter.encrypt_token.assert_called_once_with(self.tenant_id, "test_secret_key_123")
# Verify database operations
mock_session.add.assert_called_once()
mock_session.commit.assert_called_once()
@patch("services.auth.api_key_auth_service.db.session")
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
def test_create_provider_auth_validation_failed(self, mock_factory, mock_session):
"""Test create provider auth - validation failed"""
# Mock failed auth validation
mock_auth_instance = Mock()
mock_auth_instance.validate_credentials.return_value = False
mock_factory.return_value = mock_auth_instance
ApiKeyAuthService.create_provider_auth(self.tenant_id, self.mock_args)
# Verify no database operations when validation fails
mock_session.add.assert_not_called()
mock_session.commit.assert_not_called()
@patch("services.auth.api_key_auth_service.db.session")
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
@patch("services.auth.api_key_auth_service.encrypter")
def test_create_provider_auth_encrypts_api_key(self, mock_encrypter, mock_factory, mock_session):
"""Test create provider auth - ensures API key is encrypted"""
# Mock successful auth validation
mock_auth_instance = Mock()
mock_auth_instance.validate_credentials.return_value = True
mock_factory.return_value = mock_auth_instance
# Mock encryption
encrypted_key = "encrypted_test_key_123"
mock_encrypter.encrypt_token.return_value = encrypted_key
# Mock database operations
mock_session.add = Mock()
mock_session.commit = Mock()
args_copy = self.mock_args.copy()
original_key = args_copy["credentials"]["config"]["api_key"]
ApiKeyAuthService.create_provider_auth(self.tenant_id, args_copy)
# Verify original key is replaced with encrypted key
assert args_copy["credentials"]["config"]["api_key"] == encrypted_key
assert args_copy["credentials"]["config"]["api_key"] != original_key
# Verify encryption function is called correctly
mock_encrypter.encrypt_token.assert_called_once_with(self.tenant_id, original_key)
@patch("services.auth.api_key_auth_service.db.session")
def test_get_auth_credentials_success(self, mock_session):
"""Test get auth credentials - success scenario"""
# Mock database query result
mock_binding = Mock()
mock_binding.credentials = json.dumps(self.mock_credentials)
mock_session.query.return_value.where.return_value.first.return_value = mock_binding
mock_session.query.return_value.where.return_value.first.return_value = mock_binding
result = ApiKeyAuthService.get_auth_credentials(self.tenant_id, self.category, self.provider)
assert result == self.mock_credentials
mock_session.query.assert_called_once_with(DataSourceApiKeyAuthBinding)
@patch("services.auth.api_key_auth_service.db.session")
def test_get_auth_credentials_not_found(self, mock_session):
"""Test get auth credentials - not found"""
mock_session.query.return_value.where.return_value.first.return_value = None
result = ApiKeyAuthService.get_auth_credentials(self.tenant_id, self.category, self.provider)
assert result is None
@patch("services.auth.api_key_auth_service.db.session")
def test_get_auth_credentials_filters_correctly(self, mock_session):
"""Test get auth credentials - applies correct filters"""
mock_session.query.return_value.where.return_value.first.return_value = None
ApiKeyAuthService.get_auth_credentials(self.tenant_id, self.category, self.provider)
# Verify where conditions are correct
where_call = mock_session.query.return_value.where.call_args[0]
assert len(where_call) == 4 # tenant_id, category, provider, disabled
@patch("services.auth.api_key_auth_service.db.session")
def test_get_auth_credentials_json_parsing(self, mock_session):
"""Test get auth credentials - JSON parsing"""
# Mock credentials with special characters
special_credentials = {"auth_type": "api_key", "config": {"api_key": "key_with_中文_and_special_chars_!@#$%"}}
mock_binding = Mock()
mock_binding.credentials = json.dumps(special_credentials, ensure_ascii=False)
mock_session.query.return_value.where.return_value.first.return_value = mock_binding
result = ApiKeyAuthService.get_auth_credentials(self.tenant_id, self.category, self.provider)
assert result == special_credentials
assert result["config"]["api_key"] == "key_with_中文_and_special_chars_!@#$%"
@patch("services.auth.api_key_auth_service.db.session")
def test_delete_provider_auth_success(self, mock_session):
"""Test delete provider auth - success scenario"""
# Mock database query result
mock_binding = Mock()
mock_session.query.return_value.where.return_value.first.return_value = mock_binding
ApiKeyAuthService.delete_provider_auth(self.tenant_id, self.binding_id)
# Verify delete operations
mock_session.delete.assert_called_once_with(mock_binding)
mock_session.commit.assert_called_once()
@patch("services.auth.api_key_auth_service.db.session")
def test_delete_provider_auth_not_found(self, mock_session):
"""Test delete provider auth - not found"""
mock_session.query.return_value.where.return_value.first.return_value = None
ApiKeyAuthService.delete_provider_auth(self.tenant_id, self.binding_id)
# Verify no delete operations when not found
mock_session.delete.assert_not_called()
mock_session.commit.assert_not_called()
@patch("services.auth.api_key_auth_service.db.session")
def test_delete_provider_auth_filters_by_tenant(self, mock_session):
"""Test delete provider auth - filters by tenant"""
mock_session.query.return_value.where.return_value.first.return_value = None
ApiKeyAuthService.delete_provider_auth(self.tenant_id, self.binding_id)
# Verify where conditions include tenant_id and binding_id
where_call = mock_session.query.return_value.where.call_args[0]
assert len(where_call) == 2
def test_validate_api_key_auth_args_success(self):
"""Test API key auth args validation - success scenario"""
# Should not raise any exception
ApiKeyAuthService.validate_api_key_auth_args(self.mock_args)
def test_validate_api_key_auth_args_missing_category(self):
"""Test API key auth args validation - missing category"""
args = self.mock_args.copy()
del args["category"]
with pytest.raises(ValueError, match="category is required"):
ApiKeyAuthService.validate_api_key_auth_args(args)
def test_validate_api_key_auth_args_empty_category(self):
"""Test API key auth args validation - empty category"""
args = self.mock_args.copy()
args["category"] = ""
with pytest.raises(ValueError, match="category is required"):
ApiKeyAuthService.validate_api_key_auth_args(args)
def test_validate_api_key_auth_args_missing_provider(self):
"""Test API key auth args validation - missing provider"""
args = self.mock_args.copy()
del args["provider"]
with pytest.raises(ValueError, match="provider is required"):
ApiKeyAuthService.validate_api_key_auth_args(args)
def test_validate_api_key_auth_args_empty_provider(self):
"""Test API key auth args validation - empty provider"""
args = self.mock_args.copy()
args["provider"] = ""
with pytest.raises(ValueError, match="provider is required"):
ApiKeyAuthService.validate_api_key_auth_args(args)
def test_validate_api_key_auth_args_missing_credentials(self):
"""Test API key auth args validation - missing credentials"""
args = self.mock_args.copy()
del args["credentials"]
with pytest.raises(ValueError, match="credentials is required"):
ApiKeyAuthService.validate_api_key_auth_args(args)
def test_validate_api_key_auth_args_empty_credentials(self):
"""Test API key auth args validation - empty credentials"""
args = self.mock_args.copy()
args["credentials"] = None
with pytest.raises(ValueError, match="credentials is required"):
ApiKeyAuthService.validate_api_key_auth_args(args)
def test_validate_api_key_auth_args_invalid_credentials_type(self):
"""Test API key auth args validation - invalid credentials type"""
args = self.mock_args.copy()
args["credentials"] = "not_a_dict"
with pytest.raises(ValueError, match="credentials must be a dictionary"):
ApiKeyAuthService.validate_api_key_auth_args(args)
def test_validate_api_key_auth_args_missing_auth_type(self):
"""Test API key auth args validation - missing auth_type"""
args = self.mock_args.copy()
del args["credentials"]["auth_type"]
with pytest.raises(ValueError, match="auth_type is required"):
ApiKeyAuthService.validate_api_key_auth_args(args)
def test_validate_api_key_auth_args_empty_auth_type(self):
"""Test API key auth args validation - empty auth_type"""
args = self.mock_args.copy()
args["credentials"]["auth_type"] = ""
with pytest.raises(ValueError, match="auth_type is required"):
ApiKeyAuthService.validate_api_key_auth_args(args)
@pytest.mark.parametrize(
"malicious_input",
[
"<script>alert('xss')</script>",
"'; DROP TABLE users; --",
"../../../etc/passwd",
"\\x00\\x00", # null bytes
"A" * 10000, # very long input
],
)
def test_validate_api_key_auth_args_malicious_input(self, malicious_input):
"""Test API key auth args validation - malicious input"""
args = self.mock_args.copy()
args["category"] = malicious_input
# Verify parameter validator doesn't crash on malicious input
# Should validate normally rather than raising security-related exceptions
ApiKeyAuthService.validate_api_key_auth_args(args)
@patch("services.auth.api_key_auth_service.db.session")
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
@patch("services.auth.api_key_auth_service.encrypter")
def test_create_provider_auth_database_error_handling(self, mock_encrypter, mock_factory, mock_session):
"""Test create provider auth - database error handling"""
# Mock successful auth validation
mock_auth_instance = Mock()
mock_auth_instance.validate_credentials.return_value = True
mock_factory.return_value = mock_auth_instance
# Mock encryption
mock_encrypter.encrypt_token.return_value = "encrypted_key"
# Mock database error
mock_session.commit.side_effect = Exception("Database error")
with pytest.raises(Exception, match="Database error"):
ApiKeyAuthService.create_provider_auth(self.tenant_id, self.mock_args)
@patch("services.auth.api_key_auth_service.db.session")
def test_get_auth_credentials_invalid_json(self, mock_session):
"""Test get auth credentials - invalid JSON"""
# Mock database returning invalid JSON
mock_binding = Mock()
mock_binding.credentials = "invalid json content"
mock_session.query.return_value.where.return_value.first.return_value = mock_binding
with pytest.raises(json.JSONDecodeError):
ApiKeyAuthService.get_auth_credentials(self.tenant_id, self.category, self.provider)
@patch("services.auth.api_key_auth_service.db.session")
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
def test_create_provider_auth_factory_exception(self, mock_factory, mock_session):
"""Test create provider auth - factory exception"""
# Mock factory raising exception
mock_factory.side_effect = Exception("Factory error")
with pytest.raises(Exception, match="Factory error"):
ApiKeyAuthService.create_provider_auth(self.tenant_id, self.mock_args)
@patch("services.auth.api_key_auth_service.db.session")
@patch("services.auth.api_key_auth_service.ApiKeyAuthFactory")
@patch("services.auth.api_key_auth_service.encrypter")
def test_create_provider_auth_encryption_exception(self, mock_encrypter, mock_factory, mock_session):
"""Test create provider auth - encryption exception"""
# Mock successful auth validation
mock_auth_instance = Mock()
mock_auth_instance.validate_credentials.return_value = True
mock_factory.return_value = mock_auth_instance
# Mock encryption exception
mock_encrypter.encrypt_token.side_effect = Exception("Encryption error")
with pytest.raises(Exception, match="Encryption error"):
ApiKeyAuthService.create_provider_auth(self.tenant_id, self.mock_args)
def test_validate_api_key_auth_args_none_input(self):
"""Test API key auth args validation - None input"""
with pytest.raises(TypeError):
ApiKeyAuthService.validate_api_key_auth_args(None)
def test_validate_api_key_auth_args_dict_credentials_with_list_auth_type(self):
"""Test API key auth args validation - dict credentials with list auth_type"""
args = self.mock_args.copy()
args["credentials"]["auth_type"] = ["api_key"]
# Current implementation checks if auth_type exists and is truthy, list ["api_key"] is truthy
# So this should not raise exception, this test should pass
ApiKeyAuthService.validate_api_key_auth_args(args)

View File

@@ -1,231 +0,0 @@
"""
API Key Authentication System Integration Tests
"""
import json
from concurrent.futures import ThreadPoolExecutor
from unittest.mock import Mock, patch
import httpx
import pytest
from services.auth.api_key_auth_factory import ApiKeyAuthFactory
from services.auth.api_key_auth_service import ApiKeyAuthService
from services.auth.auth_type import AuthType
class TestAuthIntegration:
def setup_method(self):
self.tenant_id_1 = "tenant_123"
self.tenant_id_2 = "tenant_456" # For multi-tenant isolation testing
self.category = "search"
# Realistic authentication configurations
self.firecrawl_credentials = {"auth_type": "bearer", "config": {"api_key": "fc_test_key_123"}}
self.jina_credentials = {"auth_type": "bearer", "config": {"api_key": "jina_test_key_456"}}
self.watercrawl_credentials = {"auth_type": "x-api-key", "config": {"api_key": "wc_test_key_789"}}
@patch("services.auth.api_key_auth_service.db.session")
@patch("services.auth.firecrawl.firecrawl.httpx.post")
@patch("services.auth.api_key_auth_service.encrypter.encrypt_token")
def test_end_to_end_auth_flow(self, mock_encrypt, mock_http, mock_session):
"""Test complete authentication flow: request → validation → encryption → storage"""
mock_http.return_value = self._create_success_response()
mock_encrypt.return_value = "encrypted_fc_test_key_123"
mock_session.add = Mock()
mock_session.commit = Mock()
args = {"category": self.category, "provider": AuthType.FIRECRAWL, "credentials": self.firecrawl_credentials}
ApiKeyAuthService.create_provider_auth(self.tenant_id_1, args)
mock_http.assert_called_once()
call_args = mock_http.call_args
assert "https://api.firecrawl.dev/v1/crawl" in call_args[0][0]
assert call_args[1]["headers"]["Authorization"] == "Bearer fc_test_key_123"
mock_encrypt.assert_called_once_with(self.tenant_id_1, "fc_test_key_123")
mock_session.add.assert_called_once()
mock_session.commit.assert_called_once()
@patch("services.auth.firecrawl.firecrawl.httpx.post")
def test_cross_component_integration(self, mock_http):
"""Test factory → provider → HTTP call integration"""
mock_http.return_value = self._create_success_response()
factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, self.firecrawl_credentials)
result = factory.validate_credentials()
assert result is True
mock_http.assert_called_once()
@patch("services.auth.api_key_auth_service.db.session")
def test_multi_tenant_isolation(self, mock_session):
"""Ensure complete tenant data isolation"""
tenant1_binding = self._create_mock_binding(self.tenant_id_1, AuthType.FIRECRAWL, self.firecrawl_credentials)
tenant2_binding = self._create_mock_binding(self.tenant_id_2, AuthType.JINA, self.jina_credentials)
mock_session.scalars.return_value.all.return_value = [tenant1_binding]
result1 = ApiKeyAuthService.get_provider_auth_list(self.tenant_id_1)
mock_session.scalars.return_value.all.return_value = [tenant2_binding]
result2 = ApiKeyAuthService.get_provider_auth_list(self.tenant_id_2)
assert len(result1) == 1
assert result1[0].tenant_id == self.tenant_id_1
assert len(result2) == 1
assert result2[0].tenant_id == self.tenant_id_2
@patch("services.auth.api_key_auth_service.db.session")
def test_cross_tenant_access_prevention(self, mock_session):
"""Test prevention of cross-tenant credential access"""
mock_session.query.return_value.where.return_value.first.return_value = None
result = ApiKeyAuthService.get_auth_credentials(self.tenant_id_2, self.category, AuthType.FIRECRAWL)
assert result is None
def test_sensitive_data_protection(self):
"""Ensure API keys don't leak to logs"""
credentials_with_secrets = {
"auth_type": "bearer",
"config": {"api_key": "super_secret_key_do_not_log", "secret": "another_secret"},
}
factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, credentials_with_secrets)
factory_str = str(factory)
assert "super_secret_key_do_not_log" not in factory_str
assert "another_secret" not in factory_str
@patch("services.auth.api_key_auth_service.db.session")
@patch("services.auth.firecrawl.firecrawl.httpx.post")
@patch("services.auth.api_key_auth_service.encrypter.encrypt_token")
def test_concurrent_creation_safety(self, mock_encrypt, mock_http, mock_session):
"""Test concurrent authentication creation safety"""
mock_http.return_value = self._create_success_response()
mock_encrypt.return_value = "encrypted_key"
mock_session.add = Mock()
mock_session.commit = Mock()
args = {"category": self.category, "provider": AuthType.FIRECRAWL, "credentials": self.firecrawl_credentials}
results = []
exceptions = []
def create_auth():
try:
ApiKeyAuthService.create_provider_auth(self.tenant_id_1, args)
results.append("success")
except Exception as e:
exceptions.append(e)
with ThreadPoolExecutor(max_workers=5) as executor:
futures = [executor.submit(create_auth) for _ in range(5)]
for future in futures:
future.result()
assert len(results) == 5
assert len(exceptions) == 0
assert mock_session.add.call_count == 5
assert mock_session.commit.call_count == 5
@pytest.mark.parametrize(
"invalid_input",
[
None, # Null input
{}, # Empty dictionary - missing required fields
{"auth_type": "bearer"}, # Missing config section
{"auth_type": "bearer", "config": {}}, # Missing api_key
],
)
def test_invalid_input_boundary(self, invalid_input):
"""Test boundary handling for invalid inputs"""
with pytest.raises((ValueError, KeyError, TypeError, AttributeError)):
ApiKeyAuthFactory(AuthType.FIRECRAWL, invalid_input)
@patch("services.auth.firecrawl.firecrawl.httpx.post")
def test_http_error_handling(self, mock_http):
"""Test proper HTTP error handling"""
mock_response = Mock()
mock_response.status_code = 401
mock_response.text = '{"error": "Unauthorized"}'
mock_response.raise_for_status.side_effect = httpx.HTTPError("Unauthorized")
mock_http.return_value = mock_response
# PT012: Split into single statement for pytest.raises
factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, self.firecrawl_credentials)
with pytest.raises((httpx.HTTPError, Exception)):
factory.validate_credentials()
@patch("services.auth.api_key_auth_service.db.session")
@patch("services.auth.firecrawl.firecrawl.httpx.post")
def test_network_failure_recovery(self, mock_http, mock_session):
"""Test system recovery from network failures"""
mock_http.side_effect = httpx.RequestError("Network timeout")
mock_session.add = Mock()
mock_session.commit = Mock()
args = {"category": self.category, "provider": AuthType.FIRECRAWL, "credentials": self.firecrawl_credentials}
with pytest.raises(httpx.RequestError):
ApiKeyAuthService.create_provider_auth(self.tenant_id_1, args)
mock_session.commit.assert_not_called()
@pytest.mark.parametrize(
("provider", "credentials"),
[
(AuthType.FIRECRAWL, {"auth_type": "bearer", "config": {"api_key": "fc_key"}}),
(AuthType.JINA, {"auth_type": "bearer", "config": {"api_key": "jina_key"}}),
(AuthType.WATERCRAWL, {"auth_type": "x-api-key", "config": {"api_key": "wc_key"}}),
],
)
def test_all_providers_factory_creation(self, provider, credentials):
"""Test factory creation for all supported providers"""
auth_class = ApiKeyAuthFactory.get_apikey_auth_factory(provider)
assert auth_class is not None
factory = ApiKeyAuthFactory(provider, credentials)
assert factory.auth is not None
def _create_success_response(self, status_code=200):
"""Create successful HTTP response mock"""
mock_response = Mock()
mock_response.status_code = status_code
mock_response.json.return_value = {"status": "success"}
mock_response.raise_for_status.return_value = None
return mock_response
def _create_mock_binding(self, tenant_id: str, provider: str, credentials: dict) -> Mock:
"""Create realistic database binding mock"""
mock_binding = Mock()
mock_binding.id = f"binding_{provider}_{tenant_id}"
mock_binding.tenant_id = tenant_id
mock_binding.category = self.category
mock_binding.provider = provider
mock_binding.credentials = json.dumps(credentials, ensure_ascii=False)
mock_binding.disabled = False
mock_binding.created_at = Mock()
mock_binding.created_at.timestamp.return_value = 1640995200
mock_binding.updated_at = Mock()
mock_binding.updated_at.timestamp.return_value = 1640995200
return mock_binding
def test_integration_coverage_validation(self):
"""Validate integration test coverage meets quality standards"""
core_scenarios = {
"business_logic": ["end_to_end_auth_flow", "cross_component_integration"],
"security": ["multi_tenant_isolation", "cross_tenant_access_prevention", "sensitive_data_protection"],
"reliability": ["concurrent_creation_safety", "network_failure_recovery"],
"compatibility": ["all_providers_factory_creation"],
"boundaries": ["invalid_input_boundary", "http_error_handling"],
}
total_scenarios = sum(len(scenarios) for scenarios in core_scenarios.values())
assert total_scenarios >= 10
security_tests = core_scenarios["security"]
assert "multi_tenant_isolation" in security_tests
assert "sensitive_data_protection" in security_tests
assert True

12
api/uv.lock generated
View File

@@ -3905,7 +3905,7 @@ wheels = [
[[package]]
name = "nltk"
version = "3.9.3"
version = "3.9.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
@@ -3913,9 +3913,9 @@ dependencies = [
{ name = "regex" },
{ name = "tqdm" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e1/8f/915e1c12df07c70ed779d18ab83d065718a926e70d3ea33eb0cd66ffb7c0/nltk-3.9.3.tar.gz", hash = "sha256:cb5945d6424a98d694c2b9a0264519fab4363711065a46aa0ae7a2195b92e71f", size = 2923673, upload-time = "2026-02-24T12:05:53.833Z" }
sdist = { url = "https://files.pythonhosted.org/packages/74/a1/b3b4adf15585a5bc4c357adde150c01ebeeb642173ded4d871e89468767c/nltk-3.9.4.tar.gz", hash = "sha256:ed03bc098a40481310320808b2db712d95d13ca65b27372f8a403949c8b523d0", size = 2946864, upload-time = "2026-03-24T06:13:40.641Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/7e/9af5a710a1236e4772de8dfcc6af942a561327bb9f42b5b4a24d0cf100fd/nltk-3.9.3-py3-none-any.whl", hash = "sha256:60b3db6e9995b3dd976b1f0fa7dec22069b2677e759c28eb69b62ddd44870522", size = 1525385, upload-time = "2026-02-24T12:05:46.54Z" },
{ url = "https://files.pythonhosted.org/packages/9d/91/04e965f8e717ba0ab4bdca5c112deeab11c9e750d94c4d4602f050295d39/nltk-3.9.4-py3-none-any.whl", hash = "sha256:f2fa301c3a12718ce4a0e9305c5675299da5ad9e26068218b69d692fda84828f", size = 1552087, upload-time = "2026-03-24T06:13:38.47Z" },
]
[[package]]
@@ -5852,7 +5852,7 @@ wheels = [
[[package]]
name = "requests"
version = "2.32.5"
version = "2.33.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
@@ -5860,9 +5860,9 @@ dependencies = [
{ name = "idna" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232, upload-time = "2026-03-25T15:10:41.586Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
{ url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" },
]
[[package]]

View File

@@ -326,79 +326,66 @@ packages:
resolution: {integrity: sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==}
cpu: [arm]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-arm-musleabihf@4.59.0':
resolution: {integrity: sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==}
cpu: [arm]
os: [linux]
libc: [musl]
'@rollup/rollup-linux-arm64-gnu@4.59.0':
resolution: {integrity: sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-arm64-musl@4.59.0':
resolution: {integrity: sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==}
cpu: [arm64]
os: [linux]
libc: [musl]
'@rollup/rollup-linux-loong64-gnu@4.59.0':
resolution: {integrity: sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==}
cpu: [loong64]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-loong64-musl@4.59.0':
resolution: {integrity: sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==}
cpu: [loong64]
os: [linux]
libc: [musl]
'@rollup/rollup-linux-ppc64-gnu@4.59.0':
resolution: {integrity: sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==}
cpu: [ppc64]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-ppc64-musl@4.59.0':
resolution: {integrity: sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==}
cpu: [ppc64]
os: [linux]
libc: [musl]
'@rollup/rollup-linux-riscv64-gnu@4.59.0':
resolution: {integrity: sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==}
cpu: [riscv64]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-riscv64-musl@4.59.0':
resolution: {integrity: sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==}
cpu: [riscv64]
os: [linux]
libc: [musl]
'@rollup/rollup-linux-s390x-gnu@4.59.0':
resolution: {integrity: sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==}
cpu: [s390x]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-x64-gnu@4.59.0':
resolution: {integrity: sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==}
cpu: [x64]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-x64-musl@4.59.0':
resolution: {integrity: sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==}
cpu: [x64]
os: [linux]
libc: [musl]
'@rollup/rollup-openbsd-x64@4.59.0':
resolution: {integrity: sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==}

View File

@@ -109,7 +109,7 @@ beforeAll(() => {
disconnect = vi.fn(() => undefined)
unobserve = vi.fn(() => undefined)
}
// @ts-expect-error jsdom does not implement IntersectionObserver
// @ts-expect-error test DOM typings do not guarantee IntersectionObserver here
globalThis.IntersectionObserver = MockIntersectionObserver
})

View File

@@ -556,8 +556,8 @@ describe('DebugWithMultipleModel', () => {
)
const twoItems = screen.getAllByTestId('debug-item')
expect(twoItems[0].style.width).toBe('calc(50% - 28px)')
expect(twoItems[1].style.width).toBe('calc(50% - 28px)')
expect(twoItems[0].style.width).toBe('calc(50% - 4px - 24px)')
expect(twoItems[1].style.width).toBe('calc(50% - 4px - 24px)')
})
})
@@ -596,13 +596,13 @@ describe('DebugWithMultipleModel', () => {
// Assert
expect(items).toHaveLength(2)
expectItemLayout(items[0], {
width: 'calc(50% - 28px)',
width: 'calc(50% - 4px - 24px)',
height: '100%',
transform: 'translateX(0) translateY(0)',
classes: ['mr-2'],
})
expectItemLayout(items[1], {
width: 'calc(50% - 28px)',
width: 'calc(50% - 4px - 24px)',
height: '100%',
transform: 'translateX(calc(100% + 8px)) translateY(0)',
classes: [],
@@ -620,19 +620,19 @@ describe('DebugWithMultipleModel', () => {
// Assert
expect(items).toHaveLength(3)
expectItemLayout(items[0], {
width: 'calc(33.3% - 21.33px)',
width: 'calc(33.3% - 5.33px - 16px)',
height: '100%',
transform: 'translateX(0) translateY(0)',
classes: ['mr-2'],
})
expectItemLayout(items[1], {
width: 'calc(33.3% - 21.33px)',
width: 'calc(33.3% - 5.33px - 16px)',
height: '100%',
transform: 'translateX(calc(100% + 8px)) translateY(0)',
classes: ['mr-2'],
})
expectItemLayout(items[2], {
width: 'calc(33.3% - 21.33px)',
width: 'calc(33.3% - 5.33px - 16px)',
height: '100%',
transform: 'translateX(calc(200% + 16px)) translateY(0)',
classes: [],
@@ -655,25 +655,25 @@ describe('DebugWithMultipleModel', () => {
// Assert
expect(items).toHaveLength(4)
expectItemLayout(items[0], {
width: 'calc(50% - 28px)',
width: 'calc(50% - 4px - 24px)',
height: 'calc(50% - 4px)',
transform: 'translateX(0) translateY(0)',
classes: ['mr-2', 'mb-2'],
})
expectItemLayout(items[1], {
width: 'calc(50% - 28px)',
width: 'calc(50% - 4px - 24px)',
height: 'calc(50% - 4px)',
transform: 'translateX(calc(100% + 8px)) translateY(0)',
classes: ['mb-2'],
})
expectItemLayout(items[2], {
width: 'calc(50% - 28px)',
width: 'calc(50% - 4px - 24px)',
height: 'calc(50% - 4px)',
transform: 'translateX(0) translateY(calc(100% + 8px))',
classes: ['mr-2'],
})
expectItemLayout(items[3], {
width: 'calc(50% - 28px)',
width: 'calc(50% - 4px - 24px)',
height: 'calc(50% - 4px)',
transform: 'translateX(calc(100% + 8px)) translateY(calc(100% + 8px))',
classes: [],

View File

@@ -1,6 +1,3 @@
/**
* @vitest-environment jsdom
*/
import type { ReactNode } from 'react'
import type { ModalContextState } from '@/context/modal-context'
import type { ProviderContextState } from '@/context/provider-context'

View File

@@ -62,8 +62,8 @@ describe('ActionButton', () => {
)
const button = screen.getByRole('button', { name: 'Custom Style' })
expect(button).toHaveStyle({
color: 'rgb(255, 0, 0)',
backgroundColor: 'rgb(0, 0, 255)',
color: 'red',
backgroundColor: 'blue',
})
})

View File

@@ -8,10 +8,10 @@ import Chat from '../index'
// ─── Why each mock exists ─────────────────────────────────────────────────────
//
// Answer transitively pulls Markdown (rehype/remark/katex), AgentContent,
// WorkflowProcessItem and Operation; none can resolve in jsdom.
// WorkflowProcessItem and Operation; none can resolve in the test DOM runtime.
// Question pulls Markdown, copy-to-clipboard, react-textarea-autosize.
// ChatInputArea pulls js-audio-recorder (requires Web Audio API unavailable in
// jsdom) and VoiceInput / FileContextProvider chains.
// the test DOM runtime) and VoiceInput / FileContextProvider chains.
// PromptLogModal pulls CopyFeedbackNew and deep modal dep chain.
// AgentLogModal pulls @remixicon/react (causes lint push error), useClickAway
// from ahooks, and AgentLogDetail (workflow graph renderer).

View File

@@ -3,7 +3,7 @@ import { fireEvent, render, screen } from '@testing-library/react'
import dayjs from '../../utils/dayjs'
import Calendar from '../index'
// Mock scrollIntoView since jsdom doesn't implement it
// Mock scrollIntoView since the test DOM runtime doesn't implement it
beforeAll(() => {
Element.prototype.scrollIntoView = vi.fn()
})

View File

@@ -3,7 +3,7 @@ import { fireEvent, render, screen, within } from '@testing-library/react'
import dayjs, { isDayjsObject } from '../../utils/dayjs'
import TimePicker from '../index'
// Mock scrollIntoView since jsdom doesn't implement it
// Mock scrollIntoView since the test DOM runtime doesn't implement it
beforeAll(() => {
Element.prototype.scrollIntoView = vi.fn()
})

View File

@@ -37,11 +37,11 @@ const FileFromLinkOrLocal = ({
const { handleLoadFileFromLink } = useFile(fileConfig)
const disabled = !!fileConfig.number_limits && files.length >= fileConfig.number_limits
const fileLinkPlaceholder = t('fileUploader.pasteFileLinkInputPlaceholder', { ns: 'common' })
/* v8 ignore next -- fallback for missing i18n key is not reliably testable under current global translation mocks in jsdom @preserve */
/* v8 ignore next -- fallback for a missing i18n key is not reliably testable under the current global translation mocks in the test DOM runtime. @preserve */
const fileLinkPlaceholderText = fileLinkPlaceholder || ''
const handleSaveUrl = () => {
/* v8 ignore next -- guarded by UI-level disabled state (`disabled={!url || disabled}`), not reachable in jsdom click flow @preserve */
/* v8 ignore next -- guarded by UI-level disabled state (`disabled={!url || disabled}`), not reachable in the current test click flow. @preserve */
if (!url)
return

View File

@@ -62,7 +62,7 @@ describe('generate icon base utils', () => {
const { container } = render(generate(node, 'key'))
// to svg element
expect(container.firstChild).toHaveClass('container')
expect(container.querySelector('span')).toHaveStyle({ color: 'rgb(0, 0, 255)' })
expect(container.querySelector('span')).toHaveStyle({ color: 'blue' })
})
// add not has children

View File

@@ -99,7 +99,7 @@ describe('Input component', () => {
render(<Input className={customClass} styleCss={customStyle} />)
const input = screen.getByPlaceholderText(/input/i)
expect(input).toHaveClass(customClass)
expect(input).toHaveStyle({ color: 'rgb(255, 0, 0)' })
expect(input).toHaveStyle({ color: 'red' })
})
it('applies large size variant correctly', () => {

View File

@@ -1,6 +1,6 @@
import { createRequire } from 'node:module'
import { act, render, screen, waitFor } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import * as echarts from 'echarts'
import { Theme } from '@/types/app'
import CodeBlock from '../code-block'
@@ -10,12 +10,21 @@ type UseThemeReturn = {
}
const mockUseTheme = vi.fn<() => UseThemeReturn>(() => ({ theme: Theme.light }))
const require = createRequire(import.meta.url)
const echartsCjs = require('echarts') as {
getInstanceByDom: (dom: HTMLDivElement | null) => {
resize: (opts?: { width?: string, height?: string }) => void
} | null
}
const mockEcharts = vi.hoisted(() => {
const state = {
finishedHandler: undefined as undefined | ((event?: unknown) => void),
echartsInstance: {
resize: vi.fn<(opts?: { width?: string, height?: string }) => void>(),
trigger: vi.fn((eventName: string, event?: unknown) => {
if (eventName === 'finished')
state.finishedHandler?.(event)
}),
},
getInstanceByDom: vi.fn(() => state.echartsInstance),
}
return state
})
let clientWidthSpy: { mockRestore: () => void } | null = null
let clientHeightSpy: { mockRestore: () => void } | null = null
@@ -61,6 +70,42 @@ vi.mock('@/hooks/use-theme', () => ({
default: () => mockUseTheme(),
}))
vi.mock('echarts', () => ({
getInstanceByDom: mockEcharts.getInstanceByDom,
}))
vi.mock('echarts-for-react', async () => {
const React = await vi.importActual<typeof import('react')>('react')
const MockReactEcharts = React.forwardRef(({
onChartReady,
onEvents,
}: {
onChartReady?: (instance: typeof mockEcharts.echartsInstance) => void
onEvents?: { finished?: (event?: unknown) => void }
}, ref: React.ForwardedRef<{ getEchartsInstance: () => typeof mockEcharts.echartsInstance }>) => {
React.useImperativeHandle(ref, () => ({
getEchartsInstance: () => mockEcharts.echartsInstance,
}))
React.useEffect(() => {
mockEcharts.finishedHandler = onEvents?.finished
onChartReady?.(mockEcharts.echartsInstance)
onEvents?.finished?.({})
return () => {
mockEcharts.finishedHandler = undefined
}
}, [onChartReady, onEvents])
return <div className="echarts-for-react" />
})
return {
__esModule: true,
default: MockReactEcharts,
}
})
vi.mock('@/app/components/base/mermaid', () => ({
__esModule: true,
default: ({ PrimitiveCode }: { PrimitiveCode: string }) => <div data-testid="mock-mermaid">{PrimitiveCode}</div>,
@@ -76,9 +121,9 @@ const findEchartsHost = async () => {
const findEchartsInstance = async () => {
const host = await findEchartsHost()
await waitFor(() => {
expect(echartsCjs.getInstanceByDom(host)).toBeTruthy()
expect(echarts.getInstanceByDom(host)).toBeTruthy()
})
return echartsCjs.getInstanceByDom(host)!
return echarts.getInstanceByDom(host)!
}
describe('CodeBlock', () => {

View File

@@ -41,7 +41,7 @@ describe('NodeStatus', () => {
it('applies styleCss correctly', () => {
const { container } = render(<NodeStatus styleCss={{ color: 'red' }} />)
expect(container.firstChild).toHaveStyle({ color: 'rgb(255, 0, 0)' })
expect(container.firstChild).toHaveStyle({ color: 'red' })
})
it('applies iconClassName to the icon', () => {

View File

@@ -131,7 +131,7 @@ describe('Pagination', () => {
setCurrentPage,
children: <Pagination.PrevButton>Prev</Pagination.PrevButton>,
})
fireEvent.keyPress(screen.getByText(/prev/i), { key: 'Enter', charCode: 13 })
fireEvent.keyDown(screen.getByText(/prev/i).closest('button')!, { key: 'Enter', code: 'Enter', keyCode: 13, which: 13 })
expect(setCurrentPage).toHaveBeenCalledWith(2)
})
@@ -142,7 +142,7 @@ describe('Pagination', () => {
setCurrentPage,
children: <Pagination.PrevButton>Prev</Pagination.PrevButton>,
})
fireEvent.keyPress(screen.getByText(/prev/i), { key: 'Enter', charCode: 13 })
fireEvent.keyDown(screen.getByText(/prev/i).closest('button')!, { key: 'Enter', code: 'Enter', keyCode: 13, which: 13 })
expect(setCurrentPage).not.toHaveBeenCalled()
})
@@ -213,7 +213,7 @@ describe('Pagination', () => {
setCurrentPage,
children: <Pagination.NextButton>Next</Pagination.NextButton>,
})
fireEvent.keyPress(screen.getByText(/next/i), { key: 'Enter', charCode: 13 })
fireEvent.keyDown(screen.getByText(/next/i).closest('button')!, { key: 'Enter', code: 'Enter', keyCode: 13, which: 13 })
expect(setCurrentPage).toHaveBeenCalledWith(1)
})
@@ -225,7 +225,7 @@ describe('Pagination', () => {
setCurrentPage,
children: <Pagination.NextButton>Next</Pagination.NextButton>,
})
fireEvent.keyPress(screen.getByText(/next/i), { key: 'Enter', charCode: 13 })
fireEvent.keyDown(screen.getByText(/next/i).closest('button')!, { key: 'Enter', code: 'Enter', keyCode: 13, which: 13 })
expect(setCurrentPage).not.toHaveBeenCalled()
})
@@ -318,7 +318,7 @@ describe('Pagination', () => {
/>
),
})
fireEvent.keyPress(screen.getByText('4'), { key: 'Enter', charCode: 13 })
fireEvent.keyDown(screen.getByText('4').closest('a')!, { key: 'Enter', code: 'Enter', keyCode: 13, which: 13 })
expect(setCurrentPage).toHaveBeenCalledWith(3) // 0-indexed
})

View File

@@ -50,7 +50,7 @@ export const PrevButton = ({
tabIndex={disabled ? '-1' : 0}
disabled={disabled}
data-testid={dataTestId}
onKeyPress={(event: React.KeyboardEvent) => {
onKeyDown={(event: React.KeyboardEvent) => {
event.preventDefault()
if (event.key === 'Enter' && !disabled)
previous()
@@ -85,7 +85,7 @@ export const NextButton = ({
tabIndex={disabled ? '-1' : 0}
disabled={disabled}
data-testid={dataTestId}
onKeyPress={(event: React.KeyboardEvent) => {
onKeyDown={(event: React.KeyboardEvent) => {
event.preventDefault()
if (event.key === 'Enter' && !disabled)
next()
@@ -140,7 +140,7 @@ export const PageButton = ({
}) || undefined
}
tabIndex={0}
onKeyPress={(event: React.KeyboardEvent) => {
onKeyDown={(event: React.KeyboardEvent) => {
if (event.key === 'Enter')
pagination.setCurrentPage(page - 1)
}}

View File

@@ -41,6 +41,6 @@ describe('PremiumBadge', () => {
)
const badge = screen.getByText('Premium')
expect(badge).toBeInTheDocument()
expect(badge).toHaveStyle('background-color: rgb(255, 0, 0)') // Note: React converts 'red' to 'rgb(255, 0, 0)'
expect(badge).toHaveStyle('background-color: red')
})
})

View File

@@ -141,7 +141,7 @@ export default function ShortcutsPopupPlugin({
const portalRef = useRef<HTMLDivElement | null>(null)
const lastSelectionRef = useRef<Range | null>(null)
/* v8 ignore next -- defensive non-browser fallback; this client-only plugin runs where document exists (browser/jsdom). @preserve */
/* v8 ignore next -- defensive non-browser fallback; this client-only plugin runs where document exists (browser/test DOM runtime). @preserve */
const containerEl = useMemo(() => container ?? (typeof document !== 'undefined' ? document.body : null), [container])
const useContainer = !!containerEl && containerEl !== document.body
@@ -210,7 +210,7 @@ export default function ShortcutsPopupPlugin({
if (rect.width === 0 && rect.height === 0) {
const root = editor.getRootElement()
/* v8 ignore next 10 -- zero-size rect recovery depends on browser layout/selection geometry; deterministic reproduction in jsdom is unreliable. @preserve */
/* v8 ignore next 10 -- zero-size rect recovery depends on browser layout/selection geometry; deterministic reproduction in the test DOM runtime is unreliable. @preserve */
if (root) {
const sc = range.startContainer
const node = sc.nodeType === Node.ELEMENT_NODE

View File

@@ -1612,9 +1612,7 @@ describe('Uploader', () => {
if (!dropArea)
return
fireEvent.drop(dropArea, {
dataTransfer: null,
})
fireEvent.drop(dropArea)
expect(updateFile).not.toHaveBeenCalled()
})

View File

@@ -1,6 +1,3 @@
/**
* @vitest-environment jsdom
*/
import type { Mock } from 'vitest'
import type { CrawlOptions, CrawlResultItem } from '@/models/datasets'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'

View File

@@ -11,7 +11,7 @@ describe('code.tsx components', () => {
vi.clearAllMocks()
vi.spyOn(console, 'error').mockImplementation(() => {})
vi.useFakeTimers({ shouldAdvanceTime: true })
// jsdom does not implement scrollBy; mock it to prevent stderr noise
// The test DOM runtime does not implement scrollBy; mock it to prevent stderr noise
window.scrollBy = vi.fn()
})

View File

@@ -307,7 +307,7 @@ describe('useDocToc', () => {
it('should update activeSection when scrolling past a section', async () => {
vi.useFakeTimers()
// innerHeight/2 = 384 in jsdom (default 768), so top <= 384 means "scrolled past"
// innerHeight/2 = 384 with the default test viewport height (768), so top <= 384 means "scrolled past"
const { scrollContainer, cleanup } = setupScrollDOM([
{ id: 'intro', text: 'Intro', top: 100 },
{ id: 'details', text: 'Details', top: 600 },

View File

@@ -43,7 +43,7 @@ vi.mock('@/app/components/base/tooltip', () => ({
),
}))
// Mock portal components to avoid async/jsdom issues (consistent with sibling tests)
// Mock portal components to avoid async test DOM issues (consistent with sibling tests)
vi.mock('@/app/components/base/portal-to-follow-elem', () => ({
PortalToFollowElem: ({ children, open }: { children: React.ReactNode, open: boolean, onOpenChange: (open: boolean) => void }) => (
<div data-testid="portal" data-open={open}>

View File

@@ -142,7 +142,7 @@ describe('EndpointCard', () => {
failureFlags.disable = false
failureFlags.delete = false
failureFlags.update = false
// Polyfill document.execCommand for copy-to-clipboard in jsdom
// Polyfill document.execCommand for copy-to-clipboard in the test DOM runtime
if (typeof document.execCommand !== 'function') {
document.execCommand = vi.fn().mockReturnValue(true)
}

View File

@@ -102,10 +102,12 @@ vi.mock('@/app/components/base/ui/toast', () => ({
}))
const mockClipboardWriteText = vi.fn()
Object.assign(navigator, {
clipboard: {
Object.defineProperty(navigator, 'clipboard', {
value: {
writeText: mockClipboardWriteText,
},
configurable: true,
writable: true,
})
vi.mock('@/app/components/base/modal/modal', () => ({
@@ -192,6 +194,13 @@ describe('OAuthClientSettingsModal', () => {
vi.clearAllMocks()
mockUsePluginStore.mockReturnValue(mockPluginDetail)
mockClipboardWriteText.mockResolvedValue(undefined)
Object.defineProperty(navigator, 'clipboard', {
value: {
writeText: mockClipboardWriteText,
},
configurable: true,
writable: true,
})
setMockFormValues({
values: { client_id: 'test-client-id', client_secret: 'test-client-secret' },
isCheckValidated: true,

View File

@@ -48,10 +48,10 @@ describe('CustomEdgeLinearGradientRender', () => {
const stops = container.querySelectorAll('stop')
expect(stops).toHaveLength(2)
expect(stops[0]).toHaveAttribute('offset', '0%')
expect(stops[0].getAttribute('style')).toContain('stop-color: rgb(17, 17, 17)')
expect(stops[0].getAttribute('style')).toContain('stop-color: #111111')
expect(stops[0].getAttribute('style')).toContain('stop-opacity: 1')
expect(stops[1]).toHaveAttribute('offset', '100%')
expect(stops[1].getAttribute('style')).toContain('stop-color: rgb(34, 34, 34)')
expect(stops[1].getAttribute('style')).toContain('stop-color: #222222')
expect(stops[1].getAttribute('style')).toContain('stop-opacity: 1')
})
})

View File

@@ -159,6 +159,40 @@ describe('SelectionContextmenu', () => {
})
})
it('should render and execute copy/duplicate/delete operations', async () => {
const nodes = [
createNode({ id: 'n1', selected: true, width: 80, height: 40 }),
createNode({ id: 'n2', selected: true, position: { x: 140, y: 0 }, width: 80, height: 40 }),
]
const { store } = renderSelectionMenu({ nodes })
act(() => {
store.setState({ selectionMenu: { clientX: 120, clientY: 120 } })
})
await waitFor(() => {
expect(screen.getByTestId('selection-contextmenu-item-copy')).toBeInTheDocument()
})
fireEvent.click(screen.getByTestId('selection-contextmenu-item-copy'))
expect(mockHandleNodesCopy).toHaveBeenCalledTimes(1)
expect(store.getState().selectionMenu).toBeUndefined()
act(() => {
store.setState({ selectionMenu: { clientX: 120, clientY: 120 } })
})
fireEvent.click(screen.getByTestId('selection-contextmenu-item-duplicate'))
expect(mockHandleNodesDuplicate).toHaveBeenCalledTimes(1)
expect(store.getState().selectionMenu).toBeUndefined()
act(() => {
store.setState({ selectionMenu: { clientX: 120, clientY: 120 } })
})
fireEvent.click(screen.getByTestId('selection-contextmenu-item-delete'))
expect(mockHandleNodesDelete).toHaveBeenCalledTimes(1)
expect(store.getState().selectionMenu).toBeUndefined()
})
it('should close itself when only one node is selected', async () => {
const nodes = [
createNode({ id: 'n1', selected: true, width: 80, height: 40 }),

View File

@@ -209,7 +209,7 @@ describe('UpdateDSLModal', () => {
})
await waitFor(() => {
expect(screen.getByRole('button', { name: 'app.newApp.Cancel' })).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'app.newApp.Confirm' })).toBeInTheDocument()
}, { timeout: 1000 })
fireEvent.click(screen.getByRole('button', { name: 'app.newApp.Cancel' }))

View File

@@ -1,6 +1,6 @@
/* eslint-disable ts/no-explicit-any */
import type { ScheduleTriggerNodeType } from '../../types'
import { fireEvent, render, screen, waitFor, within } from '@testing-library/react'
import { render, screen, waitFor, within } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import FrequencySelector from '../frequency-selector'
import ModeSwitcher from '../mode-switcher'
@@ -44,14 +44,14 @@ describe('trigger-schedule components', () => {
)
const trigger = screen.getByRole('button', { name: 'workflow.nodes.triggerSchedule.frequency.daily' })
fireEvent.click(trigger)
await user.click(trigger)
await waitFor(() => {
expect(trigger).toHaveAttribute('aria-expanded', 'true')
})
const listbox = await screen.findByRole('listbox')
await user.click(within(listbox).getByText('workflow.nodes.triggerSchedule.frequency.weekly'))
await user.click(within(listbox).getByRole('option', { name: 'workflow.nodes.triggerSchedule.frequency.weekly' }))
await waitFor(() => {
expect(onChange).toHaveBeenCalledWith('weekly')

View File

@@ -150,7 +150,7 @@ describe('variable-modal', () => {
await user.click(screen.getByText('workflow.chatVariable.modal.editInJSON'))
await waitFor(() => {
expect(screen.getByText('Loading...')).toBeInTheDocument()
expect(screen.getByTestId('monaco-editor')).toBeInTheDocument()
})
await user.click(screen.getByText('workflow.chatVariable.modal.editInForm'))
expect(screen.getByDisplayValue('enabled')).toBeInTheDocument()

View File

@@ -8,7 +8,7 @@ When I ask you to write/refactor/fix tests, follow these rules by default.
- **Framework**: Next.js 15 + React 19 + TypeScript
- **Testing Tools**: Vitest 4.0.16 + React Testing Library 16.0
- **Test Environment**: jsdom
- **Test Environment**: happy-dom
- **File Naming**: `ComponentName.spec.tsx` inside a same-level `__tests__/` directory
- **Placement Rule**: Component, hook, and utility tests must live in a sibling `__tests__/` folder at the same level as the source under test. For example, `foo/index.tsx` maps to `foo/__tests__/index.spec.tsx`, and `foo/bar.ts` maps to `foo/__tests__/bar.spec.ts`.
@@ -30,7 +30,7 @@ pnpm test path/to/file.spec.tsx
## Project Test Setup
- **Configuration**: `vitest.config.ts` sets the `jsdom` environment, loads the Testing Library presets, and respects our path aliases (`@/...`). Check this file before adding new transformers or module name mappers.
- **Configuration**: `vite.config.ts` sets the `happy-dom` environment, loads the Testing Library presets, and respects our path aliases (`@/...`). Check this file before adding new transformers or module name mappers.
- **Global setup**: `vitest.setup.ts` already imports `@testing-library/jest-dom`, runs `cleanup()` after every test, and defines shared mocks (for example `react-i18next`). Add any environment-level mocks (for example `ResizeObserver`, `matchMedia`, `IntersectionObserver`, `TextEncoder`, `crypto`) here so they are shared consistently.
- **Reusable mocks**: Place shared mock factories inside `web/__mocks__/` and use `vi.mock('module-name')` to point to them rather than redefining mocks in every spec.
- **Mocking behavior**: Modules are not mocked automatically. Use `vi.mock(...)` in tests, or place global mocks in `vitest.setup.ts`.

View File

@@ -220,11 +220,10 @@
"eslint-plugin-react-refresh": "0.5.2",
"eslint-plugin-sonarjs": "4.0.2",
"eslint-plugin-storybook": "10.3.1",
"happy-dom": "20.8.8",
"hono": "4.12.8",
"husky": "9.1.7",
"iconify-import-svg": "0.1.2",
"jsdom": "29.0.1",
"jsdom-testing-mocks": "1.16.0",
"knip": "6.0.2",
"lint-staged": "16.4.0",
"postcss": "8.5.8",

View File

@@ -1,3 +1,6 @@
/**
* @vitest-environment node
*/
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { buildUpstreamUrl, createDevProxyApp, isAllowedDevOrigin, resolveDevProxyTargets } from './server'

148
web/pnpm-lock.yaml generated
View File

@@ -371,7 +371,7 @@ importers:
devDependencies:
'@antfu/eslint-config':
specifier: 7.7.3
version: 7.7.3(@eslint-react/eslint-plugin@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.57.1(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.1(typescript@5.9.3))(@typescript-eslint/utils@8.57.1(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(@vue/compiler-sfc@3.5.30)(eslint-plugin-react-hooks@7.0.1(eslint@10.1.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.5.2(eslint@10.1.0(jiti@1.21.7)))(eslint@10.1.0(jiti@1.21.7))(oxlint@1.56.0(oxlint-tsgolint@0.17.1))(typescript@5.9.3)
version: 7.7.3(@eslint-react/eslint-plugin@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.57.1(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.1(typescript@5.9.3))(@typescript-eslint/utils@8.57.1(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(@vue/compiler-sfc@3.5.30)(eslint-plugin-react-hooks@7.0.1(eslint@10.1.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.5.2(eslint@10.1.0(jiti@1.21.7)))(eslint@10.1.0(jiti@1.21.7))(oxlint@1.56.0(oxlint-tsgolint@0.17.1))(typescript@5.9.3)
'@chromatic-com/storybook':
specifier: 5.0.2
version: 5.0.2(storybook@10.3.1(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))
@@ -506,7 +506,7 @@ importers:
version: 0.5.21(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(react-dom@19.2.4(react@19.2.4))(react-server-dom-webpack@19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3)))(react@19.2.4)
'@vitest/coverage-v8':
specifier: 4.1.0
version: 4.1.0(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
version: 4.1.0(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
agentation:
specifier: 2.3.3
version: 2.3.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
@@ -546,6 +546,9 @@ importers:
eslint-plugin-storybook:
specifier: 10.3.1
version: 10.3.1(eslint@10.1.0(jiti@1.21.7))(storybook@10.3.1(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)
happy-dom:
specifier: 20.8.8
version: 20.8.8
hono:
specifier: 4.12.8
version: 4.12.8
@@ -555,12 +558,6 @@ importers:
iconify-import-svg:
specifier: 0.1.2
version: 0.1.2
jsdom:
specifier: 29.0.1
version: 29.0.1(canvas@3.2.2)
jsdom-testing-mocks:
specifier: 1.16.0
version: 1.16.0
knip:
specifier: 6.0.2
version: 6.0.2
@@ -608,13 +605,13 @@ importers:
version: 11.3.3(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
vite-plus:
specifier: 0.1.13
version: 0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
version: 0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
vitest:
specifier: npm:@voidzero-dev/vite-plus-test@0.1.13
version: '@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
version: '@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
vitest-canvas-mock:
specifier: 1.1.3
version: 1.1.3(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
version: 1.1.3(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
packages:
@@ -3510,6 +3507,12 @@ packages:
'@types/unist@3.0.3':
resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==}
'@types/whatwg-mimetype@3.0.2':
resolution: {integrity: sha512-c2AKvDT8ToxLIOUlN51gTiHXflsfIFisS4pO7pDPoKouJCESkhZnEy623gwP9laCy5lnLDAw1vAzu2vM2YLOrA==}
'@types/ws@8.18.1':
resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==}
'@types/yauzl@2.10.3':
resolution: {integrity: sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==}
@@ -4082,9 +4085,6 @@ packages:
engines: {node: '>=6.0.0'}
hasBin: true
bezier-easing@2.1.0:
resolution: {integrity: sha512-gbIqZ/eslnUFC1tjEvtz0sgx+xTK20wDnYMIA27VA04R7w6xxXQPZDbibjA9DTWZRA2CXtwHykkVzlCaAJAZig==}
bidi-js@1.0.3:
resolution: {integrity: sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==}
@@ -4385,9 +4385,6 @@ packages:
resolution: {integrity: sha512-3O5QdqgFRUbXvK1x5INf1YkBz1UKSWqrd63vWsum8MNHDBYD5urm3QtxZbKU259OrEXNM26lP/MPY3d1IGkBgA==}
engines: {node: '>=16'}
css-mediaquery@0.1.2:
resolution: {integrity: sha512-COtn4EROW5dBGlE/4PiKnh6rZpAPxDeFLaEEwt4i10jpDMFt2EhQGS79QmmrO+iKCHv0PU/HrOWEhijFd1x99Q==}
css-select@5.2.2:
resolution: {integrity: sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==}
@@ -5359,6 +5356,10 @@ packages:
hachure-fill@0.5.2:
resolution: {integrity: sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==}
happy-dom@20.8.8:
resolution: {integrity: sha512-5/F8wxkNxYtsN0bXfMwIyNLZ9WYsoOYPbmoluqVJqv8KBUbcyKZawJ7uYK4WTX8IHBLYv+VXIwfeNDPy1oKMwQ==}
engines: {node: '>=20.0.0'}
has-flag@4.0.0:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
@@ -5690,10 +5691,6 @@ packages:
resolution: {integrity: sha512-/2uqY7x6bsrpi3i9LVU6J89352C0rpMk0as8trXxCtvd4kPk1ke/Eyif6wqfSLvoNJqcDG9Vk4UsXgygzCt2xA==}
engines: {node: '>=20.0.0'}
jsdom-testing-mocks@1.16.0:
resolution: {integrity: sha512-wLrulXiLpjmcUYOYGEvz4XARkrmdVpyxzdBl9IAMbQ+ib2/UhUTRCn49McdNfXLff2ysGBUms49ZKX0LR1Q0gg==}
engines: {node: '>=14'}
jsdom@29.0.1:
resolution: {integrity: sha512-z6JOK5gRO7aMybVq/y/MlIpKh8JIi68FBKMUtKkK2KH/wMSRlCxQ682d08LB9fYXplyY/UXG8P4XXTScmdjApg==}
engines: {node: ^20.19.0 || ^22.13.0 || >=24.0.0}
@@ -7841,6 +7838,10 @@ packages:
engines: {node: '>=18'}
deprecated: Use @exodus/bytes instead for a more spec-conformant and faster implementation
whatwg-mimetype@3.0.0:
resolution: {integrity: sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==}
engines: {node: '>=12'}
whatwg-mimetype@4.0.0:
resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==}
engines: {node: '>=18'}
@@ -8140,7 +8141,7 @@ snapshots:
idb: 8.0.0
tslib: 2.8.1
'@antfu/eslint-config@7.7.3(@eslint-react/eslint-plugin@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.57.1(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.1(typescript@5.9.3))(@typescript-eslint/utils@8.57.1(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(@vue/compiler-sfc@3.5.30)(eslint-plugin-react-hooks@7.0.1(eslint@10.1.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.5.2(eslint@10.1.0(jiti@1.21.7)))(eslint@10.1.0(jiti@1.21.7))(oxlint@1.56.0(oxlint-tsgolint@0.17.1))(typescript@5.9.3)':
'@antfu/eslint-config@7.7.3(@eslint-react/eslint-plugin@3.0.0(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.57.1(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@typescript-eslint/typescript-estree@8.57.1(typescript@5.9.3))(@typescript-eslint/utils@8.57.1(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(@vue/compiler-sfc@3.5.30)(eslint-plugin-react-hooks@7.0.1(eslint@10.1.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.5.2(eslint@10.1.0(jiti@1.21.7)))(eslint@10.1.0(jiti@1.21.7))(oxlint@1.56.0(oxlint-tsgolint@0.17.1))(typescript@5.9.3)':
dependencies:
'@antfu/install-pkg': 1.1.0
'@clack/prompts': 1.1.0
@@ -8150,7 +8151,7 @@ snapshots:
'@stylistic/eslint-plugin': 5.10.0(eslint@10.1.0(jiti@1.21.7))
'@typescript-eslint/eslint-plugin': 8.57.1(@typescript-eslint/parser@8.57.1(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
'@typescript-eslint/parser': 8.57.1(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
'@vitest/eslint-plugin': 1.6.12(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
'@vitest/eslint-plugin': 1.6.12(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
ansis: 4.2.0
cac: 7.0.0
eslint: 10.1.0(jiti@1.21.7)
@@ -8217,6 +8218,7 @@ snapshots:
'@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0)
'@csstools/css-tokenizer': 4.0.0
lru-cache: 11.2.7
optional: true
'@asamuzakjp/dom-selector@7.0.3':
dependencies:
@@ -8225,8 +8227,10 @@ snapshots:
css-tree: 3.2.1
is-potential-custom-element-name: 1.0.1
lru-cache: 11.2.7
optional: true
'@asamuzakjp/nwsapi@2.3.9': {}
'@asamuzakjp/nwsapi@2.3.9':
optional: true
'@babel/code-frame@7.29.0':
dependencies:
@@ -8361,6 +8365,7 @@ snapshots:
'@bramus/specificity@2.4.2':
dependencies:
css-tree: 3.2.1
optional: true
'@chevrotain/cst-dts-gen@11.1.2':
dependencies:
@@ -8453,12 +8458,14 @@ snapshots:
transitivePeerDependencies:
- supports-color
'@csstools/color-helpers@6.0.2': {}
'@csstools/color-helpers@6.0.2':
optional: true
'@csstools/css-calc@3.1.1(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)':
dependencies:
'@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0)
'@csstools/css-tokenizer': 4.0.0
optional: true
'@csstools/css-color-parser@4.0.2(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)':
dependencies:
@@ -8466,16 +8473,20 @@ snapshots:
'@csstools/css-calc': 3.1.1(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)
'@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0)
'@csstools/css-tokenizer': 4.0.0
optional: true
'@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0)':
dependencies:
'@csstools/css-tokenizer': 4.0.0
optional: true
'@csstools/css-syntax-patches-for-csstree@1.1.1(css-tree@3.2.1)':
optionalDependencies:
css-tree: 3.2.1
optional: true
'@csstools/css-tokenizer@4.0.0': {}
'@csstools/css-tokenizer@4.0.0':
optional: true
'@e18e/eslint-plugin@0.2.0(eslint@10.1.0(jiti@1.21.7))(oxlint@1.56.0(oxlint-tsgolint@0.17.1))':
dependencies:
@@ -8777,7 +8788,8 @@ snapshots:
'@eslint/core': 1.1.1
levn: 0.4.1
'@exodus/bytes@1.15.0': {}
'@exodus/bytes@1.15.0':
optional: true
'@floating-ui/core@1.7.5':
dependencies:
@@ -10814,6 +10826,12 @@ snapshots:
'@types/unist@3.0.3': {}
'@types/whatwg-mimetype@3.0.2': {}
'@types/ws@8.18.1':
dependencies:
'@types/node': 25.5.0
'@types/yauzl@2.10.3':
dependencies:
'@types/node': 25.5.0
@@ -11019,7 +11037,7 @@ snapshots:
optionalDependencies:
react-server-dom-webpack: 19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
'@vitest/coverage-v8@4.1.0(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))':
'@vitest/coverage-v8@4.1.0(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))':
dependencies:
'@bcoe/v8-coverage': 1.0.2
'@vitest/utils': 4.1.0
@@ -11031,16 +11049,16 @@ snapshots:
obug: 2.1.1
std-env: 4.0.0
tinyrainbow: 3.1.0
vitest: '@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
vitest: '@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
'@vitest/eslint-plugin@1.6.12(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
'@vitest/eslint-plugin@1.6.12(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)':
dependencies:
'@typescript-eslint/scope-manager': 8.57.1
'@typescript-eslint/utils': 8.57.1(eslint@10.1.0(jiti@1.21.7))(typescript@5.9.3)
eslint: 10.1.0(jiti@1.21.7)
optionalDependencies:
typescript: 5.9.3
vitest: '@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
vitest: '@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
transitivePeerDependencies:
- supports-color
@@ -11105,7 +11123,7 @@ snapshots:
'@voidzero-dev/vite-plus-linux-x64-gnu@0.1.13':
optional: true
'@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)':
'@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)':
dependencies:
'@standard-schema/spec': 1.1.0
'@types/chai': 5.2.3
@@ -11123,6 +11141,7 @@ snapshots:
ws: 8.19.0
optionalDependencies:
'@types/node': 25.5.0
happy-dom: 20.8.8
jsdom: 29.0.1(canvas@3.2.2)
transitivePeerDependencies:
- '@arethetypeswrong/core'
@@ -11419,11 +11438,10 @@ snapshots:
baseline-browser-mapping@2.10.8: {}
bezier-easing@2.1.0: {}
bidi-js@1.0.3:
dependencies:
require-from-string: 2.0.2
optional: true
binary-extensions@2.3.0: {}
@@ -11715,8 +11733,6 @@ snapshots:
css-gradient-parser@0.0.16: {}
css-mediaquery@0.1.2: {}
css-select@5.2.2:
dependencies:
boolbase: 1.0.0
@@ -11745,6 +11761,7 @@ snapshots:
dependencies:
mdn-data: 2.27.1
source-map-js: 1.2.1
optional: true
css-what@6.2.2: {}
@@ -11950,6 +11967,7 @@ snapshots:
whatwg-url: 16.0.1
transitivePeerDependencies:
- '@noble/hashes'
optional: true
dayjs@1.11.20: {}
@@ -12897,6 +12915,18 @@ snapshots:
hachure-fill@0.5.2: {}
happy-dom@20.8.8:
dependencies:
'@types/node': 25.5.0
'@types/whatwg-mimetype': 3.0.2
'@types/ws': 8.18.1
entities: 7.0.1
whatwg-mimetype: 3.0.0
ws: 8.19.0
transitivePeerDependencies:
- bufferutil
- utf-8-validate
has-flag@4.0.0: {}
hast-util-from-dom@5.0.1:
@@ -13061,6 +13091,7 @@ snapshots:
'@exodus/bytes': 1.15.0
transitivePeerDependencies:
- '@noble/hashes'
optional: true
html-entities@2.6.0: {}
@@ -13199,7 +13230,8 @@ snapshots:
is-plain-obj@4.1.0: {}
is-potential-custom-element-name@1.0.1: {}
is-potential-custom-element-name@1.0.1:
optional: true
is-reference@3.0.3:
dependencies:
@@ -13261,11 +13293,6 @@ snapshots:
jsdoc-type-pratt-parser@7.1.1: {}
jsdom-testing-mocks@1.16.0:
dependencies:
bezier-easing: 2.1.0
css-mediaquery: 0.1.2
jsdom@29.0.1(canvas@3.2.2):
dependencies:
'@asamuzakjp/css-color': 5.0.1
@@ -13293,6 +13320,7 @@ snapshots:
canvas: 3.2.2
transitivePeerDependencies:
- '@noble/hashes'
optional: true
jsesc@3.1.0: {}
@@ -13753,7 +13781,8 @@ snapshots:
mdn-data@2.23.0: {}
mdn-data@2.27.1: {}
mdn-data@2.27.1:
optional: true
memoize-one@5.2.1: {}
@@ -15121,6 +15150,7 @@ snapshots:
saxes@6.0.0:
dependencies:
xmlchars: 2.2.0
optional: true
scheduler@0.27.0: {}
@@ -15397,7 +15427,8 @@ snapshots:
picocolors: 1.1.1
sax: 1.6.0
symbol-tree@3.2.4: {}
symbol-tree@3.2.4:
optional: true
synckit@0.11.12:
dependencies:
@@ -15559,10 +15590,12 @@ snapshots:
tough-cookie@6.0.1:
dependencies:
tldts: 7.0.27
optional: true
tr46@6.0.0:
dependencies:
punycode: 2.3.1
optional: true
trim-lines@3.0.1: {}
@@ -15655,7 +15688,8 @@ snapshots:
undici@7.24.0: {}
undici@7.24.5: {}
undici@7.24.5:
optional: true
unicode-trie@2.0.0:
dependencies:
@@ -15879,11 +15913,11 @@ snapshots:
- supports-color
- typescript
vite-plus@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3):
vite-plus@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3):
dependencies:
'@oxc-project/types': 0.120.0
'@voidzero-dev/vite-plus-core': 0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
'@voidzero-dev/vite-plus-test': 0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
'@voidzero-dev/vite-plus-test': 0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)
cac: 7.0.0
cross-spawn: 7.0.6
oxfmt: 0.41.0
@@ -15950,11 +15984,11 @@ snapshots:
optionalDependencies:
vite: '@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
vitest-canvas-mock@1.1.3(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)):
vitest-canvas-mock@1.1.3(@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)):
dependencies:
cssfontparser: 1.2.1
moo-color: 1.0.3
vitest: '@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
vitest: '@voidzero-dev/vite-plus-test@0.1.13(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.13(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.8)(jiti@1.21.7)(jsdom@29.0.1(canvas@3.2.2))(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
void-elements@3.1.0: {}
@@ -15990,6 +16024,7 @@ snapshots:
w3c-xmlserializer@5.0.0:
dependencies:
xml-name-validator: 5.0.0
optional: true
walk-up-path@4.0.0: {}
@@ -16002,7 +16037,8 @@ snapshots:
web-vitals@5.1.0: {}
webidl-conversions@8.0.1: {}
webidl-conversions@8.0.1:
optional: true
webpack-sources@3.3.4: {}
@@ -16044,9 +16080,12 @@ snapshots:
dependencies:
iconv-lite: 0.6.3
whatwg-mimetype@3.0.0: {}
whatwg-mimetype@4.0.0: {}
whatwg-mimetype@5.0.0: {}
whatwg-mimetype@5.0.0:
optional: true
whatwg-url@16.0.1:
dependencies:
@@ -16055,6 +16094,7 @@ snapshots:
webidl-conversions: 8.0.1
transitivePeerDependencies:
- '@noble/hashes'
optional: true
which@2.0.2:
dependencies:
@@ -16078,9 +16118,11 @@ snapshots:
xml-name-validator@4.0.0: {}
xml-name-validator@5.0.0: {}
xml-name-validator@5.0.0:
optional: true
xmlchars@2.2.0: {}
xmlchars@2.2.0:
optional: true
xtend@4.0.2: {}

View File

@@ -75,7 +75,8 @@ export default defineConfig(({ mode }) => {
// Vitest config
test: {
environment: 'jsdom',
pool: 'threads',
environment: 'happy-dom',
globals: true,
setupFiles: ['./vitest.setup.ts'],
coverage: {

View File

@@ -1,14 +1,8 @@
import { act, cleanup } from '@testing-library/react'
import { mockAnimationsApi, mockResizeObserver } from 'jsdom-testing-mocks'
import * as React from 'react'
import '@testing-library/jest-dom/vitest'
import 'vitest-canvas-mock'
mockResizeObserver()
// Mock Web Animations API for Headless UI
mockAnimationsApi()
// Suppress act() warnings from @headlessui/react internal Transition component
// These warnings are caused by Headless UI's internal async state updates, not our code
const originalConsoleError = console.error
@@ -77,24 +71,10 @@ if (typeof globalThis.IntersectionObserver === 'undefined') {
}
}
// Mock Element.scrollIntoView for tests (not available in happy-dom/jsdom)
if (typeof Element !== 'undefined' && !Element.prototype.scrollIntoView)
Element.prototype.scrollIntoView = function () { /* noop */ }
// Mock DOMRect.fromRect for tests (not available in jsdom)
if (typeof DOMRect !== 'undefined' && typeof (DOMRect as typeof DOMRect & { fromRect?: unknown }).fromRect !== 'function') {
(DOMRect as typeof DOMRect & { fromRect: (rect?: DOMRectInit) => DOMRect }).fromRect = (rect = {}) => new DOMRect(
rect.x ?? 0,
rect.y ?? 0,
rect.width ?? 0,
rect.height ?? 0,
)
}
afterEach(async () => {
// Wrap cleanup in act() to flush pending React scheduler work
// This prevents "window is not defined" errors from React 19's scheduler
// which uses setImmediate/MessageChannel that can fire after jsdom cleanup
// which uses setImmediate/MessageChannel that can fire after DOM cleanup
await act(async () => {
cleanup()
})
@@ -131,19 +111,97 @@ vi.mock('@floating-ui/react', async () => {
}
})
// mock window.matchMedia
Object.defineProperty(window, 'matchMedia', {
writable: true,
value: vi.fn().mockImplementation(query => ({
matches: false,
media: query,
onchange: null,
addListener: vi.fn(), // deprecated
removeListener: vi.fn(), // deprecated
addEventListener: vi.fn(),
removeEventListener: vi.fn(),
dispatchEvent: vi.fn(),
})),
vi.mock('@monaco-editor/react', () => {
const createEditorMock = () => {
const focusListeners: Array<() => void> = []
const blurListeners: Array<() => void> = []
return {
getContentHeight: vi.fn(() => 56),
onDidFocusEditorText: vi.fn((listener: () => void) => {
focusListeners.push(listener)
return { dispose: vi.fn() }
}),
onDidBlurEditorText: vi.fn((listener: () => void) => {
blurListeners.push(listener)
return { dispose: vi.fn() }
}),
layout: vi.fn(),
getAction: vi.fn(() => ({ run: vi.fn() })),
getModel: vi.fn(() => ({
getLineContent: vi.fn(() => ''),
})),
getPosition: vi.fn(() => ({ lineNumber: 1, column: 1 })),
deltaDecorations: vi.fn(() => []),
focus: vi.fn(() => {
focusListeners.forEach(listener => listener())
}),
setPosition: vi.fn(),
revealLine: vi.fn(),
trigger: vi.fn(),
__blur: () => {
blurListeners.forEach(listener => listener())
},
}
}
const monacoMock = {
editor: {
setTheme: vi.fn(),
defineTheme: vi.fn(),
},
Range: class {
startLineNumber: number
startColumn: number
endLineNumber: number
endColumn: number
constructor(startLineNumber: number, startColumn: number, endLineNumber: number, endColumn: number) {
this.startLineNumber = startLineNumber
this.startColumn = startColumn
this.endLineNumber = endLineNumber
this.endColumn = endColumn
}
},
}
const MonacoEditor = ({
value = '',
onChange,
onMount,
options,
}: {
value?: string
onChange?: (value: string | undefined) => void
onMount?: (editor: ReturnType<typeof createEditorMock>, monaco: typeof monacoMock) => void
options?: { readOnly?: boolean }
}) => {
const editorRef = React.useRef<ReturnType<typeof createEditorMock> | null>(null)
if (!editorRef.current)
editorRef.current = createEditorMock()
React.useEffect(() => {
onMount?.(editorRef.current!, monacoMock)
}, [onMount])
return React.createElement('textarea', {
'data-testid': 'monaco-editor',
'readOnly': options?.readOnly,
value,
'onChange': (event: React.ChangeEvent<HTMLTextAreaElement>) => onChange?.(event.target.value),
'onFocus': () => editorRef.current?.focus(),
'onBlur': () => editorRef.current?.__blur(),
})
}
return {
__esModule: true,
default: MonacoEditor,
Editor: MonacoEditor,
loader: {
config: vi.fn(),
init: vi.fn().mockResolvedValue(monacoMock),
},
}
})
// Mock localStorage for testing