Merge branch 'main' into jzh

This commit is contained in:
JzoNg
2026-03-31 18:08:24 +08:00
64 changed files with 3614 additions and 2694 deletions

View File

@@ -67,6 +67,92 @@ jobs:
}
" web/i18n-config/languages.ts | sed 's/[[:space:]]*$//')
generate_changes_json() {
node <<'NODE'
const { execFileSync } = require('node:child_process')
const fs = require('node:fs')
const path = require('node:path')
const repoRoot = process.cwd()
const baseSha = process.env.BASE_SHA || ''
const headSha = process.env.HEAD_SHA || ''
const files = (process.env.CHANGED_FILES || '').split(/\s+/).filter(Boolean)
const englishPath = fileStem => path.join(repoRoot, 'web', 'i18n', 'en-US', `${fileStem}.json`)
const readCurrentJson = (fileStem) => {
const filePath = englishPath(fileStem)
if (!fs.existsSync(filePath))
return null
return JSON.parse(fs.readFileSync(filePath, 'utf8'))
}
const readBaseJson = (fileStem) => {
if (!baseSha)
return null
try {
const relativePath = `web/i18n/en-US/${fileStem}.json`
const content = execFileSync('git', ['show', `${baseSha}:${relativePath}`], { encoding: 'utf8' })
return JSON.parse(content)
}
catch (error) {
return null
}
}
const compareJson = (beforeValue, afterValue) => JSON.stringify(beforeValue) === JSON.stringify(afterValue)
const changes = {}
for (const fileStem of files) {
const currentJson = readCurrentJson(fileStem)
const beforeJson = readBaseJson(fileStem) || {}
const afterJson = currentJson || {}
const added = {}
const updated = {}
const deleted = []
for (const [key, value] of Object.entries(afterJson)) {
if (!(key in beforeJson)) {
added[key] = value
continue
}
if (!compareJson(beforeJson[key], value)) {
updated[key] = {
before: beforeJson[key],
after: value,
}
}
}
for (const key of Object.keys(beforeJson)) {
if (!(key in afterJson))
deleted.push(key)
}
changes[fileStem] = {
fileDeleted: currentJson === null,
added,
updated,
deleted,
}
}
fs.writeFileSync(
'/tmp/i18n-changes.json',
JSON.stringify({
baseSha,
headSha,
files,
changes,
})
)
NODE
}
if [ "${{ github.event_name }}" = "repository_dispatch" ]; then
BASE_SHA="${{ github.event.client_payload.base_sha }}"
HEAD_SHA="${{ github.event.client_payload.head_sha }}"
@@ -74,12 +160,19 @@ jobs:
TARGET_LANGS="$DEFAULT_TARGET_LANGS"
SYNC_MODE="${{ github.event.client_payload.sync_mode || 'incremental' }}"
if [ -n "${{ github.event.client_payload.diff_base64 }}" ]; then
printf '%s' '${{ github.event.client_payload.diff_base64 }}' | base64 -d > /tmp/i18n-diff.txt
DIFF_AVAILABLE="true"
if [ -n "${{ github.event.client_payload.changes_base64 }}" ]; then
printf '%s' '${{ github.event.client_payload.changes_base64 }}' | base64 -d > /tmp/i18n-changes.json
CHANGES_AVAILABLE="true"
CHANGES_SOURCE="embedded"
elif [ -n "$BASE_SHA" ] && [ -n "$CHANGED_FILES" ]; then
export BASE_SHA HEAD_SHA CHANGED_FILES
generate_changes_json
CHANGES_AVAILABLE="true"
CHANGES_SOURCE="recomputed"
else
: > /tmp/i18n-diff.txt
DIFF_AVAILABLE="false"
printf '%s' '{"baseSha":"","headSha":"","files":[],"changes":{}}' > /tmp/i18n-changes.json
CHANGES_AVAILABLE="false"
CHANGES_SOURCE="unavailable"
fi
else
BASE_SHA=""
@@ -106,16 +199,15 @@ jobs:
CHANGED_FILES=""
fi
if [ "$SYNC_MODE" = "incremental" ] && [ -n "$BASE_SHA" ]; then
git diff "$BASE_SHA" "$HEAD_SHA" -- 'web/i18n/en-US/*.json' > /tmp/i18n-diff.txt 2>/dev/null || : > /tmp/i18n-diff.txt
if [ "$SYNC_MODE" = "incremental" ] && [ -n "$CHANGED_FILES" ]; then
export BASE_SHA HEAD_SHA CHANGED_FILES
generate_changes_json
CHANGES_AVAILABLE="true"
CHANGES_SOURCE="local"
else
: > /tmp/i18n-diff.txt
fi
if [ -s /tmp/i18n-diff.txt ]; then
DIFF_AVAILABLE="true"
else
DIFF_AVAILABLE="false"
printf '%s' '{"baseSha":"","headSha":"","files":[],"changes":{}}' > /tmp/i18n-changes.json
CHANGES_AVAILABLE="false"
CHANGES_SOURCE="unavailable"
fi
fi
@@ -136,7 +228,8 @@ jobs:
echo "CHANGED_FILES=$CHANGED_FILES"
echo "TARGET_LANGS=$TARGET_LANGS"
echo "SYNC_MODE=$SYNC_MODE"
echo "DIFF_AVAILABLE=$DIFF_AVAILABLE"
echo "CHANGES_AVAILABLE=$CHANGES_AVAILABLE"
echo "CHANGES_SOURCE=$CHANGES_SOURCE"
echo "FILE_ARGS=$FILE_ARGS"
echo "LANG_ARGS=$LANG_ARGS"
} >> "$GITHUB_OUTPUT"
@@ -155,7 +248,7 @@ jobs:
show_full_output: ${{ github.event_name == 'workflow_dispatch' }}
prompt: |
You are the i18n sync agent for the Dify repository.
Your job is to keep translations synchronized with the English source files under `${{ github.workspace }}/web/i18n/en-US/`, then open a PR with the result.
Your job is to keep translations synchronized with the English source files under `${{ github.workspace }}/web/i18n/en-US/`.
Use absolute paths at all times:
- Repo root: `${{ github.workspace }}`
@@ -170,13 +263,15 @@ jobs:
- Head SHA: `${{ steps.context.outputs.HEAD_SHA }}`
- Scoped file args: `${{ steps.context.outputs.FILE_ARGS }}`
- Scoped language args: `${{ steps.context.outputs.LANG_ARGS }}`
- Full English diff available: `${{ steps.context.outputs.DIFF_AVAILABLE }}`
- Structured change set available: `${{ steps.context.outputs.CHANGES_AVAILABLE }}`
- Structured change set source: `${{ steps.context.outputs.CHANGES_SOURCE }}`
- Structured change set file: `/tmp/i18n-changes.json`
Tool rules:
- Use Read for repository files.
- Use Edit for JSON updates.
- Use Bash only for `git`, `gh`, `pnpm`, and `date`.
- Run Bash commands one by one. Do not combine commands with `&&`, `||`, pipes, or command substitution.
- Use Bash only for `pnpm`.
- Do not use Bash for `git`, `gh`, or branch management.
Required execution plan:
1. Resolve target languages.
@@ -187,30 +282,25 @@ jobs:
- Only process the resolved target languages, never `en-US`.
- Do not touch unrelated i18n files.
- Do not modify `${{ github.workspace }}/web/i18n/en-US/`.
3. Detect English changes per file.
- Treat the current English JSON files under `${{ github.workspace }}/web/i18n/en-US/` plus the scoped `i18n:check` result as the primary source of truth.
- Use `/tmp/i18n-diff.txt` only as supporting context to understand what changed between `Base SHA` and `Head SHA`.
- Never rely on diff alone when deciding final keys or values.
- Read the current English JSON file for each file in scope.
- If sync mode is `incremental` and `Base SHA` is not empty, run:
`git -C ${{ github.workspace }} show <Base SHA>:web/i18n/en-US/<file>.json`
- If sync mode is `full` or `Base SHA` is empty, skip historical comparison and treat the current English file as the only source of truth for structural sync.
- If the file did not exist at Base SHA, treat all current keys as ADD.
- Compare previous and current English JSON to identify:
- ADD: key only in current
- UPDATE: key exists in both and the English value changed
- DELETE: key only in previous
- If `/tmp/i18n-diff.txt` is available, read it before translating so wording changes are grounded in the full English patch, but resolve any ambiguity by trusting the actual English files and scoped checks.
3. Resolve source changes.
- If `Structured change set available` is `true`, read `/tmp/i18n-changes.json` and use it as the source of truth for file-level and key-level changes.
- For each file entry:
- `added` contains new English keys that need translations.
- `updated` contains stale keys whose English source changed; re-translate using the `after` value.
- `deleted` contains keys that should be removed from locale files.
- `fileDeleted: true` means the English file no longer exists; remove the matching locale file if present.
- Read the current English JSON file for any file that still exists so wording, placeholders, and surrounding terminology stay accurate.
- If `Structured change set available` is `false`, treat this as a scoped full sync and use the current English files plus scoped checks as the source of truth.
4. Run a scoped pre-check before editing:
- `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
- Use this command as the source of truth for missing and extra keys inside the current scope.
5. Apply translations.
- For every target language and scoped file:
- If `fileDeleted` is `true`, remove the locale file if it exists and skip the rest of that file.
- If the locale file does not exist yet, create it with `Write` and then continue with `Edit` as needed.
- ADD missing keys.
- UPDATE stale translations when the English value changed.
- DELETE removed keys. Prefer `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }} --auto-remove` for extra keys so deletions stay in scope.
- For `zh-Hans` and `ja-JP`, if the locale file also changed between Base SHA and Head SHA, preserve manual translations unless they are clearly wrong for the new English value. If in doubt, keep the manual translation.
- Preserve placeholders exactly: `{{variable}}`, `${variable}`, HTML tags, component tags, and variable names.
- Match the existing terminology and register used by each locale.
- Prefer one Edit per file when stable, but prioritize correctness over batching.
@@ -218,14 +308,119 @@ jobs:
- Run `pnpm --dir ${{ github.workspace }}/web lint:fix --quiet -- <relative edited i18n file paths>`
- Run `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
- If verification fails, fix the remaining problems before continuing.
7. Create a PR only when there are changes in `web/i18n/`.
- Check `git -C ${{ github.workspace }} status --porcelain -- web/i18n/`
- Create branch `chore/i18n-sync-<timestamp>`
- Commit message: `chore(i18n): sync translations with en-US`
- Push the branch and open a PR against `main`
- PR title: `chore(i18n): sync translations with en-US`
- PR body: summarize files, languages, sync mode, and verification commands
8. If there are no translation changes after verification, do not create a branch, commit, or PR.
7. Stop after the scoped locale files are updated and verification passes.
- Do not create branches, commits, or pull requests.
claude_args: |
--max-turns 80
--allowedTools "Read,Write,Edit,Bash(git *),Bash(git:*),Bash(gh *),Bash(gh:*),Bash(pnpm *),Bash(pnpm:*),Bash(date *),Bash(date:*),Glob,Grep"
--max-turns 120
--allowedTools "Read,Write,Edit,Bash(pnpm *),Bash(pnpm:*),Glob,Grep"
- name: Prepare branch metadata
id: pr_meta
if: steps.context.outputs.CHANGED_FILES != ''
shell: bash
run: |
if [ -z "$(git -C "${{ github.workspace }}" status --porcelain -- web/i18n/)" ]; then
echo "has_changes=false" >> "$GITHUB_OUTPUT"
exit 0
fi
SCOPE_HASH=$(printf '%s|%s|%s' "${{ steps.context.outputs.CHANGED_FILES }}" "${{ steps.context.outputs.TARGET_LANGS }}" "${{ steps.context.outputs.SYNC_MODE }}" | sha256sum | cut -c1-8)
HEAD_SHORT=$(printf '%s' "${{ steps.context.outputs.HEAD_SHA }}" | cut -c1-12)
BRANCH_NAME="chore/i18n-sync-${HEAD_SHORT}-${SCOPE_HASH}"
{
echo "has_changes=true"
echo "branch_name=$BRANCH_NAME"
} >> "$GITHUB_OUTPUT"
- name: Commit translation changes
if: steps.pr_meta.outputs.has_changes == 'true'
shell: bash
run: |
git -C "${{ github.workspace }}" checkout -B "${{ steps.pr_meta.outputs.branch_name }}"
git -C "${{ github.workspace }}" add web/i18n/
git -C "${{ github.workspace }}" commit -m "chore(i18n): sync translations with en-US"
- name: Push translation branch
if: steps.pr_meta.outputs.has_changes == 'true'
shell: bash
run: |
if git -C "${{ github.workspace }}" ls-remote --exit-code --heads origin "${{ steps.pr_meta.outputs.branch_name }}" >/dev/null 2>&1; then
git -C "${{ github.workspace }}" push --force-with-lease origin "${{ steps.pr_meta.outputs.branch_name }}"
else
git -C "${{ github.workspace }}" push --set-upstream origin "${{ steps.pr_meta.outputs.branch_name }}"
fi
- name: Create or update translation PR
if: steps.pr_meta.outputs.has_changes == 'true'
env:
BRANCH_NAME: ${{ steps.pr_meta.outputs.branch_name }}
FILES_IN_SCOPE: ${{ steps.context.outputs.CHANGED_FILES }}
TARGET_LANGS: ${{ steps.context.outputs.TARGET_LANGS }}
SYNC_MODE: ${{ steps.context.outputs.SYNC_MODE }}
CHANGES_SOURCE: ${{ steps.context.outputs.CHANGES_SOURCE }}
BASE_SHA: ${{ steps.context.outputs.BASE_SHA }}
HEAD_SHA: ${{ steps.context.outputs.HEAD_SHA }}
REPO_NAME: ${{ github.repository }}
shell: bash
run: |
PR_BODY_FILE=/tmp/i18n-pr-body.md
LANG_COUNT=$(printf '%s\n' "$TARGET_LANGS" | wc -w | tr -d ' ')
if [ "$LANG_COUNT" = "0" ]; then
LANG_COUNT="0"
fi
export LANG_COUNT
node <<'NODE' > "$PR_BODY_FILE"
const fs = require('node:fs')
const changesPath = '/tmp/i18n-changes.json'
const changes = fs.existsSync(changesPath)
? JSON.parse(fs.readFileSync(changesPath, 'utf8'))
: { changes: {} }
const filesInScope = (process.env.FILES_IN_SCOPE || '').split(/\s+/).filter(Boolean)
const lines = [
'## Summary',
'',
`- **Files synced**: \`${process.env.FILES_IN_SCOPE || '<none>'}\``,
`- **Languages updated**: ${process.env.TARGET_LANGS || '<none>'} (${process.env.LANG_COUNT} languages)`,
`- **Sync mode**: ${process.env.SYNC_MODE}${process.env.BASE_SHA ? ` (base: \`${process.env.BASE_SHA.slice(0, 10)}\`, head: \`${process.env.HEAD_SHA.slice(0, 10)}\`)` : ` (head: \`${process.env.HEAD_SHA.slice(0, 10)}\`)`}`,
'',
'### Key changes',
]
for (const fileName of filesInScope) {
const fileChange = changes.changes?.[fileName] || { added: {}, updated: {}, deleted: [], fileDeleted: false }
const addedKeys = Object.keys(fileChange.added || {})
const updatedKeys = Object.keys(fileChange.updated || {})
const deletedKeys = fileChange.deleted || []
lines.push(`- \`${fileName}\`: +${addedKeys.length} / ~${updatedKeys.length} / -${deletedKeys.length}${fileChange.fileDeleted ? ' (file deleted in en-US)' : ''}`)
}
lines.push(
'',
'## Verification',
'',
`- \`pnpm --dir web run i18n:check --file ${process.env.FILES_IN_SCOPE} --lang ${process.env.TARGET_LANGS}\``,
`- \`pnpm --dir web lint:fix --quiet -- <edited i18n files>\``,
'',
'## Notes',
'',
'- This PR was generated from structured en-US key changes produced by `trigger-i18n-sync.yml`.',
`- Structured change source: ${process.env.CHANGES_SOURCE || 'unknown'}.`,
'- Branch name is deterministic for the head SHA and scope, so reruns update the same PR instead of opening duplicates.',
'',
'🤖 Generated with [Claude Code](https://claude.com/claude-code)'
)
process.stdout.write(lines.join('\n'))
NODE
EXISTING_PR_NUMBER=$(gh pr list --repo "$REPO_NAME" --head "$BRANCH_NAME" --state open --json number --jq '.[0].number')
if [ -n "$EXISTING_PR_NUMBER" ] && [ "$EXISTING_PR_NUMBER" != "null" ]; then
gh pr edit "$EXISTING_PR_NUMBER" --repo "$REPO_NAME" --title "chore(i18n): sync translations with en-US" --body-file "$PR_BODY_FILE"
else
gh pr create --repo "$REPO_NAME" --head "$BRANCH_NAME" --base main --title "chore(i18n): sync translations with en-US" --body-file "$PR_BODY_FILE"
fi

View File

@@ -25,7 +25,7 @@ jobs:
with:
fetch-depth: 0
- name: Detect changed files and generate full diff
- name: Detect changed files and build structured change set
id: detect
shell: bash
run: |
@@ -37,12 +37,94 @@ jobs:
if [ -n "$BASE_SHA" ]; then
CHANGED_FILES=$(git diff --name-only "$BASE_SHA" "$HEAD_SHA" -- 'web/i18n/en-US/*.json' 2>/dev/null | sed -n 's@^.*/@@p' | sed 's/\.json$//' | tr '\n' ' ' | sed 's/[[:space:]]*$//')
git diff "$BASE_SHA" "$HEAD_SHA" -- 'web/i18n/en-US/*.json' > /tmp/i18n-diff.txt 2>/dev/null || : > /tmp/i18n-diff.txt
else
CHANGED_FILES=$(find web/i18n/en-US -maxdepth 1 -type f -name '*.json' -print | sed -n 's@^.*/@@p' | sed 's/\.json$//' | sort | tr '\n' ' ' | sed 's/[[:space:]]*$//')
: > /tmp/i18n-diff.txt
fi
export BASE_SHA HEAD_SHA CHANGED_FILES
node <<'NODE'
const { execFileSync } = require('node:child_process')
const fs = require('node:fs')
const path = require('node:path')
const repoRoot = process.cwd()
const baseSha = process.env.BASE_SHA || ''
const headSha = process.env.HEAD_SHA || ''
const files = (process.env.CHANGED_FILES || '').split(/\s+/).filter(Boolean)
const englishPath = fileStem => path.join(repoRoot, 'web', 'i18n', 'en-US', `${fileStem}.json`)
const readCurrentJson = (fileStem) => {
const filePath = englishPath(fileStem)
if (!fs.existsSync(filePath))
return null
return JSON.parse(fs.readFileSync(filePath, 'utf8'))
}
const readBaseJson = (fileStem) => {
if (!baseSha)
return null
try {
const relativePath = `web/i18n/en-US/${fileStem}.json`
const content = execFileSync('git', ['show', `${baseSha}:${relativePath}`], { encoding: 'utf8' })
return JSON.parse(content)
}
catch (error) {
return null
}
}
const compareJson = (beforeValue, afterValue) => JSON.stringify(beforeValue) === JSON.stringify(afterValue)
const changes = {}
for (const fileStem of files) {
const beforeJson = readBaseJson(fileStem) || {}
const afterJson = readCurrentJson(fileStem) || {}
const added = {}
const updated = {}
const deleted = []
for (const [key, value] of Object.entries(afterJson)) {
if (!(key in beforeJson)) {
added[key] = value
continue
}
if (!compareJson(beforeJson[key], value)) {
updated[key] = {
before: beforeJson[key],
after: value,
}
}
}
for (const key of Object.keys(beforeJson)) {
if (!(key in afterJson))
deleted.push(key)
}
changes[fileStem] = {
fileDeleted: readCurrentJson(fileStem) === null,
added,
updated,
deleted,
}
}
fs.writeFileSync(
'/tmp/i18n-changes.json',
JSON.stringify({
baseSha,
headSha,
files,
changes,
})
)
NODE
if [ -n "$CHANGED_FILES" ]; then
echo "has_changes=true" >> "$GITHUB_OUTPUT"
else
@@ -65,7 +147,14 @@ jobs:
script: |
const fs = require('fs')
const diffBase64 = fs.readFileSync('/tmp/i18n-diff.txt').toString('base64')
const changesJson = fs.readFileSync('/tmp/i18n-changes.json', 'utf8')
const changesBase64 = Buffer.from(changesJson).toString('base64')
const maxEmbeddedChangesChars = 48000
const changesEmbedded = changesBase64.length <= maxEmbeddedChangesChars
if (!changesEmbedded) {
console.log(`Structured change set too large to embed safely (${changesBase64.length} chars). Downstream workflow will regenerate it from git history.`)
}
await github.rest.repos.createDispatchEvent({
owner: context.repo.owner,
@@ -73,7 +162,8 @@ jobs:
event_type: 'i18n-sync',
client_payload: {
changed_files: process.env.CHANGED_FILES,
diff_base64: diffBase64,
changes_base64: changesEmbedded ? changesBase64 : '',
changes_embedded: changesEmbedded,
sync_mode: 'incremental',
base_sha: process.env.BASE_SHA,
head_sha: process.env.HEAD_SHA,

View File

@@ -7,15 +7,16 @@ UUID_NIL = "00000000-0000-0000-0000-000000000000"
DEFAULT_FILE_NUMBER_LIMITS = 3
IMAGE_EXTENSIONS = convert_to_lower_and_upper_set({"jpg", "jpeg", "png", "webp", "gif", "svg"})
_IMAGE_EXTENSION_BASE: frozenset[str] = frozenset(("jpg", "jpeg", "png", "webp", "gif", "svg"))
_VIDEO_EXTENSION_BASE: frozenset[str] = frozenset(("mp4", "mov", "mpeg", "webm"))
_AUDIO_EXTENSION_BASE: frozenset[str] = frozenset(("mp3", "m4a", "wav", "amr", "mpga"))
VIDEO_EXTENSIONS = convert_to_lower_and_upper_set({"mp4", "mov", "mpeg", "webm"})
IMAGE_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_IMAGE_EXTENSION_BASE))
VIDEO_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_VIDEO_EXTENSION_BASE))
AUDIO_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_AUDIO_EXTENSION_BASE))
AUDIO_EXTENSIONS = convert_to_lower_and_upper_set({"mp3", "m4a", "wav", "amr", "mpga"})
_doc_extensions: set[str]
if dify_config.ETL_TYPE == "Unstructured":
_doc_extensions = {
_UNSTRUCTURED_DOCUMENT_EXTENSION_BASE: frozenset[str] = frozenset(
(
"txt",
"markdown",
"md",
@@ -35,11 +36,10 @@ if dify_config.ETL_TYPE == "Unstructured":
"pptx",
"xml",
"epub",
}
if dify_config.UNSTRUCTURED_API_URL:
_doc_extensions.add("ppt")
else:
_doc_extensions = {
)
)
_DEFAULT_DOCUMENT_EXTENSION_BASE: frozenset[str] = frozenset(
(
"txt",
"markdown",
"md",
@@ -53,8 +53,17 @@ else:
"csv",
"vtt",
"properties",
}
DOCUMENT_EXTENSIONS: set[str] = convert_to_lower_and_upper_set(_doc_extensions)
)
)
_doc_extensions: set[str]
if dify_config.ETL_TYPE == "Unstructured":
_doc_extensions = set(_UNSTRUCTURED_DOCUMENT_EXTENSION_BASE)
if dify_config.UNSTRUCTURED_API_URL:
_doc_extensions.add("ppt")
else:
_doc_extensions = set(_DEFAULT_DOCUMENT_EXTENSION_BASE)
DOCUMENT_EXTENSIONS: frozenset[str] = frozenset(convert_to_lower_and_upper_set(_doc_extensions))
# console
COOKIE_NAME_ACCESS_TOKEN = "access_token"

View File

@@ -4,8 +4,8 @@ from urllib.parse import quote
from flask import Response
HTML_MIME_TYPES = frozenset({"text/html", "application/xhtml+xml"})
HTML_EXTENSIONS = frozenset({"html", "htm"})
HTML_MIME_TYPES: frozenset[str] = frozenset(("text/html", "application/xhtml+xml"))
HTML_EXTENSIONS: frozenset[str] = frozenset(("html", "htm"))
def _normalize_mime_type(mime_type: str | None) -> str:

View File

@@ -17,7 +17,7 @@ class CSVSanitizer:
"""
# Characters that can start a formula in Excel/LibreOffice/Google Sheets
FORMULA_CHARS = frozenset({"=", "+", "-", "@", "\t", "\r"})
FORMULA_CHARS = frozenset(("=", "+", "-", "@", "\t", "\r"))
@classmethod
def sanitize_value(cls, value: Any) -> str:

View File

@@ -122,6 +122,6 @@ class JiebaKeywordTableHandler:
results.add(token)
sub_tokens = re.findall(r"\w+", token)
if len(sub_tokens) > 1:
results.update({w for w in sub_tokens if w not in list(STOPWORDS)})
results.update({w for w in sub_tokens if w not in STOPWORDS})
return results

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@ import uuid
from enum import StrEnum
from typing import Any
from clickhouse_connect import get_client
from clickhouse_connect import get_client # type: ignore[import-untyped]
from pydantic import BaseModel
from configs import dify_config

View File

@@ -35,7 +35,7 @@ class PdfExtractor(BaseExtractor):
"""
# Magic bytes for image format detection: (magic_bytes, extension, mime_type)
IMAGE_FORMATS = [
IMAGE_FORMATS: tuple[tuple[bytes, str, str], ...] = (
(b"\xff\xd8\xff", "jpg", "image/jpeg"),
(b"\x89PNG\r\n\x1a\n", "png", "image/png"),
(b"\x00\x00\x00\x0c\x6a\x50\x20\x20\x0d\x0a\x87\x0a", "jp2", "image/jp2"),
@@ -45,7 +45,7 @@ class PdfExtractor(BaseExtractor):
(b"MM\x00*", "tiff", "image/tiff"),
(b"II+\x00", "tiff", "image/tiff"),
(b"MM\x00+", "tiff", "image/tiff"),
]
)
MAX_MAGIC_LEN = max(len(m) for m, _, _ in IMAGE_FORMATS)
def __init__(self, file_path: str, tenant_id: str, user_id: str, file_cache_key: str | None = None):

View File

@@ -5,11 +5,11 @@ TRIGGER_SCHEDULE_NODE_TYPE: Final[str] = "trigger-schedule"
TRIGGER_PLUGIN_NODE_TYPE: Final[str] = "trigger-plugin"
TRIGGER_NODE_TYPES: Final[frozenset[str]] = frozenset(
{
(
TRIGGER_WEBHOOK_NODE_TYPE,
TRIGGER_SCHEDULE_NODE_TYPE,
TRIGGER_PLUGIN_NODE_TYPE,
}
)
)

View File

@@ -8,24 +8,20 @@ from pydantic import BaseModel, Field, field_validator
from core.trigger.constants import TRIGGER_WEBHOOK_NODE_TYPE
_WEBHOOK_HEADER_ALLOWED_TYPES = frozenset(
{
SegmentType.STRING,
}
)
_WEBHOOK_HEADER_ALLOWED_TYPES: frozenset[SegmentType] = frozenset((SegmentType.STRING,))
_WEBHOOK_QUERY_PARAMETER_ALLOWED_TYPES = frozenset(
{
_WEBHOOK_QUERY_PARAMETER_ALLOWED_TYPES: frozenset[SegmentType] = frozenset(
(
SegmentType.STRING,
SegmentType.NUMBER,
SegmentType.BOOLEAN,
}
)
)
_WEBHOOK_PARAMETER_ALLOWED_TYPES = _WEBHOOK_HEADER_ALLOWED_TYPES | _WEBHOOK_QUERY_PARAMETER_ALLOWED_TYPES
_WEBHOOK_BODY_ALLOWED_TYPES = frozenset(
{
_WEBHOOK_BODY_ALLOWED_TYPES: frozenset[SegmentType] = frozenset(
(
SegmentType.STRING,
SegmentType.NUMBER,
SegmentType.BOOLEAN,
@@ -35,7 +31,7 @@ _WEBHOOK_BODY_ALLOWED_TYPES = frozenset(
SegmentType.ARRAY_BOOLEAN,
SegmentType.ARRAY_OBJECT,
SegmentType.FILE,
}
)
)

View File

@@ -1,9 +1,12 @@
def convert_to_lower_and_upper_set(inputs: list[str] | set[str]) -> set[str]:
from collections.abc import Collection
def convert_to_lower_and_upper_set(inputs: Collection[str]) -> set[str]:
"""
Convert a list or set of strings to a set containing both lower and upper case versions of each string.
Convert a collection of strings to a set containing both lower and upper case versions of each string.
Args:
inputs (list[str] | set[str]): A list or set of strings to be converted.
inputs (Collection[str]): A collection of strings to be converted.
Returns:
set[str]: A set containing both lower and upper case versions of each string.

View File

@@ -1386,7 +1386,7 @@ class ConversationVariable(TypeBase):
# Only `sys.query` and `sys.files` could be modified.
_EDITABLE_SYSTEM_VARIABLE = frozenset(["query", "files"])
_EDITABLE_SYSTEM_VARIABLE = frozenset(("query", "files"))
class WorkflowDraftVariable(Base):

View File

@@ -800,8 +800,8 @@ class DraftVariableSaver:
# technical variables from being exposed in the draft environment, particularly those
# that aren't meant to be directly edited or viewed by users.
_EXCLUDE_VARIABLE_NAMES_MAPPING: dict[NodeType, frozenset[str]] = {
BuiltinNodeTypes.LLM: frozenset(["finish_reason"]),
BuiltinNodeTypes.LOOP: frozenset(["loop_round"]),
BuiltinNodeTypes.LLM: frozenset(("finish_reason",)),
BuiltinNodeTypes.LOOP: frozenset(("loop_round",)),
}
# Database session used for persisting draft variables.

View File

@@ -1249,9 +1249,9 @@ class TestFileConstants:
"""
def test_image_extensions_set_properties(self):
"""Test that IMAGE_EXTENSIONS set has expected properties."""
# Assert - Should be a set
assert isinstance(IMAGE_EXTENSIONS, set)
"""Test that IMAGE_EXTENSIONS frozenset has expected properties."""
# Assert - Should be immutable
assert isinstance(IMAGE_EXTENSIONS, frozenset)
# Should not be empty
assert len(IMAGE_EXTENSIONS) > 0
# Should contain common image formats
@@ -1260,9 +1260,9 @@ class TestFileConstants:
assert ext in IMAGE_EXTENSIONS or ext.upper() in IMAGE_EXTENSIONS
def test_video_extensions_set_properties(self):
"""Test that VIDEO_EXTENSIONS set has expected properties."""
# Assert - Should be a set
assert isinstance(VIDEO_EXTENSIONS, set)
"""Test that VIDEO_EXTENSIONS frozenset has expected properties."""
# Assert - Should be immutable
assert isinstance(VIDEO_EXTENSIONS, frozenset)
# Should not be empty
assert len(VIDEO_EXTENSIONS) > 0
# Should contain common video formats
@@ -1271,9 +1271,9 @@ class TestFileConstants:
assert ext in VIDEO_EXTENSIONS or ext.upper() in VIDEO_EXTENSIONS
def test_audio_extensions_set_properties(self):
"""Test that AUDIO_EXTENSIONS set has expected properties."""
# Assert - Should be a set
assert isinstance(AUDIO_EXTENSIONS, set)
"""Test that AUDIO_EXTENSIONS frozenset has expected properties."""
# Assert - Should be immutable
assert isinstance(AUDIO_EXTENSIONS, frozenset)
# Should not be empty
assert len(AUDIO_EXTENSIONS) > 0
# Should contain common audio formats
@@ -1282,9 +1282,9 @@ class TestFileConstants:
assert ext in AUDIO_EXTENSIONS or ext.upper() in AUDIO_EXTENSIONS
def test_document_extensions_set_properties(self):
"""Test that DOCUMENT_EXTENSIONS set has expected properties."""
# Assert - Should be a set
assert isinstance(DOCUMENT_EXTENSIONS, set)
"""Test that DOCUMENT_EXTENSIONS frozenset has expected properties."""
# Assert - Should be immutable
assert isinstance(DOCUMENT_EXTENSIONS, frozenset)
# Should not be empty
assert len(DOCUMENT_EXTENSIONS) > 0
# Should contain common document formats

View File

@@ -2,5 +2,6 @@ from core.rag.datasource.keyword.jieba.stopwords import STOPWORDS
def test_stopwords_loaded():
assert isinstance(STOPWORDS, frozenset)
assert "during" in STOPWORDS
assert "the" in STOPWORDS

View File

@@ -1,4 +1,5 @@
import base64
import logging
import uuid
from collections.abc import Sequence
from unittest import mock
@@ -1261,6 +1262,10 @@ def test_llm_node_image_file_to_markdown(llm_node: LLMNode):
class TestSaveMultimodalOutputAndConvertResultToMarkdown:
class _UnknownItem:
def __str__(self) -> str:
return "<unknown-item>"
def test_str_content(self, llm_node_for_multimodal):
llm_node, mock_file_saver = llm_node_for_multimodal
gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
@@ -1330,18 +1335,23 @@ class TestSaveMultimodalOutputAndConvertResultToMarkdown:
def test_unknown_content_type(self, llm_node_for_multimodal):
llm_node, mock_file_saver = llm_node_for_multimodal
gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
contents=frozenset(["hello world"]), file_saver=mock_file_saver, file_outputs=[]
contents=frozenset(("hello world",)), file_saver=mock_file_saver, file_outputs=[]
)
assert list(gen) == ["hello world"]
mock_file_saver.save_binary_string.assert_not_called()
mock_file_saver.save_remote_url.assert_not_called()
def test_unknown_item_type(self, llm_node_for_multimodal):
def test_unknown_item_type(self, llm_node_for_multimodal, caplog):
llm_node, mock_file_saver = llm_node_for_multimodal
gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
contents=[frozenset(["hello world"])], file_saver=mock_file_saver, file_outputs=[]
)
assert list(gen) == ["frozenset({'hello world'})"]
unknown_item = self._UnknownItem()
with caplog.at_level(logging.WARNING, logger="graphon.nodes.llm.node"):
gen = llm_node._save_multimodal_output_and_convert_result_to_markdown(
contents=[unknown_item], file_saver=mock_file_saver, file_outputs=[]
)
assert list(gen) == [str(unknown_item)]
assert "unknown item type encountered" in caplog.text
mock_file_saver.save_binary_string.assert_not_called()
mock_file_saver.save_remote_url.assert_not_called()

View File

@@ -837,7 +837,7 @@ class TestBuildSegmentValueErrors:
self.ValueErrorTestCase(
name="frozenset_type",
description="frozenset (unsupported type)",
test_value=frozenset([1, 2, 3]),
test_value=frozenset((1, 2, 3)),
),
self.ValueErrorTestCase(
name="memoryview_type",

View File

@@ -3,89 +3,93 @@ from pathlib import Path
import yaml # type: ignore
from dotenv import dotenv_values
BASE_API_AND_DOCKER_CONFIG_SET_DIFF = {
"APP_MAX_EXECUTION_TIME",
"BATCH_UPLOAD_LIMIT",
"CELERY_BEAT_SCHEDULER_TIME",
"CODE_EXECUTION_API_KEY",
"HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
"HTTP_REQUEST_MAX_READ_TIMEOUT",
"HTTP_REQUEST_MAX_WRITE_TIMEOUT",
"INNER_API_KEY",
"INNER_API_KEY_FOR_PLUGIN",
"KEYWORD_DATA_SOURCE_TYPE",
"LOGIN_LOCKOUT_DURATION",
"LOG_FORMAT",
"OCI_ACCESS_KEY",
"OCI_BUCKET_NAME",
"OCI_ENDPOINT",
"OCI_REGION",
"OCI_SECRET_KEY",
"PLUGIN_DAEMON_KEY",
"PLUGIN_DAEMON_URL",
"PLUGIN_REMOTE_INSTALL_HOST",
"PLUGIN_REMOTE_INSTALL_PORT",
"REDIS_DB",
"RESEND_API_URL",
"RESPECT_XFORWARD_HEADERS_ENABLED",
"SENTRY_DSN",
"SSRF_DEFAULT_CONNECT_TIME_OUT",
"SSRF_DEFAULT_MAX_RETRIES",
"SSRF_DEFAULT_READ_TIME_OUT",
"SSRF_DEFAULT_TIME_OUT",
"SSRF_DEFAULT_WRITE_TIME_OUT",
"UPSTASH_VECTOR_TOKEN",
"UPSTASH_VECTOR_URL",
"USING_UGC_INDEX",
"WEAVIATE_BATCH_SIZE",
}
BASE_API_AND_DOCKER_CONFIG_SET_DIFF: frozenset[str] = frozenset(
(
"APP_MAX_EXECUTION_TIME",
"BATCH_UPLOAD_LIMIT",
"CELERY_BEAT_SCHEDULER_TIME",
"CODE_EXECUTION_API_KEY",
"HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
"HTTP_REQUEST_MAX_READ_TIMEOUT",
"HTTP_REQUEST_MAX_WRITE_TIMEOUT",
"INNER_API_KEY",
"INNER_API_KEY_FOR_PLUGIN",
"KEYWORD_DATA_SOURCE_TYPE",
"LOGIN_LOCKOUT_DURATION",
"LOG_FORMAT",
"OCI_ACCESS_KEY",
"OCI_BUCKET_NAME",
"OCI_ENDPOINT",
"OCI_REGION",
"OCI_SECRET_KEY",
"PLUGIN_DAEMON_KEY",
"PLUGIN_DAEMON_URL",
"PLUGIN_REMOTE_INSTALL_HOST",
"PLUGIN_REMOTE_INSTALL_PORT",
"REDIS_DB",
"RESEND_API_URL",
"RESPECT_XFORWARD_HEADERS_ENABLED",
"SENTRY_DSN",
"SSRF_DEFAULT_CONNECT_TIME_OUT",
"SSRF_DEFAULT_MAX_RETRIES",
"SSRF_DEFAULT_READ_TIME_OUT",
"SSRF_DEFAULT_TIME_OUT",
"SSRF_DEFAULT_WRITE_TIME_OUT",
"UPSTASH_VECTOR_TOKEN",
"UPSTASH_VECTOR_URL",
"USING_UGC_INDEX",
"WEAVIATE_BATCH_SIZE",
)
)
BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF = {
"BATCH_UPLOAD_LIMIT",
"CELERY_BEAT_SCHEDULER_TIME",
"HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
"HTTP_REQUEST_MAX_READ_TIMEOUT",
"HTTP_REQUEST_MAX_WRITE_TIMEOUT",
"INNER_API_KEY",
"INNER_API_KEY_FOR_PLUGIN",
"KEYWORD_DATA_SOURCE_TYPE",
"LOGIN_LOCKOUT_DURATION",
"LOG_FORMAT",
"OPENDAL_FS_ROOT",
"OPENDAL_S3_ACCESS_KEY_ID",
"OPENDAL_S3_BUCKET",
"OPENDAL_S3_ENDPOINT",
"OPENDAL_S3_REGION",
"OPENDAL_S3_ROOT",
"OPENDAL_S3_SECRET_ACCESS_KEY",
"OPENDAL_S3_SERVER_SIDE_ENCRYPTION",
"PGVECTOR_MAX_CONNECTION",
"PGVECTOR_MIN_CONNECTION",
"PGVECTO_RS_DATABASE",
"PGVECTO_RS_HOST",
"PGVECTO_RS_PASSWORD",
"PGVECTO_RS_PORT",
"PGVECTO_RS_USER",
"PLUGIN_DAEMON_KEY",
"PLUGIN_DAEMON_URL",
"PLUGIN_REMOTE_INSTALL_HOST",
"PLUGIN_REMOTE_INSTALL_PORT",
"RESPECT_XFORWARD_HEADERS_ENABLED",
"SCARF_NO_ANALYTICS",
"SSRF_DEFAULT_CONNECT_TIME_OUT",
"SSRF_DEFAULT_MAX_RETRIES",
"SSRF_DEFAULT_READ_TIME_OUT",
"SSRF_DEFAULT_TIME_OUT",
"SSRF_DEFAULT_WRITE_TIME_OUT",
"STORAGE_OPENDAL_SCHEME",
"SUPABASE_API_KEY",
"SUPABASE_BUCKET_NAME",
"SUPABASE_URL",
"USING_UGC_INDEX",
"VIKINGDB_CONNECTION_TIMEOUT",
"VIKINGDB_SOCKET_TIMEOUT",
"WEAVIATE_BATCH_SIZE",
}
BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF: frozenset[str] = frozenset(
(
"BATCH_UPLOAD_LIMIT",
"CELERY_BEAT_SCHEDULER_TIME",
"HTTP_REQUEST_MAX_CONNECT_TIMEOUT",
"HTTP_REQUEST_MAX_READ_TIMEOUT",
"HTTP_REQUEST_MAX_WRITE_TIMEOUT",
"INNER_API_KEY",
"INNER_API_KEY_FOR_PLUGIN",
"KEYWORD_DATA_SOURCE_TYPE",
"LOGIN_LOCKOUT_DURATION",
"LOG_FORMAT",
"OPENDAL_FS_ROOT",
"OPENDAL_S3_ACCESS_KEY_ID",
"OPENDAL_S3_BUCKET",
"OPENDAL_S3_ENDPOINT",
"OPENDAL_S3_REGION",
"OPENDAL_S3_ROOT",
"OPENDAL_S3_SECRET_ACCESS_KEY",
"OPENDAL_S3_SERVER_SIDE_ENCRYPTION",
"PGVECTOR_MAX_CONNECTION",
"PGVECTOR_MIN_CONNECTION",
"PGVECTO_RS_DATABASE",
"PGVECTO_RS_HOST",
"PGVECTO_RS_PASSWORD",
"PGVECTO_RS_PORT",
"PGVECTO_RS_USER",
"PLUGIN_DAEMON_KEY",
"PLUGIN_DAEMON_URL",
"PLUGIN_REMOTE_INSTALL_HOST",
"PLUGIN_REMOTE_INSTALL_PORT",
"RESPECT_XFORWARD_HEADERS_ENABLED",
"SCARF_NO_ANALYTICS",
"SSRF_DEFAULT_CONNECT_TIME_OUT",
"SSRF_DEFAULT_MAX_RETRIES",
"SSRF_DEFAULT_READ_TIME_OUT",
"SSRF_DEFAULT_TIME_OUT",
"SSRF_DEFAULT_WRITE_TIME_OUT",
"STORAGE_OPENDAL_SCHEME",
"SUPABASE_API_KEY",
"SUPABASE_BUCKET_NAME",
"SUPABASE_URL",
"USING_UGC_INDEX",
"VIKINGDB_CONNECTION_TIMEOUT",
"VIKINGDB_SOCKET_TIMEOUT",
"WEAVIATE_BATCH_SIZE",
)
)
API_CONFIG_SET = set(dotenv_values(Path("api") / Path(".env.example")).keys())
DOCKER_CONFIG_SET = set(dotenv_values(Path("docker") / Path(".env.example")).keys())

215
pnpm-lock.yaml generated
View File

@@ -235,8 +235,8 @@ catalogs:
specifier: 0.5.21
version: 0.5.21
'@vitest/coverage-v8':
specifier: 4.1.2
version: 4.1.2
specifier: 4.1.1
version: 4.1.1
abcjs:
specifier: 6.6.2
version: 6.6.2
@@ -570,7 +570,6 @@ overrides:
array.prototype.flatmap: npm:@nolyfill/array.prototype.flatmap@^1.0.44
array.prototype.tosorted: npm:@nolyfill/array.prototype.tosorted@^1.0.44
assert: npm:@nolyfill/assert@^1.0.26
axios: 1.14.0
brace-expansion@<2.0.2: 2.0.2
canvas: ^3.2.2
devalue@<5.3.2: 5.3.2
@@ -648,10 +647,6 @@ importers:
version: 0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)
sdks/nodejs-client:
dependencies:
axios:
specifier: 1.14.0
version: 1.14.0
devDependencies:
'@eslint/js':
specifier: 'catalog:'
@@ -667,7 +662,7 @@ importers:
version: 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@5.9.3)
'@vitest/coverage-v8':
specifier: 'catalog:'
version: 4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))
version: 4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))
eslint:
specifier: 'catalog:'
version: 10.1.0(jiti@2.6.1)
@@ -1124,7 +1119,7 @@ importers:
version: 0.5.21(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(react-dom@19.2.4(react@19.2.4))(react-server-dom-webpack@19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3)))(react@19.2.4)
'@vitest/coverage-v8':
specifier: 'catalog:'
version: 4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
version: 4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))
agentation:
specifier: 'catalog:'
version: 3.0.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
@@ -2405,10 +2400,6 @@ packages:
resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
engines: {node: '>= 8'}
'@nolyfill/hasown@1.0.44':
resolution: {integrity: sha512-GA/21lkTr2PAQuT6jGnhLuBD5IFd/AEhBXJ/tf33+/bVxPxg+5ejKx9jGQGnyV/P0eSmdup5E+s8b2HL6lOrwQ==}
engines: {node: '>=12.4.0'}
'@nolyfill/is-core-module@1.0.39':
resolution: {integrity: sha512-nn5ozdjYQpUCZlWGuxcJY/KpxkWQs4DcbMCmKojjyrYDEAGy4Ce19NN4v5MduafTwJlbKc99UA8YhSVqq9yPZA==}
engines: {node: '>=12.4.0'}
@@ -4440,11 +4431,11 @@ packages:
react-server-dom-webpack:
optional: true
'@vitest/coverage-v8@4.1.2':
resolution: {integrity: sha512-sPK//PHO+kAkScb8XITeB1bf7fsk85Km7+rt4eeuRR3VS1/crD47cmV5wicisJmjNdfeokTZwjMk4Mj2d58Mgg==}
'@vitest/coverage-v8@4.1.1':
resolution: {integrity: sha512-nZ4RWwGCoGOQRMmU/Q9wlUY540RVRxJZ9lxFsFfy0QV7Zmo5VVBhB6Sl9Xa0KIp2iIs3zWfPlo9LcY1iqbpzCw==}
peerDependencies:
'@vitest/browser': 4.1.2
vitest: 4.1.2
'@vitest/browser': 4.1.1
vitest: 4.1.1
peerDependenciesMeta:
'@vitest/browser':
optional: true
@@ -4471,8 +4462,8 @@ packages:
'@vitest/pretty-format@3.2.4':
resolution: {integrity: sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==}
'@vitest/pretty-format@4.1.2':
resolution: {integrity: sha512-dwQga8aejqeuB+TvXCMzSQemvV9hNEtDDpgUKDzOmNQayl2OG241PSWeJwKRH3CiC+sESrmoFd49rfnq7T4RnA==}
'@vitest/pretty-format@4.1.1':
resolution: {integrity: sha512-GM+TEQN5WhOygr1lp7skeVjdLPqqWMHsfzXrcHAqZJi/lIVh63H0kaRCY8MDhNWikx19zBUK8ceaLB7X5AH9NQ==}
'@vitest/spy@3.2.4':
resolution: {integrity: sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==}
@@ -4480,8 +4471,8 @@ packages:
'@vitest/utils@3.2.4':
resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==}
'@vitest/utils@4.1.2':
resolution: {integrity: sha512-xw2/TiX82lQHA06cgbqRKFb5lCAy3axQ4H4SoUFhUsg+wztiet+co86IAMDtF6Vm1hc7J6j09oh/rgDn+JdKIQ==}
'@vitest/utils@4.1.1':
resolution: {integrity: sha512-cNxAlaB3sHoCdL6pj6yyUXv9Gry1NHNg0kFTXdvSIZXLHsqKH7chiWOkwJ5s5+d/oMwcoG9T0bKU38JZWKusrQ==}
'@voidzero-dev/vite-plus-core@0.1.14':
resolution: {integrity: sha512-CCWzdkfW0fo0cQNlIsYp5fOuH2IwKuPZEb2UY2Z8gXcp5pG74A82H2Pthj0heAuvYTAnfT7kEC6zM+RbiBgQbg==}
@@ -4841,9 +4832,6 @@ packages:
async@3.2.6:
resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==}
asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
autoprefixer@10.4.27:
resolution: {integrity: sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==}
engines: {node: ^10 || ^12 || >=14}
@@ -4851,9 +4839,6 @@ packages:
peerDependencies:
postcss: ^8.1.0
axios@1.14.0:
resolution: {integrity: sha512-3Y8yrqLSwjuzpXuZ0oIYZ/XGgLwUIBU3uLvbcpb0pidD9ctpShJd43KSlEEkVQg6DS0G9NKyzOvBfUtDKEyHvQ==}
bail@2.0.2:
resolution: {integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==}
@@ -4951,10 +4936,6 @@ packages:
resolution: {integrity: sha512-tixWYgm5ZoOD+3g6UTea91eow5z6AAHaho3g0V9CNSNb45gM8SmflpAc+GRd1InC4AqN/07Unrgp56Y94N9hJQ==}
engines: {node: '>=20.19.0'}
call-bind-apply-helpers@1.0.2:
resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==}
engines: {node: '>= 0.4'}
callsites@3.1.0:
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
engines: {node: '>=6'}
@@ -5126,10 +5107,6 @@ packages:
colorette@2.0.20:
resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==}
combined-stream@1.0.8:
resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
engines: {node: '>= 0.8'}
comma-separated-tokens@1.0.8:
resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==}
@@ -5464,10 +5441,6 @@ packages:
delaunator@5.1.0:
resolution: {integrity: sha512-AGrQ4QSgssa1NGmWmLPqN5NY2KajF5MqxetNEO+o0n3ZwZZeTmt7bBnvzHWrmkZFxGgr4HdyFgelzgi06otLuQ==}
delayed-stream@1.0.0:
resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
engines: {node: '>=0.4.0'}
dequal@2.0.3:
resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==}
engines: {node: '>=6'}
@@ -5533,10 +5506,6 @@ packages:
resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==}
engines: {node: '>=12'}
dunder-proto@1.0.1:
resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==}
engines: {node: '>= 0.4'}
echarts-for-react@3.0.6:
resolution: {integrity: sha512-4zqLgTGWS3JvkQDXjzkR1k1CHRdpd6by0988TWMJgnvDytegWLbeP/VNZmMa+0VJx2eD7Y632bi2JquXDgiGJg==}
peerDependencies:
@@ -5613,28 +5582,12 @@ packages:
error-stack-parser@2.1.4:
resolution: {integrity: sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ==}
es-define-property@1.0.1:
resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==}
engines: {node: '>= 0.4'}
es-errors@1.3.0:
resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==}
engines: {node: '>= 0.4'}
es-module-lexer@1.7.0:
resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==}
es-module-lexer@2.0.0:
resolution: {integrity: sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==}
es-object-atoms@1.1.1:
resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==}
engines: {node: '>= 0.4'}
es-set-tostringtag@2.1.0:
resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==}
engines: {node: '>= 0.4'}
es-toolkit@1.45.1:
resolution: {integrity: sha512-/jhoOj/Fx+A+IIyDNOvO3TItGmlMKhtX8ISAHKE90c4b/k1tqaqEZ+uUqfpU8DMnW5cgNJv606zS55jGvza0Xw==}
@@ -6115,19 +6068,6 @@ packages:
flatted@3.4.2:
resolution: {integrity: sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==}
follow-redirects@1.15.11:
resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==}
engines: {node: '>=4.0'}
peerDependencies:
debug: '*'
peerDependenciesMeta:
debug:
optional: true
form-data@4.0.5:
resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==}
engines: {node: '>= 6'}
format@0.2.2:
resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==}
engines: {node: '>=0.4.x'}
@@ -6164,9 +6104,6 @@ packages:
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
function-bind@1.1.2:
resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
functional-red-black-tree@1.0.1:
resolution: {integrity: sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==}
@@ -6181,18 +6118,10 @@ packages:
resolution: {integrity: sha512-CQ+bEO+Tva/qlmw24dCejulK5pMzVnUOFOijVogd3KQs07HnRIgp8TGipvCCRT06xeYEbpbgwaCxglFyiuIcmA==}
engines: {node: '>=18'}
get-intrinsic@1.3.0:
resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==}
engines: {node: '>= 0.4'}
get-nonce@1.0.1:
resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==}
engines: {node: '>=6'}
get-proto@1.0.1:
resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==}
engines: {node: '>= 0.4'}
get-stream@5.2.0:
resolution: {integrity: sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==}
engines: {node: '>=8'}
@@ -6249,10 +6178,6 @@ packages:
peerDependencies:
csstype: ^3.0.10
gopd@1.2.0:
resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==}
engines: {node: '>= 0.4'}
graceful-fs@4.2.11:
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
@@ -6271,14 +6196,6 @@ packages:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
has-symbols@1.1.0:
resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==}
engines: {node: '>= 0.4'}
has-tostringtag@1.0.2:
resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==}
engines: {node: '>= 0.4'}
hast-util-from-dom@5.0.1:
resolution: {integrity: sha512-N+LqofjR2zuzTjCPzyDUdSshy4Ma6li7p/c3pA78uTwzFgENbgbUrm2ugwsOdcjI1muO+o6Dgzp9p8WHtn/39Q==}
@@ -6920,10 +6837,6 @@ packages:
engines: {node: '>= 20'}
hasBin: true
math-intrinsics@1.1.0:
resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==}
engines: {node: '>= 0.4'}
mdast-util-directive@3.1.0:
resolution: {integrity: sha512-I3fNFt+DHmpWCYAT7quoM6lHf9wuqtI+oCOfvILnoicNIqjh5E3dEJWiXuYME2gNe8vl1iMQwyUHa7bgFmak6Q==}
@@ -7651,10 +7564,6 @@ packages:
property-information@7.1.0:
resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==}
proxy-from-env@2.1.0:
resolution: {integrity: sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==}
engines: {node: '>=10'}
pump@3.0.4:
resolution: {integrity: sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==}
@@ -10497,8 +10406,6 @@ snapshots:
'@nodelib/fs.scandir': 2.1.5
fastq: 1.20.1
'@nolyfill/hasown@1.0.44': {}
'@nolyfill/is-core-module@1.0.39': {}
'@nolyfill/safer-buffer@1.0.44': {}
@@ -12354,10 +12261,10 @@ snapshots:
optionalDependencies:
react-server-dom-webpack: 19.2.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(webpack@5.105.4(esbuild@0.27.2)(uglify-js@3.19.3))
'@vitest/coverage-v8@4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))':
'@vitest/coverage-v8@4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))':
dependencies:
'@bcoe/v8-coverage': 1.0.2
'@vitest/utils': 4.1.2
'@vitest/utils': 4.1.1
ast-v8-to-istanbul: 1.0.0
istanbul-lib-coverage: 3.2.2
istanbul-lib-report: 3.0.1
@@ -12368,10 +12275,10 @@ snapshots:
tinyrainbow: 3.1.0
vitest: '@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(@voidzero-dev/vite-plus-core@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@1.21.7)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3)'
'@vitest/coverage-v8@4.1.2(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))':
'@vitest/coverage-v8@4.1.1(@voidzero-dev/vite-plus-test@0.1.14(@types/node@25.5.0)(esbuild@0.27.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@5.9.3)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))':
dependencies:
'@bcoe/v8-coverage': 1.0.2
'@vitest/utils': 4.1.2
'@vitest/utils': 4.1.1
ast-v8-to-istanbul: 1.0.0
istanbul-lib-coverage: 3.2.2
istanbul-lib-report: 3.0.1
@@ -12406,7 +12313,7 @@ snapshots:
dependencies:
tinyrainbow: 2.0.0
'@vitest/pretty-format@4.1.2':
'@vitest/pretty-format@4.1.1':
dependencies:
tinyrainbow: 3.1.0
@@ -12420,9 +12327,9 @@ snapshots:
loupe: 3.2.1
tinyrainbow: 2.0.0
'@vitest/utils@4.1.2':
'@vitest/utils@4.1.1':
dependencies:
'@vitest/pretty-format': 4.1.2
'@vitest/pretty-format': 4.1.1
convert-source-map: 2.0.0
tinyrainbow: 3.1.0
@@ -12816,8 +12723,6 @@ snapshots:
async@3.2.6: {}
asynckit@0.4.0: {}
autoprefixer@10.4.27(postcss@8.5.8):
dependencies:
browserslist: 4.28.1
@@ -12827,14 +12732,6 @@ snapshots:
postcss: 8.5.8
postcss-value-parser: 4.2.0
axios@1.14.0:
dependencies:
follow-redirects: 1.15.11
form-data: 4.0.5
proxy-from-env: 2.1.0
transitivePeerDependencies:
- debug
bail@2.0.2: {}
balanced-match@1.0.2: {}
@@ -12914,11 +12811,6 @@ snapshots:
cac@7.0.0: {}
call-bind-apply-helpers@1.0.2:
dependencies:
es-errors: 1.3.0
function-bind: 1.1.2
callsites@3.1.0: {}
camelcase-css@2.0.1: {}
@@ -13108,10 +13000,6 @@ snapshots:
colorette@2.0.20: {}
combined-stream@1.0.8:
dependencies:
delayed-stream: 1.0.0
comma-separated-tokens@1.0.8: {}
comma-separated-tokens@2.0.3: {}
@@ -13441,8 +13329,6 @@ snapshots:
dependencies:
robust-predicates: 3.0.3
delayed-stream@1.0.0: {}
dequal@2.0.3: {}
destr@2.0.5: {}
@@ -13499,12 +13385,6 @@ snapshots:
dotenv@16.6.1: {}
dunder-proto@1.0.1:
dependencies:
call-bind-apply-helpers: 1.0.2
es-errors: 1.3.0
gopd: 1.2.0
echarts-for-react@3.0.6(echarts@6.0.0)(react@19.2.4):
dependencies:
echarts: 6.0.0
@@ -13571,25 +13451,10 @@ snapshots:
dependencies:
stackframe: 1.3.4
es-define-property@1.0.1: {}
es-errors@1.3.0: {}
es-module-lexer@1.7.0: {}
es-module-lexer@2.0.0: {}
es-object-atoms@1.1.1:
dependencies:
es-errors: 1.3.0
es-set-tostringtag@2.1.0:
dependencies:
es-errors: 1.3.0
get-intrinsic: 1.3.0
has-tostringtag: 1.0.2
hasown: '@nolyfill/hasown@1.0.44'
es-toolkit@1.45.1: {}
esast-util-from-estree@2.0.0:
@@ -14344,16 +14209,6 @@ snapshots:
flatted@3.4.2: {}
follow-redirects@1.15.11: {}
form-data@4.0.5:
dependencies:
asynckit: 0.4.0
combined-stream: 1.0.8
es-set-tostringtag: 2.1.0
hasown: '@nolyfill/hasown@1.0.44'
mime-types: 2.1.35
format@0.2.2: {}
formatly@0.3.0:
@@ -14380,8 +14235,6 @@ snapshots:
fsevents@2.3.3:
optional: true
function-bind@1.1.2: {}
functional-red-black-tree@1.0.1: {}
fzf@0.5.2: {}
@@ -14390,26 +14243,8 @@ snapshots:
get-east-asian-width@1.5.0: {}
get-intrinsic@1.3.0:
dependencies:
call-bind-apply-helpers: 1.0.2
es-define-property: 1.0.1
es-errors: 1.3.0
es-object-atoms: 1.1.1
function-bind: 1.1.2
get-proto: 1.0.1
gopd: 1.2.0
has-symbols: 1.1.0
hasown: '@nolyfill/hasown@1.0.44'
math-intrinsics: 1.1.0
get-nonce@1.0.1: {}
get-proto@1.0.1:
dependencies:
dunder-proto: 1.0.1
es-object-atoms: 1.1.1
get-stream@5.2.0:
dependencies:
pump: 3.0.4
@@ -14457,8 +14292,6 @@ snapshots:
dependencies:
csstype: 3.2.3
gopd@1.2.0: {}
graceful-fs@4.2.11: {}
hachure-fill@0.5.2: {}
@@ -14481,12 +14314,6 @@ snapshots:
has-flag@4.0.0: {}
has-symbols@1.1.0: {}
has-tostringtag@1.0.2:
dependencies:
has-symbols: 1.1.0
hast-util-from-dom@5.0.1:
dependencies:
'@types/hast': 3.0.4
@@ -15127,8 +14954,6 @@ snapshots:
marked@17.0.5: {}
math-intrinsics@1.1.0: {}
mdast-util-directive@3.1.0:
dependencies:
'@types/mdast': 4.0.4
@@ -16267,8 +16092,6 @@ snapshots:
property-information@7.1.0: {}
proxy-from-env@2.1.0: {}
pump@3.0.4:
dependencies:
end-of-stream: 1.4.5

View File

@@ -22,7 +22,6 @@ overrides:
array.prototype.flatmap: npm:@nolyfill/array.prototype.flatmap@^1.0.44
array.prototype.tosorted: npm:@nolyfill/array.prototype.tosorted@^1.0.44
assert: npm:@nolyfill/assert@^1.0.26
axios: 1.14.0
brace-expansion@<2.0.2: 2.0.2
canvas: ^3.2.2
devalue@<5.3.2: 5.3.2
@@ -147,12 +146,11 @@ catalog:
"@typescript/native-preview": 7.0.0-dev.20260329.1
"@vitejs/plugin-react": 6.0.1
"@vitejs/plugin-rsc": 0.5.21
"@vitest/coverage-v8": 4.1.2
"@vitest/coverage-v8": 4.1.1
abcjs: 6.6.2
agentation: 3.0.2
ahooks: 3.9.7
autoprefixer: 10.4.27
axios: 1.14.0
class-variance-authority: 0.7.1
clsx: 2.1.1
cmdk: 1.1.1

View File

@@ -12,11 +12,11 @@ const typeCheckedRules =
export default [
{
ignores: ["dist", "node_modules", "scripts", "tests", "**/*.test.*", "**/*.spec.*"],
ignores: ["dist", "node_modules", "scripts"],
},
js.configs.recommended,
{
files: ["src/**/*.ts"],
files: ["src/**/*.ts", "tests/**/*.ts"],
languageOptions: {
parser: tsParser,
ecmaVersion: "latest",

View File

@@ -1,6 +1,6 @@
{
"name": "dify-client",
"version": "3.0.0",
"version": "3.1.0",
"description": "This is the Node.js SDK for the Dify.AI API, which allows you to easily integrate Dify.AI into your Node.js applications.",
"type": "module",
"main": "./dist/index.js",
@@ -15,7 +15,8 @@
"node": ">=18.0.0"
},
"files": [
"dist",
"dist/index.js",
"dist/index.d.ts",
"README.md",
"LICENSE"
],
@@ -53,9 +54,6 @@
"publish:check": "./scripts/publish.sh --dry-run",
"publish:npm": "./scripts/publish.sh"
},
"dependencies": {
"axios": "catalog:"
},
"devDependencies": {
"@eslint/js": "catalog:",
"@types/node": "catalog:",

View File

@@ -1,6 +1,6 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { DifyClient } from "./base";
import { ValidationError } from "../errors/dify-error";
import { DifyClient } from "./base";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("DifyClient base", () => {
@@ -103,7 +103,7 @@ describe("DifyClient base", () => {
});
});
it("filePreview uses arraybuffer response", async () => {
it("filePreview uses bytes response", async () => {
const { client, request } = createHttpClientWithSpies();
const dify = new DifyClient(client);
@@ -113,7 +113,7 @@ describe("DifyClient base", () => {
method: "GET",
path: "/files/file/preview",
query: { user: "user", as_attachment: "true" },
responseType: "arraybuffer",
responseType: "bytes",
});
});
@@ -162,11 +162,11 @@ describe("DifyClient base", () => {
streaming: false,
voice: "voice",
},
responseType: "arraybuffer",
responseType: "bytes",
});
});
it("textToAudio requires text or message id", async () => {
it("textToAudio requires text or message id", () => {
const { client } = createHttpClientWithSpies();
const dify = new DifyClient(client);

View File

@@ -2,14 +2,18 @@ import type {
BinaryStream,
DifyClientConfig,
DifyResponse,
JsonObject,
MessageFeedbackRequest,
QueryParams,
RequestMethod,
SuccessResponse,
TextToAudioRequest,
} from "../types/common";
import type { HttpRequestBody } from "../http/client";
import { HttpClient } from "../http/client";
import { ensureNonEmptyString, ensureRating } from "./validation";
import { FileUploadError, ValidationError } from "../errors/dify-error";
import type { SdkFormData } from "../http/form-data";
import { isFormData } from "../http/form-data";
const toConfig = (
@@ -25,13 +29,8 @@ const toConfig = (
return init;
};
const appendUserToFormData = (form: unknown, user: string): void => {
if (!isFormData(form)) {
throw new FileUploadError("FormData is required for file uploads");
}
if (typeof form.append === "function") {
form.append("user", user);
}
const appendUserToFormData = (form: SdkFormData, user: string): void => {
form.append("user", user);
};
export class DifyClient {
@@ -57,7 +56,7 @@ export class DifyClient {
sendRequest(
method: RequestMethod,
endpoint: string,
data: unknown = null,
data: HttpRequestBody = null,
params: QueryParams | null = null,
stream = false,
headerParams: Record<string, string> = {}
@@ -72,14 +71,14 @@ export class DifyClient {
});
}
getRoot(): Promise<DifyResponse<unknown>> {
getRoot(): Promise<DifyResponse<JsonObject>> {
return this.http.request({
method: "GET",
path: "/",
});
}
getApplicationParameters(user?: string): Promise<DifyResponse<unknown>> {
getApplicationParameters(user?: string): Promise<DifyResponse<JsonObject>> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -90,11 +89,11 @@ export class DifyClient {
});
}
async getParameters(user?: string): Promise<DifyResponse<unknown>> {
async getParameters(user?: string): Promise<DifyResponse<JsonObject>> {
return this.getApplicationParameters(user);
}
getMeta(user?: string): Promise<DifyResponse<unknown>> {
getMeta(user?: string): Promise<DifyResponse<JsonObject>> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -107,21 +106,21 @@ export class DifyClient {
messageFeedback(
request: MessageFeedbackRequest
): Promise<DifyResponse<Record<string, unknown>>>;
): Promise<DifyResponse<SuccessResponse>>;
messageFeedback(
messageId: string,
rating: "like" | "dislike" | null,
user: string,
content?: string
): Promise<DifyResponse<Record<string, unknown>>>;
): Promise<DifyResponse<SuccessResponse>>;
messageFeedback(
messageIdOrRequest: string | MessageFeedbackRequest,
rating?: "like" | "dislike" | null,
user?: string,
content?: string
): Promise<DifyResponse<Record<string, unknown>>> {
): Promise<DifyResponse<SuccessResponse>> {
let messageId: string;
const payload: Record<string, unknown> = {};
const payload: JsonObject = {};
if (typeof messageIdOrRequest === "string") {
messageId = messageIdOrRequest;
@@ -157,7 +156,7 @@ export class DifyClient {
});
}
getInfo(user?: string): Promise<DifyResponse<unknown>> {
getInfo(user?: string): Promise<DifyResponse<JsonObject>> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -168,7 +167,7 @@ export class DifyClient {
});
}
getSite(user?: string): Promise<DifyResponse<unknown>> {
getSite(user?: string): Promise<DifyResponse<JsonObject>> {
if (user) {
ensureNonEmptyString(user, "user");
}
@@ -179,7 +178,7 @@ export class DifyClient {
});
}
fileUpload(form: unknown, user: string): Promise<DifyResponse<unknown>> {
fileUpload(form: unknown, user: string): Promise<DifyResponse<JsonObject>> {
if (!isFormData(form)) {
throw new FileUploadError("FormData is required for file uploads");
}
@@ -199,18 +198,18 @@ export class DifyClient {
): Promise<DifyResponse<Buffer>> {
ensureNonEmptyString(fileId, "fileId");
ensureNonEmptyString(user, "user");
return this.http.request<Buffer>({
return this.http.request<Buffer, "bytes">({
method: "GET",
path: `/files/${fileId}/preview`,
query: {
user,
as_attachment: asAttachment ? "true" : undefined,
},
responseType: "arraybuffer",
responseType: "bytes",
});
}
audioToText(form: unknown, user: string): Promise<DifyResponse<unknown>> {
audioToText(form: unknown, user: string): Promise<DifyResponse<JsonObject>> {
if (!isFormData(form)) {
throw new FileUploadError("FormData is required for audio uploads");
}
@@ -274,11 +273,11 @@ export class DifyClient {
});
}
return this.http.request<Buffer>({
return this.http.request<Buffer, "bytes">({
method: "POST",
path: "/text-to-audio",
data: payload,
responseType: "arraybuffer",
responseType: "bytes",
});
}
}

View File

@@ -1,6 +1,6 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { ChatClient } from "./chat";
import { ValidationError } from "../errors/dify-error";
import { ChatClient } from "./chat";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("ChatClient", () => {
@@ -156,13 +156,13 @@ describe("ChatClient", () => {
});
});
it("requires name when autoGenerate is false", async () => {
it("requires name when autoGenerate is false", () => {
const { client } = createHttpClientWithSpies();
const chat = new ChatClient(client);
expect(() =>
chat.renameConversation("conv", "", "user", false)
).toThrow(ValidationError);
expect(() => chat.renameConversation("conv", "", "user", false)).toThrow(
ValidationError
);
});
it("deletes conversations", async () => {

View File

@@ -1,5 +1,9 @@
import { DifyClient } from "./base";
import type { ChatMessageRequest, ChatMessageResponse } from "../types/chat";
import type {
ChatMessageRequest,
ChatMessageResponse,
ConversationSortBy,
} from "../types/chat";
import type {
AnnotationCreateRequest,
AnnotationListOptions,
@@ -9,7 +13,11 @@ import type {
import type {
DifyResponse,
DifyStream,
JsonObject,
JsonValue,
QueryParams,
SuccessResponse,
SuggestedQuestionsResponse,
} from "../types/common";
import {
ensureNonEmptyString,
@@ -22,20 +30,20 @@ export class ChatClient extends DifyClient {
request: ChatMessageRequest
): Promise<DifyResponse<ChatMessageResponse> | DifyStream<ChatMessageResponse>>;
createChatMessage(
inputs: Record<string, unknown>,
inputs: JsonObject,
query: string,
user: string,
stream?: boolean,
conversationId?: string | null,
files?: Array<Record<string, unknown>> | null
files?: ChatMessageRequest["files"]
): Promise<DifyResponse<ChatMessageResponse> | DifyStream<ChatMessageResponse>>;
createChatMessage(
inputOrRequest: ChatMessageRequest | Record<string, unknown>,
inputOrRequest: ChatMessageRequest | JsonObject,
query?: string,
user?: string,
stream = false,
conversationId?: string | null,
files?: Array<Record<string, unknown>> | null
files?: ChatMessageRequest["files"]
): Promise<DifyResponse<ChatMessageResponse> | DifyStream<ChatMessageResponse>> {
let payload: ChatMessageRequest;
let shouldStream = stream;
@@ -46,8 +54,8 @@ export class ChatClient extends DifyClient {
} else {
ensureNonEmptyString(query, "query");
ensureNonEmptyString(user, "user");
payload = {
inputs: inputOrRequest as Record<string, unknown>,
payload = {
inputs: inputOrRequest,
query,
user,
response_mode: stream ? "streaming" : "blocking",
@@ -79,10 +87,10 @@ export class ChatClient extends DifyClient {
stopChatMessage(
taskId: string,
user: string
): Promise<DifyResponse<ChatMessageResponse>> {
): Promise<DifyResponse<SuccessResponse>> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
return this.http.request<ChatMessageResponse>({
return this.http.request<SuccessResponse>({
method: "POST",
path: `/chat-messages/${taskId}/stop`,
data: { user },
@@ -92,17 +100,17 @@ export class ChatClient extends DifyClient {
stopMessage(
taskId: string,
user: string
): Promise<DifyResponse<ChatMessageResponse>> {
): Promise<DifyResponse<SuccessResponse>> {
return this.stopChatMessage(taskId, user);
}
getSuggested(
messageId: string,
user: string
): Promise<DifyResponse<ChatMessageResponse>> {
): Promise<DifyResponse<SuggestedQuestionsResponse>> {
ensureNonEmptyString(messageId, "messageId");
ensureNonEmptyString(user, "user");
return this.http.request<ChatMessageResponse>({
return this.http.request<SuggestedQuestionsResponse>({
method: "GET",
path: `/messages/${messageId}/suggested`,
query: { user },
@@ -114,7 +122,7 @@ export class ChatClient extends DifyClient {
getAppFeedbacks(
page?: number,
limit?: number
): Promise<DifyResponse<Record<string, unknown>>> {
): Promise<DifyResponse<JsonObject>> {
ensureOptionalInt(page, "page");
ensureOptionalInt(limit, "limit");
return this.http.request({
@@ -131,8 +139,8 @@ export class ChatClient extends DifyClient {
user: string,
lastId?: string | null,
limit?: number | null,
sortByOrPinned?: string | boolean | null
): Promise<DifyResponse<Record<string, unknown>>> {
sortBy?: ConversationSortBy | null
): Promise<DifyResponse<JsonObject>> {
ensureNonEmptyString(user, "user");
ensureOptionalString(lastId, "lastId");
ensureOptionalInt(limit, "limit");
@@ -144,10 +152,8 @@ export class ChatClient extends DifyClient {
if (limit) {
params.limit = limit;
}
if (typeof sortByOrPinned === "string") {
params.sort_by = sortByOrPinned;
} else if (typeof sortByOrPinned === "boolean") {
params.pinned = sortByOrPinned;
if (sortBy) {
params.sort_by = sortBy;
}
return this.http.request({
@@ -162,7 +168,7 @@ export class ChatClient extends DifyClient {
conversationId: string,
firstId?: string | null,
limit?: number | null
): Promise<DifyResponse<Record<string, unknown>>> {
): Promise<DifyResponse<JsonObject>> {
ensureNonEmptyString(user, "user");
ensureNonEmptyString(conversationId, "conversationId");
ensureOptionalString(firstId, "firstId");
@@ -189,18 +195,18 @@ export class ChatClient extends DifyClient {
name: string,
user: string,
autoGenerate?: boolean
): Promise<DifyResponse<Record<string, unknown>>>;
): Promise<DifyResponse<JsonObject>>;
renameConversation(
conversationId: string,
user: string,
options?: { name?: string | null; autoGenerate?: boolean }
): Promise<DifyResponse<Record<string, unknown>>>;
): Promise<DifyResponse<JsonObject>>;
renameConversation(
conversationId: string,
nameOrUser: string,
userOrOptions?: string | { name?: string | null; autoGenerate?: boolean },
autoGenerate?: boolean
): Promise<DifyResponse<Record<string, unknown>>> {
): Promise<DifyResponse<JsonObject>> {
ensureNonEmptyString(conversationId, "conversationId");
let name: string | null | undefined;
@@ -222,7 +228,7 @@ export class ChatClient extends DifyClient {
ensureNonEmptyString(name, "name");
}
const payload: Record<string, unknown> = {
const payload: JsonObject = {
user,
auto_generate: resolvedAutoGenerate,
};
@@ -240,7 +246,7 @@ export class ChatClient extends DifyClient {
deleteConversation(
conversationId: string,
user: string
): Promise<DifyResponse<Record<string, unknown>>> {
): Promise<DifyResponse<SuccessResponse>> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(user, "user");
return this.http.request({
@@ -256,7 +262,7 @@ export class ChatClient extends DifyClient {
lastId?: string | null,
limit?: number | null,
variableName?: string | null
): Promise<DifyResponse<Record<string, unknown>>> {
): Promise<DifyResponse<JsonObject>> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(user, "user");
ensureOptionalString(lastId, "lastId");
@@ -279,8 +285,8 @@ export class ChatClient extends DifyClient {
conversationId: string,
variableId: string,
user: string,
value: unknown
): Promise<DifyResponse<Record<string, unknown>>> {
value: JsonValue
): Promise<DifyResponse<JsonObject>> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(variableId, "variableId");
ensureNonEmptyString(user, "user");

View File

@@ -1,6 +1,11 @@
import { DifyClient } from "./base";
import type { CompletionRequest, CompletionResponse } from "../types/completion";
import type { DifyResponse, DifyStream } from "../types/common";
import type {
DifyResponse,
DifyStream,
JsonObject,
SuccessResponse,
} from "../types/common";
import { ensureNonEmptyString } from "./validation";
const warned = new Set<string>();
@@ -17,16 +22,16 @@ export class CompletionClient extends DifyClient {
request: CompletionRequest
): Promise<DifyResponse<CompletionResponse> | DifyStream<CompletionResponse>>;
createCompletionMessage(
inputs: Record<string, unknown>,
inputs: JsonObject,
user: string,
stream?: boolean,
files?: Array<Record<string, unknown>> | null
files?: CompletionRequest["files"]
): Promise<DifyResponse<CompletionResponse> | DifyStream<CompletionResponse>>;
createCompletionMessage(
inputOrRequest: CompletionRequest | Record<string, unknown>,
inputOrRequest: CompletionRequest | JsonObject,
user?: string,
stream = false,
files?: Array<Record<string, unknown>> | null
files?: CompletionRequest["files"]
): Promise<DifyResponse<CompletionResponse> | DifyStream<CompletionResponse>> {
let payload: CompletionRequest;
let shouldStream = stream;
@@ -37,7 +42,7 @@ export class CompletionClient extends DifyClient {
} else {
ensureNonEmptyString(user, "user");
payload = {
inputs: inputOrRequest as Record<string, unknown>,
inputs: inputOrRequest,
user,
files,
response_mode: stream ? "streaming" : "blocking",
@@ -64,10 +69,10 @@ export class CompletionClient extends DifyClient {
stopCompletionMessage(
taskId: string,
user: string
): Promise<DifyResponse<CompletionResponse>> {
): Promise<DifyResponse<SuccessResponse>> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
return this.http.request<CompletionResponse>({
return this.http.request<SuccessResponse>({
method: "POST",
path: `/completion-messages/${taskId}/stop`,
data: { user },
@@ -77,15 +82,15 @@ export class CompletionClient extends DifyClient {
stop(
taskId: string,
user: string
): Promise<DifyResponse<CompletionResponse>> {
): Promise<DifyResponse<SuccessResponse>> {
return this.stopCompletionMessage(taskId, user);
}
runWorkflow(
inputs: Record<string, unknown>,
inputs: JsonObject,
user: string,
stream = false
): Promise<DifyResponse<Record<string, unknown>> | DifyStream<Record<string, unknown>>> {
): Promise<DifyResponse<JsonObject> | DifyStream<JsonObject>> {
warnOnce(
"CompletionClient.runWorkflow is deprecated. Use WorkflowClient.run instead."
);
@@ -96,13 +101,13 @@ export class CompletionClient extends DifyClient {
response_mode: stream ? "streaming" : "blocking",
};
if (stream) {
return this.http.requestStream<Record<string, unknown>>({
return this.http.requestStream<JsonObject>({
method: "POST",
path: "/workflows/run",
data: payload,
});
}
return this.http.request<Record<string, unknown>>({
return this.http.request<JsonObject>({
method: "POST",
path: "/workflows/run",
data: payload,

View File

@@ -1,4 +1,5 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { FileUploadError, ValidationError } from "../errors/dify-error";
import { KnowledgeBaseClient } from "./knowledge-base";
import { createHttpClientWithSpies } from "../../tests/test-utils";
@@ -174,7 +175,6 @@ describe("KnowledgeBaseClient", () => {
it("handles pipeline operations", async () => {
const { client, request, requestStream } = createHttpClientWithSpies();
const kb = new KnowledgeBaseClient(client);
const warn = vi.spyOn(console, "warn").mockImplementation(() => {});
const form = { append: vi.fn(), getHeaders: () => ({}) };
await kb.listDatasourcePlugins("ds", { isPublished: true });
@@ -201,7 +201,6 @@ describe("KnowledgeBaseClient", () => {
});
await kb.uploadPipelineFile(form);
expect(warn).toHaveBeenCalled();
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/datasets/ds/pipeline/datasource-plugins",
@@ -246,4 +245,22 @@ describe("KnowledgeBaseClient", () => {
data: form,
});
});
it("validates form-data and optional array filters", async () => {
const { client } = createHttpClientWithSpies();
const kb = new KnowledgeBaseClient(client);
await expect(kb.createDocumentByFile("ds", {})).rejects.toBeInstanceOf(
FileUploadError
);
await expect(
kb.listSegments("ds", "doc", { status: ["ok", 1] as unknown as string[] })
).rejects.toBeInstanceOf(ValidationError);
await expect(
kb.hitTesting("ds", {
query: "q",
attachment_ids: ["att-1", 2] as unknown as string[],
})
).rejects.toBeInstanceOf(ValidationError);
});
});

View File

@@ -38,22 +38,17 @@ import {
ensureStringArray,
} from "./validation";
import { FileUploadError, ValidationError } from "../errors/dify-error";
import type { SdkFormData } from "../http/form-data";
import { isFormData } from "../http/form-data";
const warned = new Set<string>();
const warnOnce = (message: string): void => {
if (warned.has(message)) {
return;
}
warned.add(message);
console.warn(message);
};
const ensureFormData = (form: unknown, context: string): void => {
function ensureFormData(
form: unknown,
context: string
): asserts form is SdkFormData {
if (!isFormData(form)) {
throw new FileUploadError(`${context} requires FormData`);
}
};
}
const ensureNonEmptyArray = (value: unknown, name: string): void => {
if (!Array.isArray(value) || value.length === 0) {
@@ -61,12 +56,6 @@ const ensureNonEmptyArray = (value: unknown, name: string): void => {
}
};
const warnPipelineRoutes = (): void => {
warnOnce(
"RAG pipeline endpoints may be unavailable unless the service API registers dataset/rag_pipeline routes."
);
};
export class KnowledgeBaseClient extends DifyClient {
async listDatasets(
options?: DatasetListOptions
@@ -641,7 +630,6 @@ export class KnowledgeBaseClient extends DifyClient {
datasetId: string,
options?: DatasourcePluginListOptions
): Promise<DifyResponse<KnowledgeBaseResponse>> {
warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureOptionalBoolean(options?.isPublished, "isPublished");
return this.http.request({
@@ -658,7 +646,6 @@ export class KnowledgeBaseClient extends DifyClient {
nodeId: string,
request: DatasourceNodeRunRequest
): Promise<DifyStream<PipelineStreamEvent>> {
warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(nodeId, "nodeId");
ensureNonEmptyString(request.datasource_type, "datasource_type");
@@ -673,7 +660,6 @@ export class KnowledgeBaseClient extends DifyClient {
datasetId: string,
request: PipelineRunRequest
): Promise<DifyResponse<KnowledgeBaseResponse> | DifyStream<PipelineStreamEvent>> {
warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(request.datasource_type, "datasource_type");
ensureNonEmptyString(request.start_node_id, "start_node_id");
@@ -695,7 +681,6 @@ export class KnowledgeBaseClient extends DifyClient {
async uploadPipelineFile(
form: unknown
): Promise<DifyResponse<KnowledgeBaseResponse>> {
warnPipelineRoutes();
ensureFormData(form, "uploadPipelineFile");
return this.http.request({
method: "POST",

View File

@@ -10,7 +10,7 @@ import {
validateParams,
} from "./validation";
const makeLongString = (length) => "a".repeat(length);
const makeLongString = (length: number) => "a".repeat(length);
describe("validation utilities", () => {
it("ensureNonEmptyString throws on empty or whitespace", () => {
@@ -19,9 +19,7 @@ describe("validation utilities", () => {
});
it("ensureNonEmptyString throws on overly long strings", () => {
expect(() =>
ensureNonEmptyString(makeLongString(10001), "name")
).toThrow();
expect(() => ensureNonEmptyString(makeLongString(10001), "name")).toThrow();
});
it("ensureOptionalString ignores undefined and validates when set", () => {
@@ -73,7 +71,6 @@ describe("validation utilities", () => {
expect(() => validateParams({ rating: "bad" })).toThrow();
expect(() => validateParams({ page: 1.1 })).toThrow();
expect(() => validateParams({ files: "bad" })).toThrow();
// Empty strings are allowed for optional params (e.g., keyword: "" means no filter)
expect(() => validateParams({ keyword: "" })).not.toThrow();
expect(() => validateParams({ name: makeLongString(10001) })).toThrow();
expect(() =>

View File

@@ -1,4 +1,5 @@
import { ValidationError } from "../errors/dify-error";
import { isRecord } from "../internal/type-guards";
const MAX_STRING_LENGTH = 10000;
const MAX_LIST_LENGTH = 1000;
@@ -109,8 +110,8 @@ export function validateParams(params: Record<string, unknown>): void {
`Parameter '${key}' exceeds maximum size of ${MAX_LIST_LENGTH} items`
);
}
} else if (typeof value === "object") {
if (Object.keys(value as Record<string, unknown>).length > MAX_DICT_LENGTH) {
} else if (isRecord(value)) {
if (Object.keys(value).length > MAX_DICT_LENGTH) {
throw new ValidationError(
`Parameter '${key}' exceeds maximum size of ${MAX_DICT_LENGTH} items`
);

View File

@@ -90,7 +90,6 @@ describe("WorkflowClient", () => {
const { client, request } = createHttpClientWithSpies();
const workflow = new WorkflowClient(client);
// Use createdByEndUserSessionId to filter by user session (backend API parameter)
await workflow.getLogs({
keyword: "k",
status: "succeeded",

View File

@@ -1,6 +1,12 @@
import { DifyClient } from "./base";
import type { WorkflowRunRequest, WorkflowRunResponse } from "../types/workflow";
import type { DifyResponse, DifyStream, QueryParams } from "../types/common";
import type {
DifyResponse,
DifyStream,
JsonObject,
QueryParams,
SuccessResponse,
} from "../types/common";
import {
ensureNonEmptyString,
ensureOptionalInt,
@@ -12,12 +18,12 @@ export class WorkflowClient extends DifyClient {
request: WorkflowRunRequest
): Promise<DifyResponse<WorkflowRunResponse> | DifyStream<WorkflowRunResponse>>;
run(
inputs: Record<string, unknown>,
inputs: JsonObject,
user: string,
stream?: boolean
): Promise<DifyResponse<WorkflowRunResponse> | DifyStream<WorkflowRunResponse>>;
run(
inputOrRequest: WorkflowRunRequest | Record<string, unknown>,
inputOrRequest: WorkflowRunRequest | JsonObject,
user?: string,
stream = false
): Promise<DifyResponse<WorkflowRunResponse> | DifyStream<WorkflowRunResponse>> {
@@ -30,7 +36,7 @@ export class WorkflowClient extends DifyClient {
} else {
ensureNonEmptyString(user, "user");
payload = {
inputs: inputOrRequest as Record<string, unknown>,
inputs: inputOrRequest,
user,
response_mode: stream ? "streaming" : "blocking",
};
@@ -84,10 +90,10 @@ export class WorkflowClient extends DifyClient {
stop(
taskId: string,
user: string
): Promise<DifyResponse<WorkflowRunResponse>> {
): Promise<DifyResponse<SuccessResponse>> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
return this.http.request<WorkflowRunResponse>({
return this.http.request<SuccessResponse>({
method: "POST",
path: `/workflows/tasks/${taskId}/stop`,
data: { user },
@@ -111,7 +117,7 @@ export class WorkflowClient extends DifyClient {
limit?: number;
startTime?: string;
endTime?: string;
}): Promise<DifyResponse<Record<string, unknown>>> {
}): Promise<DifyResponse<JsonObject>> {
if (options?.keyword) {
ensureOptionalString(options.keyword, "keyword");
}

View File

@@ -1,304 +0,0 @@
import axios from "axios";
import { Readable } from "node:stream";
import { beforeEach, describe, expect, it, vi } from "vitest";
import {
APIError,
AuthenticationError,
FileUploadError,
NetworkError,
RateLimitError,
TimeoutError,
ValidationError,
} from "../errors/dify-error";
import { HttpClient } from "./client";
describe("HttpClient", () => {
beforeEach(() => {
vi.restoreAllMocks();
});
it("builds requests with auth headers and JSON content type", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: { ok: true },
headers: { "x-request-id": "req" },
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const response = await client.request({
method: "POST",
path: "/chat-messages",
data: { user: "u" },
});
expect(response.requestId).toBe("req");
const config = mockRequest.mock.calls[0][0];
expect(config.headers.Authorization).toBe("Bearer test");
expect(config.headers["Content-Type"]).toBe("application/json");
expect(config.responseType).toBe("json");
});
it("serializes array query params", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: "ok",
headers: {},
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
await client.requestRaw({
method: "GET",
path: "/datasets",
query: { tag_ids: ["a", "b"], limit: 2 },
});
const config = mockRequest.mock.calls[0][0];
const queryString = config.paramsSerializer.serialize({
tag_ids: ["a", "b"],
limit: 2,
});
expect(queryString).toBe("tag_ids=a&tag_ids=b&limit=2");
});
it("returns SSE stream helpers", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: Readable.from(["data: {\"text\":\"hi\"}\n\n"]),
headers: { "x-request-id": "req" },
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const stream = await client.requestStream({
method: "POST",
path: "/chat-messages",
data: { user: "u" },
});
expect(stream.status).toBe(200);
expect(stream.requestId).toBe("req");
await expect(stream.toText()).resolves.toBe("hi");
});
it("returns binary stream helpers", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: Readable.from(["chunk"]),
headers: { "x-request-id": "req" },
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const stream = await client.requestBinaryStream({
method: "POST",
path: "/text-to-audio",
data: { user: "u", text: "hi" },
});
expect(stream.status).toBe(200);
expect(stream.requestId).toBe("req");
});
it("respects form-data headers", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: "ok",
headers: {},
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const form = {
append: () => {},
getHeaders: () => ({ "content-type": "multipart/form-data; boundary=abc" }),
};
await client.requestRaw({
method: "POST",
path: "/files/upload",
data: form,
});
const config = mockRequest.mock.calls[0][0];
expect(config.headers["content-type"]).toBe(
"multipart/form-data; boundary=abc"
);
expect(config.headers["Content-Type"]).toBeUndefined();
});
it("maps 401 and 429 errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: {
status: 401,
data: { message: "unauthorized" },
headers: {},
},
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(AuthenticationError);
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: {
status: 429,
data: { message: "rate" },
headers: { "retry-after": "2" },
},
});
const error = await client
.requestRaw({ method: "GET", path: "/meta" })
.catch((err) => err);
expect(error).toBeInstanceOf(RateLimitError);
expect(error.retryAfter).toBe(2);
});
it("maps validation and upload errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: {
status: 422,
data: { message: "invalid" },
headers: {},
},
});
await expect(
client.requestRaw({ method: "POST", path: "/chat-messages", data: { user: "u" } })
).rejects.toBeInstanceOf(ValidationError);
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
config: { url: "/files/upload" },
response: {
status: 400,
data: { message: "bad upload" },
headers: {},
},
});
await expect(
client.requestRaw({ method: "POST", path: "/files/upload", data: { user: "u" } })
).rejects.toBeInstanceOf(FileUploadError);
});
it("maps timeout and network errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
code: "ECONNABORTED",
message: "timeout",
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(TimeoutError);
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
message: "network",
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(NetworkError);
});
it("retries on timeout errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 1, retryDelay: 0 });
mockRequest
.mockRejectedValueOnce({
isAxiosError: true,
code: "ECONNABORTED",
message: "timeout",
})
.mockResolvedValueOnce({ status: 200, data: "ok", headers: {} });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(mockRequest).toHaveBeenCalledTimes(2);
});
it("validates query parameters before request", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
await expect(
client.requestRaw({ method: "GET", path: "/meta", query: { user: 1 } })
).rejects.toBeInstanceOf(ValidationError);
expect(mockRequest).not.toHaveBeenCalled();
});
it("returns APIError for other http failures", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: { status: 500, data: { message: "server" }, headers: {} },
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(APIError);
});
it("logs requests and responses when enableLogging is true", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: { ok: true },
headers: {},
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
const client = new HttpClient({ apiKey: "test", enableLogging: true });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(consoleInfo).toHaveBeenCalledWith(
expect.stringContaining("dify-client-node response 200 GET")
);
consoleInfo.mockRestore();
});
it("logs retry attempts when enableLogging is true", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
const client = new HttpClient({
apiKey: "test",
maxRetries: 1,
retryDelay: 0,
enableLogging: true,
});
mockRequest
.mockRejectedValueOnce({
isAxiosError: true,
code: "ECONNABORTED",
message: "timeout",
})
.mockResolvedValueOnce({ status: 200, data: "ok", headers: {} });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(consoleInfo).toHaveBeenCalledWith(
expect.stringContaining("dify-client-node retry")
);
consoleInfo.mockRestore();
});
});

View File

@@ -0,0 +1,527 @@
import { Readable, Stream } from "node:stream";
import { beforeEach, describe, expect, it, vi } from "vitest";
import {
APIError,
AuthenticationError,
FileUploadError,
NetworkError,
RateLimitError,
TimeoutError,
ValidationError,
} from "../errors/dify-error";
import { HttpClient } from "./client";
const stubFetch = (): ReturnType<typeof vi.fn> => {
const fetchMock = vi.fn();
vi.stubGlobal("fetch", fetchMock);
return fetchMock;
};
const getFetchCall = (
fetchMock: ReturnType<typeof vi.fn>,
index = 0
): [string, RequestInit | undefined] => {
const call = fetchMock.mock.calls[index];
if (!call) {
throw new Error(`Missing fetch call at index ${index}`);
}
return call as [string, RequestInit | undefined];
};
const toHeaderRecord = (headers: HeadersInit | undefined): Record<string, string> =>
Object.fromEntries(new Headers(headers).entries());
const jsonResponse = (
body: unknown,
init: ResponseInit = {}
): Response =>
new Response(JSON.stringify(body), {
...init,
headers: {
"content-type": "application/json",
...(init.headers ?? {}),
},
});
const textResponse = (body: string, init: ResponseInit = {}): Response =>
new Response(body, {
...init,
headers: {
...(init.headers ?? {}),
},
});
describe("HttpClient", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("builds requests with auth headers and JSON content type", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
jsonResponse({ ok: true }, { status: 200, headers: { "x-request-id": "req" } })
);
const client = new HttpClient({ apiKey: "test" });
const response = await client.request({
method: "POST",
path: "/chat-messages",
data: { user: "u" },
});
expect(response.requestId).toBe("req");
expect(fetchMock).toHaveBeenCalledTimes(1);
const [url, init] = getFetchCall(fetchMock);
expect(url).toBe("https://api.dify.ai/v1/chat-messages");
expect(toHeaderRecord(init?.headers)).toMatchObject({
authorization: "Bearer test",
"content-type": "application/json",
"user-agent": "dify-client-node",
});
expect(init?.body).toBe(JSON.stringify({ user: "u" }));
});
it("serializes array query params", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
await client.requestRaw({
method: "GET",
path: "/datasets",
query: { tag_ids: ["a", "b"], limit: 2 },
});
const [url] = getFetchCall(fetchMock);
expect(new URL(url).searchParams.toString()).toBe(
"tag_ids=a&tag_ids=b&limit=2"
);
});
it("returns SSE stream helpers", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
new Response('data: {"text":"hi"}\n\n', {
status: 200,
headers: { "x-request-id": "req" },
})
);
const client = new HttpClient({ apiKey: "test" });
const stream = await client.requestStream({
method: "POST",
path: "/chat-messages",
data: { user: "u" },
});
expect(stream.status).toBe(200);
expect(stream.requestId).toBe("req");
await expect(stream.toText()).resolves.toBe("hi");
});
it("returns binary stream helpers", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
new Response("chunk", {
status: 200,
headers: { "x-request-id": "req" },
})
);
const client = new HttpClient({ apiKey: "test" });
const stream = await client.requestBinaryStream({
method: "POST",
path: "/text-to-audio",
data: { user: "u", text: "hi" },
});
expect(stream.status).toBe(200);
expect(stream.requestId).toBe("req");
expect(stream.data).toBeInstanceOf(Readable);
});
it("respects form-data headers", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
const form = new FormData();
form.append("file", new Blob(["abc"]), "file.txt");
await client.requestRaw({
method: "POST",
path: "/files/upload",
data: form,
});
const [, init] = getFetchCall(fetchMock);
expect(toHeaderRecord(init?.headers)).toMatchObject({
authorization: "Bearer test",
});
expect(toHeaderRecord(init?.headers)["content-type"]).toBeUndefined();
});
it("sends legacy form-data as a readable request body", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
const legacyForm = Object.assign(Readable.from(["chunk"]), {
append: vi.fn(),
getHeaders: () => ({
"content-type": "multipart/form-data; boundary=test",
}),
});
await client.requestRaw({
method: "POST",
path: "/files/upload",
data: legacyForm,
});
const [, init] = getFetchCall(fetchMock);
expect(toHeaderRecord(init?.headers)).toMatchObject({
authorization: "Bearer test",
"content-type": "multipart/form-data; boundary=test",
});
expect((init as RequestInit & { duplex?: string } | undefined)?.duplex).toBe(
"half"
);
expect(init?.body).not.toBe(legacyForm);
});
it("rejects legacy form-data objects that are not readable streams", async () => {
const fetchMock = stubFetch();
const client = new HttpClient({ apiKey: "test" });
const legacyForm = {
append: vi.fn(),
getHeaders: () => ({
"content-type": "multipart/form-data; boundary=test",
}),
};
await expect(
client.requestRaw({
method: "POST",
path: "/files/upload",
data: legacyForm,
})
).rejects.toBeInstanceOf(FileUploadError);
expect(fetchMock).not.toHaveBeenCalled();
});
it("accepts legacy pipeable streams that are not Readable instances", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
const legacyStream = new Stream() as Stream &
NodeJS.ReadableStream & {
append: ReturnType<typeof vi.fn>;
getHeaders: () => Record<string, string>;
};
legacyStream.readable = true;
legacyStream.pause = () => legacyStream;
legacyStream.resume = () => legacyStream;
legacyStream.append = vi.fn();
legacyStream.getHeaders = () => ({
"content-type": "multipart/form-data; boundary=test",
});
queueMicrotask(() => {
legacyStream.emit("data", Buffer.from("chunk"));
legacyStream.emit("end");
});
await client.requestRaw({
method: "POST",
path: "/files/upload",
data: legacyStream as unknown as FormData,
});
const [, init] = getFetchCall(fetchMock);
expect((init as RequestInit & { duplex?: string } | undefined)?.duplex).toBe(
"half"
);
});
it("returns buffers for byte responses", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
new Response(Uint8Array.from([1, 2, 3]), {
status: 200,
headers: { "content-type": "application/octet-stream" },
})
);
const client = new HttpClient({ apiKey: "test" });
const response = await client.request<Buffer, "bytes">({
method: "GET",
path: "/files/file-1/preview",
responseType: "bytes",
});
expect(Buffer.isBuffer(response.data)).toBe(true);
expect(Array.from(response.data.values())).toEqual([1, 2, 3]);
});
it("keeps arraybuffer as a backward-compatible binary alias", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
new Response(Uint8Array.from([4, 5, 6]), {
status: 200,
headers: { "content-type": "application/octet-stream" },
})
);
const client = new HttpClient({ apiKey: "test" });
const response = await client.request<Buffer, "arraybuffer">({
method: "GET",
path: "/files/file-1/preview",
responseType: "arraybuffer",
});
expect(Buffer.isBuffer(response.data)).toBe(true);
expect(Array.from(response.data.values())).toEqual([4, 5, 6]);
});
it("returns null for empty no-content responses", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(new Response(null, { status: 204 }));
const client = new HttpClient({ apiKey: "test" });
const response = await client.requestRaw({
method: "GET",
path: "/meta",
});
expect(response.data).toBeNull();
});
it("maps 401 and 429 errors", async () => {
const fetchMock = stubFetch();
fetchMock
.mockResolvedValueOnce(
jsonResponse({ message: "unauthorized" }, { status: 401 })
)
.mockResolvedValueOnce(
jsonResponse({ message: "rate" }, { status: 429, headers: { "retry-after": "2" } })
);
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(AuthenticationError);
const error = await client
.requestRaw({ method: "GET", path: "/meta" })
.catch((err: unknown) => err);
expect(error).toBeInstanceOf(RateLimitError);
expect((error as RateLimitError).retryAfter).toBe(2);
});
it("maps validation and upload errors", async () => {
const fetchMock = stubFetch();
fetchMock
.mockResolvedValueOnce(jsonResponse({ message: "invalid" }, { status: 422 }))
.mockResolvedValueOnce(jsonResponse({ message: "bad upload" }, { status: 400 }));
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "POST", path: "/chat-messages", data: { user: "u" } })
).rejects.toBeInstanceOf(ValidationError);
await expect(
client.requestRaw({ method: "POST", path: "/files/upload", data: { user: "u" } })
).rejects.toBeInstanceOf(FileUploadError);
});
it("maps timeout and network errors", async () => {
const fetchMock = stubFetch();
fetchMock
.mockRejectedValueOnce(Object.assign(new Error("timeout"), { name: "AbortError" }))
.mockRejectedValueOnce(new Error("network"));
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(TimeoutError);
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(NetworkError);
});
it("maps unknown transport failures to NetworkError", async () => {
const fetchMock = stubFetch();
fetchMock.mockRejectedValueOnce("boom");
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toMatchObject({
name: "NetworkError",
message: "Unexpected network error",
});
});
it("retries on timeout errors", async () => {
const fetchMock = stubFetch();
fetchMock
.mockRejectedValueOnce(Object.assign(new Error("timeout"), { name: "AbortError" }))
.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test", maxRetries: 1, retryDelay: 0 });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(fetchMock).toHaveBeenCalledTimes(2);
});
it("does not retry non-replayable readable request bodies", async () => {
const fetchMock = stubFetch();
fetchMock.mockRejectedValueOnce(new Error("network"));
const client = new HttpClient({ apiKey: "test", maxRetries: 2, retryDelay: 0 });
await expect(
client.requestRaw({
method: "POST",
path: "/chat-messages",
data: Readable.from(["chunk"]),
})
).rejects.toBeInstanceOf(NetworkError);
expect(fetchMock).toHaveBeenCalledTimes(1);
const [, init] = getFetchCall(fetchMock);
expect((init as RequestInit & { duplex?: string } | undefined)?.duplex).toBe(
"half"
);
});
it("validates query parameters before request", async () => {
const fetchMock = stubFetch();
const client = new HttpClient({ apiKey: "test" });
await expect(
client.requestRaw({ method: "GET", path: "/meta", query: { user: 1 } })
).rejects.toBeInstanceOf(ValidationError);
expect(fetchMock).not.toHaveBeenCalled();
});
it("returns APIError for other http failures", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse({ message: "server" }, { status: 500 }));
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(APIError);
});
it("uses plain text bodies when json parsing is not possible", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
textResponse("plain text", {
status: 200,
headers: { "content-type": "text/plain" },
})
);
const client = new HttpClient({ apiKey: "test" });
const response = await client.requestRaw({
method: "GET",
path: "/info",
});
expect(response.data).toBe("plain text");
});
it("keeps invalid json error bodies as API errors", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(
textResponse("{invalid", {
status: 500,
headers: { "content-type": "application/json", "x-request-id": "req-500" },
})
);
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toMatchObject({
name: "APIError",
statusCode: 500,
requestId: "req-500",
responseBody: "{invalid",
});
});
it("sends raw string bodies without additional json encoding", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
await client.requestRaw({
method: "POST",
path: "/meta",
data: '{"pre":"serialized"}',
headers: { "Content-Type": "application/custom+json" },
});
const [, init] = getFetchCall(fetchMock);
expect(init?.body).toBe('{"pre":"serialized"}');
expect(toHeaderRecord(init?.headers)).toMatchObject({
"content-type": "application/custom+json",
});
});
it("preserves explicit user-agent headers", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }, { status: 200 }));
const client = new HttpClient({ apiKey: "test" });
await client.requestRaw({
method: "GET",
path: "/meta",
headers: { "User-Agent": "custom-agent" },
});
const [, init] = getFetchCall(fetchMock);
expect(toHeaderRecord(init?.headers)).toMatchObject({
"user-agent": "custom-agent",
});
});
it("logs requests and responses when enableLogging is true", async () => {
const fetchMock = stubFetch();
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }, { status: 200 }));
const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
const client = new HttpClient({ apiKey: "test", enableLogging: true });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(consoleInfo).toHaveBeenCalledWith(
expect.stringContaining("dify-client-node response 200 GET")
);
});
it("logs retry attempts when enableLogging is true", async () => {
const fetchMock = stubFetch();
fetchMock
.mockRejectedValueOnce(Object.assign(new Error("timeout"), { name: "AbortError" }))
.mockResolvedValueOnce(jsonResponse("ok", { status: 200 }));
const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
const client = new HttpClient({
apiKey: "test",
maxRetries: 1,
retryDelay: 0,
enableLogging: true,
});
await client.requestRaw({ method: "GET", path: "/meta" });
expect(consoleInfo).toHaveBeenCalledWith(
expect.stringContaining("dify-client-node retry")
);
});
});

View File

@@ -1,11 +1,4 @@
import axios from "axios";
import type {
AxiosError,
AxiosInstance,
AxiosRequestConfig,
AxiosResponse,
} from "axios";
import type { Readable } from "node:stream";
import { Readable } from "node:stream";
import {
DEFAULT_BASE_URL,
DEFAULT_MAX_RETRIES,
@@ -13,36 +6,69 @@ import {
DEFAULT_TIMEOUT_SECONDS,
} from "../types/common";
import type {
BinaryStream,
DifyClientConfig,
DifyResponse,
DifyStream,
Headers,
JsonValue,
QueryParams,
RequestMethod,
} from "../types/common";
import type { DifyError } from "../errors/dify-error";
import {
APIError,
AuthenticationError,
DifyError,
FileUploadError,
NetworkError,
RateLimitError,
TimeoutError,
ValidationError,
} from "../errors/dify-error";
import type { SdkFormData } from "./form-data";
import { getFormDataHeaders, isFormData } from "./form-data";
import { createBinaryStream, createSseStream } from "./sse";
import { getRetryDelayMs, shouldRetry, sleep } from "./retry";
import { validateParams } from "../client/validation";
import { hasStringProperty, isRecord } from "../internal/type-guards";
const DEFAULT_USER_AGENT = "dify-client-node";
export type RequestOptions = {
export type HttpResponseType = "json" | "bytes" | "stream" | "arraybuffer";
export type HttpRequestBody =
| JsonValue
| Readable
| SdkFormData
| URLSearchParams
| ArrayBuffer
| ArrayBufferView
| Blob
| string
| null;
export type ResponseDataFor<TResponseType extends HttpResponseType> =
TResponseType extends "stream"
? Readable
: TResponseType extends "bytes" | "arraybuffer"
? Buffer
: JsonValue | string | null;
export type RawHttpResponse<TData = unknown> = {
data: TData;
status: number;
headers: Headers;
requestId?: string;
url: string;
};
export type RequestOptions<TResponseType extends HttpResponseType = "json"> = {
method: RequestMethod;
path: string;
query?: QueryParams;
data?: unknown;
data?: HttpRequestBody;
headers?: Headers;
responseType?: AxiosRequestConfig["responseType"];
responseType?: TResponseType;
};
export type HttpClientSettings = Required<
@@ -51,6 +77,23 @@ export type HttpClientSettings = Required<
apiKey: string;
};
type FetchRequestInit = RequestInit & {
duplex?: "half";
};
type PreparedRequestBody = {
body?: BodyInit | null;
headers: Headers;
duplex?: "half";
replayable: boolean;
};
type TimeoutContext = {
cleanup: () => void;
reason: Error;
signal: AbortSignal;
};
const normalizeSettings = (config: DifyClientConfig): HttpClientSettings => ({
apiKey: config.apiKey,
baseUrl: config.baseUrl ?? DEFAULT_BASE_URL,
@@ -60,19 +103,10 @@ const normalizeSettings = (config: DifyClientConfig): HttpClientSettings => ({
enableLogging: config.enableLogging ?? false,
});
const normalizeHeaders = (headers: AxiosResponse["headers"]): Headers => {
const normalizeHeaders = (headers: globalThis.Headers): Headers => {
const result: Headers = {};
if (!headers) {
return result;
}
Object.entries(headers).forEach(([key, value]) => {
if (Array.isArray(value)) {
result[key.toLowerCase()] = value.join(", ");
} else if (typeof value === "string") {
result[key.toLowerCase()] = value;
} else if (typeof value === "number") {
result[key.toLowerCase()] = value.toString();
}
headers.forEach((value, key) => {
result[key.toLowerCase()] = value;
});
return result;
};
@@ -80,9 +114,18 @@ const normalizeHeaders = (headers: AxiosResponse["headers"]): Headers => {
const resolveRequestId = (headers: Headers): string | undefined =>
headers["x-request-id"] ?? headers["x-requestid"];
const buildRequestUrl = (baseUrl: string, path: string): string => {
const buildRequestUrl = (
baseUrl: string,
path: string,
query?: QueryParams
): string => {
const trimmed = baseUrl.replace(/\/+$/, "");
return `${trimmed}${path}`;
const url = new URL(`${trimmed}${path}`);
const queryString = buildQueryString(query);
if (queryString) {
url.search = queryString;
}
return url.toString();
};
const buildQueryString = (params?: QueryParams): string => {
@@ -121,24 +164,53 @@ const parseRetryAfterSeconds = (headerValue?: string): number | undefined => {
return undefined;
};
const isReadableStream = (value: unknown): value is Readable => {
const isPipeableStream = (value: unknown): value is { pipe: (destination: unknown) => unknown } => {
if (!value || typeof value !== "object") {
return false;
}
return typeof (value as { pipe?: unknown }).pipe === "function";
};
const isUploadLikeRequest = (config?: AxiosRequestConfig): boolean => {
const url = (config?.url ?? "").toLowerCase();
if (!url) {
return false;
const toNodeReadable = (value: unknown): Readable | null => {
if (value instanceof Readable) {
return value;
}
if (!isPipeableStream(value)) {
return null;
}
const readable = new Readable({
read() {},
});
return readable.wrap(value as NodeJS.ReadableStream);
};
const isBinaryBody = (
value: unknown
): value is ArrayBuffer | ArrayBufferView | Blob => {
if (value instanceof Blob) {
return true;
}
if (value instanceof ArrayBuffer) {
return true;
}
return ArrayBuffer.isView(value);
};
const isJsonBody = (value: unknown): value is Exclude<JsonValue, string> =>
value === null ||
typeof value === "boolean" ||
typeof value === "number" ||
Array.isArray(value) ||
isRecord(value);
const isUploadLikeRequest = (path: string): boolean => {
const normalizedPath = path.toLowerCase();
return (
url.includes("upload") ||
url.includes("/files/") ||
url.includes("audio-to-text") ||
url.includes("create_by_file") ||
url.includes("update_by_file")
normalizedPath.includes("upload") ||
normalizedPath.includes("/files/") ||
normalizedPath.includes("audio-to-text") ||
normalizedPath.includes("create_by_file") ||
normalizedPath.includes("update_by_file")
);
};
@@ -146,88 +218,242 @@ const resolveErrorMessage = (status: number, responseBody: unknown): string => {
if (typeof responseBody === "string" && responseBody.trim().length > 0) {
return responseBody;
}
if (
responseBody &&
typeof responseBody === "object" &&
"message" in responseBody
) {
const message = (responseBody as Record<string, unknown>).message;
if (typeof message === "string" && message.trim().length > 0) {
if (hasStringProperty(responseBody, "message")) {
const message = responseBody.message.trim();
if (message.length > 0) {
return message;
}
}
return `Request failed with status code ${status}`;
};
const mapAxiosError = (error: unknown): DifyError => {
if (axios.isAxiosError(error)) {
const axiosError = error as AxiosError;
if (axiosError.response) {
const status = axiosError.response.status;
const headers = normalizeHeaders(axiosError.response.headers);
const requestId = resolveRequestId(headers);
const responseBody = axiosError.response.data;
const message = resolveErrorMessage(status, responseBody);
if (status === 401) {
return new AuthenticationError(message, {
statusCode: status,
responseBody,
requestId,
});
}
if (status === 429) {
const retryAfter = parseRetryAfterSeconds(headers["retry-after"]);
return new RateLimitError(message, {
statusCode: status,
responseBody,
requestId,
retryAfter,
});
}
if (status === 422) {
return new ValidationError(message, {
statusCode: status,
responseBody,
requestId,
});
}
if (status === 400) {
if (isUploadLikeRequest(axiosError.config)) {
return new FileUploadError(message, {
statusCode: status,
responseBody,
requestId,
});
}
}
return new APIError(message, {
statusCode: status,
responseBody,
requestId,
});
}
if (axiosError.code === "ECONNABORTED") {
return new TimeoutError("Request timed out", { cause: axiosError });
}
return new NetworkError(axiosError.message, { cause: axiosError });
const parseJsonLikeText = (
value: string,
contentType?: string | null
): JsonValue | string | null => {
if (value.length === 0) {
return null;
}
const shouldParseJson =
contentType?.includes("application/json") === true ||
contentType?.includes("+json") === true;
if (!shouldParseJson) {
try {
return JSON.parse(value) as JsonValue;
} catch {
return value;
}
}
return JSON.parse(value) as JsonValue;
};
const prepareRequestBody = (
method: RequestMethod,
data: HttpRequestBody | undefined
): PreparedRequestBody => {
if (method === "GET" || data === undefined) {
return {
body: undefined,
headers: {},
replayable: true,
};
}
if (isFormData(data)) {
if ("getHeaders" in data && typeof data.getHeaders === "function") {
const readable = toNodeReadable(data);
if (!readable) {
throw new FileUploadError(
"Legacy FormData must be a readable stream when used with fetch"
);
}
return {
body: Readable.toWeb(readable) as BodyInit,
headers: getFormDataHeaders(data),
duplex: "half",
replayable: false,
};
}
return {
body: data as BodyInit,
headers: getFormDataHeaders(data),
replayable: true,
};
}
if (typeof data === "string") {
return {
body: data,
headers: {},
replayable: true,
};
}
const readable = toNodeReadable(data);
if (readable) {
return {
body: Readable.toWeb(readable) as BodyInit,
headers: {},
duplex: "half",
replayable: false,
};
}
if (data instanceof URLSearchParams || isBinaryBody(data)) {
const body =
ArrayBuffer.isView(data) && !(data instanceof Uint8Array)
? new Uint8Array(data.buffer, data.byteOffset, data.byteLength)
: data;
return {
body: body as BodyInit,
headers: {},
replayable: true,
};
}
if (isJsonBody(data)) {
return {
body: JSON.stringify(data),
headers: {
"Content-Type": "application/json",
},
replayable: true,
};
}
throw new ValidationError("Unsupported request body type");
};
const createTimeoutContext = (timeoutMs: number): TimeoutContext => {
const controller = new AbortController();
const reason = new Error("Request timed out");
const timer = setTimeout(() => {
controller.abort(reason);
}, timeoutMs);
return {
signal: controller.signal,
reason,
cleanup: () => {
clearTimeout(timer);
},
};
};
const parseResponseBody = async <TResponseType extends HttpResponseType>(
response: Response,
responseType: TResponseType
): Promise<ResponseDataFor<TResponseType>> => {
if (responseType === "stream") {
if (!response.body) {
throw new NetworkError("Response body is empty");
}
return Readable.fromWeb(
response.body as unknown as Parameters<typeof Readable.fromWeb>[0]
) as ResponseDataFor<TResponseType>;
}
if (responseType === "bytes" || responseType === "arraybuffer") {
const bytes = Buffer.from(await response.arrayBuffer());
return bytes as ResponseDataFor<TResponseType>;
}
if (response.status === 204 || response.status === 205 || response.status === 304) {
return null as ResponseDataFor<TResponseType>;
}
const text = await response.text();
try {
return parseJsonLikeText(
text,
response.headers.get("content-type")
) as ResponseDataFor<TResponseType>;
} catch (error) {
if (!response.ok && error instanceof SyntaxError) {
return text as ResponseDataFor<TResponseType>;
}
throw error;
}
};
const mapHttpError = (
response: RawHttpResponse,
path: string
): DifyError => {
const status = response.status;
const responseBody = response.data;
const message = resolveErrorMessage(status, responseBody);
if (status === 401) {
return new AuthenticationError(message, {
statusCode: status,
responseBody,
requestId: response.requestId,
});
}
if (status === 429) {
const retryAfter = parseRetryAfterSeconds(response.headers["retry-after"]);
return new RateLimitError(message, {
statusCode: status,
responseBody,
requestId: response.requestId,
retryAfter,
});
}
if (status === 422) {
return new ValidationError(message, {
statusCode: status,
responseBody,
requestId: response.requestId,
});
}
if (status === 400 && isUploadLikeRequest(path)) {
return new FileUploadError(message, {
statusCode: status,
responseBody,
requestId: response.requestId,
});
}
return new APIError(message, {
statusCode: status,
responseBody,
requestId: response.requestId,
});
};
const mapTransportError = (
error: unknown,
timeoutContext: TimeoutContext
): DifyError => {
if (error instanceof DifyError) {
return error;
}
if (
timeoutContext.signal.aborted &&
timeoutContext.signal.reason === timeoutContext.reason
) {
return new TimeoutError("Request timed out", { cause: error });
}
if (error instanceof Error) {
if (error.name === "AbortError" || error.name === "TimeoutError") {
return new TimeoutError("Request timed out", { cause: error });
}
return new NetworkError(error.message, { cause: error });
}
return new NetworkError("Unexpected network error", { cause: error });
};
export class HttpClient {
private axios: AxiosInstance;
private settings: HttpClientSettings;
constructor(config: DifyClientConfig) {
this.settings = normalizeSettings(config);
this.axios = axios.create({
baseURL: this.settings.baseUrl,
timeout: this.settings.timeout * 1000,
});
}
updateApiKey(apiKey: string): void {
@@ -238,118 +464,123 @@ export class HttpClient {
return { ...this.settings };
}
async request<T>(options: RequestOptions): Promise<DifyResponse<T>> {
async request<
T,
TResponseType extends HttpResponseType = "json",
>(options: RequestOptions<TResponseType>): Promise<DifyResponse<T>> {
const response = await this.requestRaw(options);
const headers = normalizeHeaders(response.headers);
return {
data: response.data as T,
status: response.status,
headers,
requestId: resolveRequestId(headers),
headers: response.headers,
requestId: response.requestId,
};
}
async requestStream<T>(options: RequestOptions) {
async requestStream<T>(options: RequestOptions): Promise<DifyStream<T>> {
const response = await this.requestRaw({
...options,
responseType: "stream",
});
const headers = normalizeHeaders(response.headers);
return createSseStream<T>(response.data as Readable, {
return createSseStream<T>(response.data, {
status: response.status,
headers,
requestId: resolveRequestId(headers),
headers: response.headers,
requestId: response.requestId,
});
}
async requestBinaryStream(options: RequestOptions) {
async requestBinaryStream(options: RequestOptions): Promise<BinaryStream> {
const response = await this.requestRaw({
...options,
responseType: "stream",
});
const headers = normalizeHeaders(response.headers);
return createBinaryStream(response.data as Readable, {
return createBinaryStream(response.data, {
status: response.status,
headers,
requestId: resolveRequestId(headers),
headers: response.headers,
requestId: response.requestId,
});
}
async requestRaw(options: RequestOptions): Promise<AxiosResponse> {
const { method, path, query, data, headers, responseType } = options;
const { apiKey, enableLogging, maxRetries, retryDelay, timeout } =
this.settings;
async requestRaw<TResponseType extends HttpResponseType = "json">(
options: RequestOptions<TResponseType>
): Promise<RawHttpResponse<ResponseDataFor<TResponseType>>> {
const responseType = options.responseType ?? "json";
const { method, path, query, data, headers } = options;
const { apiKey, enableLogging, maxRetries, retryDelay, timeout } = this.settings;
if (query) {
validateParams(query as Record<string, unknown>);
}
if (
data &&
typeof data === "object" &&
!Array.isArray(data) &&
!isFormData(data) &&
!isReadableStream(data)
) {
validateParams(data as Record<string, unknown>);
if (isRecord(data) && !Array.isArray(data) && !isFormData(data) && !isPipeableStream(data)) {
validateParams(data);
}
const requestHeaders: Headers = {
Authorization: `Bearer ${apiKey}`,
...headers,
};
if (
typeof process !== "undefined" &&
!!process.versions?.node &&
!requestHeaders["User-Agent"] &&
!requestHeaders["user-agent"]
) {
requestHeaders["User-Agent"] = DEFAULT_USER_AGENT;
}
if (isFormData(data)) {
Object.assign(requestHeaders, getFormDataHeaders(data));
} else if (data && method !== "GET") {
requestHeaders["Content-Type"] = "application/json";
}
const url = buildRequestUrl(this.settings.baseUrl, path);
const url = buildRequestUrl(this.settings.baseUrl, path, query);
if (enableLogging) {
console.info(`dify-client-node request ${method} ${url}`);
}
const axiosConfig: AxiosRequestConfig = {
method,
url: path,
params: query,
paramsSerializer: {
serialize: (params) => buildQueryString(params as QueryParams),
},
headers: requestHeaders,
responseType: responseType ?? "json",
timeout: timeout * 1000,
};
if (method !== "GET" && data !== undefined) {
axiosConfig.data = data;
}
let attempt = 0;
// `attempt` is a zero-based retry counter
// Total attempts = 1 (initial) + maxRetries
// e.g., maxRetries=3 means: attempt 0 (initial), then retries at 1, 2, 3
while (true) {
const preparedBody = prepareRequestBody(method, data);
const requestHeaders: Headers = {
Authorization: `Bearer ${apiKey}`,
...preparedBody.headers,
...headers,
};
if (
typeof process !== "undefined" &&
!!process.versions?.node &&
!requestHeaders["User-Agent"] &&
!requestHeaders["user-agent"]
) {
requestHeaders["User-Agent"] = DEFAULT_USER_AGENT;
}
const timeoutContext = createTimeoutContext(timeout * 1000);
const requestInit: FetchRequestInit = {
method,
headers: requestHeaders,
body: preparedBody.body,
signal: timeoutContext.signal,
};
if (preparedBody.duplex) {
requestInit.duplex = preparedBody.duplex;
}
try {
const response = await this.axios.request(axiosConfig);
const fetchResponse = await fetch(url, requestInit);
const responseHeaders = normalizeHeaders(fetchResponse.headers);
const parsedBody =
(await parseResponseBody(fetchResponse, responseType)) as ResponseDataFor<TResponseType>;
const response: RawHttpResponse<ResponseDataFor<TResponseType>> = {
data: parsedBody,
status: fetchResponse.status,
headers: responseHeaders,
requestId: resolveRequestId(responseHeaders),
url,
};
if (!fetchResponse.ok) {
throw mapHttpError(response, path);
}
if (enableLogging) {
console.info(
`dify-client-node response ${response.status} ${method} ${url}`
);
}
return response;
} catch (error) {
const mapped = mapAxiosError(error);
if (!shouldRetry(mapped, attempt, maxRetries)) {
const mapped = mapTransportError(error, timeoutContext);
const shouldRetryRequest =
preparedBody.replayable && shouldRetry(mapped, attempt, maxRetries);
if (!shouldRetryRequest) {
throw mapped;
}
const retryAfterSeconds =
@@ -362,6 +593,8 @@ export class HttpClient {
}
attempt += 1;
await sleep(delay);
} finally {
timeoutContext.cleanup();
}
}
}

View File

@@ -1,4 +1,4 @@
import { describe, expect, it } from "vitest";
import { describe, expect, it, vi } from "vitest";
import { getFormDataHeaders, isFormData } from "./form-data";
describe("form-data helpers", () => {
@@ -11,9 +11,15 @@ describe("form-data helpers", () => {
expect(isFormData({})).toBe(false);
});
it("detects native FormData", () => {
const form = new FormData();
form.append("field", "value");
expect(isFormData(form)).toBe(true);
});
it("returns headers from form-data", () => {
const formLike = {
append: () => {},
append: vi.fn(),
getHeaders: () => ({ "content-type": "multipart/form-data" }),
};
expect(getFormDataHeaders(formLike)).toEqual({

View File

@@ -1,19 +1,25 @@
import type { Headers } from "../types/common";
export type FormDataLike = {
append: (...args: unknown[]) => void;
getHeaders?: () => Headers;
type FormDataAppendValue = Blob | string;
export type WebFormData = FormData;
export type LegacyNodeFormData = {
append: (name: string, value: FormDataAppendValue, fileName?: string) => void;
getHeaders: () => Headers;
constructor?: { name?: string };
};
export const isFormData = (value: unknown): value is FormDataLike => {
export type SdkFormData = WebFormData | LegacyNodeFormData;
export const isFormData = (value: unknown): value is SdkFormData => {
if (!value || typeof value !== "object") {
return false;
}
if (typeof FormData !== "undefined" && value instanceof FormData) {
return true;
}
const candidate = value as FormDataLike;
const candidate = value as Partial<LegacyNodeFormData>;
if (typeof candidate.append !== "function") {
return false;
}
@@ -23,8 +29,8 @@ export const isFormData = (value: unknown): value is FormDataLike => {
return candidate.constructor?.name === "FormData";
};
export const getFormDataHeaders = (form: FormDataLike): Headers => {
if (typeof form.getHeaders === "function") {
export const getFormDataHeaders = (form: SdkFormData): Headers => {
if ("getHeaders" in form && typeof form.getHeaders === "function") {
return form.getHeaders();
}
return {};

View File

@@ -2,7 +2,7 @@ import { describe, expect, it } from "vitest";
import { getRetryDelayMs, shouldRetry } from "./retry";
import { NetworkError, RateLimitError, TimeoutError } from "../errors/dify-error";
const withMockedRandom = (value, fn) => {
const withMockedRandom = (value: number, fn: () => void): void => {
const original = Math.random;
Math.random = () => value;
try {

View File

@@ -6,10 +6,10 @@ describe("sse parsing", () => {
it("parses event and data lines", async () => {
const stream = Readable.from([
"event: message\n",
"data: {\"answer\":\"hi\"}\n",
'data: {"answer":"hi"}\n',
"\n",
]);
const events = [];
const events: Array<{ event?: string; data: unknown; raw: string }> = [];
for await (const event of parseSseStream(stream)) {
events.push(event);
}
@@ -20,7 +20,7 @@ describe("sse parsing", () => {
it("handles multi-line data payloads", async () => {
const stream = Readable.from(["data: line1\n", "data: line2\n", "\n"]);
const events = [];
const events: Array<{ event?: string; data: unknown; raw: string }> = [];
for await (const event of parseSseStream(stream)) {
events.push(event);
}
@@ -28,10 +28,28 @@ describe("sse parsing", () => {
expect(events[0].data).toBe("line1\nline2");
});
it("ignores comments and flushes the last event without a trailing separator", async () => {
const stream = Readable.from([
Buffer.from(": keep-alive\n"),
Uint8Array.from(Buffer.from('event: message\ndata: {"delta":"hi"}\n')),
]);
const events: Array<{ event?: string; data: unknown; raw: string }> = [];
for await (const event of parseSseStream(stream)) {
events.push(event);
}
expect(events).toEqual([
{
event: "message",
data: { delta: "hi" },
raw: '{"delta":"hi"}',
},
]);
});
it("createSseStream exposes toText", async () => {
const stream = Readable.from([
"data: {\"answer\":\"hello\"}\n\n",
"data: {\"delta\":\" world\"}\n\n",
'data: {"answer":"hello"}\n\n',
'data: {"delta":" world"}\n\n',
]);
const sseStream = createSseStream(stream, {
status: 200,
@@ -72,5 +90,6 @@ describe("sse parsing", () => {
});
expect(binary.status).toBe(200);
expect(binary.headers["content-type"]).toBe("audio/mpeg");
expect(binary.toReadable()).toBe(stream);
});
});

View File

@@ -1,12 +1,29 @@
import type { Readable } from "node:stream";
import { StringDecoder } from "node:string_decoder";
import type { BinaryStream, DifyStream, Headers, StreamEvent } from "../types/common";
import type {
BinaryStream,
DifyStream,
Headers,
JsonValue,
StreamEvent,
} from "../types/common";
import { isRecord } from "../internal/type-guards";
const toBufferChunk = (chunk: unknown): Buffer => {
if (Buffer.isBuffer(chunk)) {
return chunk;
}
if (chunk instanceof Uint8Array) {
return Buffer.from(chunk);
}
return Buffer.from(String(chunk));
};
const readLines = async function* (stream: Readable): AsyncIterable<string> {
const decoder = new StringDecoder("utf8");
let buffered = "";
for await (const chunk of stream) {
buffered += decoder.write(chunk as Buffer);
buffered += decoder.write(toBufferChunk(chunk));
let index = buffered.indexOf("\n");
while (index >= 0) {
let line = buffered.slice(0, index);
@@ -24,12 +41,12 @@ const readLines = async function* (stream: Readable): AsyncIterable<string> {
}
};
const parseMaybeJson = (value: string): unknown => {
const parseMaybeJson = (value: string): JsonValue | string | null => {
if (!value) {
return null;
}
try {
return JSON.parse(value);
return JSON.parse(value) as JsonValue;
} catch {
return value;
}
@@ -81,18 +98,17 @@ const extractTextFromEvent = (data: unknown): string => {
if (typeof data === "string") {
return data;
}
if (!data || typeof data !== "object") {
if (!isRecord(data)) {
return "";
}
const record = data as Record<string, unknown>;
if (typeof record.answer === "string") {
return record.answer;
if (typeof data.answer === "string") {
return data.answer;
}
if (typeof record.text === "string") {
return record.text;
if (typeof data.text === "string") {
return data.text;
}
if (typeof record.delta === "string") {
return record.delta;
if (typeof data.delta === "string") {
return data.delta;
}
return "";
};

View File

@@ -1,227 +0,0 @@
import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import { ChatClient, DifyClient, WorkflowClient, BASE_URL, routes } from "./index";
import axios from "axios";
const mockRequest = vi.fn();
const setupAxiosMock = () => {
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
};
beforeEach(() => {
vi.restoreAllMocks();
mockRequest.mockReset();
setupAxiosMock();
});
describe("Client", () => {
it("should create a client", () => {
new DifyClient("test");
expect(axios.create).toHaveBeenCalledWith({
baseURL: BASE_URL,
timeout: 60000,
});
});
it("should update the api key", () => {
const difyClient = new DifyClient("test");
difyClient.updateApiKey("test2");
expect(difyClient.getHttpClient().getSettings().apiKey).toBe("test2");
});
});
describe("Send Requests", () => {
it("should make a successful request to the application parameter", async () => {
const difyClient = new DifyClient("test");
const method = "GET";
const endpoint = routes.application.url();
mockRequest.mockResolvedValue({
status: 200,
data: "response",
headers: {},
});
await difyClient.sendRequest(method, endpoint);
const requestConfig = mockRequest.mock.calls[0][0];
expect(requestConfig).toMatchObject({
method,
url: endpoint,
params: undefined,
responseType: "json",
timeout: 60000,
});
expect(requestConfig.headers.Authorization).toBe("Bearer test");
});
it("uses the getMeta route configuration", async () => {
const difyClient = new DifyClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await difyClient.getMeta("end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.getMeta.method,
url: routes.getMeta.url(),
params: { user: "end-user" },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
});
describe("File uploads", () => {
const OriginalFormData = globalThis.FormData;
beforeAll(() => {
globalThis.FormData = class FormDataMock {
append() {}
getHeaders() {
return {
"content-type": "multipart/form-data; boundary=test",
};
}
};
});
afterAll(() => {
globalThis.FormData = OriginalFormData;
});
it("does not override multipart boundary headers for FormData", async () => {
const difyClient = new DifyClient("test");
const form = new globalThis.FormData();
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await difyClient.fileUpload(form, "end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.fileUpload.method,
url: routes.fileUpload.url(),
params: undefined,
headers: expect.objectContaining({
Authorization: "Bearer test",
"content-type": "multipart/form-data; boundary=test",
}),
responseType: "json",
timeout: 60000,
data: form,
}));
});
});
describe("Workflow client", () => {
it("uses tasks stop path for workflow stop", async () => {
const workflowClient = new WorkflowClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "stopped", headers: {} });
await workflowClient.stop("task-1", "end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.stopWorkflow.method,
url: routes.stopWorkflow.url("task-1"),
params: undefined,
headers: expect.objectContaining({
Authorization: "Bearer test",
"Content-Type": "application/json",
}),
responseType: "json",
timeout: 60000,
data: { user: "end-user" },
}));
});
it("maps workflow log filters to service api params", async () => {
const workflowClient = new WorkflowClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await workflowClient.getLogs({
createdAtAfter: "2024-01-01T00:00:00Z",
createdAtBefore: "2024-01-02T00:00:00Z",
createdByEndUserSessionId: "sess-1",
createdByAccount: "acc-1",
page: 2,
limit: 10,
});
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: "GET",
url: "/workflows/logs",
params: {
created_at__after: "2024-01-01T00:00:00Z",
created_at__before: "2024-01-02T00:00:00Z",
created_by_end_user_session_id: "sess-1",
created_by_account: "acc-1",
page: 2,
limit: 10,
},
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
});
describe("Chat client", () => {
it("places user in query for suggested messages", async () => {
const chatClient = new ChatClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await chatClient.getSuggested("msg-1", "end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.getSuggested.method,
url: routes.getSuggested.url("msg-1"),
params: { user: "end-user" },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
it("uses last_id when listing conversations", async () => {
const chatClient = new ChatClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await chatClient.getConversations("end-user", "last-1", 10);
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.getConversations.method,
url: routes.getConversations.url(),
params: { user: "end-user", last_id: "last-1", limit: 10 },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
it("lists app feedbacks without user params", async () => {
const chatClient = new ChatClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await chatClient.getAppFeedbacks(1, 20);
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: "GET",
url: "/app/feedbacks",
params: { page: 1, limit: 20 },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
});

View File

@@ -0,0 +1,240 @@
import { Readable } from "node:stream";
import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import { BASE_URL, ChatClient, DifyClient, WorkflowClient, routes } from "./index";
const stubFetch = (): ReturnType<typeof vi.fn> => {
const fetchMock = vi.fn();
vi.stubGlobal("fetch", fetchMock);
return fetchMock;
};
const jsonResponse = (body: unknown, init: ResponseInit = {}): Response =>
new Response(JSON.stringify(body), {
status: 200,
...init,
headers: {
"content-type": "application/json",
...(init.headers ?? {}),
},
});
describe("Client", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("creates a client with default settings", () => {
const difyClient = new DifyClient("test");
expect(difyClient.getHttpClient().getSettings()).toMatchObject({
apiKey: "test",
baseUrl: BASE_URL,
timeout: 60,
});
});
it("updates the api key", () => {
const difyClient = new DifyClient("test");
difyClient.updateApiKey("test2");
expect(difyClient.getHttpClient().getSettings().apiKey).toBe("test2");
});
});
describe("Send Requests", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("makes a successful request to the application parameter route", async () => {
const fetchMock = stubFetch();
const difyClient = new DifyClient("test");
const method = "GET";
const endpoint = routes.application.url();
fetchMock.mockResolvedValueOnce(jsonResponse("response"));
const response = await difyClient.sendRequest(method, endpoint);
expect(response).toMatchObject({
status: 200,
data: "response",
headers: {
"content-type": "application/json",
},
});
const [url, init] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${endpoint}`);
expect(init.method).toBe(method);
expect(init.headers).toMatchObject({
Authorization: "Bearer test",
"User-Agent": "dify-client-node",
});
});
it("uses the getMeta route configuration", async () => {
const fetchMock = stubFetch();
const difyClient = new DifyClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }));
await difyClient.getMeta("end-user");
const [url, init] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${routes.getMeta.url()}?user=end-user`);
expect(init.method).toBe(routes.getMeta.method);
expect(init.headers).toMatchObject({
Authorization: "Bearer test",
});
});
});
describe("File uploads", () => {
const OriginalFormData = globalThis.FormData;
beforeAll(() => {
globalThis.FormData = class FormDataMock extends Readable {
constructor() {
super();
}
_read() {}
append() {}
getHeaders() {
return {
"content-type": "multipart/form-data; boundary=test",
};
}
} as unknown as typeof FormData;
});
afterAll(() => {
globalThis.FormData = OriginalFormData;
});
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("does not override multipart boundary headers for legacy FormData", async () => {
const fetchMock = stubFetch();
const difyClient = new DifyClient("test");
const form = new globalThis.FormData();
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }));
await difyClient.fileUpload(form, "end-user");
const [url, init] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${routes.fileUpload.url()}`);
expect(init.method).toBe(routes.fileUpload.method);
expect(init.headers).toMatchObject({
Authorization: "Bearer test",
"content-type": "multipart/form-data; boundary=test",
});
expect(init.body).not.toBe(form);
expect((init as RequestInit & { duplex?: string }).duplex).toBe("half");
});
});
describe("Workflow client", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("uses tasks stop path for workflow stop", async () => {
const fetchMock = stubFetch();
const workflowClient = new WorkflowClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ result: "success" }));
await workflowClient.stop("task-1", "end-user");
const [url, init] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${routes.stopWorkflow.url("task-1")}`);
expect(init.method).toBe(routes.stopWorkflow.method);
expect(init.headers).toMatchObject({
Authorization: "Bearer test",
"Content-Type": "application/json",
});
expect(init.body).toBe(JSON.stringify({ user: "end-user" }));
});
it("maps workflow log filters to service api params", async () => {
const fetchMock = stubFetch();
const workflowClient = new WorkflowClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }));
await workflowClient.getLogs({
createdAtAfter: "2024-01-01T00:00:00Z",
createdAtBefore: "2024-01-02T00:00:00Z",
createdByEndUserSessionId: "sess-1",
createdByAccount: "acc-1",
page: 2,
limit: 10,
});
const [url] = fetchMock.mock.calls[0] as [string, RequestInit];
const parsedUrl = new URL(url);
expect(parsedUrl.origin + parsedUrl.pathname).toBe(`${BASE_URL}/workflows/logs`);
expect(parsedUrl.searchParams.get("created_at__before")).toBe(
"2024-01-02T00:00:00Z"
);
expect(parsedUrl.searchParams.get("created_at__after")).toBe(
"2024-01-01T00:00:00Z"
);
expect(parsedUrl.searchParams.get("created_by_end_user_session_id")).toBe(
"sess-1"
);
expect(parsedUrl.searchParams.get("created_by_account")).toBe("acc-1");
expect(parsedUrl.searchParams.get("page")).toBe("2");
expect(parsedUrl.searchParams.get("limit")).toBe("10");
});
});
describe("Chat client", () => {
beforeEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
it("places user in query for suggested messages", async () => {
const fetchMock = stubFetch();
const chatClient = new ChatClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ result: "success", data: [] }));
await chatClient.getSuggested("msg-1", "end-user");
const [url, init] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${routes.getSuggested.url("msg-1")}?user=end-user`);
expect(init.method).toBe(routes.getSuggested.method);
expect(init.headers).toMatchObject({
Authorization: "Bearer test",
});
});
it("uses last_id when listing conversations", async () => {
const fetchMock = stubFetch();
const chatClient = new ChatClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ ok: true }));
await chatClient.getConversations("end-user", "last-1", 10);
const [url] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}${routes.getConversations.url()}?user=end-user&last_id=last-1&limit=10`);
});
it("lists app feedbacks without user params", async () => {
const fetchMock = stubFetch();
const chatClient = new ChatClient("test");
fetchMock.mockResolvedValueOnce(jsonResponse({ data: [] }));
await chatClient.getAppFeedbacks(1, 20);
const [url] = fetchMock.mock.calls[0] as [string, RequestInit];
expect(url).toBe(`${BASE_URL}/app/feedbacks?page=1&limit=20`);
});
});

View File

@@ -0,0 +1,9 @@
export const isRecord = (value: unknown): value is Record<string, unknown> =>
typeof value === "object" && value !== null;
export const hasStringProperty = <
TKey extends string,
>(
value: unknown,
key: TKey
): value is Record<TKey, string> => isRecord(value) && typeof value[key] === "string";

View File

@@ -15,4 +15,5 @@ export type AnnotationListOptions = {
keyword?: string;
};
export type AnnotationResponse = Record<string, unknown>;
export type AnnotationResponse = JsonObject;
import type { JsonObject } from "./common";

View File

@@ -1,17 +1,28 @@
import type { StreamEvent } from "./common";
import type {
DifyRequestFile,
JsonObject,
ResponseMode,
StreamEvent,
} from "./common";
export type ChatMessageRequest = {
inputs?: Record<string, unknown>;
inputs?: JsonObject;
query: string;
user: string;
response_mode?: "blocking" | "streaming";
files?: Array<Record<string, unknown>> | null;
response_mode?: ResponseMode;
files?: DifyRequestFile[] | null;
conversation_id?: string;
auto_generate_name?: boolean;
workflow_id?: string;
retriever_from?: "app" | "dataset";
};
export type ChatMessageResponse = Record<string, unknown>;
export type ChatMessageResponse = JsonObject;
export type ChatStreamEvent = StreamEvent<Record<string, unknown>>;
export type ChatStreamEvent = StreamEvent<JsonObject>;
export type ConversationSortBy =
| "created_at"
| "-created_at"
| "updated_at"
| "-updated_at";

View File

@@ -1,9 +1,18 @@
import type { Readable } from "node:stream";
export const DEFAULT_BASE_URL = "https://api.dify.ai/v1";
export const DEFAULT_TIMEOUT_SECONDS = 60;
export const DEFAULT_MAX_RETRIES = 3;
export const DEFAULT_RETRY_DELAY_SECONDS = 1;
export type RequestMethod = "GET" | "POST" | "PATCH" | "PUT" | "DELETE";
export type ResponseMode = "blocking" | "streaming";
export type JsonPrimitive = string | number | boolean | null;
export type JsonValue = JsonPrimitive | JsonObject | JsonArray;
export type JsonObject = {
[key: string]: JsonValue;
};
export type JsonArray = JsonValue[];
export type QueryParamValue =
| string
@@ -15,6 +24,13 @@ export type QueryParamValue =
export type QueryParams = Record<string, QueryParamValue>;
export type Headers = Record<string, string>;
export type DifyRequestFile = JsonObject;
export type SuccessResponse = {
result: "success";
};
export type SuggestedQuestionsResponse = SuccessResponse & {
data: string[];
};
export type DifyClientConfig = {
apiKey: string;
@@ -54,18 +70,18 @@ export type StreamEvent<T = unknown> = {
};
export type DifyStream<T = unknown> = AsyncIterable<StreamEvent<T>> & {
data: NodeJS.ReadableStream;
data: Readable;
status: number;
headers: Headers;
requestId?: string;
toText(): Promise<string>;
toReadable(): NodeJS.ReadableStream;
toReadable(): Readable;
};
export type BinaryStream = {
data: NodeJS.ReadableStream;
data: Readable;
status: number;
headers: Headers;
requestId?: string;
toReadable(): NodeJS.ReadableStream;
toReadable(): Readable;
};

View File

@@ -1,13 +1,18 @@
import type { StreamEvent } from "./common";
import type {
DifyRequestFile,
JsonObject,
ResponseMode,
StreamEvent,
} from "./common";
export type CompletionRequest = {
inputs?: Record<string, unknown>;
response_mode?: "blocking" | "streaming";
inputs?: JsonObject;
response_mode?: ResponseMode;
user: string;
files?: Array<Record<string, unknown>> | null;
files?: DifyRequestFile[] | null;
retriever_from?: "app" | "dataset";
};
export type CompletionResponse = Record<string, unknown>;
export type CompletionResponse = JsonObject;
export type CompletionStreamEvent = StreamEvent<Record<string, unknown>>;
export type CompletionStreamEvent = StreamEvent<JsonObject>;

View File

@@ -14,7 +14,7 @@ export type DatasetCreateRequest = {
external_knowledge_api_id?: string | null;
provider?: string;
external_knowledge_id?: string | null;
retrieval_model?: Record<string, unknown> | null;
retrieval_model?: JsonObject | null;
embedding_model?: string | null;
embedding_model_provider?: string | null;
};
@@ -26,9 +26,9 @@ export type DatasetUpdateRequest = {
permission?: string | null;
embedding_model?: string | null;
embedding_model_provider?: string | null;
retrieval_model?: Record<string, unknown> | null;
retrieval_model?: JsonObject | null;
partial_member_list?: Array<Record<string, string>> | null;
external_retrieval_model?: Record<string, unknown> | null;
external_retrieval_model?: JsonObject | null;
external_knowledge_id?: string | null;
external_knowledge_api_id?: string | null;
};
@@ -61,12 +61,12 @@ export type DatasetTagUnbindingRequest = {
export type DocumentTextCreateRequest = {
name: string;
text: string;
process_rule?: Record<string, unknown> | null;
process_rule?: JsonObject | null;
original_document_id?: string | null;
doc_form?: string;
doc_language?: string;
indexing_technique?: string | null;
retrieval_model?: Record<string, unknown> | null;
retrieval_model?: JsonObject | null;
embedding_model?: string | null;
embedding_model_provider?: string | null;
};
@@ -74,10 +74,10 @@ export type DocumentTextCreateRequest = {
export type DocumentTextUpdateRequest = {
name?: string | null;
text?: string | null;
process_rule?: Record<string, unknown> | null;
process_rule?: JsonObject | null;
doc_form?: string;
doc_language?: string;
retrieval_model?: Record<string, unknown> | null;
retrieval_model?: JsonObject | null;
};
export type DocumentListOptions = {
@@ -92,7 +92,7 @@ export type DocumentGetOptions = {
};
export type SegmentCreateRequest = {
segments: Array<Record<string, unknown>>;
segments: JsonObject[];
};
export type SegmentUpdateRequest = {
@@ -155,8 +155,8 @@ export type MetadataOperationRequest = {
export type HitTestingRequest = {
query?: string | null;
retrieval_model?: Record<string, unknown> | null;
external_retrieval_model?: Record<string, unknown> | null;
retrieval_model?: JsonObject | null;
external_retrieval_model?: JsonObject | null;
attachment_ids?: string[] | null;
};
@@ -165,20 +165,21 @@ export type DatasourcePluginListOptions = {
};
export type DatasourceNodeRunRequest = {
inputs: Record<string, unknown>;
inputs: JsonObject;
datasource_type: string;
credential_id?: string | null;
is_published: boolean;
};
export type PipelineRunRequest = {
inputs: Record<string, unknown>;
inputs: JsonObject;
datasource_type: string;
datasource_info_list: Array<Record<string, unknown>>;
datasource_info_list: JsonObject[];
start_node_id: string;
is_published: boolean;
response_mode: "streaming" | "blocking";
response_mode: ResponseMode;
};
export type KnowledgeBaseResponse = Record<string, unknown>;
export type PipelineStreamEvent = Record<string, unknown>;
export type KnowledgeBaseResponse = JsonObject;
export type PipelineStreamEvent = JsonObject;
import type { JsonObject, ResponseMode } from "./common";

View File

@@ -1,12 +1,17 @@
import type { StreamEvent } from "./common";
import type {
DifyRequestFile,
JsonObject,
ResponseMode,
StreamEvent,
} from "./common";
export type WorkflowRunRequest = {
inputs?: Record<string, unknown>;
inputs?: JsonObject;
user: string;
response_mode?: "blocking" | "streaming";
files?: Array<Record<string, unknown>> | null;
response_mode?: ResponseMode;
files?: DifyRequestFile[] | null;
};
export type WorkflowRunResponse = Record<string, unknown>;
export type WorkflowRunResponse = JsonObject;
export type WorkflowStreamEvent = StreamEvent<Record<string, unknown>>;
export type WorkflowStreamEvent = StreamEvent<JsonObject>;

View File

@@ -1,2 +1,4 @@
import type { JsonObject } from "./common";
export type WorkspaceModelType = string;
export type WorkspaceModelsResponse = Record<string, unknown>;
export type WorkspaceModelsResponse = JsonObject;

View File

@@ -0,0 +1,137 @@
import { createServer } from "node:http";
import { Readable } from "node:stream";
import type { AddressInfo } from "node:net";
import { afterAll, beforeAll, describe, expect, it } from "vitest";
import { HttpClient } from "../src/http/client";
const readBody = async (stream: NodeJS.ReadableStream): Promise<Buffer> => {
const chunks: Buffer[] = [];
for await (const chunk of stream) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
}
return Buffer.concat(chunks);
};
describe("HttpClient integration", () => {
const requests: Array<{
url: string;
method: string;
headers: Record<string, string | string[] | undefined>;
body: Buffer;
}> = [];
const server = createServer((req, res) => {
void (async () => {
const body = await readBody(req);
requests.push({
url: req.url ?? "",
method: req.method ?? "",
headers: req.headers,
body,
});
if (req.url?.startsWith("/json")) {
res.writeHead(200, { "content-type": "application/json", "x-request-id": "req-json" });
res.end(JSON.stringify({ ok: true }));
return;
}
if (req.url === "/stream") {
res.writeHead(200, { "content-type": "text/event-stream" });
res.end('data: {"answer":"hello"}\n\ndata: {"delta":" world"}\n\n');
return;
}
if (req.url === "/bytes") {
res.writeHead(200, { "content-type": "application/octet-stream" });
res.end(Buffer.from([1, 2, 3, 4]));
return;
}
if (req.url === "/upload-stream") {
res.writeHead(200, { "content-type": "application/json" });
res.end(JSON.stringify({ received: body.toString("utf8") }));
return;
}
res.writeHead(404, { "content-type": "application/json" });
res.end(JSON.stringify({ message: "not found" }));
})();
});
let client: HttpClient;
beforeAll(async () => {
await new Promise<void>((resolve) => {
server.listen(0, "127.0.0.1", () => resolve());
});
const address = server.address() as AddressInfo;
client = new HttpClient({
apiKey: "test-key",
baseUrl: `http://127.0.0.1:${address.port}`,
maxRetries: 0,
retryDelay: 0,
});
});
afterAll(async () => {
await new Promise<void>((resolve, reject) => {
server.close((error) => {
if (error) {
reject(error);
return;
}
resolve();
});
});
});
it("uses real fetch for query serialization and json bodies", async () => {
const response = await client.request({
method: "POST",
path: "/json",
query: { tag_ids: ["a", "b"], limit: 2 },
data: { user: "u" },
});
expect(response.requestId).toBe("req-json");
expect(response.data).toEqual({ ok: true });
expect(requests.at(-1)).toMatchObject({
url: "/json?tag_ids=a&tag_ids=b&limit=2",
method: "POST",
});
expect(requests.at(-1)?.headers.authorization).toBe("Bearer test-key");
expect(requests.at(-1)?.headers["content-type"]).toBe("application/json");
expect(requests.at(-1)?.body.toString("utf8")).toBe(JSON.stringify({ user: "u" }));
});
it("supports streaming request bodies with duplex fetch", async () => {
const response = await client.request<{ received: string }>({
method: "POST",
path: "/upload-stream",
data: Readable.from(["hello ", "world"]),
});
expect(response.data).toEqual({ received: "hello world" });
expect(requests.at(-1)?.body.toString("utf8")).toBe("hello world");
});
it("parses real sse responses into text", async () => {
const stream = await client.requestStream({
method: "GET",
path: "/stream",
});
await expect(stream.toText()).resolves.toBe("hello world");
});
it("parses real byte responses into buffers", async () => {
const response = await client.request<Buffer, "bytes">({
method: "GET",
path: "/bytes",
responseType: "bytes",
});
expect(Array.from(response.data.values())).toEqual([1, 2, 3, 4]);
});
});

View File

@@ -1,30 +0,0 @@
import axios from "axios";
import { vi } from "vitest";
import { HttpClient } from "../src/http/client";
export const createHttpClient = (configOverrides = {}) => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", ...configOverrides });
return { client, mockRequest };
};
export const createHttpClientWithSpies = (configOverrides = {}) => {
const { client, mockRequest } = createHttpClient(configOverrides);
const request = vi
.spyOn(client, "request")
.mockResolvedValue({ data: "ok", status: 200, headers: {} });
const requestStream = vi
.spyOn(client, "requestStream")
.mockResolvedValue({ data: null });
const requestBinaryStream = vi
.spyOn(client, "requestBinaryStream")
.mockResolvedValue({ data: null });
return {
client,
mockRequest,
request,
requestStream,
requestBinaryStream,
};
};

View File

@@ -0,0 +1,48 @@
import { vi } from "vitest";
import { HttpClient } from "../src/http/client";
import type { DifyClientConfig, DifyResponse } from "../src/types/common";
type FetchMock = ReturnType<typeof vi.fn>;
type RequestSpy = ReturnType<typeof vi.fn>;
type HttpClientWithFetchMock = {
client: HttpClient;
fetchMock: FetchMock;
};
type HttpClientWithSpies = HttpClientWithFetchMock & {
request: RequestSpy;
requestStream: RequestSpy;
requestBinaryStream: RequestSpy;
};
export const createHttpClient = (
configOverrides: Partial<DifyClientConfig> = {}
): HttpClientWithFetchMock => {
const fetchMock = vi.fn();
vi.stubGlobal("fetch", fetchMock);
const client = new HttpClient({ apiKey: "test", ...configOverrides });
return { client, fetchMock };
};
export const createHttpClientWithSpies = (
configOverrides: Partial<DifyClientConfig> = {}
): HttpClientWithSpies => {
const { client, fetchMock } = createHttpClient(configOverrides);
const request = vi
.spyOn(client, "request")
.mockResolvedValue({ data: "ok", status: 200, headers: {} } as DifyResponse<string>);
const requestStream = vi
.spyOn(client, "requestStream")
.mockResolvedValue({ data: null, status: 200, headers: {} } as never);
const requestBinaryStream = vi
.spyOn(client, "requestBinaryStream")
.mockResolvedValue({ data: null, status: 200, headers: {} } as never);
return {
client,
fetchMock,
request,
requestStream,
requestBinaryStream,
};
};

View File

@@ -3,7 +3,7 @@
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "Bundler",
"rootDir": "src",
"rootDir": ".",
"outDir": "dist",
"declaration": true,
"declarationMap": true,
@@ -13,5 +13,5 @@
"forceConsistentCasingInFileNames": true,
"skipLibCheck": true
},
"include": ["src/**/*.ts"]
"include": ["src/**/*.ts", "tests/**/*.ts"]
}

View File

@@ -3,7 +3,7 @@ import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
environment: "node",
include: ["**/*.test.js"],
include: ["**/*.test.ts"],
coverage: {
provider: "v8",
reporter: ["text", "text-summary"],

View File

@@ -1,6 +1,7 @@
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import * as React from 'react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createReactI18nextMock } from '@/test/i18n-mock'
import ErrorBoundary, { ErrorFallback, useAsyncError, useErrorHandler, withErrorBoundary } from '../index'
const mockConfig = vi.hoisted(() => ({
@@ -13,6 +14,19 @@ vi.mock('@/config', () => ({
},
}))
vi.mock('react-i18next', () => createReactI18nextMock({
'error': 'Error',
'errorBoundary.componentStack': 'Component Stack:',
'errorBoundary.details': 'Error Details (Development Only)',
'errorBoundary.errorCount': 'This error has occurred {{count}} times',
'errorBoundary.fallbackTitle': 'Oops! Something went wrong',
'errorBoundary.message': 'An unexpected error occurred while rendering this component.',
'errorBoundary.reloadPage': 'Reload Page',
'errorBoundary.title': 'Something went wrong',
'errorBoundary.tryAgain': 'Try Again',
'errorBoundary.tryAgainCompact': 'Try again',
}))
type ThrowOnRenderProps = {
message?: string
shouldThrow: boolean

View File

@@ -3,6 +3,7 @@ import type { ErrorInfo, ReactNode } from 'react'
import { RiAlertLine, RiBugLine } from '@remixicon/react'
import * as React from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import { IS_DEV } from '@/config'
import { cn } from '@/utils/classnames'
@@ -29,9 +30,21 @@ type ErrorBoundaryProps = {
customMessage?: string
}
type ErrorBoundaryCopy = {
componentStack: string
details: string
error: string
formatErrorCount: (count: number) => string
message: string
reload: string
title: string
tryAgain: string
}
// Internal class component for error catching
class ErrorBoundaryInner extends React.Component<
ErrorBoundaryProps & {
copy: ErrorBoundaryCopy
resetErrorBoundary: () => void
onResetKeysChange: (prevResetKeys?: Array<string | number>) => void
},
@@ -96,6 +109,7 @@ class ErrorBoundaryInner extends React.Component<
enableRecovery = true,
customTitle,
customMessage,
copy,
resetErrorBoundary,
} = this.props
@@ -118,12 +132,12 @@ class ErrorBoundaryInner extends React.Component<
<div className="mb-4 flex items-center gap-2">
<RiAlertLine className="text-state-critical-solid h-8 w-8" />
<h2 className="text-xl font-semibold text-text-primary">
{customTitle || 'Something went wrong'}
{customTitle || copy.title}
</h2>
</div>
<p className="mb-6 text-center text-text-secondary">
{customMessage || 'An unexpected error occurred while rendering this component.'}
{customMessage || copy.message}
</p>
{showDetails && errorInfo && (
@@ -131,19 +145,19 @@ class ErrorBoundaryInner extends React.Component<
<summary className="mb-2 cursor-pointer text-sm font-medium text-text-tertiary hover:text-text-secondary">
<span className="inline-flex items-center gap-1">
<RiBugLine className="h-4 w-4" />
Error Details (Development Only)
{copy.details}
</span>
</summary>
<div className="rounded-lg bg-gray-100 p-4">
<div className="mb-2">
<span className="font-mono text-xs font-semibold text-gray-600">Error:</span>
<span className="font-mono text-xs font-semibold text-gray-600">{copy.error}</span>
<pre className="mt-1 overflow-auto whitespace-pre-wrap font-mono text-xs text-gray-800">
{error.toString()}
</pre>
</div>
{errorInfo && (
<div>
<span className="font-mono text-xs font-semibold text-gray-600">Component Stack:</span>
<span className="font-mono text-xs font-semibold text-gray-600">{copy.componentStack}</span>
<pre className="mt-1 max-h-40 overflow-auto whitespace-pre-wrap font-mono text-xs text-gray-700">
{errorInfo.componentStack}
</pre>
@@ -151,11 +165,7 @@ class ErrorBoundaryInner extends React.Component<
)}
{errorCount > 1 && (
<div className="mt-2 text-xs text-gray-600">
This error has occurred
{' '}
{errorCount}
{' '}
times
{copy.formatErrorCount(errorCount)}
</div>
)}
</div>
@@ -169,14 +179,14 @@ class ErrorBoundaryInner extends React.Component<
size="small"
onClick={resetErrorBoundary}
>
Try Again
{copy.tryAgain}
</Button>
<Button
variant="secondary"
size="small"
onClick={() => window.location.reload()}
>
Reload Page
{copy.reload}
</Button>
</div>
)}
@@ -190,9 +200,20 @@ class ErrorBoundaryInner extends React.Component<
// Main functional component wrapper
const ErrorBoundary: React.FC<ErrorBoundaryProps> = (props) => {
const { t } = useTranslation()
const [errorBoundaryKey, setErrorBoundaryKey] = useState(0)
const resetKeysRef = useRef(props.resetKeys)
const prevResetKeysRef = useRef<Array<string | number> | undefined>(undefined)
const copy = {
componentStack: t('errorBoundary.componentStack', { ns: 'common' }),
details: t('errorBoundary.details', { ns: 'common' }),
error: `${t('error', { ns: 'common' })}:`,
formatErrorCount: (count: number) => t('errorBoundary.errorCount', { ns: 'common', count }),
message: t('errorBoundary.message', { ns: 'common' }),
reload: t('errorBoundary.reloadPage', { ns: 'common' }),
title: t('errorBoundary.title', { ns: 'common' }),
tryAgain: t('errorBoundary.tryAgain', { ns: 'common' }),
}
const resetErrorBoundary = useCallback(() => {
setErrorBoundaryKey(prev => prev + 1)
@@ -211,6 +232,7 @@ const ErrorBoundary: React.FC<ErrorBoundaryProps> = (props) => {
return (
<ErrorBoundaryInner
{...props}
copy={copy}
key={errorBoundaryKey}
resetErrorBoundary={resetErrorBoundary}
onResetKeysChange={onResetKeysChange}
@@ -265,12 +287,14 @@ export const ErrorFallback: React.FC<{
error: Error
resetErrorBoundaryAction: () => void
}> = ({ error, resetErrorBoundaryAction }) => {
const { t } = useTranslation()
return (
<div className="flex min-h-[200px] flex-col items-center justify-center rounded-lg border border-red-200 bg-red-50 p-8">
<h2 className="mb-2 text-lg font-semibold text-red-800">Oops! Something went wrong</h2>
<h2 className="mb-2 text-lg font-semibold text-red-800">{t('errorBoundary.fallbackTitle', { ns: 'common' })}</h2>
<p className="mb-4 text-center text-red-600">{error.message}</p>
<Button onClick={resetErrorBoundaryAction} size="small">
Try again
{t('errorBoundary.tryAgainCompact', { ns: 'common' })}
</Button>
</div>
)

View File

@@ -3,9 +3,17 @@ import type { TriggerSubscription } from '@/app/components/workflow/block-select
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { TriggerCredentialTypeEnum } from '@/app/components/workflow/block-selector/types'
import { createReactI18nextMock } from '@/test/i18n-mock'
import { SubscriptionList } from '../index'
import { SubscriptionListMode } from '../types'
vi.mock('react-i18next', () => createReactI18nextMock({
'errorBoundary.title': 'Something went wrong',
'errorBoundary.message': 'An unexpected error occurred while rendering this component.',
'errorBoundary.tryAgain': 'Try Again',
'errorBoundary.reloadPage': 'Reload Page',
}))
const mockRefetch = vi.fn()
let mockSubscriptionListError: Error | null = null
let mockSubscriptionListState: {
@@ -209,12 +217,12 @@ describe('SubscriptionList', () => {
})
describe('Edge Cases', () => {
it('should render error boundary fallback when an error occurs', () => {
it('should render error boundary fallback when an error occurs', async () => {
mockSubscriptionListError = new Error('boom')
render(<SubscriptionList />)
expect(screen.getByText('Something went wrong')).toBeInTheDocument()
expect(await screen.findByText('Something went wrong')).toBeInTheDocument()
})
})
})

View File

@@ -164,6 +164,15 @@
"environment.development": "DEVELOPMENT",
"environment.testing": "TESTING",
"error": "Error",
"errorBoundary.componentStack": "Component Stack:",
"errorBoundary.details": "Error Details (Development Only)",
"errorBoundary.errorCount": "This error has occurred {{count}} times",
"errorBoundary.fallbackTitle": "Oops! Something went wrong",
"errorBoundary.message": "An unexpected error occurred while rendering this component.",
"errorBoundary.reloadPage": "Reload Page",
"errorBoundary.title": "Something went wrong",
"errorBoundary.tryAgain": "Try Again",
"errorBoundary.tryAgainCompact": "Try again",
"errorMsg.fieldRequired": "{{field}} is required",
"errorMsg.urlError": "url should start with http:// or https://",
"feedback.content": "Feedback Content",