fix(app-assets): restore atomic batch upload for nested folder targets

The previous nested folder upload flow bypassed the backend batch-upload
contract when parentId was set. Instead of creating the whole metadata
tree in one backend operation, the frontend recursively called
createFolder/getFileUploadUrl for each node.

That introduced two regressions for uploads into subfolders:

- consistency regression: mid-sequence failures could leave partially
  created folder trees under the destination folder
- performance regression: metadata creation degraded from a single
  batch request to O(files + folders) round-trips before file bytes
  were uploaded

This change moves nested uploads back to the original batch semantics:

- add optional parent_id support to app asset batch-upload payload
- create the whole nested tree under the target parent in
  AppAssetService.batch_create_from_tree
- pass parentId through useBatchUpload instead of using per-node
  createFolder/getFileUploadUrl calls
- remove the now-unnecessary useBatchUploadOperation wrapper
- add a backend unit test covering batch tree creation under an
  existing parent folder

After this change, both root uploads and subfolder uploads use the same
single-request metadata creation path, preserving atomic tree creation
semantics and avoiding avoidable metadata round-trips.
This commit is contained in:
yyh
2026-03-26 15:16:55 +08:00
parent 4d95cee44e
commit 12ca422c8a
7 changed files with 123 additions and 158 deletions

View File

@@ -65,6 +65,12 @@ class GetUploadUrlPayload(BaseModel):
class BatchUploadPayload(BaseModel):
children: list[BatchUploadNode] = Field(..., min_length=1)
parent_id: str | None = None
@field_validator("parent_id", mode="before")
@classmethod
def empty_to_none(cls, v: str | None) -> str | None:
return v or None
class UpdateFileContentPayload(BaseModel):
@@ -291,6 +297,7 @@ class AppAssetBatchUploadResource(Resource):
Input:
{
"parent_id": "optional-target-folder-id",
"children": [
{"name": "folder1", "node_type": "folder", "children": [
{"name": "file1.txt", "node_type": "file", "size": 1024}
@@ -313,7 +320,12 @@ class AppAssetBatchUploadResource(Resource):
payload = BatchUploadPayload.model_validate(console_ns.payload or {})
try:
result_children = AppAssetService.batch_create_from_tree(app_model, current_user.id, payload.children)
result_children = AppAssetService.batch_create_from_tree(
app_model,
current_user.id,
payload.children,
parent_id=payload.parent_id,
)
return {"children": [child.model_dump() for child in result_children]}, 201
except AppAssetParentNotFoundError:
raise AppAssetNodeNotFoundError()

View File

@@ -505,8 +505,16 @@ class AppAssetService:
app_model: App,
account_id: str,
input_children: list[BatchUploadNode],
parent_id: str | None = None,
expires_in: int = 3600,
) -> list[BatchUploadNode]:
"""
Create a nested batch-upload tree under one parent in a single tree mutation.
The full metadata tree is added to the draft asset tree before the method
returns any upload URLs. That preserves sibling name de-duplication and
keeps nested uploads atomic for both root and subfolder targets.
"""
if not input_children:
return []
@@ -516,16 +524,20 @@ class AppAssetService:
tree = assets.asset_tree
taken_by_parent: dict[str | None, set[str]] = {}
stack: list[tuple[BatchUploadNode, str | None]] = [(child, None) for child in reversed(input_children)]
stack: list[tuple[BatchUploadNode, str | None]] = [
(child, parent_id) for child in reversed(input_children)
]
while stack:
node, parent_id = stack.pop()
node, current_parent_id = stack.pop()
if node.id is None:
node.id = str(uuid4())
if parent_id not in taken_by_parent:
taken_by_parent[parent_id] = {child.name for child in tree.get_children(parent_id)}
taken = taken_by_parent[parent_id]
if current_parent_id not in taken_by_parent:
taken_by_parent[current_parent_id] = {
child.name for child in tree.get_children(current_parent_id)
}
taken = taken_by_parent[current_parent_id]
unique_name = tree.ensure_unique_name(
parent_id,
current_parent_id,
node.name,
is_file=node.node_type == AssetNodeType.FILE,
extra_taken=taken,
@@ -538,7 +550,7 @@ class AppAssetService:
new_nodes: list[AppAssetNode] = []
for child in input_children:
new_nodes.extend(child.to_app_asset_nodes(None))
new_nodes.extend(child.to_app_asset_nodes(parent_id))
try:
for node in new_nodes:

View File

@@ -0,0 +1,86 @@
from types import SimpleNamespace
from core.app.entities.app_asset_entities import AppAssetFileTree, AppAssetNode, AssetNodeType, BatchUploadNode
from core.app_assets.storage import AssetPaths
from services import app_asset_service
from services.app_asset_service import AppAssetService
class DummyLock:
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
class DummySession:
committed: bool
def __init__(self) -> None:
self.committed = False
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
def commit(self) -> None:
self.committed = True
class DummyStorage:
keys: list[str]
def __init__(self) -> None:
self.keys = []
def get_upload_url(self, key: str, expires_in: int) -> str:
self.keys.append(key)
return f"https://upload.local/{key}?expires_in={expires_in}"
def test_batch_create_from_tree_creates_nodes_under_parent(monkeypatch):
session = DummySession()
storage = DummyStorage()
tenant_id = "11111111-1111-4111-8111-111111111111"
app_id = "22222222-2222-4222-8222-222222222222"
parent_folder = AppAssetNode.create_folder("33333333-3333-4333-8333-333333333333", "existing-parent")
assets = SimpleNamespace(asset_tree=AppAssetFileTree(nodes=[parent_folder]), updated_by=None)
app_model = SimpleNamespace(id=app_id, tenant_id=tenant_id)
monkeypatch.setattr(AppAssetService, "_lock", staticmethod(lambda _app_id: DummyLock()))
monkeypatch.setattr(app_asset_service, "db", SimpleNamespace(engine=object()))
monkeypatch.setattr(app_asset_service, "Session", lambda *args, **kwargs: session)
monkeypatch.setattr(
AppAssetService,
"get_or_create_assets",
staticmethod(lambda _session, _app_model, _account_id: assets),
)
monkeypatch.setattr(AppAssetService, "get_storage", staticmethod(lambda: storage))
result = AppAssetService.batch_create_from_tree(
app_model,
"account-1",
[
BatchUploadNode(
name="docs",
node_type=AssetNodeType.FOLDER,
children=[
BatchUploadNode(name="guide.md", node_type=AssetNodeType.FILE, size=12),
],
),
],
parent_id=parent_folder.id,
)
created_folder = next(node for node in assets.asset_tree.nodes if node.name == "docs")
created_file = next(node for node in assets.asset_tree.nodes if node.name == "guide.md")
assert created_folder.parent_id == parent_folder.id
assert created_file.parent_id == created_folder.id
assert created_file.id == result[0].children[0].id
assert assets.updated_by == "account-1"
assert session.committed is True
assert storage.keys == [AssetPaths.draft(tenant_id, app_id, created_file.id)]

View File

@@ -1,147 +0,0 @@
'use client'
import type { BatchUploadNodeInput, BatchUploadNodeOutput } from '@/types/app-asset'
import { useMutation, useQueryClient } from '@tanstack/react-query'
import { consoleClient, consoleQuery } from '@/service/client'
import { uploadToPresignedUrl } from '@/service/upload-to-presigned-url'
import { useBatchUpload } from '@/service/use-app-asset'
type BatchUploadOperationVariables = {
appId: string
tree: BatchUploadNodeInput[]
files: Map<string, File>
parentId?: string | null
onProgress?: (uploaded: number, total: number) => void
}
type BatchUploadTask = {
file: File
url: string
}
const uploadBatchUploadTasks = async ({
tasks,
onProgress,
}: {
tasks: BatchUploadTask[]
onProgress?: (uploaded: number, total: number) => void
}) => {
let completed = 0
const total = tasks.length
await Promise.all(
tasks.map(async (task) => {
await uploadToPresignedUrl({
file: task.file,
uploadUrl: task.url,
})
completed++
onProgress?.(completed, total)
}),
)
}
const createBatchUploadTreeInParent = async ({
appId,
tree,
files,
parentId,
pathPrefix = '',
}: {
appId: string
tree: BatchUploadNodeInput[]
files: Map<string, File>
parentId: string
pathPrefix?: string
}): Promise<{ nodes: BatchUploadNodeOutput[], tasks: BatchUploadTask[] }> => {
const nodes: BatchUploadNodeOutput[] = []
const tasks: BatchUploadTask[] = []
for (const inputNode of tree) {
const sourcePath = pathPrefix ? `${pathPrefix}/${inputNode.name}` : inputNode.name
if (inputNode.node_type === 'folder') {
const folder = await consoleClient.appAsset.createFolder({
params: { appId },
body: { name: inputNode.name, parent_id: parentId },
})
const childrenResult = await createBatchUploadTreeInParent({
appId,
tree: inputNode.children ?? [],
files,
parentId: folder.id,
pathPrefix: sourcePath,
})
nodes.push({
id: folder.id,
name: folder.name,
node_type: folder.node_type,
size: folder.size,
children: childrenResult.nodes,
})
tasks.push(...childrenResult.tasks)
continue
}
const file = files.get(sourcePath)
if (!file)
throw new Error(`Missing file for batch upload path: ${sourcePath}`)
const { node, upload_url } = await consoleClient.appAsset.getFileUploadUrl({
params: { appId },
body: {
name: inputNode.name,
size: inputNode.size ?? file.size,
parent_id: parentId,
},
})
nodes.push({
id: node.id,
name: node.name,
node_type: node.node_type,
size: node.size,
children: [],
upload_url,
})
tasks.push({ file, url: upload_url })
}
return { nodes, tasks }
}
export function useBatchUploadOperation() {
const queryClient = useQueryClient()
const batchUpload = useBatchUpload()
return useMutation({
mutationKey: consoleQuery.appAsset.batchUpload.mutationKey(),
mutationFn: async (variables: BatchUploadOperationVariables): Promise<BatchUploadNodeOutput[]> => {
if (!variables.parentId)
return batchUpload.mutateAsync(variables)
try {
const result = await createBatchUploadTreeInParent({
appId: variables.appId,
tree: variables.tree,
files: variables.files,
parentId: variables.parentId,
})
await uploadBatchUploadTasks({
tasks: result.tasks,
onProgress: variables.onProgress,
})
return result.nodes
}
finally {
await queryClient.invalidateQueries({
queryKey: consoleQuery.appAsset.tree.key({ type: 'query', input: { params: { appId: variables.appId } } }),
})
}
},
})
}

View File

@@ -5,12 +5,12 @@ import type { SkillEditorSliceShape } from '@/app/components/workflow/store/work
import type { BatchUploadNodeInput } from '@/types/app-asset'
import { useCallback, useRef } from 'react'
import {
useBatchUpload,
useCreateAppAssetFolder,
useUploadFileWithPresignedUrl,
} from '@/service/use-app-asset'
import { prepareSkillUploadFile } from '../../../utils/skill-upload-utils'
import { useSkillTreeUpdateEmitter } from '../data/use-skill-tree-collaboration'
import { useBatchUploadOperation } from './use-batch-upload-operation'
type UseCreateOperationsOptions = {
parentId: string | null
@@ -34,7 +34,7 @@ export function useCreateOperations({
const { isPending: isCreateFolderPending } = useCreateAppAssetFolder()
const { mutateAsync: uploadFileAsync, isPending: isUploadFilePending } = useUploadFileWithPresignedUrl()
const { mutateAsync: batchUploadAsync, isPending: isBatchUploadPending } = useBatchUploadOperation()
const { mutateAsync: batchUploadAsync, isPending: isBatchUploadPending } = useBatchUpload()
const emitTreeUpdate = useSkillTreeUpdateEmitter()
const handleNewFile = useCallback(() => {

View File

@@ -271,6 +271,7 @@ export const useBatchUpload = () => {
appId,
tree,
files,
parentId,
onProgress,
}: {
appId: string
@@ -281,7 +282,7 @@ export const useBatchUpload = () => {
}): Promise<BatchUploadNodeOutput[]> => {
const response = await consoleClient.appAsset.batchUpload({
params: { appId },
body: { children: tree },
body: { children: tree, parent_id: parentId },
})
const uploadTasks: Array<{ path: string, file: File, url: string }> = []

View File

@@ -179,6 +179,7 @@ export type BatchUploadNodeOutput = {
*/
export type BatchUploadPayload = {
children: BatchUploadNodeInput[]
parent_id?: string | null
}
/**