refactor: use EnumText for model_type and WorkflowNodeExecution.status (#34093)

Co-authored-by: Krishna Chaitanya <krishnabkc15@gmail.com>
This commit is contained in:
tmimmanuel
2026-03-26 21:34:44 +01:00
committed by GitHub
parent 1f11300175
commit 2ea85d3ba2
4 changed files with 14 additions and 7 deletions

View File

@@ -39,6 +39,7 @@ from core.ops.entities.trace_entity import (
) )
from core.repositories import DifyCoreRepositoryFactory from core.repositories import DifyCoreRepositoryFactory
from extensions.ext_database import db from extensions.ext_database import db
from graphon.enums import WorkflowNodeExecutionStatus
from models.model import EndUser, MessageFile from models.model import EndUser, MessageFile
from models.workflow import WorkflowNodeExecutionTriggeredFrom from models.workflow import WorkflowNodeExecutionTriggeredFrom
@@ -300,7 +301,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
"app_name": node_execution.title, "app_name": node_execution.title,
"status": node_execution.status, "status": node_execution.status,
"status_message": node_execution.error or "", "status_message": node_execution.error or "",
"level": "ERROR" if node_execution.status == "failed" else "DEFAULT", "level": "ERROR" if node_execution.status == WorkflowNodeExecutionStatus.FAILED else "DEFAULT",
} }
) )
@@ -361,7 +362,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
llm_attributes.update(self._construct_llm_attributes(process_data.get("prompts", []))) llm_attributes.update(self._construct_llm_attributes(process_data.get("prompts", [])))
node_span.set_attributes(llm_attributes) node_span.set_attributes(llm_attributes)
finally: finally:
if node_execution.status == "failed": if node_execution.status == WorkflowNodeExecutionStatus.FAILED:
set_span_status(node_span, node_execution.error) set_span_status(node_span, node_execution.error)
else: else:
set_span_status(node_span) set_span_status(node_span)

View File

@@ -60,7 +60,7 @@ def _dict_to_workflow_node_execution_model(data: dict[str, Any]) -> WorkflowNode
model.triggered_from = WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN model.triggered_from = WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN
model.node_id = data.get("node_id") or "" model.node_id = data.get("node_id") or ""
model.node_type = data.get("node_type") or "" model.node_type = data.get("node_type") or ""
model.status = data.get("status") or "running" # Default status if missing model.status = WorkflowNodeExecutionStatus(data.get("status") or "running")
model.title = data.get("title") or "" model.title = data.get("title") or ""
created_by_role_val = data.get("created_by_role") created_by_role_val = data.get("created_by_role")
try: try:

View File

@@ -33,7 +33,13 @@ from extensions.ext_storage import Storage
from factories.variable_factory import TypeMismatchError, build_segment_with_type from factories.variable_factory import TypeMismatchError, build_segment_with_type
from graphon.entities.graph_config import NodeConfigDict, NodeConfigDictAdapter from graphon.entities.graph_config import NodeConfigDict, NodeConfigDictAdapter
from graphon.entities.pause_reason import HumanInputRequired, PauseReason, PauseReasonType, SchedulingPause from graphon.entities.pause_reason import HumanInputRequired, PauseReason, PauseReasonType, SchedulingPause
from graphon.enums import BuiltinNodeTypes, NodeType, WorkflowExecutionStatus, WorkflowNodeExecutionMetadataKey from graphon.enums import (
BuiltinNodeTypes,
NodeType,
WorkflowExecutionStatus,
WorkflowNodeExecutionMetadataKey,
WorkflowNodeExecutionStatus,
)
from graphon.file.constants import maybe_file_object from graphon.file.constants import maybe_file_object
from graphon.file.models import File from graphon.file.models import File
from graphon.variables import utils as variable_utils from graphon.variables import utils as variable_utils
@@ -941,7 +947,7 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo
inputs: Mapped[str | None] = mapped_column(LongText) inputs: Mapped[str | None] = mapped_column(LongText)
process_data: Mapped[str | None] = mapped_column(LongText) process_data: Mapped[str | None] = mapped_column(LongText)
outputs: Mapped[str | None] = mapped_column(LongText) outputs: Mapped[str | None] = mapped_column(LongText)
status: Mapped[str] = mapped_column(String(255)) status: Mapped[WorkflowNodeExecutionStatus] = mapped_column(EnumText(WorkflowNodeExecutionStatus, length=255))
error: Mapped[str | None] = mapped_column(LongText) error: Mapped[str | None] = mapped_column(LongText)
elapsed_time: Mapped[float] = mapped_column(sa.Float, server_default=sa.text("0")) elapsed_time: Mapped[float] = mapped_column(sa.Float, server_default=sa.text("0"))
execution_metadata: Mapped[str | None] = mapped_column(LongText) execution_metadata: Mapped[str | None] = mapped_column(LongText)

View File

@@ -125,7 +125,7 @@ def _create_node_execution_from_domain(
else: else:
node_execution.execution_metadata = "{}" node_execution.execution_metadata = "{}"
node_execution.status = execution.status.value node_execution.status = execution.status
node_execution.error = execution.error node_execution.error = execution.error
node_execution.elapsed_time = execution.elapsed_time node_execution.elapsed_time = execution.elapsed_time
node_execution.created_by_role = creator_user_role node_execution.created_by_role = creator_user_role
@@ -159,7 +159,7 @@ def _update_node_execution_from_domain(node_execution: WorkflowNodeExecutionMode
node_execution.execution_metadata = "{}" node_execution.execution_metadata = "{}"
# Update other fields # Update other fields
node_execution.status = execution.status.value node_execution.status = execution.status
node_execution.error = execution.error node_execution.error = execution.error
node_execution.elapsed_time = execution.elapsed_time node_execution.elapsed_time = execution.elapsed_time
node_execution.finished_at = execution.finished_at node_execution.finished_at = execution.finished_at