mirror of
https://github.com/langgenius/dify.git
synced 2026-01-31 08:01:46 +08:00
- Added DraftWorkflowTriggerNodeApi and DraftWorkflowTriggerRunApi for debugging trigger nodes and workflows. - Enhanced TriggerDebugService to manage trigger debugging sessions and event listening. - Introduced structured event responses for trigger debugging, including listening started, received, node finished, and workflow started events. - Updated Queue and Stream entities to support new trigger debug events. - Refactored trigger input handling to streamline the process of creating inputs from trigger data. This implementation improves the debugging capabilities for trigger nodes and workflows, providing clearer event handling and structured responses.
906 lines
22 KiB
Python
906 lines
22 KiB
Python
from collections.abc import Mapping, Sequence
|
|
from enum import Enum
|
|
from typing import Any, Optional
|
|
|
|
from pydantic import BaseModel, ConfigDict, Field
|
|
|
|
from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage
|
|
from core.model_runtime.utils.encoders import jsonable_encoder
|
|
from core.rag.entities.citation_metadata import RetrievalSourceMetadata
|
|
from core.workflow.entities.node_entities import AgentNodeStrategyInit
|
|
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
|
|
|
|
|
|
class AnnotationReplyAccount(BaseModel):
|
|
id: str
|
|
name: str
|
|
|
|
|
|
class AnnotationReply(BaseModel):
|
|
id: str
|
|
account: AnnotationReplyAccount
|
|
|
|
|
|
class TaskStateMetadata(BaseModel):
|
|
annotation_reply: AnnotationReply | None = None
|
|
retriever_resources: Sequence[RetrievalSourceMetadata] = Field(default_factory=list)
|
|
usage: LLMUsage | None = None
|
|
|
|
|
|
class TaskState(BaseModel):
|
|
"""
|
|
TaskState entity
|
|
"""
|
|
|
|
metadata: TaskStateMetadata = Field(default_factory=TaskStateMetadata)
|
|
|
|
|
|
class EasyUITaskState(TaskState):
|
|
"""
|
|
EasyUITaskState entity
|
|
"""
|
|
|
|
llm_result: LLMResult
|
|
|
|
|
|
class WorkflowTaskState(TaskState):
|
|
"""
|
|
WorkflowTaskState entity
|
|
"""
|
|
|
|
answer: str = ""
|
|
|
|
|
|
class StreamEvent(Enum):
|
|
"""
|
|
Stream event
|
|
"""
|
|
|
|
PING = "ping"
|
|
ERROR = "error"
|
|
MESSAGE = "message"
|
|
MESSAGE_END = "message_end"
|
|
TTS_MESSAGE = "tts_message"
|
|
TTS_MESSAGE_END = "tts_message_end"
|
|
MESSAGE_FILE = "message_file"
|
|
MESSAGE_REPLACE = "message_replace"
|
|
AGENT_THOUGHT = "agent_thought"
|
|
AGENT_MESSAGE = "agent_message"
|
|
WORKFLOW_STARTED = "workflow_started"
|
|
WORKFLOW_FINISHED = "workflow_finished"
|
|
NODE_STARTED = "node_started"
|
|
NODE_FINISHED = "node_finished"
|
|
NODE_RETRY = "node_retry"
|
|
PARALLEL_BRANCH_STARTED = "parallel_branch_started"
|
|
PARALLEL_BRANCH_FINISHED = "parallel_branch_finished"
|
|
ITERATION_STARTED = "iteration_started"
|
|
ITERATION_NEXT = "iteration_next"
|
|
ITERATION_COMPLETED = "iteration_completed"
|
|
LOOP_STARTED = "loop_started"
|
|
LOOP_NEXT = "loop_next"
|
|
LOOP_COMPLETED = "loop_completed"
|
|
TEXT_CHUNK = "text_chunk"
|
|
TEXT_REPLACE = "text_replace"
|
|
AGENT_LOG = "agent_log"
|
|
# Trigger debug events
|
|
TRIGGER_DEBUG_LISTENING_STARTED = "trigger_debug_listening_started"
|
|
TRIGGER_DEBUG_RECEIVED = "trigger_debug_received"
|
|
TRIGGER_DEBUG_NODE_FINISHED = "trigger_debug_node_finished"
|
|
TRIGGER_DEBUG_WORKFLOW_STARTED = "trigger_debug_workflow_started"
|
|
TRIGGER_DEBUG_TIMEOUT = "trigger_debug_timeout"
|
|
|
|
|
|
class StreamResponse(BaseModel):
|
|
"""
|
|
StreamResponse entity
|
|
"""
|
|
|
|
event: StreamEvent
|
|
task_id: str
|
|
|
|
def to_dict(self):
|
|
return jsonable_encoder(self)
|
|
|
|
|
|
class ErrorStreamResponse(StreamResponse):
|
|
"""
|
|
ErrorStreamResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.ERROR
|
|
err: Exception
|
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
|
|
|
|
class MessageStreamResponse(StreamResponse):
|
|
"""
|
|
MessageStreamResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.MESSAGE
|
|
id: str
|
|
answer: str
|
|
from_variable_selector: Optional[list[str]] = None
|
|
|
|
|
|
class MessageAudioStreamResponse(StreamResponse):
|
|
"""
|
|
MessageStreamResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.TTS_MESSAGE
|
|
audio: str
|
|
|
|
|
|
class MessageAudioEndStreamResponse(StreamResponse):
|
|
"""
|
|
MessageStreamResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.TTS_MESSAGE_END
|
|
audio: str
|
|
|
|
|
|
class MessageEndStreamResponse(StreamResponse):
|
|
"""
|
|
MessageEndStreamResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.MESSAGE_END
|
|
id: str
|
|
metadata: dict = Field(default_factory=dict)
|
|
files: Optional[Sequence[Mapping[str, Any]]] = None
|
|
|
|
|
|
class MessageFileStreamResponse(StreamResponse):
|
|
"""
|
|
MessageFileStreamResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.MESSAGE_FILE
|
|
id: str
|
|
type: str
|
|
belongs_to: str
|
|
url: str
|
|
|
|
|
|
class MessageReplaceStreamResponse(StreamResponse):
|
|
"""
|
|
MessageReplaceStreamResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.MESSAGE_REPLACE
|
|
answer: str
|
|
reason: str
|
|
|
|
|
|
class AgentThoughtStreamResponse(StreamResponse):
|
|
"""
|
|
AgentThoughtStreamResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.AGENT_THOUGHT
|
|
id: str
|
|
position: int
|
|
thought: Optional[str] = None
|
|
observation: Optional[str] = None
|
|
tool: Optional[str] = None
|
|
tool_labels: Optional[dict] = None
|
|
tool_input: Optional[str] = None
|
|
message_files: Optional[list[str]] = None
|
|
|
|
|
|
class AgentMessageStreamResponse(StreamResponse):
|
|
"""
|
|
AgentMessageStreamResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.AGENT_MESSAGE
|
|
id: str
|
|
answer: str
|
|
|
|
|
|
class WorkflowStartStreamResponse(StreamResponse):
|
|
"""
|
|
WorkflowStartStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
workflow_id: str
|
|
inputs: Mapping[str, Any]
|
|
created_at: int
|
|
|
|
event: StreamEvent = StreamEvent.WORKFLOW_STARTED
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
|
|
class WorkflowFinishStreamResponse(StreamResponse):
|
|
"""
|
|
WorkflowFinishStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
workflow_id: str
|
|
status: str
|
|
outputs: Optional[Mapping[str, Any]] = None
|
|
error: Optional[str] = None
|
|
elapsed_time: float
|
|
total_tokens: int
|
|
total_steps: int
|
|
created_by: Optional[dict] = None
|
|
created_at: int
|
|
finished_at: int
|
|
exceptions_count: Optional[int] = 0
|
|
files: Optional[Sequence[Mapping[str, Any]]] = []
|
|
|
|
event: StreamEvent = StreamEvent.WORKFLOW_FINISHED
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
|
|
class NodeStartStreamResponse(StreamResponse):
|
|
"""
|
|
NodeStartStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
node_id: str
|
|
node_type: str
|
|
title: str
|
|
index: int
|
|
predecessor_node_id: Optional[str] = None
|
|
inputs: Optional[Mapping[str, Any]] = None
|
|
created_at: int
|
|
extras: dict = Field(default_factory=dict)
|
|
parallel_id: Optional[str] = None
|
|
parallel_start_node_id: Optional[str] = None
|
|
parent_parallel_id: Optional[str] = None
|
|
parent_parallel_start_node_id: Optional[str] = None
|
|
iteration_id: Optional[str] = None
|
|
loop_id: Optional[str] = None
|
|
parallel_run_id: Optional[str] = None
|
|
agent_strategy: Optional[AgentNodeStrategyInit] = None
|
|
|
|
event: StreamEvent = StreamEvent.NODE_STARTED
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
def to_ignore_detail_dict(self):
|
|
return {
|
|
"event": self.event.value,
|
|
"task_id": self.task_id,
|
|
"workflow_run_id": self.workflow_run_id,
|
|
"data": {
|
|
"id": self.data.id,
|
|
"node_id": self.data.node_id,
|
|
"node_type": self.data.node_type,
|
|
"title": self.data.title,
|
|
"index": self.data.index,
|
|
"predecessor_node_id": self.data.predecessor_node_id,
|
|
"inputs": None,
|
|
"created_at": self.data.created_at,
|
|
"extras": {},
|
|
"parallel_id": self.data.parallel_id,
|
|
"parallel_start_node_id": self.data.parallel_start_node_id,
|
|
"parent_parallel_id": self.data.parent_parallel_id,
|
|
"parent_parallel_start_node_id": self.data.parent_parallel_start_node_id,
|
|
"iteration_id": self.data.iteration_id,
|
|
"loop_id": self.data.loop_id,
|
|
},
|
|
}
|
|
|
|
|
|
class NodeFinishStreamResponse(StreamResponse):
|
|
"""
|
|
NodeFinishStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
node_id: str
|
|
node_type: str
|
|
title: str
|
|
index: int
|
|
predecessor_node_id: Optional[str] = None
|
|
inputs: Optional[Mapping[str, Any]] = None
|
|
process_data: Optional[Mapping[str, Any]] = None
|
|
outputs: Optional[Mapping[str, Any]] = None
|
|
status: str
|
|
error: Optional[str] = None
|
|
elapsed_time: float
|
|
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
|
|
created_at: int
|
|
finished_at: int
|
|
files: Optional[Sequence[Mapping[str, Any]]] = []
|
|
parallel_id: Optional[str] = None
|
|
parallel_start_node_id: Optional[str] = None
|
|
parent_parallel_id: Optional[str] = None
|
|
parent_parallel_start_node_id: Optional[str] = None
|
|
iteration_id: Optional[str] = None
|
|
loop_id: Optional[str] = None
|
|
|
|
event: StreamEvent = StreamEvent.NODE_FINISHED
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
def to_ignore_detail_dict(self):
|
|
return {
|
|
"event": self.event.value,
|
|
"task_id": self.task_id,
|
|
"workflow_run_id": self.workflow_run_id,
|
|
"data": {
|
|
"id": self.data.id,
|
|
"node_id": self.data.node_id,
|
|
"node_type": self.data.node_type,
|
|
"title": self.data.title,
|
|
"index": self.data.index,
|
|
"predecessor_node_id": self.data.predecessor_node_id,
|
|
"inputs": None,
|
|
"process_data": None,
|
|
"outputs": None,
|
|
"status": self.data.status,
|
|
"error": None,
|
|
"elapsed_time": self.data.elapsed_time,
|
|
"execution_metadata": None,
|
|
"created_at": self.data.created_at,
|
|
"finished_at": self.data.finished_at,
|
|
"files": [],
|
|
"parallel_id": self.data.parallel_id,
|
|
"parallel_start_node_id": self.data.parallel_start_node_id,
|
|
"parent_parallel_id": self.data.parent_parallel_id,
|
|
"parent_parallel_start_node_id": self.data.parent_parallel_start_node_id,
|
|
"iteration_id": self.data.iteration_id,
|
|
"loop_id": self.data.loop_id,
|
|
},
|
|
}
|
|
|
|
|
|
class NodeRetryStreamResponse(StreamResponse):
|
|
"""
|
|
NodeFinishStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
node_id: str
|
|
node_type: str
|
|
title: str
|
|
index: int
|
|
predecessor_node_id: Optional[str] = None
|
|
inputs: Optional[Mapping[str, Any]] = None
|
|
process_data: Optional[Mapping[str, Any]] = None
|
|
outputs: Optional[Mapping[str, Any]] = None
|
|
status: str
|
|
error: Optional[str] = None
|
|
elapsed_time: float
|
|
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
|
|
created_at: int
|
|
finished_at: int
|
|
files: Optional[Sequence[Mapping[str, Any]]] = []
|
|
parallel_id: Optional[str] = None
|
|
parallel_start_node_id: Optional[str] = None
|
|
parent_parallel_id: Optional[str] = None
|
|
parent_parallel_start_node_id: Optional[str] = None
|
|
iteration_id: Optional[str] = None
|
|
loop_id: Optional[str] = None
|
|
retry_index: int = 0
|
|
|
|
event: StreamEvent = StreamEvent.NODE_RETRY
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
def to_ignore_detail_dict(self):
|
|
return {
|
|
"event": self.event.value,
|
|
"task_id": self.task_id,
|
|
"workflow_run_id": self.workflow_run_id,
|
|
"data": {
|
|
"id": self.data.id,
|
|
"node_id": self.data.node_id,
|
|
"node_type": self.data.node_type,
|
|
"title": self.data.title,
|
|
"index": self.data.index,
|
|
"predecessor_node_id": self.data.predecessor_node_id,
|
|
"inputs": None,
|
|
"process_data": None,
|
|
"outputs": None,
|
|
"status": self.data.status,
|
|
"error": None,
|
|
"elapsed_time": self.data.elapsed_time,
|
|
"execution_metadata": None,
|
|
"created_at": self.data.created_at,
|
|
"finished_at": self.data.finished_at,
|
|
"files": [],
|
|
"parallel_id": self.data.parallel_id,
|
|
"parallel_start_node_id": self.data.parallel_start_node_id,
|
|
"parent_parallel_id": self.data.parent_parallel_id,
|
|
"parent_parallel_start_node_id": self.data.parent_parallel_start_node_id,
|
|
"iteration_id": self.data.iteration_id,
|
|
"loop_id": self.data.loop_id,
|
|
"retry_index": self.data.retry_index,
|
|
},
|
|
}
|
|
|
|
|
|
class ParallelBranchStartStreamResponse(StreamResponse):
|
|
"""
|
|
ParallelBranchStartStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
parallel_id: str
|
|
parallel_branch_id: str
|
|
parent_parallel_id: Optional[str] = None
|
|
parent_parallel_start_node_id: Optional[str] = None
|
|
iteration_id: Optional[str] = None
|
|
loop_id: Optional[str] = None
|
|
created_at: int
|
|
|
|
event: StreamEvent = StreamEvent.PARALLEL_BRANCH_STARTED
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
|
|
class ParallelBranchFinishedStreamResponse(StreamResponse):
|
|
"""
|
|
ParallelBranchFinishedStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
parallel_id: str
|
|
parallel_branch_id: str
|
|
parent_parallel_id: Optional[str] = None
|
|
parent_parallel_start_node_id: Optional[str] = None
|
|
iteration_id: Optional[str] = None
|
|
loop_id: Optional[str] = None
|
|
status: str
|
|
error: Optional[str] = None
|
|
created_at: int
|
|
|
|
event: StreamEvent = StreamEvent.PARALLEL_BRANCH_FINISHED
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
|
|
class IterationNodeStartStreamResponse(StreamResponse):
|
|
"""
|
|
NodeStartStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
node_id: str
|
|
node_type: str
|
|
title: str
|
|
created_at: int
|
|
extras: dict = Field(default_factory=dict)
|
|
metadata: Mapping = {}
|
|
inputs: Mapping = {}
|
|
parallel_id: Optional[str] = None
|
|
parallel_start_node_id: Optional[str] = None
|
|
|
|
event: StreamEvent = StreamEvent.ITERATION_STARTED
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
|
|
class IterationNodeNextStreamResponse(StreamResponse):
|
|
"""
|
|
NodeStartStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
node_id: str
|
|
node_type: str
|
|
title: str
|
|
index: int
|
|
created_at: int
|
|
pre_iteration_output: Optional[Any] = None
|
|
extras: dict = Field(default_factory=dict)
|
|
parallel_id: Optional[str] = None
|
|
parallel_start_node_id: Optional[str] = None
|
|
parallel_mode_run_id: Optional[str] = None
|
|
duration: Optional[float] = None
|
|
|
|
event: StreamEvent = StreamEvent.ITERATION_NEXT
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
|
|
class IterationNodeCompletedStreamResponse(StreamResponse):
|
|
"""
|
|
NodeCompletedStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
node_id: str
|
|
node_type: str
|
|
title: str
|
|
outputs: Optional[Mapping] = None
|
|
created_at: int
|
|
extras: Optional[dict] = None
|
|
inputs: Optional[Mapping] = None
|
|
status: WorkflowNodeExecutionStatus
|
|
error: Optional[str] = None
|
|
elapsed_time: float
|
|
total_tokens: int
|
|
execution_metadata: Optional[Mapping] = None
|
|
finished_at: int
|
|
steps: int
|
|
parallel_id: Optional[str] = None
|
|
parallel_start_node_id: Optional[str] = None
|
|
|
|
event: StreamEvent = StreamEvent.ITERATION_COMPLETED
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
|
|
class LoopNodeStartStreamResponse(StreamResponse):
|
|
"""
|
|
NodeStartStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
node_id: str
|
|
node_type: str
|
|
title: str
|
|
created_at: int
|
|
extras: dict = Field(default_factory=dict)
|
|
metadata: Mapping = {}
|
|
inputs: Mapping = {}
|
|
parallel_id: Optional[str] = None
|
|
parallel_start_node_id: Optional[str] = None
|
|
|
|
event: StreamEvent = StreamEvent.LOOP_STARTED
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
|
|
class LoopNodeNextStreamResponse(StreamResponse):
|
|
"""
|
|
NodeStartStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
node_id: str
|
|
node_type: str
|
|
title: str
|
|
index: int
|
|
created_at: int
|
|
pre_loop_output: Optional[Any] = None
|
|
extras: dict = Field(default_factory=dict)
|
|
parallel_id: Optional[str] = None
|
|
parallel_start_node_id: Optional[str] = None
|
|
parallel_mode_run_id: Optional[str] = None
|
|
duration: Optional[float] = None
|
|
|
|
event: StreamEvent = StreamEvent.LOOP_NEXT
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
|
|
class LoopNodeCompletedStreamResponse(StreamResponse):
|
|
"""
|
|
NodeCompletedStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
node_id: str
|
|
node_type: str
|
|
title: str
|
|
outputs: Optional[Mapping] = None
|
|
created_at: int
|
|
extras: Optional[dict] = None
|
|
inputs: Optional[Mapping] = None
|
|
status: WorkflowNodeExecutionStatus
|
|
error: Optional[str] = None
|
|
elapsed_time: float
|
|
total_tokens: int
|
|
execution_metadata: Optional[Mapping] = None
|
|
finished_at: int
|
|
steps: int
|
|
parallel_id: Optional[str] = None
|
|
parallel_start_node_id: Optional[str] = None
|
|
|
|
event: StreamEvent = StreamEvent.LOOP_COMPLETED
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
|
|
class TextChunkStreamResponse(StreamResponse):
|
|
"""
|
|
TextChunkStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
text: str
|
|
from_variable_selector: Optional[list[str]] = None
|
|
|
|
event: StreamEvent = StreamEvent.TEXT_CHUNK
|
|
data: Data
|
|
|
|
|
|
class TextReplaceStreamResponse(StreamResponse):
|
|
"""
|
|
TextReplaceStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
text: str
|
|
|
|
event: StreamEvent = StreamEvent.TEXT_REPLACE
|
|
data: Data
|
|
|
|
|
|
class PingStreamResponse(StreamResponse):
|
|
"""
|
|
PingStreamResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.PING
|
|
|
|
|
|
class AppStreamResponse(BaseModel):
|
|
"""
|
|
AppStreamResponse entity
|
|
"""
|
|
|
|
stream_response: StreamResponse
|
|
|
|
|
|
class ChatbotAppStreamResponse(AppStreamResponse):
|
|
"""
|
|
ChatbotAppStreamResponse entity
|
|
"""
|
|
|
|
conversation_id: str
|
|
message_id: str
|
|
created_at: int
|
|
|
|
|
|
class CompletionAppStreamResponse(AppStreamResponse):
|
|
"""
|
|
CompletionAppStreamResponse entity
|
|
"""
|
|
|
|
message_id: str
|
|
created_at: int
|
|
|
|
|
|
class WorkflowAppStreamResponse(AppStreamResponse):
|
|
"""
|
|
WorkflowAppStreamResponse entity
|
|
"""
|
|
|
|
workflow_run_id: Optional[str] = None
|
|
|
|
|
|
class AppBlockingResponse(BaseModel):
|
|
"""
|
|
AppBlockingResponse entity
|
|
"""
|
|
|
|
task_id: str
|
|
|
|
def to_dict(self):
|
|
return jsonable_encoder(self)
|
|
|
|
|
|
class ChatbotAppBlockingResponse(AppBlockingResponse):
|
|
"""
|
|
ChatbotAppBlockingResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
mode: str
|
|
conversation_id: str
|
|
message_id: str
|
|
answer: str
|
|
metadata: dict = Field(default_factory=dict)
|
|
created_at: int
|
|
|
|
data: Data
|
|
|
|
|
|
class CompletionAppBlockingResponse(AppBlockingResponse):
|
|
"""
|
|
CompletionAppBlockingResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
mode: str
|
|
message_id: str
|
|
answer: str
|
|
metadata: dict = Field(default_factory=dict)
|
|
created_at: int
|
|
|
|
data: Data
|
|
|
|
|
|
class WorkflowAppBlockingResponse(AppBlockingResponse):
|
|
"""
|
|
WorkflowAppBlockingResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
id: str
|
|
workflow_id: str
|
|
status: str
|
|
outputs: Optional[Mapping[str, Any]] = None
|
|
error: Optional[str] = None
|
|
elapsed_time: float
|
|
total_tokens: int
|
|
total_steps: int
|
|
created_at: int
|
|
finished_at: int
|
|
|
|
workflow_run_id: str
|
|
data: Data
|
|
|
|
|
|
class AgentLogStreamResponse(StreamResponse):
|
|
"""
|
|
AgentLogStreamResponse entity
|
|
"""
|
|
|
|
class Data(BaseModel):
|
|
"""
|
|
Data entity
|
|
"""
|
|
|
|
node_execution_id: str
|
|
id: str
|
|
label: str
|
|
parent_id: str | None
|
|
error: str | None
|
|
status: str
|
|
data: Mapping[str, Any]
|
|
metadata: Optional[Mapping[str, Any]] = None
|
|
node_id: str
|
|
|
|
event: StreamEvent = StreamEvent.AGENT_LOG
|
|
data: Data
|
|
|
|
|
|
# Trigger Debug Stream Responses
|
|
class TriggerDebugListeningStartedResponse(StreamResponse):
|
|
"""
|
|
TriggerDebugListeningStartedResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.TRIGGER_DEBUG_LISTENING_STARTED
|
|
session_id: str
|
|
webhook_url: str
|
|
timeout: int
|
|
|
|
|
|
class TriggerDebugReceivedResponse(StreamResponse):
|
|
"""
|
|
TriggerDebugReceivedResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.TRIGGER_DEBUG_RECEIVED
|
|
subscription_id: str
|
|
triggers: list[str]
|
|
request_id: str
|
|
timestamp: float
|
|
|
|
|
|
class TriggerDebugNodeFinishedResponse(StreamResponse):
|
|
"""
|
|
TriggerDebugNodeFinishedResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.TRIGGER_DEBUG_NODE_FINISHED
|
|
id: str
|
|
node_id: str
|
|
node_type: str
|
|
status: str
|
|
outputs: Optional[Mapping[str, Any]] = None
|
|
error: Optional[str] = None
|
|
elapsed_time: float
|
|
execution_metadata: Optional[Mapping[str, Any]] = None
|
|
|
|
|
|
class TriggerDebugWorkflowStartedResponse(StreamResponse):
|
|
"""
|
|
TriggerDebugWorkflowStartedResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.TRIGGER_DEBUG_WORKFLOW_STARTED
|
|
subscription_id: str
|
|
triggers: list[str]
|
|
request_id: str
|
|
|
|
|
|
class TriggerDebugTimeoutResponse(StreamResponse):
|
|
"""
|
|
TriggerDebugTimeoutResponse entity
|
|
"""
|
|
|
|
event: StreamEvent = StreamEvent.TRIGGER_DEBUG_TIMEOUT
|
|
error: str = "Timeout waiting for trigger"
|