mirror of
https://github.com/langgenius/dify.git
synced 2026-02-09 23:20:12 -05:00
Merge remote-tracking branch 'origin/main' into feat/support-agent-sandbox
This commit is contained in:
@@ -5,5 +5,18 @@
|
||||
"typescript-lsp@claude-plugins-official": true,
|
||||
"pyright-lsp@claude-plugins-official": true,
|
||||
"ralph-loop@claude-plugins-official": true
|
||||
},
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "npx -y block-no-verify@1.1.1"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
6
.github/workflows/api-tests.yml
vendored
6
.github/workflows/api-tests.yml
vendored
@@ -39,12 +39,6 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: uv sync --project api --dev
|
||||
|
||||
- name: Run pyrefly check
|
||||
run: |
|
||||
cd api
|
||||
uv add --dev pyrefly
|
||||
uv run pyrefly check || true
|
||||
|
||||
- name: Run dify config tests
|
||||
run: uv run --project api dev/pytest/pytest_config_tests.py
|
||||
|
||||
|
||||
29
.github/workflows/deploy-hitl.yml
vendored
Normal file
29
.github/workflows/deploy-hitl.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Deploy HITL
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Build and Push API & Web"]
|
||||
branches:
|
||||
- "feat/hitl-frontend"
|
||||
- "feat/hitl-backend"
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
(
|
||||
github.event.workflow_run.head_branch == 'feat/hitl-frontend' ||
|
||||
github.event.workflow_run.head_branch == 'feat/hitl-backend'
|
||||
)
|
||||
steps:
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v0.1.8
|
||||
with:
|
||||
host: ${{ secrets.HITL_SSH_HOST }}
|
||||
username: ${{ secrets.SSH_USER }}
|
||||
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
script: |
|
||||
${{ vars.SSH_SCRIPT || secrets.SSH_SCRIPT }}
|
||||
@@ -589,6 +589,7 @@ ENABLE_CLEAN_UNUSED_DATASETS_TASK=false
|
||||
ENABLE_CREATE_TIDB_SERVERLESS_TASK=false
|
||||
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false
|
||||
ENABLE_CLEAN_MESSAGES=false
|
||||
ENABLE_WORKFLOW_RUN_CLEANUP_TASK=false
|
||||
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false
|
||||
ENABLE_DATASETS_QUEUE_MONITOR=false
|
||||
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import base64
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import secrets
|
||||
@@ -45,6 +46,7 @@ from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpi
|
||||
from services.plugin.data_migration import PluginDataMigration
|
||||
from services.plugin.plugin_migration import PluginMigration
|
||||
from services.plugin.plugin_service import PluginService
|
||||
from services.retention.workflow_run.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup
|
||||
from tasks.remove_app_and_related_data_task import delete_draft_variables_batch
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -852,6 +854,61 @@ def clear_free_plan_tenant_expired_logs(days: int, batch: int, tenant_ids: list[
|
||||
click.echo(click.style("Clear free plan tenant expired logs completed.", fg="green"))
|
||||
|
||||
|
||||
@click.command("clean-workflow-runs", help="Clean expired workflow runs and related data for free tenants.")
|
||||
@click.option("--days", default=30, show_default=True, help="Delete workflow runs created before N days ago.")
|
||||
@click.option("--batch-size", default=200, show_default=True, help="Batch size for selecting workflow runs.")
|
||||
@click.option(
|
||||
"--start-from",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
default=None,
|
||||
help="Optional lower bound (inclusive) for created_at; must be paired with --end-before.",
|
||||
)
|
||||
@click.option(
|
||||
"--end-before",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
default=None,
|
||||
help="Optional upper bound (exclusive) for created_at; must be paired with --start-from.",
|
||||
)
|
||||
@click.option(
|
||||
"--dry-run",
|
||||
is_flag=True,
|
||||
help="Preview cleanup results without deleting any workflow run data.",
|
||||
)
|
||||
def clean_workflow_runs(
|
||||
days: int,
|
||||
batch_size: int,
|
||||
start_from: datetime.datetime | None,
|
||||
end_before: datetime.datetime | None,
|
||||
dry_run: bool,
|
||||
):
|
||||
"""
|
||||
Clean workflow runs and related workflow data for free tenants.
|
||||
"""
|
||||
if (start_from is None) ^ (end_before is None):
|
||||
raise click.UsageError("--start-from and --end-before must be provided together.")
|
||||
|
||||
start_time = datetime.datetime.now(datetime.UTC)
|
||||
click.echo(click.style(f"Starting workflow run cleanup at {start_time.isoformat()}.", fg="white"))
|
||||
|
||||
WorkflowRunCleanup(
|
||||
days=days,
|
||||
batch_size=batch_size,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
dry_run=dry_run,
|
||||
).run()
|
||||
|
||||
end_time = datetime.datetime.now(datetime.UTC)
|
||||
elapsed = end_time - start_time
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Workflow run cleanup completed. start={start_time.isoformat()} "
|
||||
f"end={end_time.isoformat()} duration={elapsed}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@click.option("-f", "--force", is_flag=True, help="Skip user confirmation and force the command to execute.")
|
||||
@click.command("clear-orphaned-file-records", help="Clear orphaned file records.")
|
||||
def clear_orphaned_file_records(force: bool):
|
||||
|
||||
@@ -1112,6 +1112,10 @@ class CeleryScheduleTasksConfig(BaseSettings):
|
||||
description="Enable clean messages task",
|
||||
default=False,
|
||||
)
|
||||
ENABLE_WORKFLOW_RUN_CLEANUP_TASK: bool = Field(
|
||||
description="Enable scheduled workflow run cleanup task",
|
||||
default=False,
|
||||
)
|
||||
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: bool = Field(
|
||||
description="Enable mail clean document notify task",
|
||||
default=False,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from decimal import Decimal
|
||||
from typing import Union, cast
|
||||
|
||||
from sqlalchemy import select
|
||||
@@ -41,6 +42,7 @@ from core.tools.tool_manager import ToolManager
|
||||
from core.tools.utils.dataset_retriever_tool import DatasetRetrieverTool
|
||||
from extensions.ext_database import db
|
||||
from factories import file_factory
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import Conversation, Message, MessageAgentThought, MessageFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -300,6 +302,7 @@ class BaseAgentRunner(AppRunner):
|
||||
thought = MessageAgentThought(
|
||||
message_id=message_id,
|
||||
message_chain_id=None,
|
||||
tool_process_data=None,
|
||||
thought="",
|
||||
tool=tool_name,
|
||||
tool_labels_str="{}",
|
||||
@@ -307,20 +310,20 @@ class BaseAgentRunner(AppRunner):
|
||||
tool_input=tool_input,
|
||||
message=message,
|
||||
message_token=0,
|
||||
message_unit_price=0,
|
||||
message_price_unit=0,
|
||||
message_unit_price=Decimal(0),
|
||||
message_price_unit=Decimal("0.001"),
|
||||
message_files=json.dumps(messages_ids) if messages_ids else "",
|
||||
answer="",
|
||||
observation="",
|
||||
answer_token=0,
|
||||
answer_unit_price=0,
|
||||
answer_price_unit=0,
|
||||
answer_unit_price=Decimal(0),
|
||||
answer_price_unit=Decimal("0.001"),
|
||||
tokens=0,
|
||||
total_price=0,
|
||||
total_price=Decimal(0),
|
||||
position=self.agent_thought_count + 1,
|
||||
currency="USD",
|
||||
latency=0,
|
||||
created_by_role="account",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=self.user_id,
|
||||
)
|
||||
|
||||
@@ -353,7 +356,8 @@ class BaseAgentRunner(AppRunner):
|
||||
raise ValueError("agent thought not found")
|
||||
|
||||
if thought:
|
||||
agent_thought.thought += thought
|
||||
existing_thought = agent_thought.thought or ""
|
||||
agent_thought.thought = f"{existing_thought}{thought}"
|
||||
|
||||
if tool_name:
|
||||
agent_thought.tool = tool_name
|
||||
@@ -451,21 +455,30 @@ class BaseAgentRunner(AppRunner):
|
||||
agent_thoughts: list[MessageAgentThought] = message.agent_thoughts
|
||||
if agent_thoughts:
|
||||
for agent_thought in agent_thoughts:
|
||||
tools = agent_thought.tool
|
||||
if tools:
|
||||
tools = tools.split(";")
|
||||
tool_names_raw = agent_thought.tool
|
||||
if tool_names_raw:
|
||||
tool_names = tool_names_raw.split(";")
|
||||
tool_calls: list[AssistantPromptMessage.ToolCall] = []
|
||||
tool_call_response: list[ToolPromptMessage] = []
|
||||
try:
|
||||
tool_inputs = json.loads(agent_thought.tool_input)
|
||||
except Exception:
|
||||
tool_inputs = {tool: {} for tool in tools}
|
||||
try:
|
||||
tool_responses = json.loads(agent_thought.observation)
|
||||
except Exception:
|
||||
tool_responses = dict.fromkeys(tools, agent_thought.observation)
|
||||
tool_input_payload = agent_thought.tool_input
|
||||
if tool_input_payload:
|
||||
try:
|
||||
tool_inputs = json.loads(tool_input_payload)
|
||||
except Exception:
|
||||
tool_inputs = {tool: {} for tool in tool_names}
|
||||
else:
|
||||
tool_inputs = {tool: {} for tool in tool_names}
|
||||
|
||||
for tool in tools:
|
||||
observation_payload = agent_thought.observation
|
||||
if observation_payload:
|
||||
try:
|
||||
tool_responses = json.loads(observation_payload)
|
||||
except Exception:
|
||||
tool_responses = dict.fromkeys(tool_names, observation_payload)
|
||||
else:
|
||||
tool_responses = dict.fromkeys(tool_names, observation_payload)
|
||||
|
||||
for tool in tool_names:
|
||||
# generate a uuid for tool call
|
||||
tool_call_id = str(uuid.uuid4())
|
||||
tool_calls.append(
|
||||
@@ -495,7 +508,7 @@ class BaseAgentRunner(AppRunner):
|
||||
*tool_call_response,
|
||||
]
|
||||
)
|
||||
if not tools:
|
||||
if not tool_names_raw:
|
||||
result.append(AssistantPromptMessage(content=agent_thought.thought))
|
||||
else:
|
||||
if message.answer:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import logging
|
||||
from collections.abc import Sequence
|
||||
|
||||
from opentelemetry.trace import SpanKind
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.ops.aliyun_trace.data_exporter.traceclient import (
|
||||
@@ -151,6 +152,7 @@ class AliyunDataTrace(BaseTraceInstance):
|
||||
),
|
||||
status=status,
|
||||
links=trace_metadata.links,
|
||||
span_kind=SpanKind.SERVER,
|
||||
)
|
||||
self.trace_client.add_span(message_span)
|
||||
|
||||
@@ -456,6 +458,7 @@ class AliyunDataTrace(BaseTraceInstance):
|
||||
),
|
||||
status=status,
|
||||
links=trace_metadata.links,
|
||||
span_kind=SpanKind.SERVER,
|
||||
)
|
||||
self.trace_client.add_span(message_span)
|
||||
|
||||
@@ -475,6 +478,7 @@ class AliyunDataTrace(BaseTraceInstance):
|
||||
),
|
||||
status=status,
|
||||
links=trace_metadata.links,
|
||||
span_kind=SpanKind.SERVER if message_span_id is None else SpanKind.INTERNAL,
|
||||
)
|
||||
self.trace_client.add_span(workflow_span)
|
||||
|
||||
|
||||
@@ -166,7 +166,7 @@ class SpanBuilder:
|
||||
attributes=span_data.attributes,
|
||||
events=span_data.events,
|
||||
links=span_data.links,
|
||||
kind=trace_api.SpanKind.INTERNAL,
|
||||
kind=span_data.span_kind,
|
||||
status=span_data.status,
|
||||
start_time=span_data.start_time,
|
||||
end_time=span_data.end_time,
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any
|
||||
|
||||
from opentelemetry import trace as trace_api
|
||||
from opentelemetry.sdk.trace import Event
|
||||
from opentelemetry.trace import Status, StatusCode
|
||||
from opentelemetry.trace import SpanKind, Status, StatusCode
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
@@ -34,3 +34,4 @@ class SpanData(BaseModel):
|
||||
status: Status = Field(default=Status(StatusCode.UNSET), description="The status of the span.")
|
||||
start_time: int | None = Field(..., description="The start time of the span in nanoseconds.")
|
||||
end_time: int | None = Field(..., description="The end time of the span in nanoseconds.")
|
||||
span_kind: SpanKind = Field(default=SpanKind.INTERNAL, description="The OpenTelemetry SpanKind for this span.")
|
||||
|
||||
@@ -212,6 +212,10 @@ class WorkflowExecutionStatus(StrEnum):
|
||||
def is_ended(self) -> bool:
|
||||
return self in _END_STATE
|
||||
|
||||
@classmethod
|
||||
def ended_values(cls) -> list[str]:
|
||||
return [status.value for status in _END_STATE]
|
||||
|
||||
|
||||
_END_STATE = frozenset(
|
||||
[
|
||||
|
||||
@@ -33,6 +33,15 @@ class VariableAssignerNode(Node[VariableAssignerData]):
|
||||
graph_runtime_state=graph_runtime_state,
|
||||
)
|
||||
|
||||
def blocks_variable_output(self, variable_selectors: set[tuple[str, ...]]) -> bool:
|
||||
"""
|
||||
Check if this Variable Assigner node blocks the output of specific variables.
|
||||
|
||||
Returns True if this node updates any of the requested conversation variables.
|
||||
"""
|
||||
assigned_selector = tuple(self.node_data.assigned_variable_selector)
|
||||
return assigned_selector in variable_selectors
|
||||
|
||||
@classmethod
|
||||
def version(cls) -> str:
|
||||
return "1"
|
||||
|
||||
@@ -19,6 +19,7 @@ from core.workflow.graph_engine.protocols.command_channel import CommandChannel
|
||||
from core.workflow.graph_events import GraphEngineEvent, GraphNodeEventBase, GraphRunFailedEvent
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.nodes.base.node import Node
|
||||
from core.workflow.nodes.node_factory import DifyNodeFactory
|
||||
from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING
|
||||
from core.workflow.runtime import GraphRuntimeState, VariablePool
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
@@ -136,13 +137,11 @@ class WorkflowEntry:
|
||||
:param user_inputs: user inputs
|
||||
:return:
|
||||
"""
|
||||
node_config = workflow.get_node_config_by_id(node_id)
|
||||
node_config = dict(workflow.get_node_config_by_id(node_id))
|
||||
node_config_data = node_config.get("data", {})
|
||||
|
||||
# Get node class
|
||||
# Get node type
|
||||
node_type = NodeType(node_config_data.get("type"))
|
||||
node_version = node_config_data.get("version", "1")
|
||||
node_cls = NODE_TYPE_CLASSES_MAPPING[node_type][node_version]
|
||||
|
||||
# init graph init params and runtime state
|
||||
graph_init_params = GraphInitParams(
|
||||
@@ -158,12 +157,12 @@ class WorkflowEntry:
|
||||
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
|
||||
|
||||
# init workflow run state
|
||||
node = node_cls(
|
||||
id=str(uuid.uuid4()),
|
||||
config=node_config,
|
||||
node_factory = DifyNodeFactory(
|
||||
graph_init_params=graph_init_params,
|
||||
graph_runtime_state=graph_runtime_state,
|
||||
)
|
||||
node = node_factory.create_node(node_config)
|
||||
node_cls = type(node)
|
||||
|
||||
try:
|
||||
# variable selector to variable mapping
|
||||
|
||||
@@ -163,6 +163,13 @@ def init_app(app: DifyApp) -> Celery:
|
||||
"task": "schedule.clean_workflow_runlogs_precise.clean_workflow_runlogs_precise",
|
||||
"schedule": crontab(minute="0", hour="2"),
|
||||
}
|
||||
if dify_config.ENABLE_WORKFLOW_RUN_CLEANUP_TASK:
|
||||
# for saas only
|
||||
imports.append("schedule.clean_workflow_runs_task")
|
||||
beat_schedule["clean_workflow_runs_task"] = {
|
||||
"task": "schedule.clean_workflow_runs_task.clean_workflow_runs_task",
|
||||
"schedule": crontab(minute="0", hour="0"),
|
||||
}
|
||||
if dify_config.ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK:
|
||||
imports.append("schedule.workflow_schedule_task")
|
||||
beat_schedule["workflow_schedule_task"] = {
|
||||
|
||||
@@ -4,6 +4,7 @@ from dify_app import DifyApp
|
||||
def init_app(app: DifyApp):
|
||||
from commands import (
|
||||
add_qdrant_index,
|
||||
clean_workflow_runs,
|
||||
cleanup_orphaned_draft_variables,
|
||||
clear_free_plan_tenant_expired_logs,
|
||||
clear_orphaned_file_records,
|
||||
@@ -58,6 +59,7 @@ def init_app(app: DifyApp):
|
||||
setup_datasource_oauth_client,
|
||||
transform_datasource_credentials,
|
||||
install_rag_pipeline_plugins,
|
||||
clean_workflow_runs,
|
||||
]
|
||||
for cmd in cmds_to_register:
|
||||
app.cli.add_command(cmd)
|
||||
|
||||
30
api/migrations/versions/2026_01_09_1630-905527cc8fd3_.py
Normal file
30
api/migrations/versions/2026_01_09_1630-905527cc8fd3_.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""add workflow_run_created_at_id_idx
|
||||
|
||||
Revision ID: 905527cc8fd3
|
||||
Revises: 7df29de0f6be
|
||||
Create Date: 2025-01-09 16:30:02.462084
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '905527cc8fd3'
|
||||
down_revision = '7df29de0f6be'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_run_created_at_id_idx', ['created_at', 'id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
|
||||
batch_op.drop_index('workflow_run_created_at_id_idx')
|
||||
# ### end Alembic commands ###
|
||||
@@ -1857,7 +1857,7 @@ class MessageChain(TypeBase):
|
||||
)
|
||||
|
||||
|
||||
class MessageAgentThought(Base):
|
||||
class MessageAgentThought(TypeBase):
|
||||
__tablename__ = "message_agent_thoughts"
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="message_agent_thought_pkey"),
|
||||
@@ -1865,34 +1865,42 @@ class MessageAgentThought(Base):
|
||||
sa.Index("message_agent_thought_message_chain_id_idx", "message_chain_id"),
|
||||
)
|
||||
|
||||
id = mapped_column(StringUUID, default=lambda: str(uuid4()))
|
||||
message_id = mapped_column(StringUUID, nullable=False)
|
||||
message_chain_id = mapped_column(StringUUID, nullable=True)
|
||||
id: Mapped[str] = mapped_column(
|
||||
StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False
|
||||
)
|
||||
message_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
position: Mapped[int] = mapped_column(sa.Integer, nullable=False)
|
||||
thought = mapped_column(LongText, nullable=True)
|
||||
tool = mapped_column(LongText, nullable=True)
|
||||
tool_labels_str = mapped_column(LongText, nullable=False, default=sa.text("'{}'"))
|
||||
tool_meta_str = mapped_column(LongText, nullable=False, default=sa.text("'{}'"))
|
||||
tool_input = mapped_column(LongText, nullable=True)
|
||||
observation = mapped_column(LongText, nullable=True)
|
||||
created_by_role: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
message_chain_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
|
||||
thought: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
tool: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
tool_labels_str: Mapped[str] = mapped_column(LongText, nullable=False, default=sa.text("'{}'"))
|
||||
tool_meta_str: Mapped[str] = mapped_column(LongText, nullable=False, default=sa.text("'{}'"))
|
||||
tool_input: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
observation: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
# plugin_id = mapped_column(StringUUID, nullable=True) ## for future design
|
||||
tool_process_data = mapped_column(LongText, nullable=True)
|
||||
message = mapped_column(LongText, nullable=True)
|
||||
message_token: Mapped[int | None] = mapped_column(sa.Integer, nullable=True)
|
||||
message_unit_price = mapped_column(sa.Numeric, nullable=True)
|
||||
message_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001"))
|
||||
message_files = mapped_column(LongText, nullable=True)
|
||||
answer = mapped_column(LongText, nullable=True)
|
||||
answer_token: Mapped[int | None] = mapped_column(sa.Integer, nullable=True)
|
||||
answer_unit_price = mapped_column(sa.Numeric, nullable=True)
|
||||
answer_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001"))
|
||||
tokens: Mapped[int | None] = mapped_column(sa.Integer, nullable=True)
|
||||
total_price = mapped_column(sa.Numeric, nullable=True)
|
||||
currency = mapped_column(String(255), nullable=True)
|
||||
latency: Mapped[float | None] = mapped_column(sa.Float, nullable=True)
|
||||
created_by_role = mapped_column(String(255), nullable=False)
|
||||
created_by = mapped_column(StringUUID, nullable=False)
|
||||
created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp())
|
||||
tool_process_data: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
message: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
message_token: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None)
|
||||
message_unit_price: Mapped[Decimal | None] = mapped_column(sa.Numeric, nullable=True, default=None)
|
||||
message_price_unit: Mapped[Decimal] = mapped_column(
|
||||
sa.Numeric(10, 7), nullable=False, default=Decimal("0.001"), server_default=sa.text("0.001")
|
||||
)
|
||||
message_files: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
answer: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None)
|
||||
answer_token: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None)
|
||||
answer_unit_price: Mapped[Decimal | None] = mapped_column(sa.Numeric, nullable=True, default=None)
|
||||
answer_price_unit: Mapped[Decimal] = mapped_column(
|
||||
sa.Numeric(10, 7), nullable=False, default=Decimal("0.001"), server_default=sa.text("0.001")
|
||||
)
|
||||
tokens: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None)
|
||||
total_price: Mapped[Decimal | None] = mapped_column(sa.Numeric, nullable=True, default=None)
|
||||
currency: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
|
||||
latency: Mapped[float | None] = mapped_column(sa.Float, nullable=True, default=None)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime, nullable=False, init=False, server_default=sa.func.current_timestamp()
|
||||
)
|
||||
|
||||
@property
|
||||
def files(self) -> list[Any]:
|
||||
|
||||
@@ -631,6 +631,7 @@ class WorkflowRun(Base):
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="workflow_run_pkey"),
|
||||
sa.Index("workflow_run_triggerd_from_idx", "tenant_id", "app_id", "triggered_from"),
|
||||
sa.Index("workflow_run_created_at_id_idx", "created_at", "id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()))
|
||||
|
||||
@@ -193,7 +193,7 @@ storage = [
|
||||
"opendal~=0.46.0",
|
||||
"oss2==2.18.5",
|
||||
"supabase~=2.18.1",
|
||||
"tos~=2.7.1",
|
||||
"tos~=2.9.0",
|
||||
]
|
||||
|
||||
############################################################
|
||||
|
||||
@@ -34,11 +34,14 @@ Example:
|
||||
```
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
from collections.abc import Callable, Sequence
|
||||
from datetime import datetime
|
||||
from typing import Protocol
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.workflow.entities.pause_reason import PauseReason
|
||||
from core.workflow.enums import WorkflowType
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
@@ -253,6 +256,44 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol):
|
||||
"""
|
||||
...
|
||||
|
||||
def get_runs_batch_by_time_range(
|
||||
self,
|
||||
start_from: datetime | None,
|
||||
end_before: datetime,
|
||||
last_seen: tuple[datetime, str] | None,
|
||||
batch_size: int,
|
||||
run_types: Sequence[WorkflowType] | None = None,
|
||||
tenant_ids: Sequence[str] | None = None,
|
||||
) -> Sequence[WorkflowRun]:
|
||||
"""
|
||||
Fetch ended workflow runs in a time window for archival and clean batching.
|
||||
"""
|
||||
...
|
||||
|
||||
def delete_runs_with_related(
|
||||
self,
|
||||
runs: Sequence[WorkflowRun],
|
||||
delete_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None,
|
||||
delete_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None,
|
||||
) -> dict[str, int]:
|
||||
"""
|
||||
Delete workflow runs and their related records (node executions, offloads, app logs,
|
||||
trigger logs, pauses, pause reasons).
|
||||
"""
|
||||
...
|
||||
|
||||
def count_runs_with_related(
|
||||
self,
|
||||
runs: Sequence[WorkflowRun],
|
||||
count_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None,
|
||||
count_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None,
|
||||
) -> dict[str, int]:
|
||||
"""
|
||||
Count workflow runs and their related records (node executions, offloads, app logs,
|
||||
trigger logs, pauses, pause reasons) without deleting data.
|
||||
"""
|
||||
...
|
||||
|
||||
def create_workflow_pause(
|
||||
self,
|
||||
workflow_run_id: str,
|
||||
|
||||
@@ -7,13 +7,18 @@ using SQLAlchemy 2.0 style queries for WorkflowNodeExecutionModel operations.
|
||||
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime
|
||||
from typing import cast
|
||||
from typing import TypedDict, cast
|
||||
|
||||
from sqlalchemy import asc, delete, desc, select
|
||||
from sqlalchemy import asc, delete, desc, func, select, tuple_
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from models.workflow import WorkflowNodeExecutionModel
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from models.workflow import (
|
||||
WorkflowNodeExecutionModel,
|
||||
WorkflowNodeExecutionOffload,
|
||||
WorkflowNodeExecutionTriggeredFrom,
|
||||
)
|
||||
from repositories.api_workflow_node_execution_repository import DifyAPIWorkflowNodeExecutionRepository
|
||||
|
||||
|
||||
@@ -44,6 +49,26 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut
|
||||
"""
|
||||
self._session_maker = session_maker
|
||||
|
||||
@staticmethod
|
||||
def _map_run_triggered_from_to_node_triggered_from(triggered_from: str) -> str:
|
||||
"""
|
||||
Map workflow run triggered_from values to workflow node execution triggered_from values.
|
||||
"""
|
||||
if triggered_from in {
|
||||
WorkflowRunTriggeredFrom.APP_RUN.value,
|
||||
WorkflowRunTriggeredFrom.DEBUGGING.value,
|
||||
WorkflowRunTriggeredFrom.SCHEDULE.value,
|
||||
WorkflowRunTriggeredFrom.PLUGIN.value,
|
||||
WorkflowRunTriggeredFrom.WEBHOOK.value,
|
||||
}:
|
||||
return WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value
|
||||
if triggered_from in {
|
||||
WorkflowRunTriggeredFrom.RAG_PIPELINE_RUN.value,
|
||||
WorkflowRunTriggeredFrom.RAG_PIPELINE_DEBUGGING.value,
|
||||
}:
|
||||
return WorkflowNodeExecutionTriggeredFrom.RAG_PIPELINE_RUN.value
|
||||
return ""
|
||||
|
||||
def get_node_last_execution(
|
||||
self,
|
||||
tenant_id: str,
|
||||
@@ -290,3 +315,119 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut
|
||||
result = cast(CursorResult, session.execute(stmt))
|
||||
session.commit()
|
||||
return result.rowcount
|
||||
|
||||
class RunContext(TypedDict):
|
||||
run_id: str
|
||||
tenant_id: str
|
||||
app_id: str
|
||||
workflow_id: str
|
||||
triggered_from: str
|
||||
|
||||
@staticmethod
|
||||
def delete_by_runs(session: Session, runs: Sequence[RunContext]) -> tuple[int, int]:
|
||||
"""
|
||||
Delete node executions (and offloads) for the given workflow runs using indexed columns.
|
||||
|
||||
Uses the composite index on (tenant_id, app_id, workflow_id, triggered_from, workflow_run_id)
|
||||
by filtering on those columns with tuple IN.
|
||||
"""
|
||||
if not runs:
|
||||
return 0, 0
|
||||
|
||||
tuple_values = [
|
||||
(
|
||||
run["tenant_id"],
|
||||
run["app_id"],
|
||||
run["workflow_id"],
|
||||
DifyAPISQLAlchemyWorkflowNodeExecutionRepository._map_run_triggered_from_to_node_triggered_from(
|
||||
run["triggered_from"]
|
||||
),
|
||||
run["run_id"],
|
||||
)
|
||||
for run in runs
|
||||
]
|
||||
|
||||
node_execution_ids = session.scalars(
|
||||
select(WorkflowNodeExecutionModel.id).where(
|
||||
tuple_(
|
||||
WorkflowNodeExecutionModel.tenant_id,
|
||||
WorkflowNodeExecutionModel.app_id,
|
||||
WorkflowNodeExecutionModel.workflow_id,
|
||||
WorkflowNodeExecutionModel.triggered_from,
|
||||
WorkflowNodeExecutionModel.workflow_run_id,
|
||||
).in_(tuple_values)
|
||||
)
|
||||
).all()
|
||||
|
||||
if not node_execution_ids:
|
||||
return 0, 0
|
||||
|
||||
offloads_deleted = (
|
||||
cast(
|
||||
CursorResult,
|
||||
session.execute(
|
||||
delete(WorkflowNodeExecutionOffload).where(
|
||||
WorkflowNodeExecutionOffload.node_execution_id.in_(node_execution_ids)
|
||||
)
|
||||
),
|
||||
).rowcount
|
||||
or 0
|
||||
)
|
||||
|
||||
node_executions_deleted = (
|
||||
cast(
|
||||
CursorResult,
|
||||
session.execute(
|
||||
delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(node_execution_ids))
|
||||
),
|
||||
).rowcount
|
||||
or 0
|
||||
)
|
||||
|
||||
return node_executions_deleted, offloads_deleted
|
||||
|
||||
@staticmethod
|
||||
def count_by_runs(session: Session, runs: Sequence[RunContext]) -> tuple[int, int]:
|
||||
"""
|
||||
Count node executions (and offloads) for the given workflow runs using indexed columns.
|
||||
"""
|
||||
if not runs:
|
||||
return 0, 0
|
||||
|
||||
tuple_values = [
|
||||
(
|
||||
run["tenant_id"],
|
||||
run["app_id"],
|
||||
run["workflow_id"],
|
||||
DifyAPISQLAlchemyWorkflowNodeExecutionRepository._map_run_triggered_from_to_node_triggered_from(
|
||||
run["triggered_from"]
|
||||
),
|
||||
run["run_id"],
|
||||
)
|
||||
for run in runs
|
||||
]
|
||||
tuple_filter = tuple_(
|
||||
WorkflowNodeExecutionModel.tenant_id,
|
||||
WorkflowNodeExecutionModel.app_id,
|
||||
WorkflowNodeExecutionModel.workflow_id,
|
||||
WorkflowNodeExecutionModel.triggered_from,
|
||||
WorkflowNodeExecutionModel.workflow_run_id,
|
||||
).in_(tuple_values)
|
||||
|
||||
node_executions_count = (
|
||||
session.scalar(select(func.count()).select_from(WorkflowNodeExecutionModel).where(tuple_filter)) or 0
|
||||
)
|
||||
offloads_count = (
|
||||
session.scalar(
|
||||
select(func.count())
|
||||
.select_from(WorkflowNodeExecutionOffload)
|
||||
.join(
|
||||
WorkflowNodeExecutionModel,
|
||||
WorkflowNodeExecutionOffload.node_execution_id == WorkflowNodeExecutionModel.id,
|
||||
)
|
||||
.where(tuple_filter)
|
||||
)
|
||||
or 0
|
||||
)
|
||||
|
||||
return int(node_executions_count), int(offloads_count)
|
||||
|
||||
@@ -21,7 +21,7 @@ Implementation Notes:
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
from collections.abc import Sequence
|
||||
from collections.abc import Callable, Sequence
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import Any, cast
|
||||
@@ -32,7 +32,7 @@ from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session, selectinload, sessionmaker
|
||||
|
||||
from core.workflow.entities.pause_reason import HumanInputRequired, PauseReason, SchedulingPause
|
||||
from core.workflow.enums import WorkflowExecutionStatus
|
||||
from core.workflow.enums import WorkflowExecutionStatus, WorkflowType
|
||||
from extensions.ext_storage import storage
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import convert_datetime_to_date
|
||||
@@ -40,8 +40,14 @@ from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
||||
from libs.time_parser import get_time_threshold
|
||||
from libs.uuid_utils import uuidv7
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from models.workflow import WorkflowPause as WorkflowPauseModel
|
||||
from models.workflow import WorkflowPauseReason, WorkflowRun
|
||||
from models.workflow import (
|
||||
WorkflowAppLog,
|
||||
WorkflowPauseReason,
|
||||
WorkflowRun,
|
||||
)
|
||||
from models.workflow import (
|
||||
WorkflowPause as WorkflowPauseModel,
|
||||
)
|
||||
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
|
||||
from repositories.entities.workflow_pause import WorkflowPauseEntity
|
||||
from repositories.types import (
|
||||
@@ -314,6 +320,171 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
logger.info("Total deleted %s workflow runs for app %s", total_deleted, app_id)
|
||||
return total_deleted
|
||||
|
||||
def get_runs_batch_by_time_range(
|
||||
self,
|
||||
start_from: datetime | None,
|
||||
end_before: datetime,
|
||||
last_seen: tuple[datetime, str] | None,
|
||||
batch_size: int,
|
||||
run_types: Sequence[WorkflowType] | None = None,
|
||||
tenant_ids: Sequence[str] | None = None,
|
||||
) -> Sequence[WorkflowRun]:
|
||||
"""
|
||||
Fetch ended workflow runs in a time window for archival and clean batching.
|
||||
|
||||
Query scope:
|
||||
- created_at in [start_from, end_before)
|
||||
- type in run_types (when provided)
|
||||
- status is an ended state
|
||||
- optional tenant_id filter and cursor (last_seen) for pagination
|
||||
"""
|
||||
with self._session_maker() as session:
|
||||
stmt = (
|
||||
select(WorkflowRun)
|
||||
.where(
|
||||
WorkflowRun.created_at < end_before,
|
||||
WorkflowRun.status.in_(WorkflowExecutionStatus.ended_values()),
|
||||
)
|
||||
.order_by(WorkflowRun.created_at.asc(), WorkflowRun.id.asc())
|
||||
.limit(batch_size)
|
||||
)
|
||||
if run_types is not None:
|
||||
if not run_types:
|
||||
return []
|
||||
stmt = stmt.where(WorkflowRun.type.in_(run_types))
|
||||
|
||||
if start_from:
|
||||
stmt = stmt.where(WorkflowRun.created_at >= start_from)
|
||||
|
||||
if tenant_ids:
|
||||
stmt = stmt.where(WorkflowRun.tenant_id.in_(tenant_ids))
|
||||
|
||||
if last_seen:
|
||||
stmt = stmt.where(
|
||||
or_(
|
||||
WorkflowRun.created_at > last_seen[0],
|
||||
and_(WorkflowRun.created_at == last_seen[0], WorkflowRun.id > last_seen[1]),
|
||||
)
|
||||
)
|
||||
|
||||
return session.scalars(stmt).all()
|
||||
|
||||
def delete_runs_with_related(
|
||||
self,
|
||||
runs: Sequence[WorkflowRun],
|
||||
delete_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None,
|
||||
delete_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None,
|
||||
) -> dict[str, int]:
|
||||
if not runs:
|
||||
return {
|
||||
"runs": 0,
|
||||
"node_executions": 0,
|
||||
"offloads": 0,
|
||||
"app_logs": 0,
|
||||
"trigger_logs": 0,
|
||||
"pauses": 0,
|
||||
"pause_reasons": 0,
|
||||
}
|
||||
|
||||
with self._session_maker() as session:
|
||||
run_ids = [run.id for run in runs]
|
||||
if delete_node_executions:
|
||||
node_executions_deleted, offloads_deleted = delete_node_executions(session, runs)
|
||||
else:
|
||||
node_executions_deleted, offloads_deleted = 0, 0
|
||||
|
||||
app_logs_result = session.execute(delete(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(run_ids)))
|
||||
app_logs_deleted = cast(CursorResult, app_logs_result).rowcount or 0
|
||||
|
||||
pause_ids = session.scalars(
|
||||
select(WorkflowPauseModel.id).where(WorkflowPauseModel.workflow_run_id.in_(run_ids))
|
||||
).all()
|
||||
pause_reasons_deleted = 0
|
||||
pauses_deleted = 0
|
||||
|
||||
if pause_ids:
|
||||
pause_reasons_result = session.execute(
|
||||
delete(WorkflowPauseReason).where(WorkflowPauseReason.pause_id.in_(pause_ids))
|
||||
)
|
||||
pause_reasons_deleted = cast(CursorResult, pause_reasons_result).rowcount or 0
|
||||
pauses_result = session.execute(delete(WorkflowPauseModel).where(WorkflowPauseModel.id.in_(pause_ids)))
|
||||
pauses_deleted = cast(CursorResult, pauses_result).rowcount or 0
|
||||
|
||||
trigger_logs_deleted = delete_trigger_logs(session, run_ids) if delete_trigger_logs else 0
|
||||
|
||||
runs_result = session.execute(delete(WorkflowRun).where(WorkflowRun.id.in_(run_ids)))
|
||||
runs_deleted = cast(CursorResult, runs_result).rowcount or 0
|
||||
|
||||
session.commit()
|
||||
|
||||
return {
|
||||
"runs": runs_deleted,
|
||||
"node_executions": node_executions_deleted,
|
||||
"offloads": offloads_deleted,
|
||||
"app_logs": app_logs_deleted,
|
||||
"trigger_logs": trigger_logs_deleted,
|
||||
"pauses": pauses_deleted,
|
||||
"pause_reasons": pause_reasons_deleted,
|
||||
}
|
||||
|
||||
def count_runs_with_related(
|
||||
self,
|
||||
runs: Sequence[WorkflowRun],
|
||||
count_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None,
|
||||
count_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None,
|
||||
) -> dict[str, int]:
|
||||
if not runs:
|
||||
return {
|
||||
"runs": 0,
|
||||
"node_executions": 0,
|
||||
"offloads": 0,
|
||||
"app_logs": 0,
|
||||
"trigger_logs": 0,
|
||||
"pauses": 0,
|
||||
"pause_reasons": 0,
|
||||
}
|
||||
|
||||
with self._session_maker() as session:
|
||||
run_ids = [run.id for run in runs]
|
||||
if count_node_executions:
|
||||
node_executions_count, offloads_count = count_node_executions(session, runs)
|
||||
else:
|
||||
node_executions_count, offloads_count = 0, 0
|
||||
|
||||
app_logs_count = (
|
||||
session.scalar(
|
||||
select(func.count()).select_from(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(run_ids))
|
||||
)
|
||||
or 0
|
||||
)
|
||||
|
||||
pause_ids = session.scalars(
|
||||
select(WorkflowPauseModel.id).where(WorkflowPauseModel.workflow_run_id.in_(run_ids))
|
||||
).all()
|
||||
pauses_count = len(pause_ids)
|
||||
pause_reasons_count = 0
|
||||
if pause_ids:
|
||||
pause_reasons_count = (
|
||||
session.scalar(
|
||||
select(func.count())
|
||||
.select_from(WorkflowPauseReason)
|
||||
.where(WorkflowPauseReason.pause_id.in_(pause_ids))
|
||||
)
|
||||
or 0
|
||||
)
|
||||
|
||||
trigger_logs_count = count_trigger_logs(session, run_ids) if count_trigger_logs else 0
|
||||
|
||||
return {
|
||||
"runs": len(runs),
|
||||
"node_executions": node_executions_count,
|
||||
"offloads": offloads_count,
|
||||
"app_logs": int(app_logs_count),
|
||||
"trigger_logs": trigger_logs_count,
|
||||
"pauses": pauses_count,
|
||||
"pause_reasons": int(pause_reasons_count),
|
||||
}
|
||||
|
||||
def create_workflow_pause(
|
||||
self,
|
||||
workflow_run_id: str,
|
||||
|
||||
@@ -4,8 +4,10 @@ SQLAlchemy implementation of WorkflowTriggerLogRepository.
|
||||
|
||||
from collections.abc import Sequence
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import cast
|
||||
|
||||
from sqlalchemy import and_, select
|
||||
from sqlalchemy import and_, delete, func, select
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from models.enums import WorkflowTriggerStatus
|
||||
@@ -84,3 +86,37 @@ class SQLAlchemyWorkflowTriggerLogRepository(WorkflowTriggerLogRepository):
|
||||
)
|
||||
|
||||
return list(self.session.scalars(query).all())
|
||||
|
||||
def delete_by_run_ids(self, run_ids: Sequence[str]) -> int:
|
||||
"""
|
||||
Delete trigger logs associated with the given workflow run ids.
|
||||
|
||||
Args:
|
||||
run_ids: Collection of workflow run identifiers.
|
||||
|
||||
Returns:
|
||||
Number of rows deleted.
|
||||
"""
|
||||
if not run_ids:
|
||||
return 0
|
||||
|
||||
result = self.session.execute(delete(WorkflowTriggerLog).where(WorkflowTriggerLog.workflow_run_id.in_(run_ids)))
|
||||
return cast(CursorResult, result).rowcount or 0
|
||||
|
||||
def count_by_run_ids(self, run_ids: Sequence[str]) -> int:
|
||||
"""
|
||||
Count trigger logs associated with the given workflow run ids.
|
||||
|
||||
Args:
|
||||
run_ids: Collection of workflow run identifiers.
|
||||
|
||||
Returns:
|
||||
Number of rows matched.
|
||||
"""
|
||||
if not run_ids:
|
||||
return 0
|
||||
|
||||
count = self.session.scalar(
|
||||
select(func.count()).select_from(WorkflowTriggerLog).where(WorkflowTriggerLog.workflow_run_id.in_(run_ids))
|
||||
)
|
||||
return int(count or 0)
|
||||
|
||||
@@ -109,3 +109,15 @@ class WorkflowTriggerLogRepository(Protocol):
|
||||
A sequence of recent WorkflowTriggerLog instances
|
||||
"""
|
||||
...
|
||||
|
||||
def delete_by_run_ids(self, run_ids: Sequence[str]) -> int:
|
||||
"""
|
||||
Delete trigger logs for workflow run IDs.
|
||||
|
||||
Args:
|
||||
run_ids: Workflow run IDs to delete
|
||||
|
||||
Returns:
|
||||
Number of rows deleted
|
||||
"""
|
||||
...
|
||||
|
||||
43
api/schedule/clean_workflow_runs_task.py
Normal file
43
api/schedule/clean_workflow_runs_task.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import click
|
||||
|
||||
import app
|
||||
from configs import dify_config
|
||||
from services.retention.workflow_run.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup
|
||||
|
||||
|
||||
@app.celery.task(queue="retention")
|
||||
def clean_workflow_runs_task() -> None:
|
||||
"""
|
||||
Scheduled cleanup for workflow runs and related records (sandbox tenants only).
|
||||
"""
|
||||
click.echo(
|
||||
click.style(
|
||||
(
|
||||
"Scheduled workflow run cleanup starting: "
|
||||
f"cutoff={dify_config.SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS} days, "
|
||||
f"batch={dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE}"
|
||||
),
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
start_time = datetime.now(UTC)
|
||||
|
||||
WorkflowRunCleanup(
|
||||
days=dify_config.SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS,
|
||||
batch_size=dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE,
|
||||
start_from=None,
|
||||
end_before=None,
|
||||
).run()
|
||||
|
||||
end_time = datetime.now(UTC)
|
||||
elapsed = end_time - start_time
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Scheduled workflow run cleanup finished. start={start_time.isoformat()} "
|
||||
f"end={end_time.isoformat()} duration={elapsed}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
0
api/services/retention/__init__.py
Normal file
0
api/services/retention/__init__.py
Normal file
0
api/services/retention/workflow_run/__init__.py
Normal file
0
api/services/retention/workflow_run/__init__.py
Normal file
@@ -0,0 +1,301 @@
|
||||
import datetime
|
||||
import logging
|
||||
from collections.abc import Iterable, Sequence
|
||||
|
||||
import click
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from configs import dify_config
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from extensions.ext_database import db
|
||||
from models.workflow import WorkflowRun
|
||||
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
|
||||
from repositories.sqlalchemy_api_workflow_node_execution_repository import (
|
||||
DifyAPISQLAlchemyWorkflowNodeExecutionRepository,
|
||||
)
|
||||
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
|
||||
from services.billing_service import BillingService, SubscriptionPlan
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WorkflowRunCleanup:
|
||||
def __init__(
|
||||
self,
|
||||
days: int,
|
||||
batch_size: int,
|
||||
start_from: datetime.datetime | None = None,
|
||||
end_before: datetime.datetime | None = None,
|
||||
workflow_run_repo: APIWorkflowRunRepository | None = None,
|
||||
dry_run: bool = False,
|
||||
):
|
||||
if (start_from is None) ^ (end_before is None):
|
||||
raise ValueError("start_from and end_before must be both set or both omitted.")
|
||||
|
||||
computed_cutoff = datetime.datetime.now() - datetime.timedelta(days=days)
|
||||
self.window_start = start_from
|
||||
self.window_end = end_before or computed_cutoff
|
||||
|
||||
if self.window_start and self.window_end <= self.window_start:
|
||||
raise ValueError("end_before must be greater than start_from.")
|
||||
|
||||
if batch_size <= 0:
|
||||
raise ValueError("batch_size must be greater than 0.")
|
||||
|
||||
self.batch_size = batch_size
|
||||
self._cleanup_whitelist: set[str] | None = None
|
||||
self.dry_run = dry_run
|
||||
self.free_plan_grace_period_days = dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD
|
||||
self.workflow_run_repo: APIWorkflowRunRepository
|
||||
if workflow_run_repo:
|
||||
self.workflow_run_repo = workflow_run_repo
|
||||
else:
|
||||
# Lazy import to avoid circular dependencies during module import
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
|
||||
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
self.workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker)
|
||||
|
||||
def run(self) -> None:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"{'Inspecting' if self.dry_run else 'Cleaning'} workflow runs "
|
||||
f"{'between ' + self.window_start.isoformat() + ' and ' if self.window_start else 'before '}"
|
||||
f"{self.window_end.isoformat()} (batch={self.batch_size})",
|
||||
fg="white",
|
||||
)
|
||||
)
|
||||
if self.dry_run:
|
||||
click.echo(click.style("Dry run mode enabled. No data will be deleted.", fg="yellow"))
|
||||
|
||||
total_runs_deleted = 0
|
||||
total_runs_targeted = 0
|
||||
related_totals = self._empty_related_counts() if self.dry_run else None
|
||||
batch_index = 0
|
||||
last_seen: tuple[datetime.datetime, str] | None = None
|
||||
|
||||
while True:
|
||||
run_rows = self.workflow_run_repo.get_runs_batch_by_time_range(
|
||||
start_from=self.window_start,
|
||||
end_before=self.window_end,
|
||||
last_seen=last_seen,
|
||||
batch_size=self.batch_size,
|
||||
)
|
||||
if not run_rows:
|
||||
break
|
||||
|
||||
batch_index += 1
|
||||
last_seen = (run_rows[-1].created_at, run_rows[-1].id)
|
||||
tenant_ids = {row.tenant_id for row in run_rows}
|
||||
free_tenants = self._filter_free_tenants(tenant_ids)
|
||||
free_runs = [row for row in run_rows if row.tenant_id in free_tenants]
|
||||
paid_or_skipped = len(run_rows) - len(free_runs)
|
||||
|
||||
if not free_runs:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"[batch #{batch_index}] skipped (no sandbox runs in batch, {paid_or_skipped} paid/unknown)",
|
||||
fg="yellow",
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
total_runs_targeted += len(free_runs)
|
||||
|
||||
if self.dry_run:
|
||||
batch_counts = self.workflow_run_repo.count_runs_with_related(
|
||||
free_runs,
|
||||
count_node_executions=self._count_node_executions,
|
||||
count_trigger_logs=self._count_trigger_logs,
|
||||
)
|
||||
if related_totals is not None:
|
||||
for key in related_totals:
|
||||
related_totals[key] += batch_counts.get(key, 0)
|
||||
sample_ids = ", ".join(run.id for run in free_runs[:5])
|
||||
click.echo(
|
||||
click.style(
|
||||
f"[batch #{batch_index}] would delete {len(free_runs)} runs "
|
||||
f"(sample ids: {sample_ids}) and skip {paid_or_skipped} paid/unknown",
|
||||
fg="yellow",
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
counts = self.workflow_run_repo.delete_runs_with_related(
|
||||
free_runs,
|
||||
delete_node_executions=self._delete_node_executions,
|
||||
delete_trigger_logs=self._delete_trigger_logs,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to delete workflow runs batch ending at %s", last_seen[0])
|
||||
raise
|
||||
|
||||
total_runs_deleted += counts["runs"]
|
||||
click.echo(
|
||||
click.style(
|
||||
f"[batch #{batch_index}] deleted runs: {counts['runs']} "
|
||||
f"(nodes {counts['node_executions']}, offloads {counts['offloads']}, "
|
||||
f"app_logs {counts['app_logs']}, trigger_logs {counts['trigger_logs']}, "
|
||||
f"pauses {counts['pauses']}, pause_reasons {counts['pause_reasons']}); "
|
||||
f"skipped {paid_or_skipped} paid/unknown",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
if self.dry_run:
|
||||
if self.window_start:
|
||||
summary_message = (
|
||||
f"Dry run complete. Would delete {total_runs_targeted} workflow runs "
|
||||
f"between {self.window_start.isoformat()} and {self.window_end.isoformat()}"
|
||||
)
|
||||
else:
|
||||
summary_message = (
|
||||
f"Dry run complete. Would delete {total_runs_targeted} workflow runs "
|
||||
f"before {self.window_end.isoformat()}"
|
||||
)
|
||||
if related_totals is not None:
|
||||
summary_message = f"{summary_message}; related records: {self._format_related_counts(related_totals)}"
|
||||
summary_color = "yellow"
|
||||
else:
|
||||
if self.window_start:
|
||||
summary_message = (
|
||||
f"Cleanup complete. Deleted {total_runs_deleted} workflow runs "
|
||||
f"between {self.window_start.isoformat()} and {self.window_end.isoformat()}"
|
||||
)
|
||||
else:
|
||||
summary_message = (
|
||||
f"Cleanup complete. Deleted {total_runs_deleted} workflow runs before {self.window_end.isoformat()}"
|
||||
)
|
||||
summary_color = "white"
|
||||
|
||||
click.echo(click.style(summary_message, fg=summary_color))
|
||||
|
||||
def _filter_free_tenants(self, tenant_ids: Iterable[str]) -> set[str]:
|
||||
tenant_id_list = list(tenant_ids)
|
||||
|
||||
if not dify_config.BILLING_ENABLED:
|
||||
return set(tenant_id_list)
|
||||
|
||||
if not tenant_id_list:
|
||||
return set()
|
||||
|
||||
cleanup_whitelist = self._get_cleanup_whitelist()
|
||||
|
||||
try:
|
||||
bulk_info = BillingService.get_plan_bulk_with_cache(tenant_id_list)
|
||||
except Exception:
|
||||
bulk_info = {}
|
||||
logger.exception("Failed to fetch billing plans in bulk for tenants: %s", tenant_id_list)
|
||||
|
||||
eligible_free_tenants: set[str] = set()
|
||||
for tenant_id in tenant_id_list:
|
||||
if tenant_id in cleanup_whitelist:
|
||||
continue
|
||||
|
||||
info = bulk_info.get(tenant_id)
|
||||
if info is None:
|
||||
logger.warning("Missing billing info for tenant %s in bulk resp; treating as non-free", tenant_id)
|
||||
continue
|
||||
|
||||
if info.get("plan") != CloudPlan.SANDBOX:
|
||||
continue
|
||||
|
||||
if self._is_within_grace_period(tenant_id, info):
|
||||
continue
|
||||
|
||||
eligible_free_tenants.add(tenant_id)
|
||||
|
||||
return eligible_free_tenants
|
||||
|
||||
def _expiration_datetime(self, tenant_id: str, expiration_value: int) -> datetime.datetime | None:
|
||||
if expiration_value < 0:
|
||||
return None
|
||||
|
||||
try:
|
||||
return datetime.datetime.fromtimestamp(expiration_value, datetime.UTC)
|
||||
except (OverflowError, OSError, ValueError):
|
||||
logger.exception("Failed to parse expiration timestamp for tenant %s", tenant_id)
|
||||
return None
|
||||
|
||||
def _is_within_grace_period(self, tenant_id: str, info: SubscriptionPlan) -> bool:
|
||||
if self.free_plan_grace_period_days <= 0:
|
||||
return False
|
||||
|
||||
expiration_value = info.get("expiration_date", -1)
|
||||
expiration_at = self._expiration_datetime(tenant_id, expiration_value)
|
||||
if expiration_at is None:
|
||||
return False
|
||||
|
||||
grace_deadline = expiration_at + datetime.timedelta(days=self.free_plan_grace_period_days)
|
||||
return datetime.datetime.now(datetime.UTC) < grace_deadline
|
||||
|
||||
def _get_cleanup_whitelist(self) -> set[str]:
|
||||
if self._cleanup_whitelist is not None:
|
||||
return self._cleanup_whitelist
|
||||
|
||||
if not dify_config.BILLING_ENABLED:
|
||||
self._cleanup_whitelist = set()
|
||||
return self._cleanup_whitelist
|
||||
|
||||
try:
|
||||
whitelist_ids = BillingService.get_expired_subscription_cleanup_whitelist()
|
||||
except Exception:
|
||||
logger.exception("Failed to fetch cleanup whitelist from billing service")
|
||||
whitelist_ids = []
|
||||
|
||||
self._cleanup_whitelist = set(whitelist_ids)
|
||||
return self._cleanup_whitelist
|
||||
|
||||
def _delete_trigger_logs(self, session: Session, run_ids: Sequence[str]) -> int:
|
||||
trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session)
|
||||
return trigger_repo.delete_by_run_ids(run_ids)
|
||||
|
||||
def _count_trigger_logs(self, session: Session, run_ids: Sequence[str]) -> int:
|
||||
trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session)
|
||||
return trigger_repo.count_by_run_ids(run_ids)
|
||||
|
||||
@staticmethod
|
||||
def _build_run_contexts(
|
||||
runs: Sequence[WorkflowRun],
|
||||
) -> list[DifyAPISQLAlchemyWorkflowNodeExecutionRepository.RunContext]:
|
||||
return [
|
||||
{
|
||||
"run_id": run.id,
|
||||
"tenant_id": run.tenant_id,
|
||||
"app_id": run.app_id,
|
||||
"workflow_id": run.workflow_id,
|
||||
"triggered_from": run.triggered_from,
|
||||
}
|
||||
for run in runs
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def _empty_related_counts() -> dict[str, int]:
|
||||
return {
|
||||
"node_executions": 0,
|
||||
"offloads": 0,
|
||||
"app_logs": 0,
|
||||
"trigger_logs": 0,
|
||||
"pauses": 0,
|
||||
"pause_reasons": 0,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _format_related_counts(counts: dict[str, int]) -> str:
|
||||
return (
|
||||
f"node_executions {counts['node_executions']}, "
|
||||
f"offloads {counts['offloads']}, "
|
||||
f"app_logs {counts['app_logs']}, "
|
||||
f"trigger_logs {counts['trigger_logs']}, "
|
||||
f"pauses {counts['pauses']}, "
|
||||
f"pause_reasons {counts['pause_reasons']}"
|
||||
)
|
||||
|
||||
def _count_node_executions(self, session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]:
|
||||
run_contexts = self._build_run_contexts(runs)
|
||||
return DifyAPISQLAlchemyWorkflowNodeExecutionRepository.count_by_runs(session, run_contexts)
|
||||
|
||||
def _delete_node_executions(self, session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]:
|
||||
run_contexts = self._build_run_contexts(runs)
|
||||
return DifyAPISQLAlchemyWorkflowNodeExecutionRepository.delete_by_runs(session, run_contexts)
|
||||
158
api/tests/fixtures/workflow/test_streaming_conversation_variables_v1_overwrite.yml
vendored
Normal file
158
api/tests/fixtures/workflow/test_streaming_conversation_variables_v1_overwrite.yml
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
app:
|
||||
description: Validate v1 Variable Assigner blocks streaming until conversation variable is updated.
|
||||
icon: 🤖
|
||||
icon_background: '#FFEAD5'
|
||||
mode: advanced-chat
|
||||
name: test_streaming_conversation_variables_v1_overwrite
|
||||
use_icon_as_answer_icon: false
|
||||
dependencies: []
|
||||
kind: app
|
||||
version: 0.5.0
|
||||
workflow:
|
||||
conversation_variables:
|
||||
- description: ''
|
||||
id: 6ddf2d7f-3d1b-4bb0-9a5e-9b0c87c7b5e6
|
||||
name: conv_var
|
||||
selector:
|
||||
- conversation
|
||||
- conv_var
|
||||
value: default
|
||||
value_type: string
|
||||
environment_variables: []
|
||||
features:
|
||||
file_upload:
|
||||
allowed_file_extensions:
|
||||
- .JPG
|
||||
- .JPEG
|
||||
- .PNG
|
||||
- .GIF
|
||||
- .WEBP
|
||||
- .SVG
|
||||
allowed_file_types:
|
||||
- image
|
||||
allowed_file_upload_methods:
|
||||
- local_file
|
||||
- remote_url
|
||||
enabled: false
|
||||
fileUploadConfig:
|
||||
audio_file_size_limit: 50
|
||||
batch_count_limit: 5
|
||||
file_size_limit: 15
|
||||
image_file_size_limit: 10
|
||||
video_file_size_limit: 100
|
||||
workflow_file_upload_limit: 10
|
||||
image:
|
||||
enabled: false
|
||||
number_limits: 3
|
||||
transfer_methods:
|
||||
- local_file
|
||||
- remote_url
|
||||
number_limits: 3
|
||||
opening_statement: ''
|
||||
retriever_resource:
|
||||
enabled: true
|
||||
sensitive_word_avoidance:
|
||||
enabled: false
|
||||
speech_to_text:
|
||||
enabled: false
|
||||
suggested_questions: []
|
||||
suggested_questions_after_answer:
|
||||
enabled: false
|
||||
text_to_speech:
|
||||
enabled: false
|
||||
language: ''
|
||||
voice: ''
|
||||
graph:
|
||||
edges:
|
||||
- data:
|
||||
isInIteration: false
|
||||
isInLoop: false
|
||||
sourceType: start
|
||||
targetType: assigner
|
||||
id: start-source-assigner-target
|
||||
source: start
|
||||
sourceHandle: source
|
||||
target: assigner
|
||||
targetHandle: target
|
||||
type: custom
|
||||
zIndex: 0
|
||||
- data:
|
||||
isInLoop: false
|
||||
sourceType: assigner
|
||||
targetType: answer
|
||||
id: assigner-source-answer-target
|
||||
source: assigner
|
||||
sourceHandle: source
|
||||
target: answer
|
||||
targetHandle: target
|
||||
type: custom
|
||||
zIndex: 0
|
||||
nodes:
|
||||
- data:
|
||||
desc: ''
|
||||
selected: false
|
||||
title: Start
|
||||
type: start
|
||||
variables: []
|
||||
height: 54
|
||||
id: start
|
||||
position:
|
||||
x: 30
|
||||
y: 253
|
||||
positionAbsolute:
|
||||
x: 30
|
||||
y: 253
|
||||
selected: false
|
||||
sourcePosition: right
|
||||
targetPosition: left
|
||||
type: custom
|
||||
width: 244
|
||||
- data:
|
||||
answer: 'Current Value Of `conv_var` is:{{#conversation.conv_var#}}'
|
||||
desc: ''
|
||||
selected: false
|
||||
title: Answer
|
||||
type: answer
|
||||
variables: []
|
||||
height: 106
|
||||
id: answer
|
||||
position:
|
||||
x: 638
|
||||
y: 253
|
||||
positionAbsolute:
|
||||
x: 638
|
||||
y: 253
|
||||
selected: true
|
||||
sourcePosition: right
|
||||
targetPosition: left
|
||||
type: custom
|
||||
width: 244
|
||||
- data:
|
||||
assigned_variable_selector:
|
||||
- conversation
|
||||
- conv_var
|
||||
desc: ''
|
||||
input_variable_selector:
|
||||
- sys
|
||||
- query
|
||||
selected: false
|
||||
title: Variable Assigner
|
||||
type: assigner
|
||||
write_mode: over-write
|
||||
height: 84
|
||||
id: assigner
|
||||
position:
|
||||
x: 334
|
||||
y: 253
|
||||
positionAbsolute:
|
||||
x: 334
|
||||
y: 253
|
||||
selected: false
|
||||
sourcePosition: right
|
||||
targetPosition: left
|
||||
type: custom
|
||||
width: 244
|
||||
viewport:
|
||||
x: 0
|
||||
y: 0
|
||||
zoom: 0.7
|
||||
@@ -230,7 +230,6 @@ class TestAgentService:
|
||||
|
||||
# Create first agent thought
|
||||
thought1 = MessageAgentThought(
|
||||
id=fake.uuid4(),
|
||||
message_id=message.id,
|
||||
position=1,
|
||||
thought="I need to analyze the user's request",
|
||||
@@ -257,7 +256,6 @@ class TestAgentService:
|
||||
|
||||
# Create second agent thought
|
||||
thought2 = MessageAgentThought(
|
||||
id=fake.uuid4(),
|
||||
message_id=message.id,
|
||||
position=2,
|
||||
thought="Based on the analysis, I can provide a response",
|
||||
@@ -545,7 +543,6 @@ class TestAgentService:
|
||||
|
||||
# Create agent thought with tool error
|
||||
thought_with_error = MessageAgentThought(
|
||||
id=fake.uuid4(),
|
||||
message_id=message.id,
|
||||
position=1,
|
||||
thought="I need to analyze the user's request",
|
||||
@@ -759,7 +756,6 @@ class TestAgentService:
|
||||
|
||||
# Create agent thought with multiple tools
|
||||
complex_thought = MessageAgentThought(
|
||||
id=fake.uuid4(),
|
||||
message_id=message.id,
|
||||
position=1,
|
||||
thought="I need to use multiple tools to complete this task",
|
||||
@@ -877,7 +873,6 @@ class TestAgentService:
|
||||
|
||||
# Create agent thought with files
|
||||
thought_with_files = MessageAgentThought(
|
||||
id=fake.uuid4(),
|
||||
message_id=message.id,
|
||||
position=1,
|
||||
thought="I need to process some files",
|
||||
@@ -957,7 +952,6 @@ class TestAgentService:
|
||||
|
||||
# Create agent thought with empty tool data
|
||||
empty_thought = MessageAgentThought(
|
||||
id=fake.uuid4(),
|
||||
message_id=message.id,
|
||||
position=1,
|
||||
thought="I need to analyze the user's request",
|
||||
@@ -999,7 +993,6 @@ class TestAgentService:
|
||||
|
||||
# Create agent thought with malformed JSON
|
||||
malformed_thought = MessageAgentThought(
|
||||
id=fake.uuid4(),
|
||||
message_id=message.id,
|
||||
position=1,
|
||||
thought="I need to analyze the user's request",
|
||||
|
||||
@@ -45,3 +45,33 @@ def test_streaming_conversation_variables():
|
||||
runner = TableTestRunner()
|
||||
result = runner.run_test_case(case)
|
||||
assert result.success, f"Test failed: {result.error}"
|
||||
|
||||
|
||||
def test_streaming_conversation_variables_v1_overwrite_waits_for_assignment():
|
||||
fixture_name = "test_streaming_conversation_variables_v1_overwrite"
|
||||
input_query = "overwrite-value"
|
||||
|
||||
case = WorkflowTestCase(
|
||||
fixture_path=fixture_name,
|
||||
use_auto_mock=False,
|
||||
mock_config=MockConfigBuilder().build(),
|
||||
query=input_query,
|
||||
inputs={},
|
||||
expected_outputs={"answer": f"Current Value Of `conv_var` is:{input_query}"},
|
||||
)
|
||||
|
||||
runner = TableTestRunner()
|
||||
result = runner.run_test_case(case)
|
||||
assert result.success, f"Test failed: {result.error}"
|
||||
|
||||
events = result.events
|
||||
conv_var_chunk_events = [
|
||||
event
|
||||
for event in events
|
||||
if isinstance(event, NodeRunStreamChunkEvent) and tuple(event.selector) == ("conversation", "conv_var")
|
||||
]
|
||||
|
||||
assert conv_var_chunk_events, "Expected conversation variable chunk events to be emitted"
|
||||
assert all(event.chunk == input_query for event in conv_var_chunk_events), (
|
||||
"Expected streamed conversation variable value to match the input query"
|
||||
)
|
||||
|
||||
@@ -2,13 +2,17 @@ from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
|
||||
from configs import dify_config
|
||||
from core.file.enums import FileType
|
||||
from core.file.models import File, FileTransferMethod
|
||||
from core.helper.code_executor.code_executor import CodeLanguage
|
||||
from core.variables.variables import StringVariable
|
||||
from core.workflow.constants import (
|
||||
CONVERSATION_VARIABLE_NODE_ID,
|
||||
ENVIRONMENT_VARIABLE_NODE_ID,
|
||||
)
|
||||
from core.workflow.nodes.code.code_node import CodeNode
|
||||
from core.workflow.nodes.code.limits import CodeNodeLimits
|
||||
from core.workflow.runtime import VariablePool
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
@@ -96,6 +100,58 @@ class TestWorkflowEntry:
|
||||
assert output_var is not None
|
||||
assert output_var.value == "system_user"
|
||||
|
||||
def test_single_step_run_injects_code_limits(self):
|
||||
"""Ensure single-step CodeNode execution configures limits."""
|
||||
# Arrange
|
||||
node_id = "code_node"
|
||||
node_data = {
|
||||
"type": "code",
|
||||
"title": "Code",
|
||||
"desc": None,
|
||||
"variables": [],
|
||||
"code_language": CodeLanguage.PYTHON3,
|
||||
"code": "def main():\n return {}",
|
||||
"outputs": {},
|
||||
}
|
||||
node_config = {"id": node_id, "data": node_data}
|
||||
|
||||
class StubWorkflow:
|
||||
def __init__(self):
|
||||
self.tenant_id = "tenant"
|
||||
self.app_id = "app"
|
||||
self.id = "workflow"
|
||||
self.graph_dict = {"nodes": [node_config], "edges": []}
|
||||
|
||||
def get_node_config_by_id(self, target_id: str):
|
||||
assert target_id == node_id
|
||||
return node_config
|
||||
|
||||
workflow = StubWorkflow()
|
||||
variable_pool = VariablePool(system_variables=SystemVariable.empty(), user_inputs={})
|
||||
expected_limits = CodeNodeLimits(
|
||||
max_string_length=dify_config.CODE_MAX_STRING_LENGTH,
|
||||
max_number=dify_config.CODE_MAX_NUMBER,
|
||||
min_number=dify_config.CODE_MIN_NUMBER,
|
||||
max_precision=dify_config.CODE_MAX_PRECISION,
|
||||
max_depth=dify_config.CODE_MAX_DEPTH,
|
||||
max_number_array_length=dify_config.CODE_MAX_NUMBER_ARRAY_LENGTH,
|
||||
max_string_array_length=dify_config.CODE_MAX_STRING_ARRAY_LENGTH,
|
||||
max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH,
|
||||
)
|
||||
|
||||
# Act
|
||||
node, _ = WorkflowEntry.single_step_run(
|
||||
workflow=workflow,
|
||||
node_id=node_id,
|
||||
user_id="user",
|
||||
user_inputs={},
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(node, CodeNode)
|
||||
assert node._limits == expected_limits
|
||||
|
||||
def test_mapping_user_inputs_to_variable_pool_with_env_variables(self):
|
||||
"""Test mapping environment variables from user inputs to variable pool."""
|
||||
# Initialize variable pool with environment variables
|
||||
|
||||
@@ -4,6 +4,7 @@ from datetime import UTC, datetime
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.workflow.enums import WorkflowExecutionStatus
|
||||
@@ -104,6 +105,42 @@ class TestDifyAPISQLAlchemyWorkflowRunRepository:
|
||||
return pause
|
||||
|
||||
|
||||
class TestGetRunsBatchByTimeRange(TestDifyAPISQLAlchemyWorkflowRunRepository):
|
||||
def test_get_runs_batch_by_time_range_filters_terminal_statuses(
|
||||
self, repository: DifyAPISQLAlchemyWorkflowRunRepository, mock_session: Mock
|
||||
):
|
||||
scalar_result = Mock()
|
||||
scalar_result.all.return_value = []
|
||||
mock_session.scalars.return_value = scalar_result
|
||||
|
||||
repository.get_runs_batch_by_time_range(
|
||||
start_from=None,
|
||||
end_before=datetime(2024, 1, 1),
|
||||
last_seen=None,
|
||||
batch_size=50,
|
||||
)
|
||||
|
||||
stmt = mock_session.scalars.call_args[0][0]
|
||||
compiled_sql = str(
|
||||
stmt.compile(
|
||||
dialect=postgresql.dialect(),
|
||||
compile_kwargs={"literal_binds": True},
|
||||
)
|
||||
)
|
||||
|
||||
assert "workflow_runs.status" in compiled_sql
|
||||
for status in (
|
||||
WorkflowExecutionStatus.SUCCEEDED,
|
||||
WorkflowExecutionStatus.FAILED,
|
||||
WorkflowExecutionStatus.STOPPED,
|
||||
WorkflowExecutionStatus.PARTIAL_SUCCEEDED,
|
||||
):
|
||||
assert f"'{status.value}'" in compiled_sql
|
||||
|
||||
assert "'running'" not in compiled_sql
|
||||
assert "'paused'" not in compiled_sql
|
||||
|
||||
|
||||
class TestCreateWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository):
|
||||
"""Test create_workflow_pause method."""
|
||||
|
||||
@@ -181,6 +218,61 @@ class TestCreateWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository):
|
||||
)
|
||||
|
||||
|
||||
class TestDeleteRunsWithRelated(TestDifyAPISQLAlchemyWorkflowRunRepository):
|
||||
def test_uses_trigger_log_repository(self, repository: DifyAPISQLAlchemyWorkflowRunRepository, mock_session: Mock):
|
||||
node_ids_result = Mock()
|
||||
node_ids_result.all.return_value = []
|
||||
pause_ids_result = Mock()
|
||||
pause_ids_result.all.return_value = []
|
||||
mock_session.scalars.side_effect = [node_ids_result, pause_ids_result]
|
||||
|
||||
# app_logs delete, runs delete
|
||||
mock_session.execute.side_effect = [Mock(rowcount=0), Mock(rowcount=1)]
|
||||
|
||||
fake_trigger_repo = Mock()
|
||||
fake_trigger_repo.delete_by_run_ids.return_value = 3
|
||||
|
||||
run = Mock(id="run-1", tenant_id="t1", app_id="a1", workflow_id="w1", triggered_from="tf")
|
||||
counts = repository.delete_runs_with_related(
|
||||
[run],
|
||||
delete_node_executions=lambda session, runs: (2, 1),
|
||||
delete_trigger_logs=lambda session, run_ids: fake_trigger_repo.delete_by_run_ids(run_ids),
|
||||
)
|
||||
|
||||
fake_trigger_repo.delete_by_run_ids.assert_called_once_with(["run-1"])
|
||||
assert counts["node_executions"] == 2
|
||||
assert counts["offloads"] == 1
|
||||
assert counts["trigger_logs"] == 3
|
||||
assert counts["runs"] == 1
|
||||
|
||||
|
||||
class TestCountRunsWithRelated(TestDifyAPISQLAlchemyWorkflowRunRepository):
|
||||
def test_uses_trigger_log_repository(self, repository: DifyAPISQLAlchemyWorkflowRunRepository, mock_session: Mock):
|
||||
pause_ids_result = Mock()
|
||||
pause_ids_result.all.return_value = ["pause-1", "pause-2"]
|
||||
mock_session.scalars.return_value = pause_ids_result
|
||||
mock_session.scalar.side_effect = [5, 2]
|
||||
|
||||
fake_trigger_repo = Mock()
|
||||
fake_trigger_repo.count_by_run_ids.return_value = 3
|
||||
|
||||
run = Mock(id="run-1", tenant_id="t1", app_id="a1", workflow_id="w1", triggered_from="tf")
|
||||
counts = repository.count_runs_with_related(
|
||||
[run],
|
||||
count_node_executions=lambda session, runs: (2, 1),
|
||||
count_trigger_logs=lambda session, run_ids: fake_trigger_repo.count_by_run_ids(run_ids),
|
||||
)
|
||||
|
||||
fake_trigger_repo.count_by_run_ids.assert_called_once_with(["run-1"])
|
||||
assert counts["node_executions"] == 2
|
||||
assert counts["offloads"] == 1
|
||||
assert counts["trigger_logs"] == 3
|
||||
assert counts["app_logs"] == 5
|
||||
assert counts["pauses"] == 2
|
||||
assert counts["pause_reasons"] == 2
|
||||
assert counts["runs"] == 1
|
||||
|
||||
|
||||
class TestResumeWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository):
|
||||
"""Test resume_workflow_pause method."""
|
||||
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
from unittest.mock import Mock
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
|
||||
|
||||
|
||||
def test_delete_by_run_ids_executes_delete():
|
||||
session = Mock(spec=Session)
|
||||
session.execute.return_value = Mock(rowcount=2)
|
||||
repo = SQLAlchemyWorkflowTriggerLogRepository(session)
|
||||
|
||||
deleted = repo.delete_by_run_ids(["run-1", "run-2"])
|
||||
|
||||
stmt = session.execute.call_args[0][0]
|
||||
compiled_sql = str(stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True}))
|
||||
assert "workflow_trigger_logs" in compiled_sql
|
||||
assert "'run-1'" in compiled_sql
|
||||
assert "'run-2'" in compiled_sql
|
||||
assert deleted == 2
|
||||
|
||||
|
||||
def test_delete_by_run_ids_empty_short_circuits():
|
||||
session = Mock(spec=Session)
|
||||
repo = SQLAlchemyWorkflowTriggerLogRepository(session)
|
||||
|
||||
deleted = repo.delete_by_run_ids([])
|
||||
|
||||
session.execute.assert_not_called()
|
||||
assert deleted == 0
|
||||
@@ -0,0 +1,327 @@
|
||||
import datetime
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from services.billing_service import SubscriptionPlan
|
||||
from services.retention.workflow_run import clear_free_plan_expired_workflow_run_logs as cleanup_module
|
||||
from services.retention.workflow_run.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup
|
||||
|
||||
|
||||
class FakeRun:
|
||||
def __init__(
|
||||
self,
|
||||
run_id: str,
|
||||
tenant_id: str,
|
||||
created_at: datetime.datetime,
|
||||
app_id: str = "app-1",
|
||||
workflow_id: str = "wf-1",
|
||||
triggered_from: str = "workflow-run",
|
||||
) -> None:
|
||||
self.id = run_id
|
||||
self.tenant_id = tenant_id
|
||||
self.app_id = app_id
|
||||
self.workflow_id = workflow_id
|
||||
self.triggered_from = triggered_from
|
||||
self.created_at = created_at
|
||||
|
||||
|
||||
class FakeRepo:
|
||||
def __init__(
|
||||
self,
|
||||
batches: list[list[FakeRun]],
|
||||
delete_result: dict[str, int] | None = None,
|
||||
count_result: dict[str, int] | None = None,
|
||||
) -> None:
|
||||
self.batches = batches
|
||||
self.call_idx = 0
|
||||
self.deleted: list[list[str]] = []
|
||||
self.counted: list[list[str]] = []
|
||||
self.delete_result = delete_result or {
|
||||
"runs": 0,
|
||||
"node_executions": 0,
|
||||
"offloads": 0,
|
||||
"app_logs": 0,
|
||||
"trigger_logs": 0,
|
||||
"pauses": 0,
|
||||
"pause_reasons": 0,
|
||||
}
|
||||
self.count_result = count_result or {
|
||||
"runs": 0,
|
||||
"node_executions": 0,
|
||||
"offloads": 0,
|
||||
"app_logs": 0,
|
||||
"trigger_logs": 0,
|
||||
"pauses": 0,
|
||||
"pause_reasons": 0,
|
||||
}
|
||||
|
||||
def get_runs_batch_by_time_range(
|
||||
self,
|
||||
start_from: datetime.datetime | None,
|
||||
end_before: datetime.datetime,
|
||||
last_seen: tuple[datetime.datetime, str] | None,
|
||||
batch_size: int,
|
||||
) -> list[FakeRun]:
|
||||
if self.call_idx >= len(self.batches):
|
||||
return []
|
||||
batch = self.batches[self.call_idx]
|
||||
self.call_idx += 1
|
||||
return batch
|
||||
|
||||
def delete_runs_with_related(
|
||||
self, runs: list[FakeRun], delete_node_executions=None, delete_trigger_logs=None
|
||||
) -> dict[str, int]:
|
||||
self.deleted.append([run.id for run in runs])
|
||||
result = self.delete_result.copy()
|
||||
result["runs"] = len(runs)
|
||||
return result
|
||||
|
||||
def count_runs_with_related(
|
||||
self, runs: list[FakeRun], count_node_executions=None, count_trigger_logs=None
|
||||
) -> dict[str, int]:
|
||||
self.counted.append([run.id for run in runs])
|
||||
result = self.count_result.copy()
|
||||
result["runs"] = len(runs)
|
||||
return result
|
||||
|
||||
|
||||
def plan_info(plan: str, expiration: int) -> SubscriptionPlan:
|
||||
return SubscriptionPlan(plan=plan, expiration_date=expiration)
|
||||
|
||||
|
||||
def create_cleanup(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
repo: FakeRepo,
|
||||
*,
|
||||
grace_period_days: int = 0,
|
||||
whitelist: set[str] | None = None,
|
||||
**kwargs: Any,
|
||||
) -> WorkflowRunCleanup:
|
||||
monkeypatch.setattr(
|
||||
cleanup_module.dify_config,
|
||||
"SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD",
|
||||
grace_period_days,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
cleanup_module.WorkflowRunCleanup,
|
||||
"_get_cleanup_whitelist",
|
||||
lambda self: whitelist or set(),
|
||||
)
|
||||
return WorkflowRunCleanup(workflow_run_repo=repo, **kwargs)
|
||||
|
||||
|
||||
def test_filter_free_tenants_billing_disabled(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10)
|
||||
|
||||
monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", False)
|
||||
|
||||
def fail_bulk(_: list[str]) -> dict[str, SubscriptionPlan]:
|
||||
raise RuntimeError("should not call")
|
||||
|
||||
monkeypatch.setattr(cleanup_module.BillingService, "get_plan_bulk_with_cache", staticmethod(fail_bulk))
|
||||
|
||||
tenants = {"t1", "t2"}
|
||||
free = cleanup._filter_free_tenants(tenants)
|
||||
|
||||
assert free == tenants
|
||||
|
||||
|
||||
def test_filter_free_tenants_bulk_mixed(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10)
|
||||
|
||||
monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True)
|
||||
monkeypatch.setattr(
|
||||
cleanup_module.BillingService,
|
||||
"get_plan_bulk_with_cache",
|
||||
staticmethod(
|
||||
lambda tenant_ids: {
|
||||
tenant_id: (plan_info("team", -1) if tenant_id == "t_paid" else plan_info("sandbox", -1))
|
||||
for tenant_id in tenant_ids
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
free = cleanup._filter_free_tenants({"t_free", "t_paid", "t_missing"})
|
||||
|
||||
assert free == {"t_free", "t_missing"}
|
||||
|
||||
|
||||
def test_filter_free_tenants_respects_grace_period(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, grace_period_days=45)
|
||||
|
||||
monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True)
|
||||
now = datetime.datetime.now(datetime.UTC)
|
||||
within_grace_ts = int((now - datetime.timedelta(days=10)).timestamp())
|
||||
outside_grace_ts = int((now - datetime.timedelta(days=90)).timestamp())
|
||||
|
||||
def fake_bulk(_: list[str]) -> dict[str, SubscriptionPlan]:
|
||||
return {
|
||||
"recently_downgraded": plan_info("sandbox", within_grace_ts),
|
||||
"long_sandbox": plan_info("sandbox", outside_grace_ts),
|
||||
}
|
||||
|
||||
monkeypatch.setattr(cleanup_module.BillingService, "get_plan_bulk_with_cache", staticmethod(fake_bulk))
|
||||
|
||||
free = cleanup._filter_free_tenants({"recently_downgraded", "long_sandbox"})
|
||||
|
||||
assert free == {"long_sandbox"}
|
||||
|
||||
|
||||
def test_filter_free_tenants_skips_cleanup_whitelist(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
cleanup = create_cleanup(
|
||||
monkeypatch,
|
||||
repo=FakeRepo([]),
|
||||
days=30,
|
||||
batch_size=10,
|
||||
whitelist={"tenant_whitelist"},
|
||||
)
|
||||
|
||||
monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True)
|
||||
monkeypatch.setattr(
|
||||
cleanup_module.BillingService,
|
||||
"get_plan_bulk_with_cache",
|
||||
staticmethod(
|
||||
lambda tenant_ids: {
|
||||
tenant_id: (plan_info("team", -1) if tenant_id == "t_paid" else plan_info("sandbox", -1))
|
||||
for tenant_id in tenant_ids
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
tenants = {"tenant_whitelist", "tenant_regular"}
|
||||
free = cleanup._filter_free_tenants(tenants)
|
||||
|
||||
assert free == {"tenant_regular"}
|
||||
|
||||
|
||||
def test_filter_free_tenants_bulk_failure(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10)
|
||||
|
||||
monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True)
|
||||
monkeypatch.setattr(
|
||||
cleanup_module.BillingService,
|
||||
"get_plan_bulk_with_cache",
|
||||
staticmethod(lambda tenant_ids: (_ for _ in ()).throw(RuntimeError("boom"))),
|
||||
)
|
||||
|
||||
free = cleanup._filter_free_tenants({"t1", "t2"})
|
||||
|
||||
assert free == set()
|
||||
|
||||
|
||||
def test_run_deletes_only_free_tenants(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
cutoff = datetime.datetime.now()
|
||||
repo = FakeRepo(
|
||||
batches=[
|
||||
[
|
||||
FakeRun("run-free", "t_free", cutoff),
|
||||
FakeRun("run-paid", "t_paid", cutoff),
|
||||
]
|
||||
]
|
||||
)
|
||||
cleanup = create_cleanup(monkeypatch, repo=repo, days=30, batch_size=10)
|
||||
|
||||
monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True)
|
||||
monkeypatch.setattr(
|
||||
cleanup_module.BillingService,
|
||||
"get_plan_bulk_with_cache",
|
||||
staticmethod(
|
||||
lambda tenant_ids: {
|
||||
tenant_id: (plan_info("team", -1) if tenant_id == "t_paid" else plan_info("sandbox", -1))
|
||||
for tenant_id in tenant_ids
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
cleanup.run()
|
||||
|
||||
assert repo.deleted == [["run-free"]]
|
||||
|
||||
|
||||
def test_run_skips_when_no_free_tenants(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
cutoff = datetime.datetime.now()
|
||||
repo = FakeRepo(batches=[[FakeRun("run-paid", "t_paid", cutoff)]])
|
||||
cleanup = create_cleanup(monkeypatch, repo=repo, days=30, batch_size=10)
|
||||
|
||||
monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True)
|
||||
monkeypatch.setattr(
|
||||
cleanup_module.BillingService,
|
||||
"get_plan_bulk_with_cache",
|
||||
staticmethod(lambda tenant_ids: {tenant_id: plan_info("team", 1893456000) for tenant_id in tenant_ids}),
|
||||
)
|
||||
|
||||
cleanup.run()
|
||||
|
||||
assert repo.deleted == []
|
||||
|
||||
|
||||
def test_run_exits_on_empty_batch(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10)
|
||||
|
||||
cleanup.run()
|
||||
|
||||
|
||||
def test_run_dry_run_skips_deletions(monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str]) -> None:
|
||||
cutoff = datetime.datetime.now()
|
||||
repo = FakeRepo(
|
||||
batches=[[FakeRun("run-free", "t_free", cutoff)]],
|
||||
count_result={
|
||||
"runs": 0,
|
||||
"node_executions": 2,
|
||||
"offloads": 1,
|
||||
"app_logs": 3,
|
||||
"trigger_logs": 4,
|
||||
"pauses": 5,
|
||||
"pause_reasons": 6,
|
||||
},
|
||||
)
|
||||
cleanup = create_cleanup(monkeypatch, repo=repo, days=30, batch_size=10, dry_run=True)
|
||||
|
||||
monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", False)
|
||||
|
||||
cleanup.run()
|
||||
|
||||
assert repo.deleted == []
|
||||
assert repo.counted == [["run-free"]]
|
||||
captured = capsys.readouterr().out
|
||||
assert "Dry run mode enabled" in captured
|
||||
assert "would delete 1 runs" in captured
|
||||
assert "related records" in captured
|
||||
assert "node_executions 2" in captured
|
||||
assert "offloads 1" in captured
|
||||
assert "app_logs 3" in captured
|
||||
assert "trigger_logs 4" in captured
|
||||
assert "pauses 5" in captured
|
||||
assert "pause_reasons 6" in captured
|
||||
|
||||
|
||||
def test_between_sets_window_bounds(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
start_from = datetime.datetime(2024, 5, 1, 0, 0, 0)
|
||||
end_before = datetime.datetime(2024, 6, 1, 0, 0, 0)
|
||||
cleanup = create_cleanup(
|
||||
monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_from=start_from, end_before=end_before
|
||||
)
|
||||
|
||||
assert cleanup.window_start == start_from
|
||||
assert cleanup.window_end == end_before
|
||||
|
||||
|
||||
def test_between_requires_both_boundaries(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
create_cleanup(
|
||||
monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_from=datetime.datetime.now(), end_before=None
|
||||
)
|
||||
with pytest.raises(ValueError):
|
||||
create_cleanup(
|
||||
monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_from=None, end_before=datetime.datetime.now()
|
||||
)
|
||||
|
||||
|
||||
def test_between_requires_end_after_start(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
start_from = datetime.datetime(2024, 6, 1, 0, 0, 0)
|
||||
end_before = datetime.datetime(2024, 5, 1, 0, 0, 0)
|
||||
with pytest.raises(ValueError):
|
||||
create_cleanup(
|
||||
monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_from=start_from, end_before=end_before
|
||||
)
|
||||
4845
api/uv.lock
generated
4845
api/uv.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1484,6 +1484,7 @@ ENABLE_CLEAN_UNUSED_DATASETS_TASK=false
|
||||
ENABLE_CREATE_TIDB_SERVERLESS_TASK=false
|
||||
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false
|
||||
ENABLE_CLEAN_MESSAGES=false
|
||||
ENABLE_WORKFLOW_RUN_CLEANUP_TASK=false
|
||||
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false
|
||||
ENABLE_DATASETS_QUEUE_MONITOR=false
|
||||
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true
|
||||
|
||||
@@ -662,6 +662,7 @@ x-shared-env: &shared-api-worker-env
|
||||
ENABLE_CREATE_TIDB_SERVERLESS_TASK: ${ENABLE_CREATE_TIDB_SERVERLESS_TASK:-false}
|
||||
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK: ${ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK:-false}
|
||||
ENABLE_CLEAN_MESSAGES: ${ENABLE_CLEAN_MESSAGES:-false}
|
||||
ENABLE_WORKFLOW_RUN_CLEANUP_TASK: ${ENABLE_WORKFLOW_RUN_CLEANUP_TASK:-false}
|
||||
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: ${ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK:-false}
|
||||
ENABLE_DATASETS_QUEUE_MONITOR: ${ENABLE_DATASETS_QUEUE_MONITOR:-false}
|
||||
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: ${ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK:-true}
|
||||
|
||||
@@ -31,6 +31,8 @@ NEXT_PUBLIC_UPLOAD_IMAGE_AS_ICON=false
|
||||
|
||||
# The timeout for the text generation in millisecond
|
||||
NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS=60000
|
||||
# Used by web/docker/entrypoint.sh to overwrite/export NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS at container startup (Docker only)
|
||||
TEXT_GENERATION_TIMEOUT_MS=60000
|
||||
|
||||
# CSP https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP
|
||||
NEXT_PUBLIC_CSP_WHITELIST=
|
||||
|
||||
1
web/.nvmrc
Normal file
1
web/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
22.21.1
|
||||
@@ -1,8 +1,10 @@
|
||||
import type { Preview } from '@storybook/react'
|
||||
import type { Resource } from 'i18next'
|
||||
import { withThemeByDataAttribute } from '@storybook/addon-themes'
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import { ToastProvider } from '../app/components/base/toast'
|
||||
import I18N from '../app/components/i18n'
|
||||
import { I18nClientProvider as I18N } from '../app/components/provider/i18n'
|
||||
import commonEnUS from '../i18n/en-US/common.json'
|
||||
|
||||
import '../app/styles/globals.css'
|
||||
import '../app/styles/markdown.scss'
|
||||
@@ -16,6 +18,14 @@ const queryClient = new QueryClient({
|
||||
},
|
||||
})
|
||||
|
||||
const storyResources: Resource = {
|
||||
'en-US': {
|
||||
// Preload the most common namespace to avoid missing keys during initial render;
|
||||
// other namespaces will be loaded on demand via resourcesToBackend.
|
||||
common: commonEnUS as unknown as Record<string, unknown>,
|
||||
},
|
||||
}
|
||||
|
||||
export const decorators = [
|
||||
withThemeByDataAttribute({
|
||||
themes: {
|
||||
@@ -28,7 +38,7 @@ export const decorators = [
|
||||
(Story) => {
|
||||
return (
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<I18N locale="en-US">
|
||||
<I18N locale="en-US" resource={storyResources}>
|
||||
<ToastProvider>
|
||||
<Story />
|
||||
</ToastProvider>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# base image
|
||||
FROM node:22-alpine3.21 AS base
|
||||
FROM node:22.21.1-alpine3.23 AS base
|
||||
LABEL maintainer="takatost@gmail.com"
|
||||
|
||||
# if you located in China, you can use aliyun mirror to speed up
|
||||
|
||||
@@ -11,6 +11,16 @@ Before starting the web frontend service, please make sure the following environ
|
||||
- [Node.js](https://nodejs.org) >= v22.11.x
|
||||
- [pnpm](https://pnpm.io) v10.x
|
||||
|
||||
> [!TIP]
|
||||
> It is recommended to install and enable Corepack to manage package manager versions automatically:
|
||||
>
|
||||
> ```bash
|
||||
> npm install -g corepack
|
||||
> corepack enable
|
||||
> ```
|
||||
>
|
||||
> Learn more: [Corepack](https://github.com/nodejs/corepack#readme)
|
||||
|
||||
First, install the dependencies:
|
||||
|
||||
```bash
|
||||
|
||||
@@ -29,6 +29,7 @@ import { CheckModal } from '@/hooks/use-pay'
|
||||
import { useInfiniteAppList } from '@/service/use-apps'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { isServer } from '@/utils/client'
|
||||
import AppCard from './app-card'
|
||||
import { AppCardSkeleton } from './app-card-skeleton'
|
||||
import Empty from './empty'
|
||||
@@ -71,7 +72,7 @@ const List = () => {
|
||||
// 1) Normalize legacy/incorrect query params like ?mode=discover -> ?category=all
|
||||
useEffect(() => {
|
||||
// avoid running on server
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return
|
||||
const mode = searchParams.get('mode')
|
||||
if (!mode)
|
||||
|
||||
@@ -11,6 +11,7 @@ import DifyLogo from '@/app/components/base/logo/dify-logo'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { isClient } from '@/utils/client'
|
||||
import {
|
||||
useEmbeddedChatbotContext,
|
||||
} from '../context'
|
||||
@@ -40,7 +41,6 @@ const Header: FC<IHeaderProps> = ({
|
||||
allInputsHidden,
|
||||
} = useEmbeddedChatbotContext()
|
||||
|
||||
const isClient = typeof window !== 'undefined'
|
||||
const isIframe = isClient ? window.self !== window.top : false
|
||||
const [parentOrigin, setParentOrigin] = useState('')
|
||||
const [showToggleExpandButton, setShowToggleExpandButton] = useState(false)
|
||||
|
||||
3
web/app/components/provider/serwist.tsx
Normal file
3
web/app/components/provider/serwist.tsx
Normal file
@@ -0,0 +1,3 @@
|
||||
'use client'
|
||||
|
||||
export { SerwistProvider } from '@serwist/turbopack/react'
|
||||
@@ -13,6 +13,7 @@ import Tooltip from '@/app/components/base/tooltip'
|
||||
import InstallFromMarketplace from '@/app/components/plugins/install-plugin/install-from-marketplace'
|
||||
import Action from '@/app/components/workflow/block-selector/market-place-plugin/action'
|
||||
import { useGetLanguage } from '@/context/i18n'
|
||||
import { isServer } from '@/utils/client'
|
||||
import { formatNumber } from '@/utils/format'
|
||||
import { getMarketplaceUrl } from '@/utils/var'
|
||||
import BlockIcon from '../block-icon'
|
||||
@@ -49,14 +50,14 @@ const FeaturedTools = ({
|
||||
const language = useGetLanguage()
|
||||
const [visibleCount, setVisibleCount] = useState(INITIAL_VISIBLE_COUNT)
|
||||
const [isCollapsed, setIsCollapsed] = useState<boolean>(() => {
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return false
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
return stored === 'true'
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
if (stored !== null)
|
||||
@@ -64,7 +65,7 @@ const FeaturedTools = ({
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return
|
||||
window.localStorage.setItem(STORAGE_KEY, String(isCollapsed))
|
||||
}, [isCollapsed])
|
||||
|
||||
@@ -12,6 +12,7 @@ import Tooltip from '@/app/components/base/tooltip'
|
||||
import InstallFromMarketplace from '@/app/components/plugins/install-plugin/install-from-marketplace'
|
||||
import Action from '@/app/components/workflow/block-selector/market-place-plugin/action'
|
||||
import { useGetLanguage } from '@/context/i18n'
|
||||
import { isServer } from '@/utils/client'
|
||||
import { formatNumber } from '@/utils/format'
|
||||
import { getMarketplaceUrl } from '@/utils/var'
|
||||
import BlockIcon from '../block-icon'
|
||||
@@ -42,14 +43,14 @@ const FeaturedTriggers = ({
|
||||
const language = useGetLanguage()
|
||||
const [visibleCount, setVisibleCount] = useState(INITIAL_VISIBLE_COUNT)
|
||||
const [isCollapsed, setIsCollapsed] = useState<boolean>(() => {
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return false
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
return stored === 'true'
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
if (stored !== null)
|
||||
@@ -57,7 +58,7 @@ const FeaturedTriggers = ({
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return
|
||||
window.localStorage.setItem(STORAGE_KEY, String(isCollapsed))
|
||||
}, [isCollapsed])
|
||||
|
||||
@@ -11,6 +11,7 @@ import { ArrowDownRoundFill } from '@/app/components/base/icons/src/vender/solid
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import { getFormattedPlugin } from '@/app/components/plugins/marketplace/utils'
|
||||
import { useRAGRecommendedPlugins } from '@/service/use-tools'
|
||||
import { isServer } from '@/utils/client'
|
||||
import { getMarketplaceUrl } from '@/utils/var'
|
||||
import List from './list'
|
||||
|
||||
@@ -29,14 +30,14 @@ const RAGToolRecommendations = ({
|
||||
}: RAGToolRecommendationsProps) => {
|
||||
const { t } = useTranslation()
|
||||
const [isCollapsed, setIsCollapsed] = useState<boolean>(() => {
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return false
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
return stored === 'true'
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return
|
||||
const stored = window.localStorage.getItem(STORAGE_KEY)
|
||||
if (stored !== null)
|
||||
@@ -44,7 +45,7 @@ const RAGToolRecommendations = ({
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return
|
||||
window.localStorage.setItem(STORAGE_KEY, String(isCollapsed))
|
||||
}, [isCollapsed])
|
||||
|
||||
@@ -12,6 +12,7 @@ import { ToastProvider } from './components/base/toast'
|
||||
import BrowserInitializer from './components/browser-initializer'
|
||||
import { ReactScanLoader } from './components/devtools/react-scan/loader'
|
||||
import { I18nServerProvider } from './components/provider/i18n-server'
|
||||
import { SerwistProvider } from './components/provider/serwist'
|
||||
import SentryInitializer from './components/sentry-initializer'
|
||||
import RoutePrefixHandle from './routePrefixHandle'
|
||||
import './styles/globals.css'
|
||||
@@ -39,6 +40,9 @@ const LocaleLayout = async ({
|
||||
}) => {
|
||||
const locale = await getLocaleOnServer()
|
||||
|
||||
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
|
||||
const swUrl = `${basePath}/serwist/sw.js`
|
||||
|
||||
const datasetMap: Record<DatasetAttr, string | undefined> = {
|
||||
[DatasetAttr.DATA_API_PREFIX]: process.env.NEXT_PUBLIC_API_PREFIX,
|
||||
[DatasetAttr.DATA_PUBLIC_API_PREFIX]: process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX,
|
||||
@@ -92,33 +96,35 @@ const LocaleLayout = async ({
|
||||
className="color-scheme h-full select-auto"
|
||||
{...datasetMap}
|
||||
>
|
||||
<ReactScanLoader />
|
||||
<JotaiProvider>
|
||||
<ThemeProvider
|
||||
attribute="data-theme"
|
||||
defaultTheme="system"
|
||||
enableSystem
|
||||
disableTransitionOnChange
|
||||
enableColorScheme={false}
|
||||
>
|
||||
<NuqsAdapter>
|
||||
<BrowserInitializer>
|
||||
<SentryInitializer>
|
||||
<TanstackQueryInitializer>
|
||||
<I18nServerProvider>
|
||||
<ToastProvider>
|
||||
<GlobalPublicStoreProvider>
|
||||
{children}
|
||||
</GlobalPublicStoreProvider>
|
||||
</ToastProvider>
|
||||
</I18nServerProvider>
|
||||
</TanstackQueryInitializer>
|
||||
</SentryInitializer>
|
||||
</BrowserInitializer>
|
||||
</NuqsAdapter>
|
||||
</ThemeProvider>
|
||||
</JotaiProvider>
|
||||
<RoutePrefixHandle />
|
||||
<SerwistProvider swUrl={swUrl}>
|
||||
<ReactScanLoader />
|
||||
<JotaiProvider>
|
||||
<ThemeProvider
|
||||
attribute="data-theme"
|
||||
defaultTheme="system"
|
||||
enableSystem
|
||||
disableTransitionOnChange
|
||||
enableColorScheme={false}
|
||||
>
|
||||
<NuqsAdapter>
|
||||
<BrowserInitializer>
|
||||
<SentryInitializer>
|
||||
<TanstackQueryInitializer>
|
||||
<I18nServerProvider>
|
||||
<ToastProvider>
|
||||
<GlobalPublicStoreProvider>
|
||||
{children}
|
||||
</GlobalPublicStoreProvider>
|
||||
</ToastProvider>
|
||||
</I18nServerProvider>
|
||||
</TanstackQueryInitializer>
|
||||
</SentryInitializer>
|
||||
</BrowserInitializer>
|
||||
</NuqsAdapter>
|
||||
</ThemeProvider>
|
||||
</JotaiProvider>
|
||||
<RoutePrefixHandle />
|
||||
</SerwistProvider>
|
||||
</body>
|
||||
</html>
|
||||
)
|
||||
|
||||
14
web/app/serwist/[path]/route.ts
Normal file
14
web/app/serwist/[path]/route.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { spawnSync } from 'node:child_process'
|
||||
import { randomUUID } from 'node:crypto'
|
||||
import { createSerwistRoute } from '@serwist/turbopack'
|
||||
|
||||
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
|
||||
const revision = spawnSync('git', ['rev-parse', 'HEAD'], { encoding: 'utf-8' }).stdout?.trim() || randomUUID()
|
||||
|
||||
export const { dynamic, dynamicParams, revalidate, generateStaticParams, GET } = createSerwistRoute({
|
||||
additionalPrecacheEntries: [{ url: `${basePath}/_offline.html`, revision }],
|
||||
swSrc: 'app/sw.ts',
|
||||
nextConfig: {
|
||||
basePath,
|
||||
},
|
||||
})
|
||||
104
web/app/sw.ts
Normal file
104
web/app/sw.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
/// <reference no-default-lib="true" />
|
||||
/// <reference lib="esnext" />
|
||||
/// <reference lib="webworker" />
|
||||
|
||||
import type { PrecacheEntry, SerwistGlobalConfig } from 'serwist'
|
||||
import { CacheableResponsePlugin, CacheFirst, ExpirationPlugin, NetworkFirst, Serwist, StaleWhileRevalidate } from 'serwist'
|
||||
|
||||
declare global {
|
||||
// eslint-disable-next-line ts/consistent-type-definitions
|
||||
interface WorkerGlobalScope extends SerwistGlobalConfig {
|
||||
__SW_MANIFEST: (PrecacheEntry | string)[] | undefined
|
||||
}
|
||||
}
|
||||
|
||||
declare const self: ServiceWorkerGlobalScope
|
||||
|
||||
const scopePathname = new URL(self.registration.scope).pathname
|
||||
const basePath = scopePathname.replace(/\/serwist\/$/, '').replace(/\/$/, '')
|
||||
const offlineUrl = `${basePath}/_offline.html`
|
||||
|
||||
const serwist = new Serwist({
|
||||
precacheEntries: self.__SW_MANIFEST,
|
||||
skipWaiting: true,
|
||||
clientsClaim: true,
|
||||
navigationPreload: true,
|
||||
runtimeCaching: [
|
||||
{
|
||||
matcher: ({ url }) => url.origin === 'https://fonts.googleapis.com',
|
||||
handler: new CacheFirst({
|
||||
cacheName: 'google-fonts',
|
||||
plugins: [
|
||||
new CacheableResponsePlugin({ statuses: [0, 200] }),
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 4,
|
||||
maxAgeSeconds: 365 * 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ url }) => url.origin === 'https://fonts.gstatic.com',
|
||||
handler: new CacheFirst({
|
||||
cacheName: 'google-fonts-webfonts',
|
||||
plugins: [
|
||||
new CacheableResponsePlugin({ statuses: [0, 200] }),
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 4,
|
||||
maxAgeSeconds: 365 * 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ request }) => request.destination === 'image',
|
||||
handler: new CacheFirst({
|
||||
cacheName: 'images',
|
||||
plugins: [
|
||||
new CacheableResponsePlugin({ statuses: [0, 200] }),
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 64,
|
||||
maxAgeSeconds: 30 * 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ request }) => request.destination === 'script' || request.destination === 'style',
|
||||
handler: new StaleWhileRevalidate({
|
||||
cacheName: 'static-resources',
|
||||
plugins: [
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 32,
|
||||
maxAgeSeconds: 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ url, sameOrigin }) => sameOrigin && url.pathname.startsWith('/api/'),
|
||||
handler: new NetworkFirst({
|
||||
cacheName: 'api-cache',
|
||||
networkTimeoutSeconds: 10,
|
||||
plugins: [
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 16,
|
||||
maxAgeSeconds: 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
],
|
||||
fallbacks: {
|
||||
entries: [
|
||||
{
|
||||
url: offlineUrl,
|
||||
matcher({ request }) {
|
||||
return request.destination === 'document'
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
serwist.addEventListeners()
|
||||
@@ -5,6 +5,7 @@ import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { NUM_INFINITE } from '@/app/components/billing/config'
|
||||
import { Plan } from '@/app/components/billing/type'
|
||||
import { IS_CLOUD_EDITION } from '@/config'
|
||||
import { isServer } from '@/utils/client'
|
||||
|
||||
export type TriggerEventsLimitModalPayload = {
|
||||
usage: number
|
||||
@@ -46,7 +47,7 @@ export const useTriggerEventsLimitModal = ({
|
||||
useEffect(() => {
|
||||
if (!IS_CLOUD_EDITION)
|
||||
return
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return
|
||||
if (!currentWorkspaceId)
|
||||
return
|
||||
|
||||
@@ -5,12 +5,13 @@ import type { FC, PropsWithChildren } from 'react'
|
||||
import { QueryClientProvider } from '@tanstack/react-query'
|
||||
import { useState } from 'react'
|
||||
import { TanStackDevtoolsLoader } from '@/app/components/devtools/tanstack/loader'
|
||||
import { isServer } from '@/utils/client'
|
||||
import { makeQueryClient } from './query-client-server'
|
||||
|
||||
let browserQueryClient: QueryClient | undefined
|
||||
|
||||
function getQueryClient() {
|
||||
if (typeof window === 'undefined') {
|
||||
if (isServer) {
|
||||
return makeQueryClient()
|
||||
}
|
||||
if (!browserQueryClient)
|
||||
|
||||
@@ -12,6 +12,13 @@ import {
|
||||
usePricingModal,
|
||||
} from './use-query-params'
|
||||
|
||||
// Mock isServer to allow runtime control in tests
|
||||
const mockIsServer = vi.hoisted(() => ({ value: false }))
|
||||
vi.mock('@/utils/client', () => ({
|
||||
get isServer() { return mockIsServer.value },
|
||||
get isClient() { return !mockIsServer.value },
|
||||
}))
|
||||
|
||||
const renderWithAdapter = <T,>(hook: () => T, searchParams = '') => {
|
||||
const onUrlUpdate = vi.fn<(event: UrlUpdateEvent) => void>()
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
@@ -428,6 +435,7 @@ describe('clearQueryParams', () => {
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllGlobals()
|
||||
mockIsServer.value = false
|
||||
})
|
||||
|
||||
it('should remove a single key when provided one key', () => {
|
||||
@@ -463,13 +471,13 @@ describe('clearQueryParams', () => {
|
||||
replaceSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('should no-op when window is undefined', () => {
|
||||
it('should no-op when running on server', () => {
|
||||
// Arrange
|
||||
const replaceSpy = vi.spyOn(window.history, 'replaceState')
|
||||
vi.stubGlobal('window', undefined)
|
||||
mockIsServer.value = true
|
||||
|
||||
// Act
|
||||
expect(() => clearQueryParams('foo')).not.toThrow()
|
||||
clearQueryParams('foo')
|
||||
|
||||
// Assert
|
||||
expect(replaceSpy).not.toHaveBeenCalled()
|
||||
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
} from 'nuqs'
|
||||
import { useCallback } from 'react'
|
||||
import { ACCOUNT_SETTING_MODAL_ACTION } from '@/app/components/header/account-setting/constants'
|
||||
import { isServer } from '@/utils/client'
|
||||
|
||||
/**
|
||||
* Modal State Query Parameters
|
||||
@@ -176,7 +177,7 @@ export function usePluginInstallation() {
|
||||
* clearQueryParams(['param1', 'param2'])
|
||||
*/
|
||||
export function clearQueryParams(keys: string | string[]) {
|
||||
if (typeof window === 'undefined')
|
||||
if (isServer)
|
||||
return
|
||||
|
||||
const url = new URL(window.location.href)
|
||||
|
||||
@@ -15,10 +15,7 @@ const config: KnipConfig = {
|
||||
ignoreBinaries: [
|
||||
'only-allow',
|
||||
],
|
||||
ignoreDependencies: [
|
||||
// required by next-pwa
|
||||
'babel-loader',
|
||||
],
|
||||
ignoreDependencies: [],
|
||||
rules: {
|
||||
files: 'warn',
|
||||
dependencies: 'warn',
|
||||
|
||||
@@ -1,77 +1,8 @@
|
||||
import withBundleAnalyzerInit from '@next/bundle-analyzer'
|
||||
import createMDX from '@next/mdx'
|
||||
import { codeInspectorPlugin } from 'code-inspector-plugin'
|
||||
import withPWAInit from 'next-pwa'
|
||||
|
||||
const isDev = process.env.NODE_ENV === 'development'
|
||||
|
||||
const withPWA = withPWAInit({
|
||||
dest: 'public',
|
||||
register: true,
|
||||
skipWaiting: true,
|
||||
disable: process.env.NODE_ENV === 'development',
|
||||
fallbacks: {
|
||||
document: '/_offline.html',
|
||||
},
|
||||
runtimeCaching: [
|
||||
{
|
||||
urlPattern: /^https:\/\/fonts\.googleapis\.com\/.*/i,
|
||||
handler: 'CacheFirst',
|
||||
options: {
|
||||
cacheName: 'google-fonts',
|
||||
expiration: {
|
||||
maxEntries: 4,
|
||||
maxAgeSeconds: 365 * 24 * 60 * 60, // 1 year
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
urlPattern: /^https:\/\/fonts\.gstatic\.com\/.*/i,
|
||||
handler: 'CacheFirst',
|
||||
options: {
|
||||
cacheName: 'google-fonts-webfonts',
|
||||
expiration: {
|
||||
maxEntries: 4,
|
||||
maxAgeSeconds: 365 * 24 * 60 * 60, // 1 year
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
urlPattern: /\.(?:png|jpg|jpeg|svg|gif|webp|avif)$/i,
|
||||
handler: 'CacheFirst',
|
||||
options: {
|
||||
cacheName: 'images',
|
||||
expiration: {
|
||||
maxEntries: 64,
|
||||
maxAgeSeconds: 30 * 24 * 60 * 60, // 30 days
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
urlPattern: /\.(?:js|css)$/i,
|
||||
handler: 'StaleWhileRevalidate',
|
||||
options: {
|
||||
cacheName: 'static-resources',
|
||||
expiration: {
|
||||
maxEntries: 32,
|
||||
maxAgeSeconds: 24 * 60 * 60, // 1 day
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
urlPattern: /^\/api\/.*/i,
|
||||
handler: 'NetworkFirst',
|
||||
options: {
|
||||
cacheName: 'api-cache',
|
||||
networkTimeoutSeconds: 10,
|
||||
expiration: {
|
||||
maxEntries: 16,
|
||||
maxAgeSeconds: 60 * 60, // 1 hour
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
const withMDX = createMDX({
|
||||
extension: /\.mdx?$/,
|
||||
options: {
|
||||
@@ -97,6 +28,7 @@ const remoteImageURLs = [hasSetWebPrefix ? new URL(`${process.env.NEXT_PUBLIC_WE
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
basePath: process.env.NEXT_PUBLIC_BASE_PATH || '',
|
||||
serverExternalPackages: ['esbuild-wasm'],
|
||||
transpilePackages: ['echarts', 'zrender'],
|
||||
turbopack: {
|
||||
rules: codeInspectorPlugin({
|
||||
@@ -148,4 +80,4 @@ const nextConfig = {
|
||||
},
|
||||
}
|
||||
|
||||
export default withPWA(withBundleAnalyzer(withMDX(nextConfig)))
|
||||
export default withBundleAnalyzer(withMDX(nextConfig))
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
}
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=v22.11.0"
|
||||
"node": ">=22.12.0"
|
||||
},
|
||||
"browserslist": [
|
||||
"last 1 Chrome version",
|
||||
@@ -111,7 +111,6 @@
|
||||
"mitt": "^3.0.1",
|
||||
"negotiator": "^1.0.0",
|
||||
"next": "~15.5.9",
|
||||
"next-pwa": "^5.6.0",
|
||||
"next-themes": "^0.4.6",
|
||||
"nuqs": "^2.8.6",
|
||||
"pinyin-pro": "^3.27.0",
|
||||
@@ -153,7 +152,6 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@antfu/eslint-config": "^6.7.3",
|
||||
"@babel/core": "^7.28.4",
|
||||
"@chromatic-com/storybook": "^4.1.1",
|
||||
"@eslint-react/eslint-plugin": "^2.3.13",
|
||||
"@mdx-js/loader": "^3.1.1",
|
||||
@@ -162,12 +160,13 @@
|
||||
"@next/eslint-plugin-next": "15.5.9",
|
||||
"@next/mdx": "15.5.9",
|
||||
"@rgrove/parse-xml": "^4.2.0",
|
||||
"@serwist/turbopack": "^9.5.0",
|
||||
"@storybook/addon-docs": "9.1.13",
|
||||
"@storybook/addon-links": "9.1.13",
|
||||
"@storybook/addon-onboarding": "9.1.13",
|
||||
"@storybook/addon-themes": "9.1.13",
|
||||
"@storybook/nextjs": "9.1.13",
|
||||
"@storybook/react": "9.1.13",
|
||||
"@storybook/react": "9.1.17",
|
||||
"@tanstack/eslint-plugin-query": "^5.91.2",
|
||||
"@tanstack/react-devtools": "^0.9.0",
|
||||
"@tanstack/react-form-devtools": "^0.2.9",
|
||||
@@ -194,9 +193,9 @@
|
||||
"@vitejs/plugin-react": "^5.1.2",
|
||||
"@vitest/coverage-v8": "4.0.16",
|
||||
"autoprefixer": "^10.4.21",
|
||||
"babel-loader": "^10.0.0",
|
||||
"code-inspector-plugin": "1.2.9",
|
||||
"cross-env": "^10.1.0",
|
||||
"esbuild-wasm": "^0.27.2",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-plugin-react-hooks": "^7.0.1",
|
||||
"eslint-plugin-react-refresh": "^0.4.26",
|
||||
@@ -212,6 +211,7 @@
|
||||
"postcss": "^8.5.6",
|
||||
"react-scan": "^0.4.3",
|
||||
"sass": "^1.93.2",
|
||||
"serwist": "^9.5.0",
|
||||
"storybook": "9.1.17",
|
||||
"tailwindcss": "^3.4.18",
|
||||
"tsx": "^4.21.0",
|
||||
@@ -236,7 +236,8 @@
|
||||
"brace-expansion@<2.0.2": "2.0.2",
|
||||
"devalue@<5.3.2": "5.3.2",
|
||||
"es-iterator-helpers": "npm:@nolyfill/es-iterator-helpers@^1",
|
||||
"esbuild@<0.25.0": "0.25.0",
|
||||
"esbuild@<0.27.2": "0.27.2",
|
||||
"glob@>=10.2.0,<10.5.0": "11.1.0",
|
||||
"hasown": "npm:@nolyfill/hasown@^1",
|
||||
"is-arguments": "npm:@nolyfill/is-arguments@^1",
|
||||
"is-core-module": "npm:@nolyfill/is-core-module@^1",
|
||||
@@ -278,7 +279,6 @@
|
||||
"@types/react-dom": "~19.2.3",
|
||||
"brace-expansion": "~2.0",
|
||||
"canvas": "^3.2.0",
|
||||
"esbuild": "~0.25.0",
|
||||
"pbkdf2": "~3.1.3",
|
||||
"prismjs": "~1.30",
|
||||
"string-width": "~4.2.3"
|
||||
|
||||
1789
web/pnpm-lock.yaml
generated
1789
web/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@@ -72,12 +72,12 @@ async function getNewAccessToken(timeout: number): Promise<void> {
|
||||
}
|
||||
|
||||
function releaseRefreshLock() {
|
||||
if (isRefreshing) {
|
||||
isRefreshing = false
|
||||
globalThis.localStorage.removeItem(LOCAL_STORAGE_KEY)
|
||||
globalThis.localStorage.removeItem('last_refresh_time')
|
||||
globalThis.removeEventListener('beforeunload', releaseRefreshLock)
|
||||
}
|
||||
// Always clear the refresh lock to avoid cross-tab deadlocks.
|
||||
// This is safe to call multiple times and from tabs that were only waiting.
|
||||
isRefreshing = false
|
||||
globalThis.localStorage.removeItem(LOCAL_STORAGE_KEY)
|
||||
globalThis.localStorage.removeItem('last_refresh_time')
|
||||
globalThis.removeEventListener('beforeunload', releaseRefreshLock)
|
||||
}
|
||||
|
||||
export async function refreshAccessTokenOrRelogin(timeout: number) {
|
||||
|
||||
3
web/utils/client.ts
Normal file
3
web/utils/client.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export const isServer = typeof window === 'undefined'
|
||||
|
||||
export const isClient = typeof window !== 'undefined'
|
||||
@@ -1,3 +1,5 @@
|
||||
import { isServer } from '@/utils/client'
|
||||
|
||||
/**
|
||||
* Send Google Analytics event
|
||||
* @param eventName - event name
|
||||
@@ -7,7 +9,7 @@ export const sendGAEvent = (
|
||||
eventName: string,
|
||||
eventParams?: GtagEventParams,
|
||||
): void => {
|
||||
if (typeof window === 'undefined' || typeof (window as any).gtag !== 'function') {
|
||||
if (isServer || typeof (window as any).gtag !== 'function') {
|
||||
return
|
||||
}
|
||||
(window as any).gtag('event', eventName, eventParams)
|
||||
|
||||
Reference in New Issue
Block a user