mirror of
https://github.com/langgenius/dify.git
synced 2026-02-11 16:10:12 -05:00
Compare commits
5 Commits
main
...
test/tool-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3849e444bf | ||
|
|
1afc354d97 | ||
|
|
0e36aa9c67 | ||
|
|
c36de51771 | ||
|
|
b0b4cac03f |
@@ -553,8 +553,6 @@ WORKFLOW_LOG_CLEANUP_ENABLED=false
|
||||
WORKFLOW_LOG_RETENTION_DAYS=30
|
||||
# Batch size for workflow log cleanup operations (default: 100)
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
||||
# Comma-separated list of workflow IDs to clean logs for
|
||||
WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS=
|
||||
|
||||
# App configuration
|
||||
APP_MAX_EXECUTION_TIME=1200
|
||||
@@ -717,7 +715,6 @@ ANNOTATION_IMPORT_MAX_CONCURRENT=5
|
||||
# Sandbox expired records clean configuration
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000
|
||||
|
||||
|
||||
2
api/.vscode/launch.json.example
vendored
2
api/.vscode/launch.json.example
vendored
@@ -54,7 +54,7 @@
|
||||
"--loglevel",
|
||||
"DEBUG",
|
||||
"-Q",
|
||||
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,workflow_based_app_execution,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
|
||||
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1314,9 +1314,6 @@ class WorkflowLogConfig(BaseSettings):
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE: int = Field(
|
||||
default=100, description="Batch size for workflow run log cleanup operations"
|
||||
)
|
||||
WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS: str = Field(
|
||||
default="", description="Comma-separated list of workflow IDs to clean logs for"
|
||||
)
|
||||
|
||||
|
||||
class SwaggerUIConfig(BaseSettings):
|
||||
@@ -1347,10 +1344,6 @@ class SandboxExpiredRecordsCleanConfig(BaseSettings):
|
||||
description="Maximum number of records to process in each batch",
|
||||
default=1000,
|
||||
)
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: PositiveInt = Field(
|
||||
description="Maximum interval in milliseconds between batches",
|
||||
default=200,
|
||||
)
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: PositiveInt = Field(
|
||||
description="Retention days for sandbox expired workflow_run records and message records",
|
||||
default=30,
|
||||
|
||||
@@ -259,20 +259,11 @@ class CeleryConfig(DatabaseConfig):
|
||||
description="Password of the Redis Sentinel master.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
|
||||
description="Timeout for Redis Sentinel socket operations in seconds.",
|
||||
default=0.1,
|
||||
)
|
||||
|
||||
CELERY_TASK_ANNOTATIONS: dict[str, Any] | None = Field(
|
||||
description=(
|
||||
"Annotations for Celery tasks as a JSON mapping of task name -> options "
|
||||
"(for example, rate limits or other task-specific settings)."
|
||||
),
|
||||
default=None,
|
||||
)
|
||||
|
||||
@computed_field
|
||||
def CELERY_RESULT_BACKEND(self) -> str | None:
|
||||
if self.CELERY_BACKEND in ("database", "rabbitmq"):
|
||||
|
||||
@@ -21,7 +21,6 @@ language_timezone_mapping = {
|
||||
"th-TH": "Asia/Bangkok",
|
||||
"id-ID": "Asia/Jakarta",
|
||||
"ar-TN": "Africa/Tunis",
|
||||
"nl-NL": "Europe/Amsterdam",
|
||||
}
|
||||
|
||||
languages = list(language_timezone_mapping.keys())
|
||||
|
||||
@@ -599,12 +599,7 @@ def _get_conversation(app_model, conversation_id):
|
||||
db.session.execute(
|
||||
sa.update(Conversation)
|
||||
.where(Conversation.id == conversation_id, Conversation.read_at.is_(None))
|
||||
# Keep updated_at unchanged when only marking a conversation as read.
|
||||
.values(
|
||||
read_at=naive_utc_now(),
|
||||
read_account_id=current_user.id,
|
||||
updated_at=Conversation.updated_at,
|
||||
)
|
||||
.values(read_at=naive_utc_now(), read_account_id=current_user.id)
|
||||
)
|
||||
db.session.commit()
|
||||
db.session.refresh(conversation)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import urllib.parse
|
||||
|
||||
import httpx
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
import services
|
||||
@@ -11,12 +10,12 @@ from controllers.common.errors import (
|
||||
RemoteFileUploadError,
|
||||
UnsupportedFileTypeError,
|
||||
)
|
||||
from controllers.console import console_ns
|
||||
from controllers.fastopenapi import console_router
|
||||
from core.file import helpers as file_helpers
|
||||
from core.helper import ssrf_proxy
|
||||
from extensions.ext_database import db
|
||||
from fields.file_fields import FileWithSignedUrl, RemoteFileInfo
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from libs.login import current_account_with_tenant
|
||||
from services.file_service import FileService
|
||||
|
||||
|
||||
@@ -24,73 +23,69 @@ class RemoteFileUploadPayload(BaseModel):
|
||||
url: str = Field(..., description="URL to fetch")
|
||||
|
||||
|
||||
@console_ns.route("/remote-files/<path:url>")
|
||||
class GetRemoteFileInfo(Resource):
|
||||
@login_required
|
||||
def get(self, url: str):
|
||||
decoded_url = urllib.parse.unquote(url)
|
||||
resp = ssrf_proxy.head(decoded_url)
|
||||
@console_router.get(
|
||||
"/remote-files/<path:url>",
|
||||
response_model=RemoteFileInfo,
|
||||
tags=["console"],
|
||||
)
|
||||
def get_remote_file_info(url: str) -> RemoteFileInfo:
|
||||
decoded_url = urllib.parse.unquote(url)
|
||||
resp = ssrf_proxy.head(decoded_url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(decoded_url, timeout=3)
|
||||
resp.raise_for_status()
|
||||
return RemoteFileInfo(
|
||||
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
|
||||
file_length=int(resp.headers.get("Content-Length", 0)),
|
||||
)
|
||||
|
||||
|
||||
@console_router.post(
|
||||
"/remote-files/upload",
|
||||
response_model=FileWithSignedUrl,
|
||||
tags=["console"],
|
||||
status_code=201,
|
||||
)
|
||||
def upload_remote_file(payload: RemoteFileUploadPayload) -> FileWithSignedUrl:
|
||||
url = payload.url
|
||||
|
||||
try:
|
||||
resp = ssrf_proxy.head(url=url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(decoded_url, timeout=3)
|
||||
resp.raise_for_status()
|
||||
return RemoteFileInfo(
|
||||
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
|
||||
file_length=int(resp.headers.get("Content-Length", 0)),
|
||||
).model_dump(mode="json")
|
||||
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
|
||||
except httpx.RequestError as e:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
|
||||
|
||||
file_info = helpers.guess_file_info_from_response(resp)
|
||||
|
||||
@console_ns.route("/remote-files/upload")
|
||||
class RemoteFileUpload(Resource):
|
||||
@login_required
|
||||
def post(self):
|
||||
payload = RemoteFileUploadPayload.model_validate(console_ns.payload)
|
||||
url = payload.url
|
||||
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
|
||||
raise FileTooLargeError
|
||||
|
||||
# Try to fetch remote file metadata/content first
|
||||
try:
|
||||
resp = ssrf_proxy.head(url=url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
# Normalize into a user-friendly error message expected by tests
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
|
||||
except httpx.RequestError as e:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
|
||||
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
|
||||
|
||||
file_info = helpers.guess_file_info_from_response(resp)
|
||||
|
||||
# Enforce file size limit with 400 (Bad Request) per tests' expectation
|
||||
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
|
||||
raise FileTooLargeError()
|
||||
|
||||
# Load content if needed
|
||||
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
|
||||
|
||||
try:
|
||||
user, _ = current_account_with_tenant()
|
||||
upload_file = FileService(db.engine).upload_file(
|
||||
filename=file_info.filename,
|
||||
content=content,
|
||||
mimetype=file_info.mimetype,
|
||||
user=user,
|
||||
source_url=url,
|
||||
)
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
raise UnsupportedFileTypeError()
|
||||
|
||||
# Success: return created resource with 201 status
|
||||
return (
|
||||
FileWithSignedUrl(
|
||||
id=upload_file.id,
|
||||
name=upload_file.name,
|
||||
size=upload_file.size,
|
||||
extension=upload_file.extension,
|
||||
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
|
||||
mime_type=upload_file.mime_type,
|
||||
created_by=upload_file.created_by,
|
||||
created_at=int(upload_file.created_at.timestamp()),
|
||||
).model_dump(mode="json"),
|
||||
201,
|
||||
try:
|
||||
user, _ = current_account_with_tenant()
|
||||
upload_file = FileService(db.engine).upload_file(
|
||||
filename=file_info.filename,
|
||||
content=content,
|
||||
mimetype=file_info.mimetype,
|
||||
user=user,
|
||||
source_url=url,
|
||||
)
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
raise UnsupportedFileTypeError()
|
||||
|
||||
return FileWithSignedUrl(
|
||||
id=upload_file.id,
|
||||
name=upload_file.name,
|
||||
size=upload_file.size,
|
||||
extension=upload_file.extension,
|
||||
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
|
||||
mime_type=upload_file.mime_type,
|
||||
created_by=upload_file.created_by,
|
||||
created_at=int(upload_file.created_at.timestamp()),
|
||||
)
|
||||
|
||||
@@ -42,15 +42,7 @@ class SetupResponse(BaseModel):
|
||||
tags=["console"],
|
||||
)
|
||||
def get_setup_status_api() -> SetupStatusResponse:
|
||||
"""Get system setup status.
|
||||
|
||||
NOTE: This endpoint is unauthenticated by design.
|
||||
|
||||
During first-time bootstrap there is no admin account yet, so frontend initialization must be
|
||||
able to query setup progress before any login flow exists.
|
||||
|
||||
Only bootstrap-safe status information should be returned by this endpoint.
|
||||
"""
|
||||
"""Get system setup status."""
|
||||
if dify_config.EDITION == "SELF_HOSTED":
|
||||
setup_status = get_setup_status()
|
||||
if setup_status and not isinstance(setup_status, bool):
|
||||
@@ -69,12 +61,7 @@ def get_setup_status_api() -> SetupStatusResponse:
|
||||
)
|
||||
@only_edition_self_hosted
|
||||
def setup_system(payload: SetupRequestPayload) -> SetupResponse:
|
||||
"""Initialize system setup with admin account.
|
||||
|
||||
NOTE: This endpoint is unauthenticated by design for first-time bootstrap.
|
||||
Access is restricted by deployment mode (`SELF_HOSTED`), one-time setup guards,
|
||||
and init-password validation rather than user session authentication.
|
||||
"""
|
||||
"""Initialize system setup with admin account."""
|
||||
if get_setup_status():
|
||||
raise AlreadySetupError()
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ def stream_topic_events(
|
||||
on_subscribe()
|
||||
while True:
|
||||
try:
|
||||
msg = sub.receive(timeout=1)
|
||||
msg = sub.receive(timeout=0.1)
|
||||
except SubscriptionClosedError:
|
||||
return
|
||||
if msg is None:
|
||||
|
||||
@@ -45,8 +45,6 @@ from core.app.entities.task_entities import (
|
||||
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
||||
from core.app.task_pipeline.message_cycle_manager import MessageCycleManager
|
||||
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||
from core.file import helpers as file_helpers
|
||||
from core.file.enums import FileTransferMethod
|
||||
from core.model_manager import ModelInstance
|
||||
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
|
||||
from core.model_runtime.entities.message_entities import (
|
||||
@@ -58,11 +56,10 @@ from core.ops.entities.trace_entity import TraceTaskName
|
||||
from core.ops.ops_trace_manager import TraceQueueManager, TraceTask
|
||||
from core.prompt.utils.prompt_message_util import PromptMessageUtil
|
||||
from core.prompt.utils.prompt_template_parser import PromptTemplateParser
|
||||
from core.tools.signature import sign_tool_file
|
||||
from events.message_event import message_was_created
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models.model import AppMode, Conversation, Message, MessageAgentThought, MessageFile, UploadFile
|
||||
from models.model import AppMode, Conversation, Message, MessageAgentThought
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -466,85 +463,6 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
||||
metadata=metadata_dict,
|
||||
)
|
||||
|
||||
def _record_files(self):
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
message_files = session.scalars(select(MessageFile).where(MessageFile.message_id == self._message_id)).all()
|
||||
if not message_files:
|
||||
return None
|
||||
|
||||
files_list = []
|
||||
upload_file_ids = [
|
||||
mf.upload_file_id
|
||||
for mf in message_files
|
||||
if mf.transfer_method == FileTransferMethod.LOCAL_FILE and mf.upload_file_id
|
||||
]
|
||||
upload_files_map = {}
|
||||
if upload_file_ids:
|
||||
upload_files = session.scalars(select(UploadFile).where(UploadFile.id.in_(upload_file_ids))).all()
|
||||
upload_files_map = {uf.id: uf for uf in upload_files}
|
||||
|
||||
for message_file in message_files:
|
||||
upload_file = None
|
||||
if message_file.transfer_method == FileTransferMethod.LOCAL_FILE and message_file.upload_file_id:
|
||||
upload_file = upload_files_map.get(message_file.upload_file_id)
|
||||
|
||||
url = None
|
||||
filename = "file"
|
||||
mime_type = "application/octet-stream"
|
||||
size = 0
|
||||
extension = ""
|
||||
|
||||
if message_file.transfer_method == FileTransferMethod.REMOTE_URL:
|
||||
url = message_file.url
|
||||
if message_file.url:
|
||||
filename = message_file.url.split("/")[-1].split("?")[0] # Remove query params
|
||||
elif message_file.transfer_method == FileTransferMethod.LOCAL_FILE:
|
||||
if upload_file:
|
||||
url = file_helpers.get_signed_file_url(upload_file_id=str(upload_file.id))
|
||||
filename = upload_file.name
|
||||
mime_type = upload_file.mime_type or "application/octet-stream"
|
||||
size = upload_file.size or 0
|
||||
extension = f".{upload_file.extension}" if upload_file.extension else ""
|
||||
elif message_file.upload_file_id:
|
||||
# Fallback: generate URL even if upload_file not found
|
||||
url = file_helpers.get_signed_file_url(upload_file_id=str(message_file.upload_file_id))
|
||||
elif message_file.transfer_method == FileTransferMethod.TOOL_FILE and message_file.url:
|
||||
# For tool files, use URL directly if it's HTTP, otherwise sign it
|
||||
if message_file.url.startswith("http"):
|
||||
url = message_file.url
|
||||
filename = message_file.url.split("/")[-1].split("?")[0]
|
||||
else:
|
||||
# Extract tool file id and extension from URL
|
||||
url_parts = message_file.url.split("/")
|
||||
if url_parts:
|
||||
file_part = url_parts[-1].split("?")[0] # Remove query params first
|
||||
# Use rsplit to correctly handle filenames with multiple dots
|
||||
if "." in file_part:
|
||||
tool_file_id, ext = file_part.rsplit(".", 1)
|
||||
extension = f".{ext}"
|
||||
else:
|
||||
tool_file_id = file_part
|
||||
extension = ".bin"
|
||||
url = sign_tool_file(tool_file_id=tool_file_id, extension=extension)
|
||||
filename = file_part
|
||||
|
||||
transfer_method_value = message_file.transfer_method
|
||||
remote_url = message_file.url if message_file.transfer_method == FileTransferMethod.REMOTE_URL else ""
|
||||
file_dict = {
|
||||
"related_id": message_file.id,
|
||||
"extension": extension,
|
||||
"filename": filename,
|
||||
"size": size,
|
||||
"mime_type": mime_type,
|
||||
"transfer_method": transfer_method_value,
|
||||
"type": message_file.type,
|
||||
"url": url or "",
|
||||
"upload_file_id": message_file.upload_file_id or message_file.id,
|
||||
"remote_url": remote_url,
|
||||
}
|
||||
files_list.append(file_dict)
|
||||
return files_list or None
|
||||
|
||||
def _agent_message_to_stream_response(self, answer: str, message_id: str) -> AgentMessageStreamResponse:
|
||||
"""
|
||||
Agent message to stream response.
|
||||
|
||||
@@ -64,13 +64,7 @@ class MessageCycleManager:
|
||||
|
||||
# Use SQLAlchemy 2.x style session.scalar(select(...))
|
||||
with session_factory.create_session() as session:
|
||||
message_file = session.scalar(
|
||||
select(MessageFile)
|
||||
.where(
|
||||
MessageFile.message_id == message_id,
|
||||
)
|
||||
.where(MessageFile.belongs_to == "assistant")
|
||||
)
|
||||
message_file = session.scalar(select(MessageFile).where(MessageFile.message_id == message_id))
|
||||
|
||||
if message_file:
|
||||
self._message_has_file.add(message_id)
|
||||
|
||||
@@ -80,14 +80,8 @@ def init_app(app: DifyApp) -> Celery:
|
||||
worker_hijack_root_logger=False,
|
||||
timezone=pytz.timezone(dify_config.LOG_TZ or "UTC"),
|
||||
task_ignore_result=True,
|
||||
task_annotations=dify_config.CELERY_TASK_ANNOTATIONS,
|
||||
)
|
||||
|
||||
if dify_config.CELERY_BACKEND == "redis":
|
||||
celery_app.conf.update(
|
||||
result_backend_transport_options=broker_transport_options,
|
||||
)
|
||||
|
||||
# Apply SSL configuration if enabled
|
||||
ssl_options = _get_celery_ssl_options()
|
||||
if ssl_options:
|
||||
|
||||
@@ -119,7 +119,7 @@ class RedisClientWrapper:
|
||||
|
||||
|
||||
redis_client: RedisClientWrapper = RedisClientWrapper()
|
||||
_pubsub_redis_client: redis.Redis | RedisCluster | None = None
|
||||
pubsub_redis_client: RedisClientWrapper = RedisClientWrapper()
|
||||
|
||||
|
||||
def _get_ssl_configuration() -> tuple[type[Union[Connection, SSLConnection]], dict[str, Any]]:
|
||||
@@ -232,7 +232,7 @@ def _create_standalone_client(redis_params: dict[str, Any]) -> Union[redis.Redis
|
||||
return client
|
||||
|
||||
|
||||
def _create_pubsub_client(pubsub_url: str, use_clusters: bool) -> redis.Redis | RedisCluster:
|
||||
def _create_pubsub_client(pubsub_url: str, use_clusters: bool) -> Union[redis.Redis, RedisCluster]:
|
||||
if use_clusters:
|
||||
return RedisCluster.from_url(pubsub_url)
|
||||
return redis.Redis.from_url(pubsub_url)
|
||||
@@ -256,19 +256,23 @@ def init_app(app: DifyApp):
|
||||
redis_client.initialize(client)
|
||||
app.extensions["redis"] = redis_client
|
||||
|
||||
global _pubsub_redis_client
|
||||
_pubsub_redis_client = client
|
||||
pubsub_client = client
|
||||
if dify_config.normalized_pubsub_redis_url:
|
||||
_pubsub_redis_client = _create_pubsub_client(
|
||||
pubsub_client = _create_pubsub_client(
|
||||
dify_config.normalized_pubsub_redis_url, dify_config.PUBSUB_REDIS_USE_CLUSTERS
|
||||
)
|
||||
pubsub_redis_client.initialize(pubsub_client)
|
||||
|
||||
|
||||
def get_pubsub_redis_client() -> RedisClientWrapper:
|
||||
return pubsub_redis_client
|
||||
|
||||
|
||||
def get_pubsub_broadcast_channel() -> BroadcastChannelProtocol:
|
||||
assert _pubsub_redis_client is not None, "PubSub redis Client should be initialized here."
|
||||
redis_conn = get_pubsub_redis_client()
|
||||
if dify_config.PUBSUB_REDIS_CHANNEL_TYPE == "sharded":
|
||||
return ShardedRedisBroadcastChannel(_pubsub_redis_client)
|
||||
return RedisBroadcastChannel(_pubsub_redis_client)
|
||||
return ShardedRedisBroadcastChannel(redis_conn) # pyright: ignore[reportArgumentType]
|
||||
return RedisBroadcastChannel(redis_conn) # pyright: ignore[reportArgumentType]
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
|
||||
@@ -152,7 +152,7 @@ class RedisSubscriptionBase(Subscription):
|
||||
"""Iterator for consuming messages from the subscription."""
|
||||
while not self._closed.is_set():
|
||||
try:
|
||||
item = self._queue.get(timeout=1)
|
||||
item = self._queue.get(timeout=0.1)
|
||||
except queue.Empty:
|
||||
continue
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from libs.broadcast_channel.channel import Producer, Subscriber, Subscription
|
||||
from redis import Redis, RedisCluster
|
||||
from redis import Redis
|
||||
|
||||
from ._subscription import RedisSubscriptionBase
|
||||
|
||||
@@ -18,7 +18,7 @@ class BroadcastChannel:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
redis_client: Redis | RedisCluster,
|
||||
redis_client: Redis,
|
||||
):
|
||||
self._client = redis_client
|
||||
|
||||
@@ -27,7 +27,7 @@ class BroadcastChannel:
|
||||
|
||||
|
||||
class Topic:
|
||||
def __init__(self, redis_client: Redis | RedisCluster, topic: str):
|
||||
def __init__(self, redis_client: Redis, topic: str):
|
||||
self._client = redis_client
|
||||
self._topic = topic
|
||||
|
||||
|
||||
@@ -70,9 +70,8 @@ class _RedisShardedSubscription(RedisSubscriptionBase):
|
||||
# Since we have already filtered at the caller's site, we can safely set
|
||||
# `ignore_subscribe_messages=False`.
|
||||
if isinstance(self._client, RedisCluster):
|
||||
# NOTE(QuantumGhost): due to an issue in upstream code, calling `get_sharded_message` without
|
||||
# specifying the `target_node` argument would use busy-looping to wait
|
||||
# for incoming message, consuming excessive CPU quota.
|
||||
# NOTE(QuantumGhost): due to an issue in upstream code, calling `get_sharded_message`
|
||||
# would use busy-looping to wait for incoming message, consuming excessive CPU quota.
|
||||
#
|
||||
# Here we specify the `target_node` to mitigate this problem.
|
||||
node = self._client.get_node_from_key(self._topic)
|
||||
@@ -81,10 +80,8 @@ class _RedisShardedSubscription(RedisSubscriptionBase):
|
||||
timeout=1,
|
||||
target_node=node,
|
||||
)
|
||||
elif isinstance(self._client, Redis):
|
||||
return self._pubsub.get_sharded_message(ignore_subscribe_messages=False, timeout=1) # type: ignore[attr-defined]
|
||||
else:
|
||||
raise AssertionError("client should be either Redis or RedisCluster.")
|
||||
return self._pubsub.get_sharded_message(ignore_subscribe_messages=False, timeout=1) # type: ignore[attr-defined]
|
||||
|
||||
def _get_message_type(self) -> str:
|
||||
return "smessage"
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
"""add unique constraint to tenant_default_models
|
||||
|
||||
Revision ID: fix_tenant_default_model_unique
|
||||
Revises: 9d77545f524e
|
||||
Create Date: 2026-01-19 15:07:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f55813ffe2c8'
|
||||
down_revision = 'c3df22613c99'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# First, remove duplicate records keeping only the most recent one per (tenant_id, model_type)
|
||||
# This is necessary before adding the unique constraint
|
||||
conn = op.get_bind()
|
||||
|
||||
# Delete duplicates: keep the record with the latest updated_at for each (tenant_id, model_type)
|
||||
# If updated_at is the same, keep the one with the largest id as tiebreaker
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Use DISTINCT ON for efficient deduplication
|
||||
conn.execute(sa.text("""
|
||||
DELETE FROM tenant_default_models
|
||||
WHERE id NOT IN (
|
||||
SELECT DISTINCT ON (tenant_id, model_type) id
|
||||
FROM tenant_default_models
|
||||
ORDER BY tenant_id, model_type, updated_at DESC, id DESC
|
||||
)
|
||||
"""))
|
||||
else:
|
||||
# MySQL: Use self-join to find and delete duplicates
|
||||
# Keep the record with latest updated_at (or largest id if updated_at is equal)
|
||||
conn.execute(sa.text("""
|
||||
DELETE t1 FROM tenant_default_models t1
|
||||
INNER JOIN tenant_default_models t2
|
||||
ON t1.tenant_id = t2.tenant_id
|
||||
AND t1.model_type = t2.model_type
|
||||
AND (t1.updated_at < t2.updated_at
|
||||
OR (t1.updated_at = t2.updated_at AND t1.id < t2.id))
|
||||
"""))
|
||||
|
||||
# Now add the unique constraint
|
||||
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
|
||||
batch_op.create_unique_constraint('unique_tenant_default_model_type', ['tenant_id', 'model_type'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
|
||||
batch_op.drop_constraint('unique_tenant_default_model_type', type_='unique')
|
||||
@@ -1,39 +0,0 @@
|
||||
"""fix index to optimize message clean job performance
|
||||
|
||||
Revision ID: fce013ca180e
|
||||
Revises: f55813ffe2c8
|
||||
Create Date: 2026-02-11 15:49:17.603638
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'fce013ca180e'
|
||||
down_revision = 'f55813ffe2c8'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('message_created_at_idx'))
|
||||
|
||||
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
|
||||
batch_op.create_index('saved_message_message_id_idx', ['message_id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
|
||||
batch_op.drop_index('saved_message_message_id_idx')
|
||||
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('message_created_at_idx'), ['created_at'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -227,7 +227,7 @@ class App(Base):
|
||||
with Session(db.engine) as session:
|
||||
if api_provider_ids:
|
||||
existing_api_providers = [
|
||||
str(api_provider.id)
|
||||
api_provider.id
|
||||
for api_provider in session.execute(
|
||||
text("SELECT id FROM tool_api_providers WHERE id IN :provider_ids"),
|
||||
{"provider_ids": tuple(api_provider_ids)},
|
||||
@@ -1040,6 +1040,7 @@ class Message(Base):
|
||||
Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"),
|
||||
Index("message_account_idx", "app_id", "from_source", "from_account_id"),
|
||||
Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"),
|
||||
Index("message_created_at_idx", "created_at"),
|
||||
Index("message_app_mode_idx", "app_mode"),
|
||||
Index("message_created_at_id_idx", "created_at", "id"),
|
||||
)
|
||||
|
||||
@@ -181,7 +181,6 @@ class TenantDefaultModel(TypeBase):
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tenant_default_model_pkey"),
|
||||
sa.Index("tenant_default_model_tenant_id_provider_type_idx", "tenant_id", "provider_name", "model_type"),
|
||||
sa.UniqueConstraint("tenant_id", "model_type", name="unique_tenant_default_model_type"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
|
||||
@@ -16,7 +16,6 @@ class SavedMessage(TypeBase):
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="saved_message_pkey"),
|
||||
sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"),
|
||||
sa.Index("saved_message_message_id_idx", "message_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.13.0"
|
||||
version = "1.12.1"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
@@ -23,7 +23,7 @@ dependencies = [
|
||||
"gevent~=25.9.1",
|
||||
"gmpy2~=2.2.1",
|
||||
"google-api-core==2.18.0",
|
||||
"google-api-python-client==2.189.0",
|
||||
"google-api-python-client==2.90.0",
|
||||
"google-auth==2.29.0",
|
||||
"google-auth-httplib2==0.2.0",
|
||||
"google-cloud-aiplatform==1.49.0",
|
||||
|
||||
@@ -264,15 +264,9 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol):
|
||||
batch_size: int,
|
||||
run_types: Sequence[WorkflowType] | None = None,
|
||||
tenant_ids: Sequence[str] | None = None,
|
||||
workflow_ids: Sequence[str] | None = None,
|
||||
) -> Sequence[WorkflowRun]:
|
||||
"""
|
||||
Fetch ended workflow runs in a time window for archival and clean batching.
|
||||
|
||||
Optional filters:
|
||||
- run_types
|
||||
- tenant_ids
|
||||
- workflow_ids
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
@@ -386,7 +386,6 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
batch_size: int,
|
||||
run_types: Sequence[WorkflowType] | None = None,
|
||||
tenant_ids: Sequence[str] | None = None,
|
||||
workflow_ids: Sequence[str] | None = None,
|
||||
) -> Sequence[WorkflowRun]:
|
||||
"""
|
||||
Fetch ended workflow runs in a time window for archival and clean batching.
|
||||
@@ -395,7 +394,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
- created_at in [start_from, end_before)
|
||||
- type in run_types (when provided)
|
||||
- status is an ended state
|
||||
- optional tenant_id, workflow_id filters and cursor (last_seen) for pagination
|
||||
- optional tenant_id filter and cursor (last_seen) for pagination
|
||||
"""
|
||||
with self._session_maker() as session:
|
||||
stmt = (
|
||||
@@ -418,9 +417,6 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
if tenant_ids:
|
||||
stmt = stmt.where(WorkflowRun.tenant_id.in_(tenant_ids))
|
||||
|
||||
if workflow_ids:
|
||||
stmt = stmt.where(WorkflowRun.workflow_id.in_(workflow_ids))
|
||||
|
||||
if last_seen:
|
||||
stmt = stmt.where(
|
||||
or_(
|
||||
|
||||
@@ -4,6 +4,7 @@ import time
|
||||
from collections.abc import Sequence
|
||||
|
||||
import click
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
import app
|
||||
@@ -12,7 +13,6 @@ from extensions.ext_database import db
|
||||
from models.model import (
|
||||
AppAnnotationHitHistory,
|
||||
Conversation,
|
||||
DatasetRetrieverResource,
|
||||
Message,
|
||||
MessageAgentThought,
|
||||
MessageAnnotation,
|
||||
@@ -20,10 +20,7 @@ from models.model import (
|
||||
MessageFeedback,
|
||||
MessageFile,
|
||||
)
|
||||
from models.web import SavedMessage
|
||||
from models.workflow import ConversationVariable, WorkflowRun
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
|
||||
from models.workflow import ConversationVariable, WorkflowAppLog, WorkflowNodeExecutionModel, WorkflowRun
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -32,15 +29,8 @@ MAX_RETRIES = 3
|
||||
BATCH_SIZE = dify_config.WORKFLOW_LOG_CLEANUP_BATCH_SIZE
|
||||
|
||||
|
||||
def _get_specific_workflow_ids() -> list[str]:
|
||||
workflow_ids_str = dify_config.WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS.strip()
|
||||
if not workflow_ids_str:
|
||||
return []
|
||||
return [wid.strip() for wid in workflow_ids_str.split(",") if wid.strip()]
|
||||
|
||||
|
||||
@app.celery.task(queue="retention")
|
||||
def clean_workflow_runlogs_precise() -> None:
|
||||
@app.celery.task(queue="dataset")
|
||||
def clean_workflow_runlogs_precise():
|
||||
"""Clean expired workflow run logs with retry mechanism and complete message cascade"""
|
||||
|
||||
click.echo(click.style("Start clean workflow run logs (precise mode with complete cascade).", fg="green"))
|
||||
@@ -49,48 +39,48 @@ def clean_workflow_runlogs_precise() -> None:
|
||||
retention_days = dify_config.WORKFLOW_LOG_RETENTION_DAYS
|
||||
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=retention_days)
|
||||
session_factory = sessionmaker(db.engine, expire_on_commit=False)
|
||||
workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_factory)
|
||||
workflow_ids = _get_specific_workflow_ids()
|
||||
workflow_ids_filter = workflow_ids or None
|
||||
|
||||
try:
|
||||
with session_factory.begin() as session:
|
||||
total_workflow_runs = session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count()
|
||||
if total_workflow_runs == 0:
|
||||
logger.info("No expired workflow run logs found")
|
||||
return
|
||||
logger.info("Found %s expired workflow run logs to clean", total_workflow_runs)
|
||||
|
||||
total_deleted = 0
|
||||
failed_batches = 0
|
||||
batch_count = 0
|
||||
last_seen: tuple[datetime.datetime, str] | None = None
|
||||
while True:
|
||||
run_rows = workflow_run_repo.get_runs_batch_by_time_range(
|
||||
start_from=None,
|
||||
end_before=cutoff_date,
|
||||
last_seen=last_seen,
|
||||
batch_size=BATCH_SIZE,
|
||||
workflow_ids=workflow_ids_filter,
|
||||
)
|
||||
|
||||
if not run_rows:
|
||||
if batch_count == 0:
|
||||
logger.info("No expired workflow run logs found")
|
||||
break
|
||||
|
||||
last_seen = (run_rows[-1].created_at, run_rows[-1].id)
|
||||
batch_count += 1
|
||||
with session_factory.begin() as session:
|
||||
success = _delete_batch(session, workflow_run_repo, run_rows, failed_batches)
|
||||
workflow_run_ids = session.scalars(
|
||||
select(WorkflowRun.id)
|
||||
.where(WorkflowRun.created_at < cutoff_date)
|
||||
.order_by(WorkflowRun.created_at, WorkflowRun.id)
|
||||
.limit(BATCH_SIZE)
|
||||
).all()
|
||||
|
||||
if success:
|
||||
total_deleted += len(run_rows)
|
||||
failed_batches = 0
|
||||
else:
|
||||
failed_batches += 1
|
||||
if failed_batches >= MAX_RETRIES:
|
||||
logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES)
|
||||
if not workflow_run_ids:
|
||||
break
|
||||
|
||||
batch_count += 1
|
||||
|
||||
success = _delete_batch(session, workflow_run_ids, failed_batches)
|
||||
|
||||
if success:
|
||||
total_deleted += len(workflow_run_ids)
|
||||
failed_batches = 0
|
||||
else:
|
||||
# Calculate incremental delay times: 5, 10, 15 minutes
|
||||
retry_delay_minutes = failed_batches * 5
|
||||
logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes)
|
||||
time.sleep(retry_delay_minutes * 60)
|
||||
continue
|
||||
failed_batches += 1
|
||||
if failed_batches >= MAX_RETRIES:
|
||||
logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES)
|
||||
break
|
||||
else:
|
||||
# Calculate incremental delay times: 5, 10, 15 minutes
|
||||
retry_delay_minutes = failed_batches * 5
|
||||
logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes)
|
||||
time.sleep(retry_delay_minutes * 60)
|
||||
continue
|
||||
|
||||
logger.info("Cleanup completed: %s expired workflow run logs deleted", total_deleted)
|
||||
|
||||
@@ -103,16 +93,10 @@ def clean_workflow_runlogs_precise() -> None:
|
||||
click.echo(click.style(f"Cleaned workflow run logs from db success latency: {execution_time:.2f}s", fg="green"))
|
||||
|
||||
|
||||
def _delete_batch(
|
||||
session: Session,
|
||||
workflow_run_repo,
|
||||
workflow_runs: Sequence[WorkflowRun],
|
||||
attempt_count: int,
|
||||
) -> bool:
|
||||
def _delete_batch(session: Session, workflow_run_ids: Sequence[str], attempt_count: int) -> bool:
|
||||
"""Delete a single batch of workflow runs and all related data within a nested transaction."""
|
||||
try:
|
||||
with session.begin_nested():
|
||||
workflow_run_ids = [run.id for run in workflow_runs]
|
||||
message_data = (
|
||||
session.query(Message.id, Message.conversation_id)
|
||||
.where(Message.workflow_run_id.in_(workflow_run_ids))
|
||||
@@ -123,13 +107,11 @@ def _delete_batch(
|
||||
if message_id_list:
|
||||
message_related_models = [
|
||||
AppAnnotationHitHistory,
|
||||
DatasetRetrieverResource,
|
||||
MessageAgentThought,
|
||||
MessageChain,
|
||||
MessageFile,
|
||||
MessageAnnotation,
|
||||
MessageFeedback,
|
||||
SavedMessage,
|
||||
]
|
||||
for model in message_related_models:
|
||||
session.query(model).where(model.message_id.in_(message_id_list)).delete(synchronize_session=False) # type: ignore
|
||||
@@ -140,6 +122,14 @@ def _delete_batch(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
session.query(WorkflowNodeExecutionModel).where(
|
||||
WorkflowNodeExecutionModel.workflow_run_id.in_(workflow_run_ids)
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
if conversation_id_list:
|
||||
session.query(ConversationVariable).where(
|
||||
ConversationVariable.conversation_id.in_(conversation_id_list)
|
||||
@@ -149,22 +139,7 @@ def _delete_batch(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
def _delete_node_executions(active_session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]:
|
||||
run_ids = [run.id for run in runs]
|
||||
repo = DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository(
|
||||
session_maker=sessionmaker(bind=active_session.get_bind(), expire_on_commit=False)
|
||||
)
|
||||
return repo.delete_by_runs(active_session, run_ids)
|
||||
|
||||
def _delete_trigger_logs(active_session: Session, run_ids: Sequence[str]) -> int:
|
||||
trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(active_session)
|
||||
return trigger_repo.delete_by_run_ids(run_ids)
|
||||
|
||||
workflow_run_repo.delete_runs_with_related(
|
||||
workflow_runs,
|
||||
delete_node_executions=_delete_node_executions,
|
||||
delete_trigger_logs=_delete_trigger_logs,
|
||||
)
|
||||
session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from libs.exception import BaseHTTPException
|
||||
from models.human_input import RecipientType
|
||||
from models.model import App, AppMode
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
from tasks.app_generate.workflow_execute_task import resume_app_execution
|
||||
from tasks.app_generate.workflow_execute_task import WORKFLOW_BASED_APP_EXECUTION_QUEUE, resume_app_execution
|
||||
|
||||
|
||||
class Form:
|
||||
@@ -230,6 +230,7 @@ class HumanInputService:
|
||||
try:
|
||||
resume_app_execution.apply_async(
|
||||
kwargs={"payload": payload},
|
||||
queue=WORKFLOW_BASED_APP_EXECUTION_QUEUE,
|
||||
)
|
||||
except Exception: # pragma: no cover
|
||||
logger.exception("Failed to enqueue resume task for workflow run %s", workflow_run_id)
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
from typing import cast
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import delete, select, tuple_
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
@@ -196,15 +193,11 @@ class MessagesCleanService:
|
||||
self._end_before,
|
||||
)
|
||||
|
||||
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
|
||||
|
||||
while True:
|
||||
stats["batches"] += 1
|
||||
batch_start = time.monotonic()
|
||||
|
||||
# Step 1: Fetch a batch of messages using cursor
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
fetch_messages_start = time.monotonic()
|
||||
msg_stmt = (
|
||||
select(Message.id, Message.app_id, Message.created_at)
|
||||
.where(Message.created_at < self._end_before)
|
||||
@@ -216,13 +209,13 @@ class MessagesCleanService:
|
||||
msg_stmt = msg_stmt.where(Message.created_at >= self._start_from)
|
||||
|
||||
# Apply cursor condition: (created_at, id) > (last_created_at, last_message_id)
|
||||
# This translates to:
|
||||
# created_at > last_created_at OR (created_at = last_created_at AND id > last_message_id)
|
||||
if _cursor:
|
||||
# Continuing from previous batch
|
||||
msg_stmt = msg_stmt.where(
|
||||
tuple_(Message.created_at, Message.id)
|
||||
> tuple_(
|
||||
sa.literal(_cursor[0], type_=sa.DateTime()),
|
||||
sa.literal(_cursor[1], type_=Message.id.type),
|
||||
)
|
||||
(Message.created_at > _cursor[0])
|
||||
| ((Message.created_at == _cursor[0]) & (Message.id > _cursor[1]))
|
||||
)
|
||||
|
||||
raw_messages = list(session.execute(msg_stmt).all())
|
||||
@@ -230,12 +223,6 @@ class MessagesCleanService:
|
||||
SimpleMessage(id=msg_id, app_id=app_id, created_at=msg_created_at)
|
||||
for msg_id, app_id, msg_created_at in raw_messages
|
||||
]
|
||||
logger.info(
|
||||
"clean_messages (batch %s): fetched %s messages in %sms",
|
||||
stats["batches"],
|
||||
len(messages),
|
||||
int((time.monotonic() - fetch_messages_start) * 1000),
|
||||
)
|
||||
|
||||
# Track total messages fetched across all batches
|
||||
stats["total_messages"] += len(messages)
|
||||
@@ -254,16 +241,8 @@ class MessagesCleanService:
|
||||
logger.info("clean_messages (batch %s): no app_ids found, skip", stats["batches"])
|
||||
continue
|
||||
|
||||
fetch_apps_start = time.monotonic()
|
||||
app_stmt = select(App.id, App.tenant_id).where(App.id.in_(app_ids))
|
||||
apps = list(session.execute(app_stmt).all())
|
||||
logger.info(
|
||||
"clean_messages (batch %s): fetched %s apps for %s app_ids in %sms",
|
||||
stats["batches"],
|
||||
len(apps),
|
||||
len(app_ids),
|
||||
int((time.monotonic() - fetch_apps_start) * 1000),
|
||||
)
|
||||
|
||||
if not apps:
|
||||
logger.info("clean_messages (batch %s): no apps found, skip", stats["batches"])
|
||||
@@ -273,15 +252,7 @@ class MessagesCleanService:
|
||||
app_to_tenant: dict[str, str] = {app.id: app.tenant_id for app in apps}
|
||||
|
||||
# Step 3: Delegate to policy to determine which messages to delete
|
||||
policy_start = time.monotonic()
|
||||
message_ids_to_delete = self._policy.filter_message_ids(messages, app_to_tenant)
|
||||
logger.info(
|
||||
"clean_messages (batch %s): policy selected %s/%s messages in %sms",
|
||||
stats["batches"],
|
||||
len(message_ids_to_delete),
|
||||
len(messages),
|
||||
int((time.monotonic() - policy_start) * 1000),
|
||||
)
|
||||
|
||||
if not message_ids_to_delete:
|
||||
logger.info("clean_messages (batch %s): no messages to delete, skip", stats["batches"])
|
||||
@@ -292,20 +263,14 @@ class MessagesCleanService:
|
||||
# Step 4: Batch delete messages and their relations
|
||||
if not self._dry_run:
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
delete_relations_start = time.monotonic()
|
||||
# Delete related records first
|
||||
self._batch_delete_message_relations(session, message_ids_to_delete)
|
||||
delete_relations_ms = int((time.monotonic() - delete_relations_start) * 1000)
|
||||
|
||||
# Delete messages
|
||||
delete_messages_start = time.monotonic()
|
||||
delete_stmt = delete(Message).where(Message.id.in_(message_ids_to_delete))
|
||||
delete_result = cast(CursorResult, session.execute(delete_stmt))
|
||||
messages_deleted = delete_result.rowcount
|
||||
delete_messages_ms = int((time.monotonic() - delete_messages_start) * 1000)
|
||||
commit_start = time.monotonic()
|
||||
session.commit()
|
||||
commit_ms = int((time.monotonic() - commit_start) * 1000)
|
||||
|
||||
stats["total_deleted"] += messages_deleted
|
||||
|
||||
@@ -315,19 +280,6 @@ class MessagesCleanService:
|
||||
len(messages),
|
||||
messages_deleted,
|
||||
)
|
||||
logger.info(
|
||||
"clean_messages (batch %s): relations %sms, messages %sms, commit %sms, batch total %sms",
|
||||
stats["batches"],
|
||||
delete_relations_ms,
|
||||
delete_messages_ms,
|
||||
commit_ms,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
|
||||
# Random sleep between batches to avoid overwhelming the database
|
||||
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
|
||||
logger.info("clean_messages (batch %s): sleeping for %.2fms", stats["batches"], sleep_ms)
|
||||
time.sleep(sleep_ms / 1000)
|
||||
else:
|
||||
# Log random sample of message IDs that would be deleted (up to 10)
|
||||
sample_size = min(10, len(message_ids_to_delete))
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import time
|
||||
from collections.abc import Iterable, Sequence
|
||||
|
||||
import click
|
||||
@@ -75,12 +72,7 @@ class WorkflowRunCleanup:
|
||||
batch_index = 0
|
||||
last_seen: tuple[datetime.datetime, str] | None = None
|
||||
|
||||
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
|
||||
|
||||
while True:
|
||||
batch_start = time.monotonic()
|
||||
|
||||
fetch_start = time.monotonic()
|
||||
run_rows = self.workflow_run_repo.get_runs_batch_by_time_range(
|
||||
start_from=self.window_start,
|
||||
end_before=self.window_end,
|
||||
@@ -88,30 +80,12 @@ class WorkflowRunCleanup:
|
||||
batch_size=self.batch_size,
|
||||
)
|
||||
if not run_rows:
|
||||
logger.info("workflow_run_cleanup (batch #%s): no more rows to process", batch_index + 1)
|
||||
break
|
||||
|
||||
batch_index += 1
|
||||
last_seen = (run_rows[-1].created_at, run_rows[-1].id)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): fetched %s rows in %sms",
|
||||
batch_index,
|
||||
len(run_rows),
|
||||
int((time.monotonic() - fetch_start) * 1000),
|
||||
)
|
||||
|
||||
tenant_ids = {row.tenant_id for row in run_rows}
|
||||
|
||||
filter_start = time.monotonic()
|
||||
free_tenants = self._filter_free_tenants(tenant_ids)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): filtered %s free tenants from %s tenants in %sms",
|
||||
batch_index,
|
||||
len(free_tenants),
|
||||
len(tenant_ids),
|
||||
int((time.monotonic() - filter_start) * 1000),
|
||||
)
|
||||
|
||||
free_runs = [row for row in run_rows if row.tenant_id in free_tenants]
|
||||
paid_or_skipped = len(run_rows) - len(free_runs)
|
||||
|
||||
@@ -130,17 +104,11 @@ class WorkflowRunCleanup:
|
||||
total_runs_targeted += len(free_runs)
|
||||
|
||||
if self.dry_run:
|
||||
count_start = time.monotonic()
|
||||
batch_counts = self.workflow_run_repo.count_runs_with_related(
|
||||
free_runs,
|
||||
count_node_executions=self._count_node_executions,
|
||||
count_trigger_logs=self._count_trigger_logs,
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s, dry_run): counted related records in %sms",
|
||||
batch_index,
|
||||
int((time.monotonic() - count_start) * 1000),
|
||||
)
|
||||
if related_totals is not None:
|
||||
for key in related_totals:
|
||||
related_totals[key] += batch_counts.get(key, 0)
|
||||
@@ -152,21 +120,14 @@ class WorkflowRunCleanup:
|
||||
fg="yellow",
|
||||
)
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s, dry_run): batch total %sms",
|
||||
batch_index,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
delete_start = time.monotonic()
|
||||
counts = self.workflow_run_repo.delete_runs_with_related(
|
||||
free_runs,
|
||||
delete_node_executions=self._delete_node_executions,
|
||||
delete_trigger_logs=self._delete_trigger_logs,
|
||||
)
|
||||
delete_ms = int((time.monotonic() - delete_start) * 1000)
|
||||
except Exception:
|
||||
logger.exception("Failed to delete workflow runs batch ending at %s", last_seen[0])
|
||||
raise
|
||||
@@ -182,17 +143,6 @@ class WorkflowRunCleanup:
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): delete %sms, batch total %sms",
|
||||
batch_index,
|
||||
delete_ms,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
|
||||
# Random sleep between batches to avoid overwhelming the database
|
||||
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
|
||||
logger.info("workflow_run_cleanup (batch #%s): sleeping for %.2fms", batch_index, sleep_ms)
|
||||
time.sleep(sleep_ms / 1000)
|
||||
|
||||
if self.dry_run:
|
||||
if self.window_start:
|
||||
|
||||
@@ -129,15 +129,15 @@ def build_workflow_event_stream(
|
||||
return
|
||||
|
||||
try:
|
||||
event = buffer_state.queue.get(timeout=1)
|
||||
event = buffer_state.queue.get(timeout=0.1)
|
||||
except queue.Empty:
|
||||
current_time = time.time()
|
||||
if current_time - last_msg_time > idle_timeout:
|
||||
logger.debug(
|
||||
"Idle timeout of %s seconds reached, closing workflow event stream.",
|
||||
"No workflow events received for %s seconds, keeping stream open",
|
||||
idle_timeout,
|
||||
)
|
||||
return
|
||||
last_msg_time = current_time
|
||||
if current_time - last_ping_time >= ping_interval:
|
||||
yield StreamEvent.PING.value
|
||||
last_ping_time = current_time
|
||||
@@ -405,7 +405,7 @@ def _start_buffering(subscription) -> BufferState:
|
||||
dropped_count = 0
|
||||
try:
|
||||
while not buffer_state.stop_event.is_set():
|
||||
msg = subscription.receive(timeout=1)
|
||||
msg = subscription.receive(timeout=0.1)
|
||||
if msg is None:
|
||||
continue
|
||||
event = _parse_event_message(msg)
|
||||
|
||||
@@ -51,7 +51,7 @@ def _patch_redis_clients_on_loaded_modules():
|
||||
continue
|
||||
if hasattr(module, "redis_client"):
|
||||
module.redis_client = redis_mock
|
||||
if hasattr(module, "_pubsub_redis_client"):
|
||||
if hasattr(module, "pubsub_redis_client"):
|
||||
module.pubsub_redis_client = redis_mock
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ def _patch_redis_clients():
|
||||
|
||||
with (
|
||||
patch.object(ext_redis, "redis_client", redis_mock),
|
||||
patch.object(ext_redis, "_pubsub_redis_client", redis_mock),
|
||||
patch.object(ext_redis, "pubsub_redis_client", redis_mock),
|
||||
):
|
||||
_patch_redis_clients_on_loaded_modules()
|
||||
yield
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
from datetime import datetime
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from controllers.console.app.conversation import _get_conversation
|
||||
|
||||
|
||||
def test_get_conversation_mark_read_keeps_updated_at_unchanged():
|
||||
app_model = SimpleNamespace(id="app-id")
|
||||
account = SimpleNamespace(id="account-id")
|
||||
conversation = MagicMock()
|
||||
conversation.id = "conversation-id"
|
||||
|
||||
with (
|
||||
patch("controllers.console.app.conversation.current_account_with_tenant", return_value=(account, None)),
|
||||
patch("controllers.console.app.conversation.naive_utc_now", return_value=datetime(2026, 2, 9, 0, 0, 0)),
|
||||
patch("controllers.console.app.conversation.db.session") as mock_session,
|
||||
):
|
||||
mock_session.query.return_value.where.return_value.first.return_value = conversation
|
||||
|
||||
_get_conversation(app_model, "conversation-id")
|
||||
|
||||
statement = mock_session.execute.call_args[0][0]
|
||||
compiled = statement.compile()
|
||||
sql_text = str(compiled).lower()
|
||||
compact_sql_text = sql_text.replace(" ", "")
|
||||
params = compiled.params
|
||||
|
||||
assert "updated_at=current_timestamp" not in compact_sql_text
|
||||
assert "updated_at=conversations.updated_at" in compact_sql_text
|
||||
assert "read_at=:read_at" in compact_sql_text
|
||||
assert "read_account_id=:read_account_id" in compact_sql_text
|
||||
assert params["read_at"] == datetime(2026, 2, 9, 0, 0, 0)
|
||||
assert params["read_account_id"] == "account-id"
|
||||
@@ -1,286 +1,92 @@
|
||||
"""Tests for remote file upload API endpoints using Flask-RESTX."""
|
||||
|
||||
import contextlib
|
||||
import builtins
|
||||
from datetime import datetime
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import Mock, patch
|
||||
from unittest.mock import patch
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
from flask import Flask, g
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
|
||||
from extensions import ext_fastopenapi
|
||||
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView # type: ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app() -> Flask:
|
||||
"""Create Flask app for testing."""
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
app.config["SECRET_KEY"] = "test-secret-key"
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
"""Create test client with console blueprint registered."""
|
||||
from controllers.console import bp
|
||||
def test_console_remote_files_fastopenapi_get_info(app: Flask):
|
||||
ext_fastopenapi.init_app(app)
|
||||
|
||||
app.register_blueprint(bp)
|
||||
return app.test_client()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_account():
|
||||
"""Create a mock account for testing."""
|
||||
from models import Account
|
||||
|
||||
account = Mock(spec=Account)
|
||||
account.id = "test-account-id"
|
||||
account.current_tenant_id = "test-tenant-id"
|
||||
return account
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def auth_ctx(app, mock_account):
|
||||
"""Context manager to set auth/tenant context in flask.g for a request."""
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ctx():
|
||||
with app.test_request_context():
|
||||
g._login_user = mock_account
|
||||
g._current_tenant = mock_account.current_tenant_id
|
||||
yield
|
||||
|
||||
return _ctx
|
||||
|
||||
|
||||
class TestGetRemoteFileInfo:
|
||||
"""Test GET /console/api/remote-files/<path:url> endpoint."""
|
||||
|
||||
def test_get_remote_file_info_success(self, app, client, mock_account):
|
||||
"""Test successful retrieval of remote file info."""
|
||||
response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("HEAD", "http://example.com/file.txt"),
|
||||
headers={"Content-Type": "text/plain", "Content-Length": "1024"},
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
with app.test_request_context():
|
||||
g._login_user = mock_account
|
||||
g._current_tenant = mock_account.current_tenant_id
|
||||
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt"
|
||||
resp = client.get(f"/console/api/remote-files/{encoded_url}")
|
||||
|
||||
assert resp.status_code == 200
|
||||
data = resp.get_json()
|
||||
assert data["file_type"] == "text/plain"
|
||||
assert data["file_length"] == 1024
|
||||
|
||||
def test_get_remote_file_info_fallback_to_get_on_head_failure(self, app, client, mock_account):
|
||||
"""Test fallback to GET when HEAD returns non-200 status."""
|
||||
head_response = httpx.Response(
|
||||
404,
|
||||
request=httpx.Request("HEAD", "http://example.com/file.pdf"),
|
||||
)
|
||||
get_response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("GET", "http://example.com/file.pdf"),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "2048"},
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_response),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
with app.test_request_context():
|
||||
g._login_user = mock_account
|
||||
g._current_tenant = mock_account.current_tenant_id
|
||||
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.pdf"
|
||||
resp = client.get(f"/console/api/remote-files/{encoded_url}")
|
||||
|
||||
assert resp.status_code == 200
|
||||
data = resp.get_json()
|
||||
assert data["file_type"] == "application/pdf"
|
||||
assert data["file_length"] == 2048
|
||||
|
||||
|
||||
class TestRemoteFileUpload:
|
||||
"""Test POST /console/api/remote-files/upload endpoint."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("head_status", "use_get"),
|
||||
[
|
||||
(200, False), # HEAD succeeds
|
||||
(405, True), # HEAD fails -> fallback GET
|
||||
],
|
||||
response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("HEAD", "http://example.com/file.txt"),
|
||||
headers={"Content-Type": "text/plain", "Content-Length": "10"},
|
||||
)
|
||||
def test_upload_remote_file_success_paths(self, client, mock_account, auth_ctx, head_status, use_get):
|
||||
url = "http://example.com/file.pdf"
|
||||
head_resp = httpx.Response(
|
||||
head_status,
|
||||
request=httpx.Request("HEAD", url),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "1024"},
|
||||
)
|
||||
get_resp = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("GET", url),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "1024"},
|
||||
content=b"file content",
|
||||
)
|
||||
|
||||
file_info = SimpleNamespace(
|
||||
extension="pdf",
|
||||
size=1024,
|
||||
filename="file.pdf",
|
||||
mimetype="application/pdf",
|
||||
)
|
||||
uploaded_file = SimpleNamespace(
|
||||
id="uploaded-file-id",
|
||||
name="file.pdf",
|
||||
size=1024,
|
||||
extension="pdf",
|
||||
mime_type="application/pdf",
|
||||
created_by="test-account-id",
|
||||
created_at=datetime(2024, 1, 1, 12, 0, 0),
|
||||
)
|
||||
with patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response):
|
||||
client = app.test_client()
|
||||
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt"
|
||||
resp = client.get(f"/console/api/remote-files/{encoded_url}")
|
||||
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp) as p_head,
|
||||
patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_resp) as p_get,
|
||||
patch(
|
||||
"controllers.console.remote_files.helpers.guess_file_info_from_response",
|
||||
return_value=file_info,
|
||||
),
|
||||
patch(
|
||||
"controllers.console.remote_files.FileService.is_file_size_within_limit",
|
||||
return_value=True,
|
||||
),
|
||||
patch("controllers.console.remote_files.db", spec=["engine"]),
|
||||
patch("controllers.console.remote_files.FileService") as mock_file_service,
|
||||
patch(
|
||||
"controllers.console.remote_files.file_helpers.get_signed_file_url",
|
||||
return_value="http://example.com/signed-url",
|
||||
),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
mock_file_service.return_value.upload_file.return_value = uploaded_file
|
||||
assert resp.status_code == 200
|
||||
assert resp.get_json() == {"file_type": "text/plain", "file_length": 10}
|
||||
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": url},
|
||||
)
|
||||
|
||||
assert resp.status_code == 201
|
||||
p_head.assert_called_once()
|
||||
# GET is used either for fallback (HEAD fails) or to fetch content after HEAD succeeds
|
||||
p_get.assert_called_once()
|
||||
mock_file_service.return_value.upload_file.assert_called_once()
|
||||
def test_console_remote_files_fastopenapi_upload(app: Flask):
|
||||
ext_fastopenapi.init_app(app)
|
||||
|
||||
data = resp.get_json()
|
||||
assert data["id"] == "uploaded-file-id"
|
||||
assert data["name"] == "file.pdf"
|
||||
assert data["size"] == 1024
|
||||
assert data["extension"] == "pdf"
|
||||
assert data["url"] == "http://example.com/signed-url"
|
||||
assert data["mime_type"] == "application/pdf"
|
||||
assert data["created_by"] == "test-account-id"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("size_ok", "raises", "expected_status", "expected_msg"),
|
||||
[
|
||||
# When size check fails in controller, API returns 413 with message "File size exceeded..."
|
||||
(False, None, 413, "file size exceeded"),
|
||||
# When service raises unsupported type, controller maps to 415 with message "File type not allowed."
|
||||
(True, "unsupported", 415, "file type not allowed"),
|
||||
],
|
||||
head_response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("GET", "http://example.com/file.txt"),
|
||||
content=b"hello",
|
||||
)
|
||||
def test_upload_remote_file_errors(
|
||||
self, client, mock_account, auth_ctx, size_ok, raises, expected_status, expected_msg
|
||||
file_info = SimpleNamespace(
|
||||
extension="txt",
|
||||
size=5,
|
||||
filename="file.txt",
|
||||
mimetype="text/plain",
|
||||
)
|
||||
uploaded = SimpleNamespace(
|
||||
id="file-id",
|
||||
name="file.txt",
|
||||
size=5,
|
||||
extension="txt",
|
||||
mime_type="text/plain",
|
||||
created_by="user-id",
|
||||
created_at=datetime(2024, 1, 1),
|
||||
)
|
||||
|
||||
with (
|
||||
patch("controllers.console.remote_files.db", new=SimpleNamespace(engine=object())),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response),
|
||||
patch("controllers.console.remote_files.helpers.guess_file_info_from_response", return_value=file_info),
|
||||
patch("controllers.console.remote_files.FileService.is_file_size_within_limit", return_value=True),
|
||||
patch("controllers.console.remote_files.FileService.__init__", return_value=None),
|
||||
patch("controllers.console.remote_files.current_account_with_tenant", return_value=(object(), "tenant-id")),
|
||||
patch("controllers.console.remote_files.FileService.upload_file", return_value=uploaded),
|
||||
patch("controllers.console.remote_files.file_helpers.get_signed_file_url", return_value="signed-url"),
|
||||
):
|
||||
url = "http://example.com/x.pdf"
|
||||
head_resp = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("HEAD", url),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "9"},
|
||||
client = app.test_client()
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": "http://example.com/file.txt"},
|
||||
)
|
||||
file_info = SimpleNamespace(extension="pdf", size=9, filename="x.pdf", mimetype="application/pdf")
|
||||
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp),
|
||||
patch(
|
||||
"controllers.console.remote_files.helpers.guess_file_info_from_response",
|
||||
return_value=file_info,
|
||||
),
|
||||
patch(
|
||||
"controllers.console.remote_files.FileService.is_file_size_within_limit",
|
||||
return_value=size_ok,
|
||||
),
|
||||
patch("controllers.console.remote_files.db", spec=["engine"]),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
if raises == "unsupported":
|
||||
from services.errors.file import UnsupportedFileTypeError
|
||||
|
||||
with patch("controllers.console.remote_files.FileService") as mock_file_service:
|
||||
mock_file_service.return_value.upload_file.side_effect = UnsupportedFileTypeError("bad")
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": url},
|
||||
)
|
||||
else:
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": url},
|
||||
)
|
||||
|
||||
assert resp.status_code == expected_status
|
||||
data = resp.get_json()
|
||||
msg = (data.get("error") or {}).get("message") or data.get("message", "")
|
||||
assert expected_msg in msg.lower()
|
||||
|
||||
def test_upload_remote_file_fetch_failure(self, client, mock_account, auth_ctx):
|
||||
"""Test upload when fetching of remote file fails."""
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch(
|
||||
"controllers.console.remote_files.ssrf_proxy.head",
|
||||
side_effect=httpx.RequestError("Connection failed"),
|
||||
),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": "http://unreachable.com/file.pdf"},
|
||||
)
|
||||
|
||||
assert resp.status_code == 400
|
||||
data = resp.get_json()
|
||||
msg = (data.get("error") or {}).get("message") or data.get("message", "")
|
||||
assert "failed to fetch" in msg.lower()
|
||||
assert resp.status_code == 201
|
||||
assert resp.get_json() == {
|
||||
"id": "file-id",
|
||||
"name": "file.txt",
|
||||
"size": 5,
|
||||
"extension": "txt",
|
||||
"url": "signed-url",
|
||||
"mime_type": "text/plain",
|
||||
"created_by": "user-id",
|
||||
"created_at": int(uploaded.created_at.timestamp()),
|
||||
}
|
||||
|
||||
@@ -25,19 +25,15 @@ class TestMessageCycleManagerOptimization:
|
||||
task_state = Mock()
|
||||
return MessageCycleManager(application_generate_entity=mock_application_generate_entity, task_state=task_state)
|
||||
|
||||
def test_get_message_event_type_with_assistant_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE_FILE when message has assistant-generated files.
|
||||
|
||||
This ensures that AI-generated images (belongs_to='assistant') trigger the MESSAGE_FILE event,
|
||||
allowing the frontend to properly display generated image files with url field.
|
||||
"""
|
||||
def test_get_message_event_type_with_message_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE_FILE when message has files."""
|
||||
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
|
||||
# Setup mock session and message file
|
||||
mock_session = Mock()
|
||||
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
mock_message_file = Mock()
|
||||
mock_message_file.belongs_to = "assistant"
|
||||
# Current implementation uses session.scalar(select(...))
|
||||
mock_session.scalar.return_value = mock_message_file
|
||||
|
||||
# Execute
|
||||
@@ -48,31 +44,6 @@ class TestMessageCycleManagerOptimization:
|
||||
assert result == StreamEvent.MESSAGE_FILE
|
||||
mock_session.scalar.assert_called_once()
|
||||
|
||||
def test_get_message_event_type_with_user_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE when message only has user-uploaded files.
|
||||
|
||||
This is a regression test for the issue where user-uploaded images (belongs_to='user')
|
||||
caused the LLM text response to be incorrectly tagged with MESSAGE_FILE event,
|
||||
resulting in broken images in the chat UI. The query filters for belongs_to='assistant',
|
||||
so when only user files exist, the database query returns None, resulting in MESSAGE event type.
|
||||
"""
|
||||
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
|
||||
# Setup mock session and message file
|
||||
mock_session = Mock()
|
||||
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# When querying for assistant files with only user files present, return None
|
||||
# (simulates database query with belongs_to='assistant' filter returning no results)
|
||||
mock_session.scalar.return_value = None
|
||||
|
||||
# Execute
|
||||
with current_app.app_context():
|
||||
result = message_cycle_manager.get_message_event_type("test-message-id")
|
||||
|
||||
# Assert
|
||||
assert result == StreamEvent.MESSAGE
|
||||
mock_session.scalar.assert_called_once()
|
||||
|
||||
def test_get_message_event_type_without_message_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE when message has no files."""
|
||||
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
|
||||
@@ -98,7 +69,7 @@ class TestMessageCycleManagerOptimization:
|
||||
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
mock_message_file = Mock()
|
||||
mock_message_file.belongs_to = "assistant"
|
||||
# Current implementation uses session.scalar(select(...))
|
||||
mock_session.scalar.return_value = mock_message_file
|
||||
|
||||
# Execute: compute event type once, then pass to message_to_stream_response
|
||||
|
||||
@@ -496,9 +496,6 @@ class TestSchemaResolverClass:
|
||||
avg_time_no_cache = sum(results1) / len(results1)
|
||||
|
||||
# Second run (with cache) - run multiple times
|
||||
# Warm up cache first
|
||||
resolve_dify_schema_refs(schema)
|
||||
|
||||
results2 = []
|
||||
for _ in range(3):
|
||||
start = time.perf_counter()
|
||||
|
||||
@@ -198,15 +198,6 @@ class SubscriptionTestCase:
|
||||
description: str = ""
|
||||
|
||||
|
||||
class FakeRedisClient:
|
||||
"""Minimal fake Redis client for unit tests."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.publish = MagicMock()
|
||||
self.spublish = MagicMock()
|
||||
self.pubsub = MagicMock(return_value=MagicMock())
|
||||
|
||||
|
||||
class TestRedisSubscription:
|
||||
"""Test cases for the _RedisSubscription class."""
|
||||
|
||||
@@ -628,13 +619,10 @@ class TestRedisSubscription:
|
||||
class TestRedisShardedSubscription:
|
||||
"""Test cases for the _RedisShardedSubscription class."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def patch_sharded_redis_type(self, monkeypatch):
|
||||
monkeypatch.setattr("libs.broadcast_channel.redis.sharded_channel.Redis", FakeRedisClient)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_redis_client(self) -> FakeRedisClient:
|
||||
return FakeRedisClient()
|
||||
def mock_redis_client(self) -> MagicMock:
|
||||
client = MagicMock()
|
||||
return client
|
||||
|
||||
@pytest.fixture
|
||||
def mock_pubsub(self) -> MagicMock:
|
||||
@@ -648,7 +636,7 @@ class TestRedisShardedSubscription:
|
||||
|
||||
@pytest.fixture
|
||||
def sharded_subscription(
|
||||
self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient
|
||||
self, mock_pubsub: MagicMock, mock_redis_client: MagicMock
|
||||
) -> Generator[_RedisShardedSubscription, None, None]:
|
||||
"""Create a _RedisShardedSubscription instance for testing."""
|
||||
subscription = _RedisShardedSubscription(
|
||||
@@ -669,7 +657,7 @@ class TestRedisShardedSubscription:
|
||||
|
||||
# ==================== Lifecycle Tests ====================
|
||||
|
||||
def test_sharded_subscription_initialization(self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
|
||||
def test_sharded_subscription_initialization(self, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
|
||||
"""Test that sharded subscription is properly initialized."""
|
||||
subscription = _RedisShardedSubscription(
|
||||
client=mock_redis_client,
|
||||
@@ -982,7 +970,7 @@ class TestRedisShardedSubscription:
|
||||
],
|
||||
)
|
||||
def test_sharded_subscription_scenarios(
|
||||
self, test_case: SubscriptionTestCase, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient
|
||||
self, test_case: SubscriptionTestCase, mock_pubsub: MagicMock, mock_redis_client: MagicMock
|
||||
):
|
||||
"""Test various sharded subscription scenarios using table-driven approach."""
|
||||
subscription = _RedisShardedSubscription(
|
||||
@@ -1070,7 +1058,7 @@ class TestRedisShardedSubscription:
|
||||
# Close should still work
|
||||
sharded_subscription.close() # Should not raise
|
||||
|
||||
def test_channel_name_variations(self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
|
||||
def test_channel_name_variations(self, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
|
||||
"""Test various sharded channel name formats."""
|
||||
channel_names = [
|
||||
"simple",
|
||||
@@ -1132,13 +1120,10 @@ class TestRedisSubscriptionCommon:
|
||||
"""Parameterized fixture providing subscription type and class."""
|
||||
return request.param
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def patch_sharded_redis_type(self, monkeypatch):
|
||||
monkeypatch.setattr("libs.broadcast_channel.redis.sharded_channel.Redis", FakeRedisClient)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_redis_client(self) -> FakeRedisClient:
|
||||
return FakeRedisClient()
|
||||
def mock_redis_client(self) -> MagicMock:
|
||||
client = MagicMock()
|
||||
return client
|
||||
|
||||
@pytest.fixture
|
||||
def mock_pubsub(self) -> MagicMock:
|
||||
@@ -1155,7 +1140,7 @@ class TestRedisSubscriptionCommon:
|
||||
return pubsub
|
||||
|
||||
@pytest.fixture
|
||||
def subscription(self, subscription_params, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
|
||||
def subscription(self, subscription_params, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
|
||||
"""Create a subscription instance based on parameterized type."""
|
||||
subscription_type, subscription_class = subscription_params
|
||||
topic_name = f"test-{subscription_type}-topic"
|
||||
|
||||
@@ -62,9 +62,6 @@ class FakeRepo:
|
||||
end_before: datetime.datetime,
|
||||
last_seen: tuple[datetime.datetime, str] | None,
|
||||
batch_size: int,
|
||||
run_types=None,
|
||||
tenant_ids=None,
|
||||
workflow_ids=None,
|
||||
) -> list[FakeRun]:
|
||||
if self.call_idx >= len(self.batches):
|
||||
return []
|
||||
|
||||
@@ -17,6 +17,7 @@ from core.workflow.nodes.human_input.entities import (
|
||||
from core.workflow.nodes.human_input.enums import FormInputType, HumanInputFormKind, HumanInputFormStatus
|
||||
from models.human_input import RecipientType
|
||||
from services.human_input_service import Form, FormExpiredError, HumanInputService, InvalidFormDataError
|
||||
from tasks.app_generate.workflow_execute_task import WORKFLOW_BASED_APP_EXECUTION_QUEUE
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -87,6 +88,7 @@ def test_enqueue_resume_dispatches_task_for_workflow(mocker, mock_session_factor
|
||||
|
||||
resume_task.apply_async.assert_called_once()
|
||||
call_kwargs = resume_task.apply_async.call_args.kwargs
|
||||
assert call_kwargs["queue"] == WORKFLOW_BASED_APP_EXECUTION_QUEUE
|
||||
assert call_kwargs["kwargs"]["payload"]["workflow_run_id"] == "workflow-run-id"
|
||||
|
||||
|
||||
@@ -128,6 +130,7 @@ def test_enqueue_resume_dispatches_task_for_advanced_chat(mocker, mock_session_f
|
||||
|
||||
resume_task.apply_async.assert_called_once()
|
||||
call_kwargs = resume_task.apply_async.call_args.kwargs
|
||||
assert call_kwargs["queue"] == WORKFLOW_BASED_APP_EXECUTION_QUEUE
|
||||
assert call_kwargs["kwargs"]["payload"]["workflow_run_id"] == "workflow-run-id"
|
||||
|
||||
|
||||
|
||||
84
api/uv.lock
generated
84
api/uv.lock
generated
@@ -1237,47 +1237,49 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.5"
|
||||
version = "46.0.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1366,7 +1368,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.13.0"
|
||||
version = "1.12.1"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
@@ -1592,7 +1594,7 @@ requires-dist = [
|
||||
{ name = "gevent", specifier = "~=25.9.1" },
|
||||
{ name = "gmpy2", specifier = "~=2.2.1" },
|
||||
{ name = "google-api-core", specifier = "==2.18.0" },
|
||||
{ name = "google-api-python-client", specifier = "==2.189.0" },
|
||||
{ name = "google-api-python-client", specifier = "==2.90.0" },
|
||||
{ name = "google-auth", specifier = "==2.29.0" },
|
||||
{ name = "google-auth-httplib2", specifier = "==0.2.0" },
|
||||
{ name = "google-cloud-aiplatform", specifier = "==1.49.0" },
|
||||
@@ -2304,7 +2306,7 @@ grpc = [
|
||||
|
||||
[[package]]
|
||||
name = "google-api-python-client"
|
||||
version = "2.189.0"
|
||||
version = "2.90.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-api-core" },
|
||||
@@ -2313,9 +2315,9 @@ dependencies = [
|
||||
{ name = "httplib2" },
|
||||
{ name = "uritemplate" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6f/f8/0783aeca3410ee053d4dd1fccafd85197847b8f84dd038e036634605d083/google_api_python_client-2.189.0.tar.gz", hash = "sha256:45f2d8559b5c895dde6ad3fb33de025f5cb2c197fa5862f18df7f5295a172741", size = 13979470, upload-time = "2026-02-03T19:24:55.432Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/35/8b/d990f947c261304a5c1599d45717d02c27d46af5f23e1fee5dc19c8fa79d/google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03", size = 10891311, upload-time = "2023-06-20T16:29:25.008Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/04/44/3677ff27998214f2fa7957359da48da378a0ffff1bd0bdaba42e752bc13e/google_api_python_client-2.189.0-py3-none-any.whl", hash = "sha256:a258c09660a49c6159173f8bbece171278e917e104a11f0640b34751b79c8a1a", size = 14547633, upload-time = "2026-02-03T19:24:52.845Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/03/209b5c36a621ae644dc7d4743746cd3b38b18e133f8779ecaf6b95cc01ce/google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913", size = 11379891, upload-time = "2023-06-20T16:29:19.532Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -106,10 +106,10 @@ if [[ -z "${QUEUES}" ]]; then
|
||||
# Configure queues based on edition
|
||||
if [[ "${EDITION}" == "CLOUD" ]]; then
|
||||
# Cloud edition: separate queues for dataset and trigger tasks
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
else
|
||||
# Community edition (SELF_HOSTED): dataset and workflow have separate queues
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
fi
|
||||
|
||||
echo "No queues specified, using edition-based defaults: ${QUEUES}"
|
||||
|
||||
@@ -62,9 +62,6 @@ LANG=C.UTF-8
|
||||
LC_ALL=C.UTF-8
|
||||
PYTHONIOENCODING=utf-8
|
||||
|
||||
# Set UV cache directory to avoid permission issues with non-existent home directory
|
||||
UV_CACHE_DIR=/tmp/.uv-cache
|
||||
|
||||
# ------------------------------
|
||||
# Server Configuration
|
||||
# ------------------------------
|
||||
@@ -387,8 +384,6 @@ CELERY_USE_SENTINEL=false
|
||||
CELERY_SENTINEL_MASTER_NAME=
|
||||
CELERY_SENTINEL_PASSWORD=
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT=0.1
|
||||
# e.g. {"tasks.add": {"rate_limit": "10/s"}}
|
||||
CELERY_TASK_ANNOTATIONS=null
|
||||
|
||||
# ------------------------------
|
||||
# CORS Configuration
|
||||
@@ -1073,8 +1068,6 @@ WORKFLOW_LOG_CLEANUP_ENABLED=false
|
||||
WORKFLOW_LOG_RETENTION_DAYS=30
|
||||
# Batch size for workflow log cleanup operations (default: 100)
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
||||
# Comma-separated list of workflow IDs to clean logs for
|
||||
WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS=
|
||||
|
||||
# Aliyun SLS Logstore Configuration
|
||||
# Aliyun Access Key ID
|
||||
@@ -1525,7 +1518,6 @@ AMPLITUDE_API_KEY=
|
||||
# Sandbox expired records clean configuration
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.13.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -63,7 +63,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.13.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -102,7 +102,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.13.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -132,7 +132,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.13.0
|
||||
image: langgenius/dify-web:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@@ -16,7 +16,6 @@ x-shared-env: &shared-api-worker-env
|
||||
LANG: ${LANG:-C.UTF-8}
|
||||
LC_ALL: ${LC_ALL:-C.UTF-8}
|
||||
PYTHONIOENCODING: ${PYTHONIOENCODING:-utf-8}
|
||||
UV_CACHE_DIR: ${UV_CACHE_DIR:-/tmp/.uv-cache}
|
||||
LOG_LEVEL: ${LOG_LEVEL:-INFO}
|
||||
LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text}
|
||||
LOG_FILE: ${LOG_FILE:-/app/logs/server.log}
|
||||
@@ -106,7 +105,6 @@ x-shared-env: &shared-api-worker-env
|
||||
CELERY_SENTINEL_MASTER_NAME: ${CELERY_SENTINEL_MASTER_NAME:-}
|
||||
CELERY_SENTINEL_PASSWORD: ${CELERY_SENTINEL_PASSWORD:-}
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT: ${CELERY_SENTINEL_SOCKET_TIMEOUT:-0.1}
|
||||
CELERY_TASK_ANNOTATIONS: ${CELERY_TASK_ANNOTATIONS:-null}
|
||||
WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS:-*}
|
||||
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*}
|
||||
COOKIE_DOMAIN: ${COOKIE_DOMAIN:-}
|
||||
@@ -470,7 +468,6 @@ x-shared-env: &shared-api-worker-env
|
||||
WORKFLOW_LOG_CLEANUP_ENABLED: ${WORKFLOW_LOG_CLEANUP_ENABLED:-false}
|
||||
WORKFLOW_LOG_RETENTION_DAYS: ${WORKFLOW_LOG_RETENTION_DAYS:-30}
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE: ${WORKFLOW_LOG_CLEANUP_BATCH_SIZE:-100}
|
||||
WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS: ${WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS:-}
|
||||
ALIYUN_SLS_ACCESS_KEY_ID: ${ALIYUN_SLS_ACCESS_KEY_ID:-}
|
||||
ALIYUN_SLS_ACCESS_KEY_SECRET: ${ALIYUN_SLS_ACCESS_KEY_SECRET:-}
|
||||
ALIYUN_SLS_ENDPOINT: ${ALIYUN_SLS_ENDPOINT:-}
|
||||
@@ -685,7 +682,6 @@ x-shared-env: &shared-api-worker-env
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: ${SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD:-21}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE:-1000}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL:-200}
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: ${SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS:-30}
|
||||
PUBSUB_REDIS_URL: ${PUBSUB_REDIS_URL:-}
|
||||
PUBSUB_REDIS_CHANNEL_TYPE: ${PUBSUB_REDIS_CHANNEL_TYPE:-pubsub}
|
||||
@@ -716,7 +712,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.13.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -758,7 +754,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.13.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -797,7 +793,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.13.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -827,7 +823,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.13.0
|
||||
image: langgenius/dify-web:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
8
sdks/nodejs-client/pnpm-lock.yaml
generated
8
sdks/nodejs-client/pnpm-lock.yaml
generated
@@ -10,7 +10,7 @@ importers:
|
||||
dependencies:
|
||||
axios:
|
||||
specifier: ^1.13.2
|
||||
version: 1.13.5
|
||||
version: 1.13.2
|
||||
devDependencies:
|
||||
'@eslint/js':
|
||||
specifier: ^9.39.2
|
||||
@@ -544,8 +544,8 @@ packages:
|
||||
asynckit@0.4.0:
|
||||
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
|
||||
|
||||
axios@1.13.5:
|
||||
resolution: {integrity: sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==}
|
||||
axios@1.13.2:
|
||||
resolution: {integrity: sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==}
|
||||
|
||||
balanced-match@1.0.2:
|
||||
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
|
||||
@@ -1677,7 +1677,7 @@ snapshots:
|
||||
|
||||
asynckit@0.4.0: {}
|
||||
|
||||
axios@1.13.5:
|
||||
axios@1.13.2:
|
||||
dependencies:
|
||||
follow-redirects: 1.15.11
|
||||
form-data: 4.0.5
|
||||
|
||||
271
web/__tests__/plugins/plugin-auth-flow.test.tsx
Normal file
271
web/__tests__/plugins/plugin-auth-flow.test.tsx
Normal file
@@ -0,0 +1,271 @@
|
||||
/**
|
||||
* Integration Test: Plugin Authentication Flow
|
||||
*
|
||||
* Tests the integration between PluginAuth, usePluginAuth hook,
|
||||
* Authorize/Authorized components, and credential management.
|
||||
* Verifies the complete auth flow from checking authorization status
|
||||
* to rendering the correct UI state.
|
||||
*/
|
||||
import { cleanup, render, screen } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { AuthCategory, CredentialTypeEnum } from '@/app/components/plugins/plugin-auth/types'
|
||||
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
const map: Record<string, string> = {
|
||||
'plugin.auth.setUpTip': 'Set up your credentials',
|
||||
'plugin.auth.authorized': 'Authorized',
|
||||
'plugin.auth.apiKey': 'API Key',
|
||||
'plugin.auth.oauth': 'OAuth',
|
||||
}
|
||||
return map[key] ?? key
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => ({
|
||||
isCurrentWorkspaceManager: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/classnames', () => ({
|
||||
cn: (...args: unknown[]) => args.filter(Boolean).join(' '),
|
||||
}))
|
||||
|
||||
const mockUsePluginAuth = vi.fn()
|
||||
vi.mock('@/app/components/plugins/plugin-auth/hooks/use-plugin-auth', () => ({
|
||||
usePluginAuth: (...args: unknown[]) => mockUsePluginAuth(...args),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/plugin-auth/authorize', () => ({
|
||||
default: ({ pluginPayload, canOAuth, canApiKey }: {
|
||||
pluginPayload: { provider: string }
|
||||
canOAuth: boolean
|
||||
canApiKey: boolean
|
||||
}) => (
|
||||
<div data-testid="authorize-component">
|
||||
<span data-testid="auth-provider">{pluginPayload.provider}</span>
|
||||
{canOAuth && <span data-testid="auth-oauth">OAuth available</span>}
|
||||
{canApiKey && <span data-testid="auth-apikey">API Key available</span>}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/plugin-auth/authorized', () => ({
|
||||
default: ({ pluginPayload, credentials }: {
|
||||
pluginPayload: { provider: string }
|
||||
credentials: Array<{ id: string, name: string }>
|
||||
}) => (
|
||||
<div data-testid="authorized-component">
|
||||
<span data-testid="auth-provider">{pluginPayload.provider}</span>
|
||||
<span data-testid="auth-credential-count">
|
||||
{credentials.length}
|
||||
{' '}
|
||||
credentials
|
||||
</span>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
const { default: PluginAuth } = await import('@/app/components/plugins/plugin-auth/plugin-auth')
|
||||
|
||||
describe('Plugin Authentication Flow Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
cleanup()
|
||||
})
|
||||
|
||||
const basePayload = {
|
||||
category: AuthCategory.tool,
|
||||
provider: 'test-provider',
|
||||
}
|
||||
|
||||
describe('Unauthorized State', () => {
|
||||
it('renders Authorize component when not authorized', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: false,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
render(<PluginAuth pluginPayload={basePayload} />)
|
||||
|
||||
expect(screen.getByTestId('authorize-component')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('authorized-component')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('auth-apikey')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows OAuth option when plugin supports it', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: false,
|
||||
canOAuth: true,
|
||||
canApiKey: true,
|
||||
credentials: [],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
render(<PluginAuth pluginPayload={basePayload} />)
|
||||
|
||||
expect(screen.getByTestId('auth-oauth')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('auth-apikey')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('applies className to wrapper when not authorized', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: false,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
const { container } = render(
|
||||
<PluginAuth pluginPayload={basePayload} className="custom-class" />,
|
||||
)
|
||||
|
||||
expect(container.firstChild).toHaveClass('custom-class')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Authorized State', () => {
|
||||
it('renders Authorized component when authorized and no children', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: true,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [
|
||||
{ id: 'cred-1', name: 'My API Key', is_default: true },
|
||||
],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
render(<PluginAuth pluginPayload={basePayload} />)
|
||||
|
||||
expect(screen.queryByTestId('authorize-component')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('authorized-component')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('auth-credential-count')).toHaveTextContent('1 credentials')
|
||||
})
|
||||
|
||||
it('renders children instead of Authorized when authorized and children provided', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: true,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [{ id: 'cred-1', name: 'Key', is_default: true }],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
render(
|
||||
<PluginAuth pluginPayload={basePayload}>
|
||||
<div data-testid="custom-children">Custom authorized view</div>
|
||||
</PluginAuth>,
|
||||
)
|
||||
|
||||
expect(screen.queryByTestId('authorize-component')).not.toBeInTheDocument()
|
||||
expect(screen.queryByTestId('authorized-component')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('custom-children')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('does not apply className when authorized', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: true,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [{ id: 'cred-1', name: 'Key', is_default: true }],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
const { container } = render(
|
||||
<PluginAuth pluginPayload={basePayload} className="custom-class" />,
|
||||
)
|
||||
|
||||
expect(container.firstChild).not.toHaveClass('custom-class')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Auth Category Integration', () => {
|
||||
it('passes correct provider to usePluginAuth for tool category', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: false,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
const toolPayload = {
|
||||
category: AuthCategory.tool,
|
||||
provider: 'google-search-provider',
|
||||
}
|
||||
|
||||
render(<PluginAuth pluginPayload={toolPayload} />)
|
||||
|
||||
expect(mockUsePluginAuth).toHaveBeenCalledWith(toolPayload, true)
|
||||
expect(screen.getByTestId('auth-provider')).toHaveTextContent('google-search-provider')
|
||||
})
|
||||
|
||||
it('passes correct provider to usePluginAuth for datasource category', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: false,
|
||||
canOAuth: true,
|
||||
canApiKey: false,
|
||||
credentials: [],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
const dsPayload = {
|
||||
category: AuthCategory.datasource,
|
||||
provider: 'notion-datasource',
|
||||
}
|
||||
|
||||
render(<PluginAuth pluginPayload={dsPayload} />)
|
||||
|
||||
expect(mockUsePluginAuth).toHaveBeenCalledWith(dsPayload, true)
|
||||
expect(screen.getByTestId('auth-oauth')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('auth-apikey')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Multiple Credentials', () => {
|
||||
it('shows credential count when multiple credentials exist', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: true,
|
||||
canOAuth: true,
|
||||
canApiKey: true,
|
||||
credentials: [
|
||||
{ id: 'cred-1', name: 'API Key 1', is_default: true },
|
||||
{ id: 'cred-2', name: 'API Key 2', is_default: false },
|
||||
{ id: 'cred-3', name: 'OAuth Token', is_default: false, credential_type: CredentialTypeEnum.OAUTH2 },
|
||||
],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
render(<PluginAuth pluginPayload={basePayload} />)
|
||||
|
||||
expect(screen.getByTestId('auth-credential-count')).toHaveTextContent('3 credentials')
|
||||
})
|
||||
})
|
||||
})
|
||||
224
web/__tests__/plugins/plugin-card-rendering.test.tsx
Normal file
224
web/__tests__/plugins/plugin-card-rendering.test.tsx
Normal file
@@ -0,0 +1,224 @@
|
||||
/**
|
||||
* Integration Test: Plugin Card Rendering Pipeline
|
||||
*
|
||||
* Tests the integration between Card, Icon, Title, Description,
|
||||
* OrgInfo, CornerMark, and CardMoreInfo components. Verifies that
|
||||
* plugin data flows correctly through the card rendering pipeline.
|
||||
*/
|
||||
import { cleanup, render, screen } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('#i18n', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useGetLanguage: () => 'en_US',
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-theme', () => ({
|
||||
default: () => ({ theme: 'light' }),
|
||||
}))
|
||||
|
||||
vi.mock('@/i18n-config', () => ({
|
||||
renderI18nObject: (obj: Record<string, string>, locale: string) => obj[locale] || obj.en_US || '',
|
||||
}))
|
||||
|
||||
vi.mock('@/types/app', () => ({
|
||||
Theme: { dark: 'dark', light: 'light' },
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/classnames', () => ({
|
||||
cn: (...args: unknown[]) => args.filter(a => typeof a === 'string' && a).join(' '),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/hooks', () => ({
|
||||
useCategories: () => ({
|
||||
categoriesMap: {
|
||||
tool: { label: 'Tool' },
|
||||
model: { label: 'Model' },
|
||||
extension: { label: 'Extension' },
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/base/badges/partner', () => ({
|
||||
default: () => <span data-testid="partner-badge">Partner</span>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/base/badges/verified', () => ({
|
||||
default: () => <span data-testid="verified-badge">Verified</span>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/card-icon', () => ({
|
||||
default: ({ src, installed, installFailed }: { src: string | object, installed?: boolean, installFailed?: boolean }) => (
|
||||
<div data-testid="card-icon" data-installed={installed} data-install-failed={installFailed}>
|
||||
{typeof src === 'string' ? src : 'emoji-icon'}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/corner-mark', () => ({
|
||||
default: ({ text }: { text: string }) => (
|
||||
<div data-testid="corner-mark">{text}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/description', () => ({
|
||||
default: ({ text, descriptionLineRows }: { text: string, descriptionLineRows?: number }) => (
|
||||
<div data-testid="description" data-rows={descriptionLineRows}>{text}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/org-info', () => ({
|
||||
default: ({ orgName, packageName }: { orgName: string, packageName: string }) => (
|
||||
<div data-testid="org-info">
|
||||
{orgName}
|
||||
/
|
||||
{packageName}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/placeholder', () => ({
|
||||
default: ({ text }: { text: string }) => (
|
||||
<div data-testid="placeholder">{text}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/title', () => ({
|
||||
default: ({ title }: { title: string }) => (
|
||||
<div data-testid="title">{title}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
const { default: Card } = await import('@/app/components/plugins/card/index')
|
||||
type CardPayload = Parameters<typeof Card>[0]['payload']
|
||||
|
||||
describe('Plugin Card Rendering Integration', () => {
|
||||
beforeEach(() => {
|
||||
cleanup()
|
||||
})
|
||||
|
||||
const makePayload = (overrides = {}) => ({
|
||||
category: 'tool',
|
||||
type: 'plugin',
|
||||
name: 'google-search',
|
||||
org: 'langgenius',
|
||||
label: { en_US: 'Google Search', zh_Hans: 'Google搜索' },
|
||||
brief: { en_US: 'Search the web using Google', zh_Hans: '使用Google搜索网页' },
|
||||
icon: 'https://example.com/icon.png',
|
||||
verified: true,
|
||||
badges: [] as string[],
|
||||
...overrides,
|
||||
}) as CardPayload
|
||||
|
||||
it('renders a complete plugin card with all subcomponents', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} />)
|
||||
|
||||
expect(screen.getByTestId('card-icon')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('title')).toHaveTextContent('Google Search')
|
||||
expect(screen.getByTestId('org-info')).toHaveTextContent('langgenius/google-search')
|
||||
expect(screen.getByTestId('description')).toHaveTextContent('Search the web using Google')
|
||||
})
|
||||
|
||||
it('shows corner mark with category label when not hidden', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} />)
|
||||
|
||||
expect(screen.getByTestId('corner-mark')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('hides corner mark when hideCornerMark is true', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} hideCornerMark />)
|
||||
|
||||
expect(screen.queryByTestId('corner-mark')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows installed status on icon', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} installed />)
|
||||
|
||||
const icon = screen.getByTestId('card-icon')
|
||||
expect(icon).toHaveAttribute('data-installed', 'true')
|
||||
})
|
||||
|
||||
it('shows install failed status on icon', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} installFailed />)
|
||||
|
||||
const icon = screen.getByTestId('card-icon')
|
||||
expect(icon).toHaveAttribute('data-install-failed', 'true')
|
||||
})
|
||||
|
||||
it('renders verified badge when plugin is verified', () => {
|
||||
const payload = makePayload({ verified: true })
|
||||
render(<Card payload={payload} />)
|
||||
|
||||
expect(screen.getByTestId('verified-badge')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders partner badge when plugin has partner badge', () => {
|
||||
const payload = makePayload({ badges: ['partner'] })
|
||||
render(<Card payload={payload} />)
|
||||
|
||||
expect(screen.getByTestId('partner-badge')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders footer content when provided', () => {
|
||||
const payload = makePayload()
|
||||
render(
|
||||
<Card
|
||||
payload={payload}
|
||||
footer={<div data-testid="custom-footer">Custom footer</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('custom-footer')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders titleLeft content when provided', () => {
|
||||
const payload = makePayload()
|
||||
render(
|
||||
<Card
|
||||
payload={payload}
|
||||
titleLeft={<span data-testid="title-left-content">New</span>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('title-left-content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('uses dark icon when theme is dark and icon_dark is provided', () => {
|
||||
vi.doMock('@/hooks/use-theme', () => ({
|
||||
default: () => ({ theme: 'dark' }),
|
||||
}))
|
||||
|
||||
const payload = makePayload({
|
||||
icon: 'https://example.com/icon-light.png',
|
||||
icon_dark: 'https://example.com/icon-dark.png',
|
||||
})
|
||||
|
||||
render(<Card payload={payload} />)
|
||||
expect(screen.getByTestId('card-icon')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows loading placeholder when isLoading is true', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} isLoading loadingFileName="uploading.difypkg" />)
|
||||
|
||||
expect(screen.getByTestId('placeholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders description with custom line rows', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} descriptionLineRows={3} />)
|
||||
|
||||
const description = screen.getByTestId('description')
|
||||
expect(description).toHaveAttribute('data-rows', '3')
|
||||
})
|
||||
})
|
||||
159
web/__tests__/plugins/plugin-data-utilities.test.ts
Normal file
159
web/__tests__/plugins/plugin-data-utilities.test.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
/**
|
||||
* Integration Test: Plugin Data Utilities
|
||||
*
|
||||
* Tests the integration between plugin utility functions, including
|
||||
* tag/category validation, form schema transformation, and
|
||||
* credential data processing. Verifies that these utilities work
|
||||
* correctly together in processing plugin metadata.
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { transformFormSchemasSecretInput } from '@/app/components/plugins/plugin-auth/utils'
|
||||
import { getValidCategoryKeys, getValidTagKeys } from '@/app/components/plugins/utils'
|
||||
|
||||
type TagInput = Parameters<typeof getValidTagKeys>[0]
|
||||
|
||||
describe('Plugin Data Utilities Integration', () => {
|
||||
describe('Tag and Category Validation Pipeline', () => {
|
||||
it('validates tags and categories in a metadata processing flow', () => {
|
||||
const pluginMetadata = {
|
||||
tags: ['search', 'productivity', 'invalid-tag', 'media-generate'],
|
||||
category: 'tool',
|
||||
}
|
||||
|
||||
const validTags = getValidTagKeys(pluginMetadata.tags as TagInput)
|
||||
expect(validTags.length).toBeGreaterThan(0)
|
||||
expect(validTags.length).toBeLessThanOrEqual(pluginMetadata.tags.length)
|
||||
|
||||
const validCategory = getValidCategoryKeys(pluginMetadata.category)
|
||||
expect(validCategory).toBeDefined()
|
||||
})
|
||||
|
||||
it('handles completely invalid metadata gracefully', () => {
|
||||
const invalidMetadata = {
|
||||
tags: ['nonexistent-1', 'nonexistent-2'],
|
||||
category: 'nonexistent-category',
|
||||
}
|
||||
|
||||
const validTags = getValidTagKeys(invalidMetadata.tags as TagInput)
|
||||
expect(validTags).toHaveLength(0)
|
||||
|
||||
const validCategory = getValidCategoryKeys(invalidMetadata.category)
|
||||
expect(validCategory).toBeUndefined()
|
||||
})
|
||||
|
||||
it('handles undefined and empty inputs', () => {
|
||||
expect(getValidTagKeys([] as TagInput)).toHaveLength(0)
|
||||
expect(getValidCategoryKeys(undefined)).toBeUndefined()
|
||||
expect(getValidCategoryKeys('')).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Credential Secret Masking Pipeline', () => {
|
||||
it('masks secrets when displaying credential form data', () => {
|
||||
const credentialValues = {
|
||||
api_key: 'sk-abc123456789',
|
||||
api_endpoint: 'https://api.example.com',
|
||||
secret_token: 'secret-token-value',
|
||||
description: 'My credential set',
|
||||
}
|
||||
|
||||
const secretFields = ['api_key', 'secret_token']
|
||||
|
||||
const displayValues = transformFormSchemasSecretInput(secretFields, credentialValues)
|
||||
|
||||
expect(displayValues.api_key).toBe('[__HIDDEN__]')
|
||||
expect(displayValues.secret_token).toBe('[__HIDDEN__]')
|
||||
expect(displayValues.api_endpoint).toBe('https://api.example.com')
|
||||
expect(displayValues.description).toBe('My credential set')
|
||||
})
|
||||
|
||||
it('preserves original values when no secret fields', () => {
|
||||
const values = {
|
||||
name: 'test',
|
||||
endpoint: 'https://api.example.com',
|
||||
}
|
||||
|
||||
const result = transformFormSchemasSecretInput([], values)
|
||||
expect(result).toEqual(values)
|
||||
})
|
||||
|
||||
it('handles falsy secret values without masking', () => {
|
||||
const values = {
|
||||
api_key: '',
|
||||
secret: null as unknown as string,
|
||||
other: 'visible',
|
||||
}
|
||||
|
||||
const result = transformFormSchemasSecretInput(['api_key', 'secret'], values)
|
||||
expect(result.api_key).toBe('')
|
||||
expect(result.secret).toBeNull()
|
||||
expect(result.other).toBe('visible')
|
||||
})
|
||||
|
||||
it('does not mutate the original values object', () => {
|
||||
const original = {
|
||||
api_key: 'my-secret-key',
|
||||
name: 'test',
|
||||
}
|
||||
const originalCopy = { ...original }
|
||||
|
||||
transformFormSchemasSecretInput(['api_key'], original)
|
||||
|
||||
expect(original).toEqual(originalCopy)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Combined Plugin Metadata Validation', () => {
|
||||
it('processes a complete plugin entry with tags and credentials', () => {
|
||||
const pluginEntry = {
|
||||
name: 'test-plugin',
|
||||
category: 'tool',
|
||||
tags: ['search', 'invalid-tag'],
|
||||
credentials: {
|
||||
api_key: 'sk-test-key-123',
|
||||
base_url: 'https://api.test.com',
|
||||
},
|
||||
secretFields: ['api_key'],
|
||||
}
|
||||
|
||||
const validCategory = getValidCategoryKeys(pluginEntry.category)
|
||||
expect(validCategory).toBe('tool')
|
||||
|
||||
const validTags = getValidTagKeys(pluginEntry.tags as TagInput)
|
||||
expect(validTags).toContain('search')
|
||||
|
||||
const displayCredentials = transformFormSchemasSecretInput(
|
||||
pluginEntry.secretFields,
|
||||
pluginEntry.credentials,
|
||||
)
|
||||
expect(displayCredentials.api_key).toBe('[__HIDDEN__]')
|
||||
expect(displayCredentials.base_url).toBe('https://api.test.com')
|
||||
|
||||
expect(pluginEntry.credentials.api_key).toBe('sk-test-key-123')
|
||||
})
|
||||
|
||||
it('handles multiple plugins in batch processing', () => {
|
||||
const plugins = [
|
||||
{ tags: ['search', 'productivity'], category: 'tool' },
|
||||
{ tags: ['image', 'design'], category: 'model' },
|
||||
{ tags: ['invalid'], category: 'extension' },
|
||||
]
|
||||
|
||||
const results = plugins.map(p => ({
|
||||
validTags: getValidTagKeys(p.tags as TagInput),
|
||||
validCategory: getValidCategoryKeys(p.category),
|
||||
}))
|
||||
|
||||
expect(results[0].validTags.length).toBeGreaterThan(0)
|
||||
expect(results[0].validCategory).toBe('tool')
|
||||
|
||||
expect(results[1].validTags).toContain('image')
|
||||
expect(results[1].validTags).toContain('design')
|
||||
expect(results[1].validCategory).toBe('model')
|
||||
|
||||
expect(results[2].validTags).toHaveLength(0)
|
||||
expect(results[2].validCategory).toBe('extension')
|
||||
})
|
||||
})
|
||||
})
|
||||
269
web/__tests__/plugins/plugin-install-flow.test.ts
Normal file
269
web/__tests__/plugins/plugin-install-flow.test.ts
Normal file
@@ -0,0 +1,269 @@
|
||||
/**
|
||||
* Integration Test: Plugin Installation Flow
|
||||
*
|
||||
* Tests the integration between GitHub release fetching, version comparison,
|
||||
* upload handling, and task status polling. Verifies the complete plugin
|
||||
* installation pipeline from source discovery to completion.
|
||||
*/
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/config', () => ({
|
||||
GITHUB_ACCESS_TOKEN: '',
|
||||
}))
|
||||
|
||||
const mockToastNotify = vi.fn()
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
default: { notify: (...args: unknown[]) => mockToastNotify(...args) },
|
||||
}))
|
||||
|
||||
const mockUploadGitHub = vi.fn()
|
||||
vi.mock('@/service/plugins', () => ({
|
||||
uploadGitHub: (...args: unknown[]) => mockUploadGitHub(...args),
|
||||
checkTaskStatus: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/semver', () => ({
|
||||
compareVersion: (a: string, b: string) => {
|
||||
const parse = (v: string) => v.replace(/^v/, '').split('.').map(Number)
|
||||
const [aMajor, aMinor = 0, aPatch = 0] = parse(a)
|
||||
const [bMajor, bMinor = 0, bPatch = 0] = parse(b)
|
||||
if (aMajor !== bMajor)
|
||||
return aMajor > bMajor ? 1 : -1
|
||||
if (aMinor !== bMinor)
|
||||
return aMinor > bMinor ? 1 : -1
|
||||
if (aPatch !== bPatch)
|
||||
return aPatch > bPatch ? 1 : -1
|
||||
return 0
|
||||
},
|
||||
getLatestVersion: (versions: string[]) => {
|
||||
return versions.sort((a, b) => {
|
||||
const parse = (v: string) => v.replace(/^v/, '').split('.').map(Number)
|
||||
const [aMaj, aMin = 0, aPat = 0] = parse(a)
|
||||
const [bMaj, bMin = 0, bPat = 0] = parse(b)
|
||||
if (aMaj !== bMaj)
|
||||
return bMaj - aMaj
|
||||
if (aMin !== bMin)
|
||||
return bMin - aMin
|
||||
return bPat - aPat
|
||||
})[0]
|
||||
},
|
||||
}))
|
||||
|
||||
const { useGitHubReleases, useGitHubUpload } = await import(
|
||||
'@/app/components/plugins/install-plugin/hooks',
|
||||
)
|
||||
|
||||
describe('Plugin Installation Flow Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
globalThis.fetch = vi.fn()
|
||||
})
|
||||
|
||||
describe('GitHub Release Discovery → Version Check → Upload Pipeline', () => {
|
||||
it('fetches releases, checks for updates, and uploads the new version', async () => {
|
||||
const mockReleases = [
|
||||
{
|
||||
tag_name: 'v2.0.0',
|
||||
assets: [{ browser_download_url: 'https://github.com/test/v2.difypkg', name: 'plugin-v2.difypkg' }],
|
||||
},
|
||||
{
|
||||
tag_name: 'v1.5.0',
|
||||
assets: [{ browser_download_url: 'https://github.com/test/v1.5.difypkg', name: 'plugin-v1.5.difypkg' }],
|
||||
},
|
||||
{
|
||||
tag_name: 'v1.0.0',
|
||||
assets: [{ browser_download_url: 'https://github.com/test/v1.difypkg', name: 'plugin-v1.difypkg' }],
|
||||
},
|
||||
]
|
||||
|
||||
;(globalThis.fetch as ReturnType<typeof vi.fn>).mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockReleases),
|
||||
})
|
||||
|
||||
mockUploadGitHub.mockResolvedValue({
|
||||
manifest: { name: 'test-plugin', version: '2.0.0' },
|
||||
unique_identifier: 'test-plugin:2.0.0',
|
||||
})
|
||||
|
||||
const { fetchReleases, checkForUpdates } = useGitHubReleases()
|
||||
|
||||
const releases = await fetchReleases('test-org', 'test-repo')
|
||||
expect(releases).toHaveLength(3)
|
||||
expect(releases[0].tag_name).toBe('v2.0.0')
|
||||
|
||||
const { needUpdate, toastProps } = checkForUpdates(releases, 'v1.0.0')
|
||||
expect(needUpdate).toBe(true)
|
||||
expect(toastProps.message).toContain('v2.0.0')
|
||||
|
||||
const { handleUpload } = useGitHubUpload()
|
||||
const onSuccess = vi.fn()
|
||||
const result = await handleUpload(
|
||||
'https://github.com/test-org/test-repo',
|
||||
'v2.0.0',
|
||||
'plugin-v2.difypkg',
|
||||
onSuccess,
|
||||
)
|
||||
|
||||
expect(mockUploadGitHub).toHaveBeenCalledWith(
|
||||
'https://github.com/test-org/test-repo',
|
||||
'v2.0.0',
|
||||
'plugin-v2.difypkg',
|
||||
)
|
||||
expect(onSuccess).toHaveBeenCalledWith({
|
||||
manifest: { name: 'test-plugin', version: '2.0.0' },
|
||||
unique_identifier: 'test-plugin:2.0.0',
|
||||
})
|
||||
expect(result).toEqual({
|
||||
manifest: { name: 'test-plugin', version: '2.0.0' },
|
||||
unique_identifier: 'test-plugin:2.0.0',
|
||||
})
|
||||
})
|
||||
|
||||
it('handles no new version available', async () => {
|
||||
const mockReleases = [
|
||||
{
|
||||
tag_name: 'v1.0.0',
|
||||
assets: [{ browser_download_url: 'https://github.com/test/v1.difypkg', name: 'plugin-v1.difypkg' }],
|
||||
},
|
||||
]
|
||||
|
||||
;(globalThis.fetch as ReturnType<typeof vi.fn>).mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockReleases),
|
||||
})
|
||||
|
||||
const { fetchReleases, checkForUpdates } = useGitHubReleases()
|
||||
|
||||
const releases = await fetchReleases('test-org', 'test-repo')
|
||||
const { needUpdate, toastProps } = checkForUpdates(releases, 'v1.0.0')
|
||||
|
||||
expect(needUpdate).toBe(false)
|
||||
expect(toastProps.type).toBe('info')
|
||||
expect(toastProps.message).toBe('No new version available')
|
||||
})
|
||||
|
||||
it('handles empty releases', async () => {
|
||||
;(globalThis.fetch as ReturnType<typeof vi.fn>).mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
})
|
||||
|
||||
const { fetchReleases, checkForUpdates } = useGitHubReleases()
|
||||
|
||||
const releases = await fetchReleases('test-org', 'test-repo')
|
||||
expect(releases).toHaveLength(0)
|
||||
|
||||
const { needUpdate, toastProps } = checkForUpdates(releases, 'v1.0.0')
|
||||
expect(needUpdate).toBe(false)
|
||||
expect(toastProps.type).toBe('error')
|
||||
expect(toastProps.message).toBe('Input releases is empty')
|
||||
})
|
||||
|
||||
it('handles fetch failure gracefully', async () => {
|
||||
;(globalThis.fetch as ReturnType<typeof vi.fn>).mockResolvedValue({
|
||||
ok: false,
|
||||
status: 404,
|
||||
})
|
||||
|
||||
const { fetchReleases } = useGitHubReleases()
|
||||
const releases = await fetchReleases('nonexistent-org', 'nonexistent-repo')
|
||||
|
||||
expect(releases).toEqual([])
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ type: 'error' }),
|
||||
)
|
||||
})
|
||||
|
||||
it('handles upload failure gracefully', async () => {
|
||||
mockUploadGitHub.mockRejectedValue(new Error('Upload failed'))
|
||||
|
||||
const { handleUpload } = useGitHubUpload()
|
||||
const onSuccess = vi.fn()
|
||||
|
||||
await expect(
|
||||
handleUpload('https://github.com/test/repo', 'v1.0.0', 'plugin.difypkg', onSuccess),
|
||||
).rejects.toThrow('Upload failed')
|
||||
|
||||
expect(onSuccess).not.toHaveBeenCalled()
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ type: 'error', message: 'Error uploading package' }),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Task Status Polling Integration', () => {
|
||||
it('polls until plugin installation succeeds', async () => {
|
||||
const mockCheckTaskStatus = vi.fn()
|
||||
.mockResolvedValueOnce({
|
||||
task: {
|
||||
plugins: [{ plugin_unique_identifier: 'test:1.0.0', status: 'running' }],
|
||||
},
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
task: {
|
||||
plugins: [{ plugin_unique_identifier: 'test:1.0.0', status: 'success' }],
|
||||
},
|
||||
})
|
||||
|
||||
const { checkTaskStatus: fetchCheckTaskStatus } = await import('@/service/plugins')
|
||||
;(fetchCheckTaskStatus as ReturnType<typeof vi.fn>).mockImplementation(mockCheckTaskStatus)
|
||||
|
||||
await vi.doMock('@/utils', () => ({
|
||||
sleep: () => Promise.resolve(),
|
||||
}))
|
||||
|
||||
const { default: checkTaskStatus } = await import(
|
||||
'@/app/components/plugins/install-plugin/base/check-task-status',
|
||||
)
|
||||
|
||||
const checker = checkTaskStatus()
|
||||
const result = await checker.check({
|
||||
taskId: 'task-123',
|
||||
pluginUniqueIdentifier: 'test:1.0.0',
|
||||
})
|
||||
|
||||
expect(result.status).toBe('success')
|
||||
})
|
||||
|
||||
it('returns failure when plugin not found in task', async () => {
|
||||
const mockCheckTaskStatus = vi.fn().mockResolvedValue({
|
||||
task: {
|
||||
plugins: [{ plugin_unique_identifier: 'other:1.0.0', status: 'success' }],
|
||||
},
|
||||
})
|
||||
|
||||
const { checkTaskStatus: fetchCheckTaskStatus } = await import('@/service/plugins')
|
||||
;(fetchCheckTaskStatus as ReturnType<typeof vi.fn>).mockImplementation(mockCheckTaskStatus)
|
||||
|
||||
const { default: checkTaskStatus } = await import(
|
||||
'@/app/components/plugins/install-plugin/base/check-task-status',
|
||||
)
|
||||
|
||||
const checker = checkTaskStatus()
|
||||
const result = await checker.check({
|
||||
taskId: 'task-123',
|
||||
pluginUniqueIdentifier: 'test:1.0.0',
|
||||
})
|
||||
|
||||
expect(result.status).toBe('failed')
|
||||
expect(result.error).toBe('Plugin package not found')
|
||||
})
|
||||
|
||||
it('stops polling when stop() is called', async () => {
|
||||
const { default: checkTaskStatus } = await import(
|
||||
'@/app/components/plugins/install-plugin/base/check-task-status',
|
||||
)
|
||||
|
||||
const checker = checkTaskStatus()
|
||||
checker.stop()
|
||||
|
||||
const result = await checker.check({
|
||||
taskId: 'task-123',
|
||||
pluginUniqueIdentifier: 'test:1.0.0',
|
||||
})
|
||||
|
||||
expect(result.status).toBe('success')
|
||||
})
|
||||
})
|
||||
})
|
||||
97
web/__tests__/plugins/plugin-marketplace-to-install.test.tsx
Normal file
97
web/__tests__/plugins/plugin-marketplace-to-install.test.tsx
Normal file
@@ -0,0 +1,97 @@
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { pluginInstallLimit } from '@/app/components/plugins/install-plugin/hooks/use-install-plugin-limit'
|
||||
import { InstallationScope } from '@/types/feature'
|
||||
|
||||
vi.mock('@/context/global-public-context', () => ({
|
||||
useGlobalPublicStore: () => ({
|
||||
plugin_installation_permission: {
|
||||
restrict_to_marketplace_only: false,
|
||||
plugin_installation_scope: InstallationScope.ALL,
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
describe('Plugin Marketplace to Install Flow', () => {
|
||||
describe('install permission validation pipeline', () => {
|
||||
const systemFeaturesAll = {
|
||||
plugin_installation_permission: {
|
||||
restrict_to_marketplace_only: false,
|
||||
plugin_installation_scope: InstallationScope.ALL,
|
||||
},
|
||||
}
|
||||
|
||||
const systemFeaturesMarketplaceOnly = {
|
||||
plugin_installation_permission: {
|
||||
restrict_to_marketplace_only: true,
|
||||
plugin_installation_scope: InstallationScope.ALL,
|
||||
},
|
||||
}
|
||||
|
||||
const systemFeaturesOfficialOnly = {
|
||||
plugin_installation_permission: {
|
||||
restrict_to_marketplace_only: false,
|
||||
plugin_installation_scope: InstallationScope.OFFICIAL_ONLY,
|
||||
},
|
||||
}
|
||||
|
||||
it('should allow marketplace plugin when all sources allowed', () => {
|
||||
const plugin = { from: 'marketplace' as const, verification: { authorized_category: 'langgenius' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesAll as never)
|
||||
expect(result.canInstall).toBe(true)
|
||||
})
|
||||
|
||||
it('should allow github plugin when all sources allowed', () => {
|
||||
const plugin = { from: 'github' as const, verification: { authorized_category: 'langgenius' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesAll as never)
|
||||
expect(result.canInstall).toBe(true)
|
||||
})
|
||||
|
||||
it('should block github plugin when marketplace only', () => {
|
||||
const plugin = { from: 'github' as const, verification: { authorized_category: 'langgenius' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesMarketplaceOnly as never)
|
||||
expect(result.canInstall).toBe(false)
|
||||
})
|
||||
|
||||
it('should allow marketplace plugin when marketplace only', () => {
|
||||
const plugin = { from: 'marketplace' as const, verification: { authorized_category: 'partner' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesMarketplaceOnly as never)
|
||||
expect(result.canInstall).toBe(true)
|
||||
})
|
||||
|
||||
it('should allow official plugin when official only', () => {
|
||||
const plugin = { from: 'marketplace' as const, verification: { authorized_category: 'langgenius' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesOfficialOnly as never)
|
||||
expect(result.canInstall).toBe(true)
|
||||
})
|
||||
|
||||
it('should block community plugin when official only', () => {
|
||||
const plugin = { from: 'marketplace' as const, verification: { authorized_category: 'community' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesOfficialOnly as never)
|
||||
expect(result.canInstall).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('plugin source classification', () => {
|
||||
it('should correctly classify plugin install sources', () => {
|
||||
const sources = ['marketplace', 'github', 'package'] as const
|
||||
const features = {
|
||||
plugin_installation_permission: {
|
||||
restrict_to_marketplace_only: true,
|
||||
plugin_installation_scope: InstallationScope.ALL,
|
||||
},
|
||||
}
|
||||
|
||||
const results = sources.map(source => ({
|
||||
source,
|
||||
canInstall: pluginInstallLimit(
|
||||
{ from: source, verification: { authorized_category: 'langgenius' } } as never,
|
||||
features as never,
|
||||
).canInstall,
|
||||
}))
|
||||
|
||||
expect(results.find(r => r.source === 'marketplace')?.canInstall).toBe(true)
|
||||
expect(results.find(r => r.source === 'github')?.canInstall).toBe(false)
|
||||
expect(results.find(r => r.source === 'package')?.canInstall).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
120
web/__tests__/plugins/plugin-page-filter-management.test.tsx
Normal file
120
web/__tests__/plugins/plugin-page-filter-management.test.tsx
Normal file
@@ -0,0 +1,120 @@
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it } from 'vitest'
|
||||
import { useStore } from '@/app/components/plugins/plugin-page/filter-management/store'
|
||||
|
||||
describe('Plugin Page Filter Management Integration', () => {
|
||||
beforeEach(() => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
act(() => {
|
||||
result.current.setTagList([])
|
||||
result.current.setCategoryList([])
|
||||
result.current.setShowTagManagementModal(false)
|
||||
result.current.setShowCategoryManagementModal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('tag and category filter lifecycle', () => {
|
||||
it('should manage full tag lifecycle: add -> update -> clear', () => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
|
||||
const initialTags = [
|
||||
{ name: 'search', label: { en_US: 'Search' } },
|
||||
{ name: 'productivity', label: { en_US: 'Productivity' } },
|
||||
]
|
||||
|
||||
act(() => {
|
||||
result.current.setTagList(initialTags as never[])
|
||||
})
|
||||
expect(result.current.tagList).toHaveLength(2)
|
||||
|
||||
const updatedTags = [
|
||||
...initialTags,
|
||||
{ name: 'image', label: { en_US: 'Image' } },
|
||||
]
|
||||
|
||||
act(() => {
|
||||
result.current.setTagList(updatedTags as never[])
|
||||
})
|
||||
expect(result.current.tagList).toHaveLength(3)
|
||||
|
||||
act(() => {
|
||||
result.current.setTagList([])
|
||||
})
|
||||
expect(result.current.tagList).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should manage full category lifecycle: add -> update -> clear', () => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
|
||||
const categories = [
|
||||
{ name: 'tool', label: { en_US: 'Tool' } },
|
||||
{ name: 'model', label: { en_US: 'Model' } },
|
||||
]
|
||||
|
||||
act(() => {
|
||||
result.current.setCategoryList(categories as never[])
|
||||
})
|
||||
expect(result.current.categoryList).toHaveLength(2)
|
||||
|
||||
act(() => {
|
||||
result.current.setCategoryList([])
|
||||
})
|
||||
expect(result.current.categoryList).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('modal state management', () => {
|
||||
it('should manage tag management modal independently', () => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
|
||||
act(() => {
|
||||
result.current.setShowTagManagementModal(true)
|
||||
})
|
||||
expect(result.current.showTagManagementModal).toBe(true)
|
||||
expect(result.current.showCategoryManagementModal).toBe(false)
|
||||
|
||||
act(() => {
|
||||
result.current.setShowTagManagementModal(false)
|
||||
})
|
||||
expect(result.current.showTagManagementModal).toBe(false)
|
||||
})
|
||||
|
||||
it('should manage category management modal independently', () => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
|
||||
act(() => {
|
||||
result.current.setShowCategoryManagementModal(true)
|
||||
})
|
||||
expect(result.current.showCategoryManagementModal).toBe(true)
|
||||
expect(result.current.showTagManagementModal).toBe(false)
|
||||
})
|
||||
|
||||
it('should support both modals open simultaneously', () => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
|
||||
act(() => {
|
||||
result.current.setShowTagManagementModal(true)
|
||||
result.current.setShowCategoryManagementModal(true)
|
||||
})
|
||||
|
||||
expect(result.current.showTagManagementModal).toBe(true)
|
||||
expect(result.current.showCategoryManagementModal).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('state persistence across renders', () => {
|
||||
it('should maintain filter state when re-rendered', () => {
|
||||
const { result, rerender } = renderHook(() => useStore())
|
||||
|
||||
act(() => {
|
||||
result.current.setTagList([{ name: 'search' }] as never[])
|
||||
result.current.setCategoryList([{ name: 'tool' }] as never[])
|
||||
})
|
||||
|
||||
rerender()
|
||||
|
||||
expect(result.current.tagList).toHaveLength(1)
|
||||
expect(result.current.categoryList).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
369
web/__tests__/tools/tool-browsing-and-filtering.test.tsx
Normal file
369
web/__tests__/tools/tool-browsing-and-filtering.test.tsx
Normal file
@@ -0,0 +1,369 @@
|
||||
import type { Collection } from '@/app/components/tools/types'
|
||||
/**
|
||||
* Integration Test: Tool Browsing & Filtering Flow
|
||||
*
|
||||
* Tests the integration between ProviderList, TabSliderNew, LabelFilter,
|
||||
* Input (search), and card rendering. Verifies that tab switching, keyword
|
||||
* filtering, and label filtering work together correctly.
|
||||
*/
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import { cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { CollectionType } from '@/app/components/tools/types'
|
||||
|
||||
// ---- Mocks ----
|
||||
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
const map: Record<string, string> = {
|
||||
'type.builtIn': 'Built-in',
|
||||
'type.custom': 'Custom',
|
||||
'type.workflow': 'Workflow',
|
||||
'noTools': 'No tools found',
|
||||
}
|
||||
return map[key] ?? key
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('nuqs', () => ({
|
||||
useQueryState: () => ['builtin', vi.fn()],
|
||||
}))
|
||||
|
||||
vi.mock('@/context/global-public-context', () => ({
|
||||
useGlobalPublicStore: () => ({ enable_marketplace: false }),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/hooks', () => ({
|
||||
useTags: () => ({
|
||||
getTagLabel: (key: string) => key,
|
||||
tags: [],
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-plugins', () => ({
|
||||
useCheckInstalled: () => ({ data: null }),
|
||||
useInvalidateInstalledPluginList: () => vi.fn(),
|
||||
}))
|
||||
|
||||
const mockCollections: Collection[] = [
|
||||
{
|
||||
id: 'google-search',
|
||||
name: 'google_search',
|
||||
author: 'Dify',
|
||||
description: { en_US: 'Google Search Tool', zh_Hans: 'Google搜索工具' },
|
||||
icon: 'https://example.com/google.png',
|
||||
label: { en_US: 'Google Search', zh_Hans: 'Google搜索' },
|
||||
type: CollectionType.builtIn,
|
||||
team_credentials: {},
|
||||
is_team_authorization: true,
|
||||
allow_delete: false,
|
||||
labels: ['search'],
|
||||
},
|
||||
{
|
||||
id: 'weather-api',
|
||||
name: 'weather_api',
|
||||
author: 'Dify',
|
||||
description: { en_US: 'Weather API Tool', zh_Hans: '天气API工具' },
|
||||
icon: 'https://example.com/weather.png',
|
||||
label: { en_US: 'Weather API', zh_Hans: '天气API' },
|
||||
type: CollectionType.builtIn,
|
||||
team_credentials: {},
|
||||
is_team_authorization: false,
|
||||
allow_delete: false,
|
||||
labels: ['utility'],
|
||||
},
|
||||
{
|
||||
id: 'my-custom-tool',
|
||||
name: 'my_custom_tool',
|
||||
author: 'User',
|
||||
description: { en_US: 'My Custom Tool', zh_Hans: '我的自定义工具' },
|
||||
icon: 'https://example.com/custom.png',
|
||||
label: { en_US: 'My Custom Tool', zh_Hans: '我的自定义工具' },
|
||||
type: CollectionType.custom,
|
||||
team_credentials: {},
|
||||
is_team_authorization: false,
|
||||
allow_delete: true,
|
||||
labels: [],
|
||||
},
|
||||
{
|
||||
id: 'workflow-tool-1',
|
||||
name: 'workflow_tool_1',
|
||||
author: 'User',
|
||||
description: { en_US: 'Workflow Tool', zh_Hans: '工作流工具' },
|
||||
icon: 'https://example.com/workflow.png',
|
||||
label: { en_US: 'Workflow Tool', zh_Hans: '工作流工具' },
|
||||
type: CollectionType.workflow,
|
||||
team_credentials: {},
|
||||
is_team_authorization: false,
|
||||
allow_delete: true,
|
||||
labels: [],
|
||||
},
|
||||
]
|
||||
|
||||
const mockRefetch = vi.fn()
|
||||
vi.mock('@/service/use-tools', () => ({
|
||||
useAllToolProviders: () => ({
|
||||
data: mockCollections,
|
||||
refetch: mockRefetch,
|
||||
isSuccess: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/tab-slider-new', () => ({
|
||||
default: ({ value, onChange, options }: { value: string, onChange: (v: string) => void, options: Array<{ value: string, text: string }> }) => (
|
||||
<div data-testid="tab-slider">
|
||||
{options.map((opt: { value: string, text: string }) => (
|
||||
<button
|
||||
key={opt.value}
|
||||
data-testid={`tab-${opt.value}`}
|
||||
data-active={value === opt.value ? 'true' : 'false'}
|
||||
onClick={() => onChange(opt.value)}
|
||||
>
|
||||
{opt.text}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/input', () => ({
|
||||
default: ({ value, onChange, onClear, showLeftIcon, showClearIcon, wrapperClassName }: {
|
||||
value: string
|
||||
onChange: (e: { target: { value: string } }) => void
|
||||
onClear: () => void
|
||||
showLeftIcon?: boolean
|
||||
showClearIcon?: boolean
|
||||
wrapperClassName?: string
|
||||
}) => (
|
||||
<div data-testid="search-input-wrapper" className={wrapperClassName}>
|
||||
<input
|
||||
data-testid="search-input"
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
data-left-icon={showLeftIcon ? 'true' : 'false'}
|
||||
data-clear-icon={showClearIcon ? 'true' : 'false'}
|
||||
/>
|
||||
{showClearIcon && value && (
|
||||
<button data-testid="clear-search" onClick={onClear}>Clear</button>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card', () => ({
|
||||
default: ({ payload, className }: { payload: { brief: Record<string, string> | string, name: string }, className?: string }) => {
|
||||
const briefText = typeof payload.brief === 'object' ? payload.brief?.en_US || '' : payload.brief
|
||||
return (
|
||||
<div data-testid={`card-${payload.name}`} className={className}>
|
||||
<span>{payload.name}</span>
|
||||
<span>{briefText}</span>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/card-more-info', () => ({
|
||||
default: ({ tags }: { tags: string[] }) => (
|
||||
<div data-testid="card-more-info">{tags.join(', ')}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/labels/filter', () => ({
|
||||
default: ({ value: _value, onChange }: { value: string[], onChange: (v: string[]) => void }) => (
|
||||
<div data-testid="label-filter">
|
||||
<button data-testid="filter-search" onClick={() => onChange(['search'])}>Filter: search</button>
|
||||
<button data-testid="filter-utility" onClick={() => onChange(['utility'])}>Filter: utility</button>
|
||||
<button data-testid="filter-clear" onClick={() => onChange([])}>Clear filter</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/provider/custom-create-card', () => ({
|
||||
default: () => <div data-testid="custom-create-card">Create Custom Tool</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/provider/detail', () => ({
|
||||
default: ({ collection, onHide }: { collection: Collection, onHide: () => void }) => (
|
||||
<div data-testid="provider-detail">
|
||||
<span data-testid="detail-name">{collection.name}</span>
|
||||
<button data-testid="detail-close" onClick={onHide}>Close</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/provider/empty', () => ({
|
||||
default: () => <div data-testid="workflow-empty">No workflow tools</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/plugin-detail-panel', () => ({
|
||||
default: ({ detail, onHide }: { detail: unknown, onHide: () => void }) => (
|
||||
detail ? <div data-testid="plugin-detail-panel"><button onClick={onHide}>Close</button></div> : null
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/marketplace/empty', () => ({
|
||||
default: ({ text }: { text: string }) => <div data-testid="empty-state">{text}</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/marketplace', () => ({
|
||||
default: () => null,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/mcp', () => ({
|
||||
default: () => <div data-testid="mcp-list">MCP List</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/classnames', () => ({
|
||||
cn: (...args: unknown[]) => args.filter(Boolean).join(' '),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/block-selector/types', () => ({
|
||||
ToolTypeEnum: { BuiltIn: 'builtin', Custom: 'api', Workflow: 'workflow', MCP: 'mcp' },
|
||||
}))
|
||||
|
||||
const { default: ProviderList } = await import('@/app/components/tools/provider-list')
|
||||
|
||||
const createWrapper = () => {
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: { queries: { retry: false } },
|
||||
})
|
||||
return ({ children }: { children: React.ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
)
|
||||
}
|
||||
|
||||
describe('Tool Browsing & Filtering Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
cleanup()
|
||||
})
|
||||
|
||||
it('renders tab options and built-in tools by default', () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
expect(screen.getByTestId('tab-slider')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('tab-builtin')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('tab-api')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('tab-workflow')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('tab-mcp')).toBeInTheDocument()
|
||||
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('card-weather_api')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('card-my_custom_tool')).not.toBeInTheDocument()
|
||||
expect(screen.queryByTestId('card-workflow_tool_1')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('filters tools by keyword search', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
const searchInput = screen.getByTestId('search-input')
|
||||
fireEvent.change(searchInput, { target: { value: 'Google' } })
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('card-weather_api')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('clears search keyword and shows all tools again', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
const searchInput = screen.getByTestId('search-input')
|
||||
fireEvent.change(searchInput, { target: { value: 'Google' } })
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByTestId('card-weather_api')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.change(searchInput, { target: { value: '' } })
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('card-weather_api')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('filters tools by label tags', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
fireEvent.click(screen.getByTestId('filter-search'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('card-weather_api')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('clears label filter and shows all tools', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
fireEvent.click(screen.getByTestId('filter-utility'))
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByTestId('card-google_search')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('card-weather_api')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('filter-clear'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('card-weather_api')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('combines keyword search and label filter', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
fireEvent.click(screen.getByTestId('filter-search'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
const searchInput = screen.getByTestId('search-input')
|
||||
fireEvent.change(searchInput, { target: { value: 'Weather' } })
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByTestId('card-google_search')).not.toBeInTheDocument()
|
||||
expect(screen.queryByTestId('card-weather_api')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('opens provider detail when clicking a non-plugin collection card', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
const card = screen.getByTestId('card-google_search')
|
||||
fireEvent.click(card.parentElement!)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('provider-detail')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('detail-name')).toHaveTextContent('google_search')
|
||||
})
|
||||
})
|
||||
|
||||
it('closes provider detail and deselects current provider', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
const card = screen.getByTestId('card-google_search')
|
||||
fireEvent.click(card.parentElement!)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('provider-detail')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('detail-close'))
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByTestId('provider-detail')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('shows label filter for non-MCP tabs', () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
expect(screen.getByTestId('label-filter')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows search input on all tabs', () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
expect(screen.getByTestId('search-input')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
239
web/__tests__/tools/tool-data-processing.test.ts
Normal file
239
web/__tests__/tools/tool-data-processing.test.ts
Normal file
@@ -0,0 +1,239 @@
|
||||
/**
|
||||
* Integration Test: Tool Data Processing Pipeline
|
||||
*
|
||||
* Tests the integration between tool utility functions and type conversions.
|
||||
* Verifies that data flows correctly through the processing pipeline:
|
||||
* raw API data → form schemas → form values → configured values.
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { addFileInfos, sortAgentSorts } from '@/app/components/tools/utils/index'
|
||||
import {
|
||||
addDefaultValue,
|
||||
generateFormValue,
|
||||
getConfiguredValue,
|
||||
getPlainValue,
|
||||
getStructureValue,
|
||||
toolCredentialToFormSchemas,
|
||||
toolParametersToFormSchemas,
|
||||
toType,
|
||||
triggerEventParametersToFormSchemas,
|
||||
} from '@/app/components/tools/utils/to-form-schema'
|
||||
|
||||
describe('Tool Data Processing Pipeline Integration', () => {
|
||||
describe('End-to-end: API schema → form schema → form value', () => {
|
||||
it('processes tool parameters through the full pipeline', () => {
|
||||
const rawParameters = [
|
||||
{
|
||||
name: 'query',
|
||||
label: { en_US: 'Search Query', zh_Hans: '搜索查询' },
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: 'hello',
|
||||
form: 'llm',
|
||||
human_description: { en_US: 'Enter your search query', zh_Hans: '输入搜索查询' },
|
||||
llm_description: 'The search query string',
|
||||
options: [],
|
||||
},
|
||||
{
|
||||
name: 'limit',
|
||||
label: { en_US: 'Result Limit', zh_Hans: '结果限制' },
|
||||
type: 'number',
|
||||
required: false,
|
||||
default: '10',
|
||||
form: 'form',
|
||||
human_description: { en_US: 'Maximum results', zh_Hans: '最大结果数' },
|
||||
llm_description: 'Limit for results',
|
||||
options: [],
|
||||
},
|
||||
]
|
||||
|
||||
const formSchemas = toolParametersToFormSchemas(rawParameters as unknown as Parameters<typeof toolParametersToFormSchemas>[0])
|
||||
expect(formSchemas).toHaveLength(2)
|
||||
expect(formSchemas[0].variable).toBe('query')
|
||||
expect(formSchemas[0].required).toBe(true)
|
||||
expect(formSchemas[0].type).toBe('text-input')
|
||||
expect(formSchemas[1].variable).toBe('limit')
|
||||
expect(formSchemas[1].type).toBe('number-input')
|
||||
|
||||
const withDefaults = addDefaultValue({}, formSchemas)
|
||||
expect(withDefaults.query).toBe('hello')
|
||||
expect(withDefaults.limit).toBe('10')
|
||||
|
||||
const formValues = generateFormValue({}, formSchemas, false)
|
||||
expect(formValues).toBeDefined()
|
||||
expect(formValues.query).toBeDefined()
|
||||
expect(formValues.limit).toBeDefined()
|
||||
})
|
||||
|
||||
it('processes tool credentials through the pipeline', () => {
|
||||
const rawCredentials = [
|
||||
{
|
||||
name: 'api_key',
|
||||
label: { en_US: 'API Key', zh_Hans: 'API 密钥' },
|
||||
type: 'secret-input',
|
||||
required: true,
|
||||
default: '',
|
||||
placeholder: { en_US: 'Enter API key', zh_Hans: '输入 API 密钥' },
|
||||
help: { en_US: 'Your API key', zh_Hans: '你的 API 密钥' },
|
||||
url: 'https://example.com/get-key',
|
||||
options: [],
|
||||
},
|
||||
]
|
||||
|
||||
const credentialSchemas = toolCredentialToFormSchemas(rawCredentials as Parameters<typeof toolCredentialToFormSchemas>[0])
|
||||
expect(credentialSchemas).toHaveLength(1)
|
||||
expect(credentialSchemas[0].variable).toBe('api_key')
|
||||
expect(credentialSchemas[0].required).toBe(true)
|
||||
expect(credentialSchemas[0].type).toBe('secret-input')
|
||||
})
|
||||
|
||||
it('processes trigger event parameters through the pipeline', () => {
|
||||
const rawParams = [
|
||||
{
|
||||
name: 'event_type',
|
||||
label: { en_US: 'Event Type', zh_Hans: '事件类型' },
|
||||
type: 'select',
|
||||
required: true,
|
||||
default: 'push',
|
||||
form: 'form',
|
||||
description: { en_US: 'Type of event', zh_Hans: '事件类型' },
|
||||
options: [
|
||||
{ value: 'push', label: { en_US: 'Push', zh_Hans: '推送' } },
|
||||
{ value: 'pull', label: { en_US: 'Pull', zh_Hans: '拉取' } },
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
const schemas = triggerEventParametersToFormSchemas(rawParams as unknown as Parameters<typeof triggerEventParametersToFormSchemas>[0])
|
||||
expect(schemas).toHaveLength(1)
|
||||
expect(schemas[0].name).toBe('event_type')
|
||||
expect(schemas[0].type).toBe('select')
|
||||
expect(schemas[0].options).toHaveLength(2)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Type conversion integration', () => {
|
||||
it('converts all supported types correctly', () => {
|
||||
const typeConversions = [
|
||||
{ input: 'string', expected: 'text-input' },
|
||||
{ input: 'number', expected: 'number-input' },
|
||||
{ input: 'boolean', expected: 'checkbox' },
|
||||
{ input: 'select', expected: 'select' },
|
||||
{ input: 'secret-input', expected: 'secret-input' },
|
||||
{ input: 'file', expected: 'file' },
|
||||
{ input: 'files', expected: 'files' },
|
||||
]
|
||||
|
||||
typeConversions.forEach(({ input, expected }) => {
|
||||
expect(toType(input)).toBe(expected)
|
||||
})
|
||||
})
|
||||
|
||||
it('returns the original type for unrecognized types', () => {
|
||||
expect(toType('unknown-type')).toBe('unknown-type')
|
||||
expect(toType('app-selector')).toBe('app-selector')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Value extraction integration', () => {
|
||||
it('wraps values with getStructureValue and extracts inner value with getPlainValue', () => {
|
||||
const plainInput = { query: 'test', limit: 10 }
|
||||
const structured = getStructureValue(plainInput)
|
||||
|
||||
expect(structured.query).toEqual({ value: 'test' })
|
||||
expect(structured.limit).toEqual({ value: 10 })
|
||||
|
||||
const objectStructured = {
|
||||
query: { value: { type: 'constant', content: 'test search' } },
|
||||
limit: { value: { type: 'constant', content: 10 } },
|
||||
}
|
||||
const extracted = getPlainValue(objectStructured)
|
||||
expect(extracted.query).toEqual({ type: 'constant', content: 'test search' })
|
||||
expect(extracted.limit).toEqual({ type: 'constant', content: 10 })
|
||||
})
|
||||
|
||||
it('handles getConfiguredValue for workflow tool configurations', () => {
|
||||
const formSchemas = [
|
||||
{ variable: 'query', type: 'text-input', default: 'default-query' },
|
||||
{ variable: 'format', type: 'select', default: 'json' },
|
||||
]
|
||||
|
||||
const configured = getConfiguredValue({}, formSchemas)
|
||||
expect(configured).toBeDefined()
|
||||
expect(configured.query).toBeDefined()
|
||||
expect(configured.format).toBeDefined()
|
||||
})
|
||||
|
||||
it('preserves existing values in getConfiguredValue', () => {
|
||||
const formSchemas = [
|
||||
{ variable: 'query', type: 'text-input', default: 'default-query' },
|
||||
]
|
||||
|
||||
const configured = getConfiguredValue({ query: 'my-existing-query' }, formSchemas)
|
||||
expect(configured.query).toBe('my-existing-query')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Agent utilities integration', () => {
|
||||
it('sorts agent thoughts and enriches with file infos end-to-end', () => {
|
||||
const thoughts = [
|
||||
{ id: 't3', position: 3, tool: 'search', files: ['f1'] },
|
||||
{ id: 't1', position: 1, tool: 'analyze', files: [] },
|
||||
{ id: 't2', position: 2, tool: 'summarize', files: ['f2'] },
|
||||
] as Parameters<typeof sortAgentSorts>[0]
|
||||
|
||||
const messageFiles = [
|
||||
{ id: 'f1', name: 'result.txt', type: 'document' },
|
||||
{ id: 'f2', name: 'summary.pdf', type: 'document' },
|
||||
] as Parameters<typeof addFileInfos>[1]
|
||||
|
||||
const sorted = sortAgentSorts(thoughts)
|
||||
expect(sorted[0].id).toBe('t1')
|
||||
expect(sorted[1].id).toBe('t2')
|
||||
expect(sorted[2].id).toBe('t3')
|
||||
|
||||
const enriched = addFileInfos(sorted, messageFiles)
|
||||
expect(enriched[0].message_files).toBeUndefined()
|
||||
expect(enriched[1].message_files).toHaveLength(1)
|
||||
expect(enriched[1].message_files![0].id).toBe('f2')
|
||||
expect(enriched[2].message_files).toHaveLength(1)
|
||||
expect(enriched[2].message_files![0].id).toBe('f1')
|
||||
})
|
||||
|
||||
it('handles null inputs gracefully in the pipeline', () => {
|
||||
const sortedNull = sortAgentSorts(null as never)
|
||||
expect(sortedNull).toBeNull()
|
||||
|
||||
const enrichedNull = addFileInfos(null as never, [])
|
||||
expect(enrichedNull).toBeNull()
|
||||
|
||||
// addFileInfos with empty list and null files returns the mapped (empty) list
|
||||
const enrichedEmptyList = addFileInfos([], null as never)
|
||||
expect(enrichedEmptyList).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('Default value application', () => {
|
||||
it('applies defaults only to empty fields, preserving user values', () => {
|
||||
const userValues = { api_key: 'user-provided-key' }
|
||||
const schemas = [
|
||||
{ variable: 'api_key', type: 'text-input', default: 'default-key', name: 'api_key' },
|
||||
{ variable: 'secret', type: 'secret-input', default: 'default-secret', name: 'secret' },
|
||||
]
|
||||
|
||||
const result = addDefaultValue(userValues, schemas)
|
||||
expect(result.api_key).toBe('user-provided-key')
|
||||
expect(result.secret).toBe('default-secret')
|
||||
})
|
||||
|
||||
it('handles boolean type conversion in defaults', () => {
|
||||
const schemas = [
|
||||
{ variable: 'enabled', type: 'boolean', default: 'true', name: 'enabled' },
|
||||
]
|
||||
|
||||
const result = addDefaultValue({ enabled: 'true' }, schemas)
|
||||
expect(result.enabled).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
548
web/__tests__/tools/tool-provider-detail-flow.test.tsx
Normal file
548
web/__tests__/tools/tool-provider-detail-flow.test.tsx
Normal file
@@ -0,0 +1,548 @@
|
||||
import type { Collection } from '@/app/components/tools/types'
|
||||
/**
|
||||
* Integration Test: Tool Provider Detail Flow
|
||||
*
|
||||
* Tests the integration between ProviderDetail, ConfigCredential,
|
||||
* EditCustomToolModal, WorkflowToolModal, and service APIs.
|
||||
* Verifies that different provider types render correctly and
|
||||
* handle auth/edit/delete flows.
|
||||
*/
|
||||
import { cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { CollectionType } from '@/app/components/tools/types'
|
||||
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string, opts?: Record<string, unknown>) => {
|
||||
const map: Record<string, string> = {
|
||||
'auth.authorized': 'Authorized',
|
||||
'auth.unauthorized': 'Set up credentials',
|
||||
'auth.setup': 'NEEDS SETUP',
|
||||
'createTool.editAction': 'Edit',
|
||||
'createTool.deleteToolConfirmTitle': 'Delete Tool',
|
||||
'createTool.deleteToolConfirmContent': 'Are you sure?',
|
||||
'createTool.toolInput.title': 'Tool Input',
|
||||
'createTool.toolInput.required': 'Required',
|
||||
'openInStudio': 'Open in Studio',
|
||||
'api.actionSuccess': 'Action succeeded',
|
||||
}
|
||||
if (key === 'detailPanel.actionNum')
|
||||
return `${opts?.num ?? 0} actions`
|
||||
if (key === 'includeToolNum')
|
||||
return `${opts?.num ?? 0} actions`
|
||||
return map[key] ?? key
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useLocale: () => 'en',
|
||||
}))
|
||||
|
||||
vi.mock('@/i18n-config/language', () => ({
|
||||
getLanguage: () => 'en_US',
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => ({
|
||||
isCurrentWorkspaceManager: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
const mockSetShowModelModal = vi.fn()
|
||||
vi.mock('@/context/modal-context', () => ({
|
||||
useModalContext: () => ({
|
||||
setShowModelModal: mockSetShowModelModal,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/provider-context', () => ({
|
||||
useProviderContext: () => ({
|
||||
modelProviders: [
|
||||
{ provider: 'model-provider-1', name: 'Model Provider 1' },
|
||||
],
|
||||
}),
|
||||
}))
|
||||
|
||||
const mockFetchBuiltInToolList = vi.fn().mockResolvedValue([
|
||||
{ name: 'tool-1', description: { en_US: 'Tool 1' }, parameters: [] },
|
||||
{ name: 'tool-2', description: { en_US: 'Tool 2' }, parameters: [] },
|
||||
])
|
||||
const mockFetchModelToolList = vi.fn().mockResolvedValue([])
|
||||
const mockFetchCustomToolList = vi.fn().mockResolvedValue([])
|
||||
const mockFetchCustomCollection = vi.fn().mockResolvedValue({
|
||||
credentials: { auth_type: 'none' },
|
||||
schema: '',
|
||||
schema_type: 'openapi',
|
||||
})
|
||||
const mockFetchWorkflowToolDetail = vi.fn().mockResolvedValue({
|
||||
workflow_app_id: 'app-123',
|
||||
tool: {
|
||||
parameters: [
|
||||
{ name: 'query', llm_description: 'Search query', form: 'text', required: true, type: 'string' },
|
||||
],
|
||||
labels: ['search'],
|
||||
},
|
||||
})
|
||||
const mockUpdateBuiltInToolCredential = vi.fn().mockResolvedValue({})
|
||||
const mockRemoveBuiltInToolCredential = vi.fn().mockResolvedValue({})
|
||||
const mockUpdateCustomCollection = vi.fn().mockResolvedValue({})
|
||||
const mockRemoveCustomCollection = vi.fn().mockResolvedValue({})
|
||||
const mockDeleteWorkflowTool = vi.fn().mockResolvedValue({})
|
||||
const mockSaveWorkflowToolProvider = vi.fn().mockResolvedValue({})
|
||||
|
||||
vi.mock('@/service/tools', () => ({
|
||||
fetchBuiltInToolList: (...args: unknown[]) => mockFetchBuiltInToolList(...args),
|
||||
fetchModelToolList: (...args: unknown[]) => mockFetchModelToolList(...args),
|
||||
fetchCustomToolList: (...args: unknown[]) => mockFetchCustomToolList(...args),
|
||||
fetchCustomCollection: (...args: unknown[]) => mockFetchCustomCollection(...args),
|
||||
fetchWorkflowToolDetail: (...args: unknown[]) => mockFetchWorkflowToolDetail(...args),
|
||||
updateBuiltInToolCredential: (...args: unknown[]) => mockUpdateBuiltInToolCredential(...args),
|
||||
removeBuiltInToolCredential: (...args: unknown[]) => mockRemoveBuiltInToolCredential(...args),
|
||||
updateCustomCollection: (...args: unknown[]) => mockUpdateCustomCollection(...args),
|
||||
removeCustomCollection: (...args: unknown[]) => mockRemoveCustomCollection(...args),
|
||||
deleteWorkflowTool: (...args: unknown[]) => mockDeleteWorkflowTool(...args),
|
||||
saveWorkflowToolProvider: (...args: unknown[]) => mockSaveWorkflowToolProvider(...args),
|
||||
fetchBuiltInToolCredential: vi.fn().mockResolvedValue({}),
|
||||
fetchBuiltInToolCredentialSchema: vi.fn().mockResolvedValue([]),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-tools', () => ({
|
||||
useInvalidateAllWorkflowTools: () => vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/classnames', () => ({
|
||||
cn: (...args: unknown[]) => args.filter(Boolean).join(' '),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/var', () => ({
|
||||
basePath: '',
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/drawer', () => ({
|
||||
default: ({ isOpen, children, onClose }: { isOpen: boolean, children: React.ReactNode, onClose: () => void }) => (
|
||||
isOpen
|
||||
? (
|
||||
<div data-testid="drawer">
|
||||
{children}
|
||||
<button data-testid="drawer-close" onClick={onClose}>Close Drawer</button>
|
||||
</div>
|
||||
)
|
||||
: null
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/confirm', () => ({
|
||||
default: ({ title, isShow, onConfirm, onCancel }: {
|
||||
title: string
|
||||
content: string
|
||||
isShow: boolean
|
||||
onConfirm: () => void
|
||||
onCancel: () => void
|
||||
}) => (
|
||||
isShow
|
||||
? (
|
||||
<div data-testid="confirm-dialog">
|
||||
<span>{title}</span>
|
||||
<button data-testid="confirm-ok" onClick={onConfirm}>Confirm</button>
|
||||
<button data-testid="confirm-cancel" onClick={onCancel}>Cancel</button>
|
||||
</div>
|
||||
)
|
||||
: null
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
default: { notify: vi.fn() },
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/icons/src/vender/line/general', () => ({
|
||||
LinkExternal02: () => <span data-testid="link-icon" />,
|
||||
Settings01: () => <span data-testid="settings-icon" />,
|
||||
}))
|
||||
|
||||
vi.mock('@remixicon/react', () => ({
|
||||
RiCloseLine: () => <span data-testid="close-icon" />,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/header/account-setting/model-provider-page/declarations', () => ({
|
||||
ConfigurationMethodEnum: { predefinedModel: 'predefined-model' },
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/header/indicator', () => ({
|
||||
default: ({ color }: { color: string }) => <span data-testid={`indicator-${color}`} />,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/card-icon', () => ({
|
||||
default: ({ src }: { src: string }) => <div data-testid="card-icon" data-src={typeof src === 'string' ? src : 'emoji'} />,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/description', () => ({
|
||||
default: ({ text }: { text: string }) => <div data-testid="description">{text}</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/org-info', () => ({
|
||||
default: ({ orgName, packageName }: { orgName: string, packageName: string }) => (
|
||||
<div data-testid="org-info">
|
||||
{orgName}
|
||||
{' '}
|
||||
/
|
||||
{' '}
|
||||
{packageName}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/title', () => ({
|
||||
default: ({ title }: { title: string }) => <div data-testid="title">{title}</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/edit-custom-collection-modal', () => ({
|
||||
default: ({ onHide, onEdit, onRemove }: { onHide: () => void, onEdit: (data: unknown) => void, onRemove: () => void, payload: unknown }) => (
|
||||
<div data-testid="edit-custom-modal">
|
||||
<button data-testid="custom-modal-hide" onClick={onHide}>Hide</button>
|
||||
<button data-testid="custom-modal-save" onClick={() => onEdit({ name: 'updated', labels: [] })}>Save</button>
|
||||
<button data-testid="custom-modal-remove" onClick={onRemove}>Remove</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/setting/build-in/config-credentials', () => ({
|
||||
default: ({ onCancel, onSaved, onRemove }: { collection: Collection, onCancel: () => void, onSaved: (v: Record<string, unknown>) => void, onRemove: () => void }) => (
|
||||
<div data-testid="config-credential">
|
||||
<button data-testid="cred-cancel" onClick={onCancel}>Cancel</button>
|
||||
<button data-testid="cred-save" onClick={() => onSaved({ api_key: 'test-key' })}>Save</button>
|
||||
<button data-testid="cred-remove" onClick={onRemove}>Remove</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/workflow-tool', () => ({
|
||||
default: ({ onHide, onSave, onRemove }: { payload: unknown, onHide: () => void, onSave: (d: unknown) => void, onRemove: () => void }) => (
|
||||
<div data-testid="workflow-tool-modal">
|
||||
<button data-testid="wf-modal-hide" onClick={onHide}>Hide</button>
|
||||
<button data-testid="wf-modal-save" onClick={() => onSave({ name: 'updated-wf' })}>Save</button>
|
||||
<button data-testid="wf-modal-remove" onClick={onRemove}>Remove</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/provider/tool-item', () => ({
|
||||
default: ({ tool }: { tool: { name: string } }) => (
|
||||
<div data-testid={`tool-item-${tool.name}`}>{tool.name}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
const { default: ProviderDetail } = await import('@/app/components/tools/provider/detail')
|
||||
|
||||
const makeCollection = (overrides: Partial<Collection> = {}): Collection => ({
|
||||
id: 'test-collection',
|
||||
name: 'test_collection',
|
||||
author: 'Dify',
|
||||
description: { en_US: 'Test collection description', zh_Hans: '测试集合描述' },
|
||||
icon: 'https://example.com/icon.png',
|
||||
label: { en_US: 'Test Collection', zh_Hans: '测试集合' },
|
||||
type: CollectionType.builtIn,
|
||||
team_credentials: {},
|
||||
is_team_authorization: false,
|
||||
allow_delete: false,
|
||||
labels: [],
|
||||
...overrides,
|
||||
})
|
||||
|
||||
const mockOnHide = vi.fn()
|
||||
const mockOnRefreshData = vi.fn()
|
||||
|
||||
describe('Tool Provider Detail Flow Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
cleanup()
|
||||
})
|
||||
|
||||
describe('Built-in Provider', () => {
|
||||
it('renders provider detail with title, author, and description', async () => {
|
||||
const collection = makeCollection()
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('title')).toHaveTextContent('Test Collection')
|
||||
expect(screen.getByTestId('org-info')).toHaveTextContent('Dify')
|
||||
expect(screen.getByTestId('description')).toHaveTextContent('Test collection description')
|
||||
})
|
||||
})
|
||||
|
||||
it('loads tool list from API on mount', async () => {
|
||||
const collection = makeCollection()
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockFetchBuiltInToolList).toHaveBeenCalledWith('test_collection')
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('tool-item-tool-1')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('tool-item-tool-2')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('shows "Set up credentials" button when not authorized and needs auth', async () => {
|
||||
const collection = makeCollection({
|
||||
allow_delete: true,
|
||||
is_team_authorization: false,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Set up credentials')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('shows "Authorized" button when authorized', async () => {
|
||||
const collection = makeCollection({
|
||||
allow_delete: true,
|
||||
is_team_authorization: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Authorized')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('indicator-green')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('opens ConfigCredential when clicking auth button (built-in type)', async () => {
|
||||
const collection = makeCollection({
|
||||
allow_delete: true,
|
||||
is_team_authorization: false,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Set up credentials')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Set up credentials'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('config-credential')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('saves credential and refreshes data', async () => {
|
||||
const collection = makeCollection({
|
||||
allow_delete: true,
|
||||
is_team_authorization: false,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Set up credentials')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Set up credentials'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('config-credential')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('cred-save'))
|
||||
await waitFor(() => {
|
||||
expect(mockUpdateBuiltInToolCredential).toHaveBeenCalledWith('test_collection', { api_key: 'test-key' })
|
||||
expect(mockOnRefreshData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('removes credential and refreshes data', async () => {
|
||||
const collection = makeCollection({
|
||||
allow_delete: true,
|
||||
is_team_authorization: false,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
fireEvent.click(screen.getByText('Set up credentials'))
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('config-credential')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('cred-remove'))
|
||||
await waitFor(() => {
|
||||
expect(mockRemoveBuiltInToolCredential).toHaveBeenCalledWith('test_collection')
|
||||
expect(mockOnRefreshData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Model Provider', () => {
|
||||
it('opens model modal when clicking auth button for model type', async () => {
|
||||
const collection = makeCollection({
|
||||
id: 'model-provider-1',
|
||||
type: CollectionType.model,
|
||||
allow_delete: true,
|
||||
is_team_authorization: false,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Set up credentials')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Set up credentials'))
|
||||
await waitFor(() => {
|
||||
expect(mockSetShowModelModal).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
payload: expect.objectContaining({
|
||||
currentProvider: expect.objectContaining({ provider: 'model-provider-1' }),
|
||||
}),
|
||||
}),
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Custom Provider', () => {
|
||||
it('fetches custom collection details and shows edit button', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.custom,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockFetchCustomCollection).toHaveBeenCalledWith('test_collection')
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Edit')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('opens edit modal and saves changes', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.custom,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Edit')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Edit'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('edit-custom-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('custom-modal-save'))
|
||||
await waitFor(() => {
|
||||
expect(mockUpdateCustomCollection).toHaveBeenCalled()
|
||||
expect(mockOnRefreshData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('shows delete confirmation and removes collection', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.custom,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Edit')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Edit'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('edit-custom-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('custom-modal-remove'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('confirm-dialog')).toBeInTheDocument()
|
||||
expect(screen.getByText('Delete Tool')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('confirm-ok'))
|
||||
await waitFor(() => {
|
||||
expect(mockRemoveCustomCollection).toHaveBeenCalledWith('test_collection')
|
||||
expect(mockOnRefreshData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Workflow Provider', () => {
|
||||
it('fetches workflow tool detail and shows "Open in Studio" and "Edit" buttons', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.workflow,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockFetchWorkflowToolDetail).toHaveBeenCalledWith('test-collection')
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Open in Studio')).toBeInTheDocument()
|
||||
expect(screen.getByText('Edit')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('shows workflow tool parameters', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.workflow,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('query')).toBeInTheDocument()
|
||||
expect(screen.getByText('string')).toBeInTheDocument()
|
||||
expect(screen.getByText('Search query')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('deletes workflow tool through confirmation dialog', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.workflow,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Edit')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Edit'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('workflow-tool-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('wf-modal-remove'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('confirm-dialog')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('confirm-ok'))
|
||||
await waitFor(() => {
|
||||
expect(mockDeleteWorkflowTool).toHaveBeenCalledWith('test-collection')
|
||||
expect(mockOnRefreshData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Drawer Interaction', () => {
|
||||
it('calls onHide when closing the drawer', async () => {
|
||||
const collection = makeCollection()
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('drawer')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('drawer-close'))
|
||||
expect(mockOnHide).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
261
web/__tests__/workflow-parallel-limit.test.tsx
Normal file
261
web/__tests__/workflow-parallel-limit.test.tsx
Normal file
@@ -0,0 +1,261 @@
|
||||
/**
|
||||
* MAX_PARALLEL_LIMIT Configuration Bug Test
|
||||
*
|
||||
* This test reproduces and verifies the fix for issue #23083:
|
||||
* MAX_PARALLEL_LIMIT environment variable does not take effect in iteration panel
|
||||
*/
|
||||
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
|
||||
// Mock environment variables before importing constants
|
||||
const originalEnv = process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
// Test with different environment values
|
||||
function setupEnvironment(value?: string) {
|
||||
if (value)
|
||||
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = value
|
||||
else
|
||||
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
// Clear module cache to force re-evaluation
|
||||
vi.resetModules()
|
||||
}
|
||||
|
||||
function restoreEnvironment() {
|
||||
if (originalEnv)
|
||||
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = originalEnv
|
||||
else
|
||||
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
vi.resetModules()
|
||||
}
|
||||
|
||||
// Mock i18next with proper implementation
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
if (key.includes('MaxParallelismTitle'))
|
||||
return 'Max Parallelism'
|
||||
if (key.includes('MaxParallelismDesc'))
|
||||
return 'Maximum number of parallel executions'
|
||||
if (key.includes('parallelMode'))
|
||||
return 'Parallel Mode'
|
||||
if (key.includes('parallelPanelDesc'))
|
||||
return 'Enable parallel execution'
|
||||
if (key.includes('errorResponseMethod'))
|
||||
return 'Error Response Method'
|
||||
return key
|
||||
},
|
||||
}),
|
||||
initReactI18next: {
|
||||
type: '3rdParty',
|
||||
init: vi.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock i18next module completely to prevent initialization issues
|
||||
vi.mock('i18next', () => ({
|
||||
use: vi.fn().mockReturnThis(),
|
||||
init: vi.fn().mockReturnThis(),
|
||||
t: vi.fn(key => key),
|
||||
isInitialized: true,
|
||||
}))
|
||||
|
||||
// Mock the useConfig hook
|
||||
vi.mock('@/app/components/workflow/nodes/iteration/use-config', () => ({
|
||||
default: () => ({
|
||||
inputs: {
|
||||
is_parallel: true,
|
||||
parallel_nums: 5,
|
||||
error_handle_mode: 'terminated',
|
||||
},
|
||||
changeParallel: vi.fn(),
|
||||
changeParallelNums: vi.fn(),
|
||||
changeErrorHandleMode: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock other components
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/variable/var-reference-picker', () => ({
|
||||
default: function MockVarReferencePicker() {
|
||||
return <div data-testid="var-reference-picker">VarReferencePicker</div>
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/split', () => ({
|
||||
default: function MockSplit() {
|
||||
return <div data-testid="split">Split</div>
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/field', () => ({
|
||||
default: function MockField({ title, children }: { title: string, children: React.ReactNode }) {
|
||||
return (
|
||||
<div data-testid="field">
|
||||
<label>{title}</label>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
},
|
||||
}))
|
||||
|
||||
const getParallelControls = () => ({
|
||||
numberInput: screen.getByRole('spinbutton'),
|
||||
slider: screen.getByRole('slider'),
|
||||
})
|
||||
|
||||
describe('MAX_PARALLEL_LIMIT Configuration Bug', () => {
|
||||
const mockNodeData = {
|
||||
id: 'test-iteration-node',
|
||||
type: 'iteration' as const,
|
||||
data: {
|
||||
title: 'Test Iteration',
|
||||
desc: 'Test iteration node',
|
||||
iterator_selector: ['test'],
|
||||
output_selector: ['output'],
|
||||
is_parallel: true,
|
||||
parallel_nums: 5,
|
||||
error_handle_mode: 'terminated' as const,
|
||||
},
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
restoreEnvironment()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
restoreEnvironment()
|
||||
})
|
||||
|
||||
describe('Environment Variable Parsing', () => {
|
||||
it('should parse MAX_PARALLEL_LIMIT from NEXT_PUBLIC_MAX_PARALLEL_LIMIT environment variable', async () => {
|
||||
setupEnvironment('25')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(25)
|
||||
})
|
||||
|
||||
it('should fallback to default when environment variable is not set', async () => {
|
||||
setupEnvironment() // No environment variable
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
it('should handle invalid environment variable values', async () => {
|
||||
setupEnvironment('invalid')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
// Should fall back to default when parsing fails
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
it('should handle empty environment variable', async () => {
|
||||
setupEnvironment('')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
// Should fall back to default when empty
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
// Edge cases for boundary values
|
||||
it('should clamp MAX_PARALLEL_LIMIT to MIN when env is 0 or negative', async () => {
|
||||
setupEnvironment('0')
|
||||
let { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
|
||||
|
||||
setupEnvironment('-5')
|
||||
;({ MAX_PARALLEL_LIMIT } = await import('@/config'))
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
|
||||
})
|
||||
|
||||
it('should handle float numbers by parseInt behavior', async () => {
|
||||
setupEnvironment('12.7')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
// parseInt truncates to integer
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(12)
|
||||
})
|
||||
})
|
||||
|
||||
describe('UI Component Integration (Main Fix Verification)', () => {
|
||||
it('should render iteration panel with environment-configured max value', async () => {
|
||||
// Set environment variable to a different value
|
||||
setupEnvironment('30')
|
||||
|
||||
// Import Panel after setting environment
|
||||
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
render(
|
||||
<Panel
|
||||
id="test-node"
|
||||
// @ts-expect-error key type mismatch
|
||||
data={mockNodeData.data}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Behavior-focused assertion: UI max should equal MAX_PARALLEL_LIMIT
|
||||
const { numberInput, slider } = getParallelControls()
|
||||
expect(numberInput).toHaveAttribute('max', String(MAX_PARALLEL_LIMIT))
|
||||
expect(slider).toHaveAttribute('aria-valuemax', String(MAX_PARALLEL_LIMIT))
|
||||
|
||||
// Verify the actual values
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(30)
|
||||
expect(numberInput.getAttribute('max')).toBe('30')
|
||||
expect(slider.getAttribute('aria-valuemax')).toBe('30')
|
||||
})
|
||||
|
||||
it('should maintain UI consistency with different environment values', async () => {
|
||||
setupEnvironment('15')
|
||||
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
render(
|
||||
<Panel
|
||||
id="test-node"
|
||||
// @ts-expect-error key type mismatch
|
||||
data={mockNodeData.data}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Both input and slider should use the same max value from MAX_PARALLEL_LIMIT
|
||||
const { numberInput, slider } = getParallelControls()
|
||||
|
||||
expect(numberInput.getAttribute('max')).toBe(slider.getAttribute('aria-valuemax'))
|
||||
expect(numberInput.getAttribute('max')).toBe(String(MAX_PARALLEL_LIMIT))
|
||||
})
|
||||
})
|
||||
|
||||
describe('Legacy Constant Verification (For Transition Period)', () => {
|
||||
// Marked as transition/deprecation tests
|
||||
it('should maintain MAX_ITERATION_PARALLEL_NUM for backward compatibility', async () => {
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
expect(typeof MAX_ITERATION_PARALLEL_NUM).toBe('number')
|
||||
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10) // Hardcoded legacy value
|
||||
})
|
||||
|
||||
it('should demonstrate MAX_PARALLEL_LIMIT vs legacy constant difference', async () => {
|
||||
setupEnvironment('50')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
|
||||
// MAX_PARALLEL_LIMIT is configurable, MAX_ITERATION_PARALLEL_NUM is not
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(50)
|
||||
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10)
|
||||
expect(MAX_PARALLEL_LIMIT).not.toBe(MAX_ITERATION_PARALLEL_NUM)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Constants Validation', () => {
|
||||
it('should validate that required constants exist and have correct types', async () => {
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
const { MIN_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
expect(typeof MAX_PARALLEL_LIMIT).toBe('number')
|
||||
expect(typeof MIN_ITERATION_PARALLEL_NUM).toBe('number')
|
||||
expect(MAX_PARALLEL_LIMIT).toBeGreaterThanOrEqual(MIN_ITERATION_PARALLEL_NUM)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -8,7 +8,6 @@ import { UserActionButtonType } from '@/app/components/workflow/nodes/human-inpu
|
||||
import 'dayjs/locale/en'
|
||||
import 'dayjs/locale/zh-cn'
|
||||
import 'dayjs/locale/ja'
|
||||
import 'dayjs/locale/nl'
|
||||
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(relativeTime)
|
||||
@@ -46,7 +45,6 @@ const localeMap: Record<string, string> = {
|
||||
'en-US': 'en',
|
||||
'zh-Hans': 'zh-cn',
|
||||
'ja-JP': 'ja',
|
||||
'nl-NL': 'nl',
|
||||
}
|
||||
|
||||
export const getRelativeTime = (
|
||||
|
||||
@@ -98,9 +98,7 @@ const VoiceParamConfig = ({
|
||||
className="h-full w-full cursor-pointer rounded-lg border-0 bg-components-input-bg-normal py-1.5 pl-3 pr-10 focus-visible:bg-state-base-hover focus-visible:outline-none group-hover:bg-state-base-hover sm:text-sm sm:leading-6"
|
||||
>
|
||||
<span className={cn('block truncate text-left text-text-secondary', !languageItem?.name && 'text-text-tertiary')}>
|
||||
{languageItem?.name
|
||||
? t(`voice.language.${replace(languageItem?.value ?? '', '-', '')}`, languageItem?.name, { ns: 'common' as const })
|
||||
: localLanguagePlaceholder}
|
||||
{languageItem?.name ? t(`voice.language.${replace(languageItem?.value, '-', '')}`, { ns: 'common' }) : localLanguagePlaceholder}
|
||||
</span>
|
||||
<span className="pointer-events-none absolute inset-y-0 right-0 flex items-center pr-2">
|
||||
<ChevronDownIcon
|
||||
@@ -131,7 +129,7 @@ const VoiceParamConfig = ({
|
||||
<span
|
||||
className={cn('block', selected && 'font-normal')}
|
||||
>
|
||||
{t(`voice.language.${replace((item.value), '-', '')}`, item.name, { ns: 'common' as const })}
|
||||
{t(`voice.language.${replace((item.value), '-', '')}`, { ns: 'common' })}
|
||||
</span>
|
||||
{(selected || item.value === text2speech?.language) && (
|
||||
<span
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { RemixiconComponentType } from '@remixicon/react'
|
||||
import * as z from 'zod'
|
||||
import { z } from 'zod'
|
||||
|
||||
export const InputTypeEnum = z.enum([
|
||||
'text-input',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { ZodNumber, ZodSchema, ZodString } from 'zod'
|
||||
import type { BaseConfiguration } from './types'
|
||||
import * as z from 'zod'
|
||||
import { z } from 'zod'
|
||||
import { BaseFieldType } from './types'
|
||||
|
||||
export const generateZodSchema = (fields: BaseConfiguration[]) => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as z from 'zod'
|
||||
import { z } from 'zod'
|
||||
|
||||
const ContactMethod = z.union([
|
||||
z.literal('email'),
|
||||
@@ -22,10 +22,10 @@ export const UserSchema = z.object({
|
||||
.min(3, 'Surname must be at least 3 characters long')
|
||||
.regex(/^[A-Z]/, 'Surname must start with a capital letter'),
|
||||
isAcceptingTerms: z.boolean().refine(val => val, {
|
||||
error: 'You must accept the terms and conditions',
|
||||
message: 'You must accept the terms and conditions',
|
||||
}),
|
||||
contact: z.object({
|
||||
email: z.email('Invalid email address'),
|
||||
email: z.string().email('Invalid email address'),
|
||||
phone: z.string().optional(),
|
||||
preferredContactMethod: ContactMethod,
|
||||
}),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { ZodSchema, ZodString } from 'zod'
|
||||
import type { InputFieldConfiguration } from './types'
|
||||
import * as z from 'zod'
|
||||
import { z } from 'zod'
|
||||
import { SupportedFileTypes, TransferMethod } from '@/app/components/rag-pipeline/components/panel/input-field/editor/form/schema'
|
||||
import { InputFieldType } from './types'
|
||||
|
||||
|
||||
@@ -204,10 +204,23 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Avoid executing arbitrary code; require valid JSON for chart options.
|
||||
setChartState('error')
|
||||
processedRef.current = true
|
||||
return
|
||||
try {
|
||||
// eslint-disable-next-line no-new-func
|
||||
const result = new Function(`return ${trimmedContent}`)()
|
||||
if (typeof result === 'object' && result !== null) {
|
||||
setFinalChartOption(result)
|
||||
setChartState('success')
|
||||
processedRef.current = true
|
||||
return
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// If we have a complete JSON structure but it doesn't parse,
|
||||
// it's likely an error rather than incomplete data
|
||||
setChartState('error')
|
||||
processedRef.current = true
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -236,9 +249,19 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Only accept JSON to avoid executing arbitrary code from the message.
|
||||
setChartState('error')
|
||||
processedRef.current = true
|
||||
try {
|
||||
// eslint-disable-next-line no-new-func
|
||||
const result = new Function(`return ${trimmedContent}`)()
|
||||
if (typeof result === 'object' && result !== null) {
|
||||
setFinalChartOption(result)
|
||||
isValidOption = true
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Both parsing methods failed, but content looks complete
|
||||
setChartState('error')
|
||||
processedRef.current = true
|
||||
}
|
||||
}
|
||||
|
||||
if (isValidOption) {
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import type { FC } from 'react'
|
||||
import * as React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { env } from '@/env'
|
||||
import ParamItem from '.'
|
||||
|
||||
type Props = {
|
||||
@@ -12,7 +11,12 @@ type Props = {
|
||||
enable: boolean
|
||||
}
|
||||
|
||||
const maxTopK = env.NEXT_PUBLIC_TOP_K_MAX_VALUE
|
||||
const maxTopK = (() => {
|
||||
const configValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-top-k-max-value') || '', 10)
|
||||
if (configValue && !isNaN(configValue))
|
||||
return configValue
|
||||
return 10
|
||||
})()
|
||||
const VALUE_LIMIT = {
|
||||
default: 2,
|
||||
step: 1,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { noop } from 'es-toolkit/function'
|
||||
import * as z from 'zod'
|
||||
import { z } from 'zod'
|
||||
import withValidation from '.'
|
||||
|
||||
describe('withValidation HOC', () => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import * as z from 'zod'
|
||||
import { z } from 'zod'
|
||||
import withValidation from '.'
|
||||
|
||||
// Sample components to wrap with validation
|
||||
@@ -65,7 +65,7 @@ const ProductCard = ({ name, price, category, inStock }: ProductCardProps) => {
|
||||
// Create validated versions
|
||||
const userSchema = z.object({
|
||||
name: z.string().min(1, 'Name is required'),
|
||||
email: z.email('Invalid email'),
|
||||
email: z.string().email('Invalid email'),
|
||||
age: z.number().min(0).max(150),
|
||||
})
|
||||
|
||||
@@ -371,7 +371,7 @@ export const ConfigurationValidation: Story = {
|
||||
)
|
||||
|
||||
const configSchema = z.object({
|
||||
apiUrl: z.url('Must be valid URL'),
|
||||
apiUrl: z.string().url('Must be valid URL'),
|
||||
timeout: z.number().min(0).max(30000),
|
||||
retries: z.number().min(0).max(5),
|
||||
debug: z.boolean(),
|
||||
@@ -430,7 +430,7 @@ export const UsageDocumentation: Story = {
|
||||
<div>
|
||||
<h4 className="mb-2 text-sm font-semibold text-gray-900">Usage Example</h4>
|
||||
<pre className="overflow-x-auto rounded-lg bg-gray-900 p-4 text-xs text-gray-100">
|
||||
{`import * as z from 'zod'
|
||||
{`import { z } from 'zod'
|
||||
import withValidation from './withValidation'
|
||||
|
||||
// Define your component
|
||||
|
||||
@@ -5,7 +5,6 @@ import { useTranslation } from 'react-i18next'
|
||||
import Input from '@/app/components/base/input'
|
||||
import { InputNumber } from '@/app/components/base/input-number'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { env } from '@/env'
|
||||
|
||||
const TextLabel: FC<PropsWithChildren> = (props) => {
|
||||
return <label className="text-xs font-semibold leading-none text-text-secondary">{props.children}</label>
|
||||
@@ -47,7 +46,7 @@ export const DelimiterInput: FC<InputProps & { tooltip?: string }> = (props) =>
|
||||
}
|
||||
|
||||
export const MaxLengthInput: FC<InputNumberProps> = (props) => {
|
||||
const maxValue = env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
|
||||
const maxValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-indexing-max-segmentation-tokens-length') || '4000', 10)
|
||||
|
||||
const { t } = useTranslation()
|
||||
return (
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { ParentMode, PreProcessingRule, ProcessRule, Rules, SummaryIndexSetting as SummaryIndexSettingType } from '@/models/datasets'
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { env } from '@/env'
|
||||
import { ChunkingMode, ProcessMode } from '@/models/datasets'
|
||||
import escape from './escape'
|
||||
import unescape from './unescape'
|
||||
@@ -9,7 +8,10 @@ import unescape from './unescape'
|
||||
export const DEFAULT_SEGMENT_IDENTIFIER = '\\n\\n'
|
||||
export const DEFAULT_MAXIMUM_CHUNK_LENGTH = 1024
|
||||
export const DEFAULT_OVERLAP = 50
|
||||
export const MAXIMUM_CHUNK_TOKEN_LENGTH = env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
|
||||
export const MAXIMUM_CHUNK_TOKEN_LENGTH = Number.parseInt(
|
||||
globalThis.document?.body?.getAttribute('data-public-indexing-max-segmentation-tokens-length') || '4000',
|
||||
10,
|
||||
)
|
||||
|
||||
export type ParentChildConfig = {
|
||||
chunkForContext: ParentMode
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { BaseConfiguration } from '@/app/components/base/form/form-scenarios/base/types'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import * as z from 'zod'
|
||||
import { z } from 'zod'
|
||||
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import Actions from './actions'
|
||||
@@ -53,7 +53,7 @@ const createFailingSchema = () => {
|
||||
issues: [{ path: ['field1'], message: 'is required' }],
|
||||
},
|
||||
}),
|
||||
} as unknown as z.ZodType
|
||||
} as unknown as z.ZodSchema
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
|
||||
@@ -1,129 +0,0 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { DocType } from '@/models/datasets'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Radio from '@/app/components/base/radio'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { useMetadataMap } from '@/hooks/use-metadata'
|
||||
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import s from '../style.module.css'
|
||||
|
||||
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
|
||||
return <div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
|
||||
}
|
||||
|
||||
const IconButton: FC<{ type: DocType, isChecked: boolean }> = ({ type, isChecked = false }) => {
|
||||
const metadataMap = useMetadataMap()
|
||||
return (
|
||||
<Tooltip popupContent={metadataMap[type].text}>
|
||||
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
|
||||
<TypeIcon
|
||||
iconName={metadataMap[type].iconName || ''}
|
||||
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
|
||||
/>
|
||||
</button>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
type DocTypeSelectorProps = {
|
||||
docType: DocType | ''
|
||||
documentType?: DocType | ''
|
||||
tempDocType: DocType | ''
|
||||
onTempDocTypeChange: (type: DocType | '') => void
|
||||
onConfirm: () => void
|
||||
onCancel: () => void
|
||||
}
|
||||
|
||||
const DocTypeSelector: FC<DocTypeSelectorProps> = ({
|
||||
docType,
|
||||
documentType,
|
||||
tempDocType,
|
||||
onTempDocTypeChange,
|
||||
onConfirm,
|
||||
onCancel,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const isFirstTime = !docType && !documentType
|
||||
const currValue = tempDocType ?? documentType
|
||||
|
||||
return (
|
||||
<>
|
||||
{isFirstTime && (
|
||||
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
|
||||
)}
|
||||
<div className={s.operationWrapper}>
|
||||
{isFirstTime && (
|
||||
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
|
||||
)}
|
||||
{documentType && (
|
||||
<>
|
||||
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
|
||||
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
|
||||
</>
|
||||
)}
|
||||
<Radio.Group value={currValue ?? ''} onChange={onTempDocTypeChange} className={s.radioGroup}>
|
||||
{CUSTOMIZABLE_DOC_TYPES.map(type => (
|
||||
<Radio key={type} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
|
||||
<IconButton type={type} isChecked={currValue === type} />
|
||||
</Radio>
|
||||
))}
|
||||
</Radio.Group>
|
||||
{isFirstTime && (
|
||||
<Button variant="primary" onClick={onConfirm} disabled={!tempDocType}>
|
||||
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
|
||||
</Button>
|
||||
)}
|
||||
{documentType && (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={onConfirm} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">
|
||||
{t('operation.save', { ns: 'common' })}
|
||||
</Button>
|
||||
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>
|
||||
{t('operation.cancel', { ns: 'common' })}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
type DocumentTypeDisplayProps = {
|
||||
displayType: DocType | ''
|
||||
showChangeLink?: boolean
|
||||
onChangeClick?: () => void
|
||||
}
|
||||
|
||||
export const DocumentTypeDisplay: FC<DocumentTypeDisplayProps> = ({
|
||||
displayType,
|
||||
showChangeLink = false,
|
||||
onChangeClick,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const metadataMap = useMetadataMap()
|
||||
const effectiveType = displayType || 'book'
|
||||
|
||||
return (
|
||||
<div className={s.documentTypeShow}>
|
||||
{(displayType || !showChangeLink) && (
|
||||
<>
|
||||
<TypeIcon iconName={metadataMap[effectiveType]?.iconName || ''} className={s.iconShow} />
|
||||
{metadataMap[effectiveType].text}
|
||||
{showChangeLink && (
|
||||
<div className="ml-1 inline-flex items-center gap-1">
|
||||
·
|
||||
<div onClick={onChangeClick} className="cursor-pointer hover:text-text-accent">
|
||||
{t('operation.change', { ns: 'common' })}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default DocTypeSelector
|
||||
@@ -1,89 +0,0 @@
|
||||
'use client'
|
||||
import type { FC, ReactNode } from 'react'
|
||||
import type { inputType } from '@/hooks/use-metadata'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
|
||||
import Input from '@/app/components/base/input'
|
||||
import { SimpleSelect } from '@/app/components/base/select'
|
||||
import { getTextWidthWithCanvas } from '@/utils'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import s from '../style.module.css'
|
||||
|
||||
type FieldInfoProps = {
|
||||
label: string
|
||||
value?: string
|
||||
valueIcon?: ReactNode
|
||||
displayedValue?: string
|
||||
defaultValue?: string
|
||||
showEdit?: boolean
|
||||
inputType?: inputType
|
||||
selectOptions?: Array<{ value: string, name: string }>
|
||||
onUpdate?: (v: string) => void
|
||||
}
|
||||
|
||||
const FieldInfo: FC<FieldInfoProps> = ({
|
||||
label,
|
||||
value = '',
|
||||
valueIcon,
|
||||
displayedValue = '',
|
||||
defaultValue,
|
||||
showEdit = false,
|
||||
inputType = 'input',
|
||||
selectOptions = [],
|
||||
onUpdate,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
|
||||
const editAlignTop = showEdit && inputType === 'textarea'
|
||||
const readAlignTop = !showEdit && textNeedWrap
|
||||
|
||||
const renderContent = () => {
|
||||
if (!showEdit)
|
||||
return displayedValue
|
||||
|
||||
if (inputType === 'select') {
|
||||
return (
|
||||
<SimpleSelect
|
||||
onSelect={({ value }) => onUpdate?.(value as string)}
|
||||
items={selectOptions}
|
||||
defaultValue={value}
|
||||
className={s.select}
|
||||
wrapperClassName={s.selectWrapper}
|
||||
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
if (inputType === 'textarea') {
|
||||
return (
|
||||
<AutoHeightTextarea
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
className={s.textArea}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Input
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
defaultValue={defaultValue}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
|
||||
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
|
||||
<div className="flex grow items-center gap-1 text-text-secondary">
|
||||
{valueIcon}
|
||||
{renderContent()}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default FieldInfo
|
||||
@@ -1,88 +0,0 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { metadataType } from '@/hooks/use-metadata'
|
||||
import type { FullDocumentDetail } from '@/models/datasets'
|
||||
import { get } from 'es-toolkit/compat'
|
||||
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
|
||||
import FieldInfo from './field-info'
|
||||
|
||||
const map2Options = (map: Record<string, string>) => {
|
||||
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
|
||||
}
|
||||
|
||||
function useCategoryMapResolver(mainField: metadataType | '') {
|
||||
const languageMap = useLanguages()
|
||||
const bookCategoryMap = useBookCategories()
|
||||
const personalDocCategoryMap = usePersonalDocCategories()
|
||||
const businessDocCategoryMap = useBusinessDocCategories()
|
||||
|
||||
return (field: string): Record<string, string> => {
|
||||
if (field === 'language')
|
||||
return languageMap
|
||||
if (field === 'category' && mainField === 'book')
|
||||
return bookCategoryMap
|
||||
if (field === 'document_type') {
|
||||
if (mainField === 'personal_document')
|
||||
return personalDocCategoryMap
|
||||
if (mainField === 'business_document')
|
||||
return businessDocCategoryMap
|
||||
}
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
type MetadataFieldListProps = {
|
||||
mainField: metadataType | ''
|
||||
canEdit?: boolean
|
||||
metadata?: Record<string, string>
|
||||
docDetail?: FullDocumentDetail
|
||||
onFieldUpdate?: (field: string, value: string) => void
|
||||
}
|
||||
|
||||
const MetadataFieldList: FC<MetadataFieldListProps> = ({
|
||||
mainField,
|
||||
canEdit = false,
|
||||
metadata,
|
||||
docDetail,
|
||||
onFieldUpdate,
|
||||
}) => {
|
||||
const metadataMap = useMetadataMap()
|
||||
const getCategoryMap = useCategoryMapResolver(mainField)
|
||||
|
||||
if (!mainField)
|
||||
return null
|
||||
|
||||
const fieldMap = metadataMap[mainField]?.subFieldsMap
|
||||
const isFixedField = ['originInfo', 'technicalParameters'].includes(mainField)
|
||||
const sourceData = isFixedField ? docDetail : metadata
|
||||
|
||||
const getDisplayValue = (field: string) => {
|
||||
const val = get(sourceData, field, '')
|
||||
if (!val && val !== 0)
|
||||
return '-'
|
||||
if (fieldMap[field]?.inputType === 'select')
|
||||
return getCategoryMap(field)[val]
|
||||
if (fieldMap[field]?.render)
|
||||
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
|
||||
return val
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
{Object.keys(fieldMap).map(field => (
|
||||
<FieldInfo
|
||||
key={fieldMap[field]?.label}
|
||||
label={fieldMap[field]?.label}
|
||||
displayedValue={getDisplayValue(field)}
|
||||
value={get(sourceData, field, '')}
|
||||
inputType={fieldMap[field]?.inputType || 'input'}
|
||||
showEdit={canEdit}
|
||||
onUpdate={val => onFieldUpdate?.(field, val)}
|
||||
selectOptions={map2Options(getCategoryMap(field))}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default MetadataFieldList
|
||||
@@ -1,137 +0,0 @@
|
||||
'use client'
|
||||
import type { CommonResponse } from '@/models/common'
|
||||
import type { DocType, FullDocumentDetail } from '@/models/datasets'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { modifyDocMetadata } from '@/service/datasets'
|
||||
import { asyncRunSafe } from '@/utils'
|
||||
import { useDocumentContext } from '../../context'
|
||||
|
||||
type MetadataState = {
|
||||
documentType?: DocType | ''
|
||||
metadata: Record<string, string>
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize raw doc_type: treat 'others' as empty string.
|
||||
*/
|
||||
const normalizeDocType = (rawDocType: string): DocType | '' => {
|
||||
return rawDocType === 'others' ? '' : rawDocType as DocType | ''
|
||||
}
|
||||
|
||||
type UseMetadataStateOptions = {
|
||||
docDetail?: FullDocumentDetail
|
||||
onUpdate?: () => void
|
||||
}
|
||||
|
||||
export function useMetadataState({ docDetail, onUpdate }: UseMetadataStateOptions) {
|
||||
const { doc_metadata = {} } = docDetail || {}
|
||||
const rawDocType = docDetail?.doc_type ?? ''
|
||||
const docType = normalizeDocType(rawDocType)
|
||||
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const datasetId = useDocumentContext(s => s.datasetId)
|
||||
const documentId = useDocumentContext(s => s.documentId)
|
||||
|
||||
// If no documentType yet, start in editing + showDocTypes mode
|
||||
const [editStatus, setEditStatus] = useState(!docType)
|
||||
const [metadataParams, setMetadataParams] = useState<MetadataState>(
|
||||
docType
|
||||
? { documentType: docType, metadata: (doc_metadata || {}) as Record<string, string> }
|
||||
: { metadata: {} },
|
||||
)
|
||||
const [showDocTypes, setShowDocTypes] = useState(!docType)
|
||||
const [tempDocType, setTempDocType] = useState<DocType | ''>('')
|
||||
const [saveLoading, setSaveLoading] = useState(false)
|
||||
|
||||
// Sync local state when the upstream docDetail changes (e.g. after save or navigation).
|
||||
// These setters are intentionally called together to batch-reset multiple pieces
|
||||
// of derived editing state that cannot be expressed as pure derived values.
|
||||
useEffect(() => {
|
||||
if (docDetail?.doc_type) {
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setEditStatus(false)
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setShowDocTypes(false)
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setTempDocType(docType)
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setMetadataParams({
|
||||
documentType: docType,
|
||||
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
|
||||
})
|
||||
}
|
||||
}, [docDetail?.doc_type, docDetail?.doc_metadata, docType])
|
||||
|
||||
const confirmDocType = () => {
|
||||
if (!tempDocType)
|
||||
return
|
||||
setMetadataParams({
|
||||
documentType: tempDocType,
|
||||
// Clear metadata when switching to a different doc type
|
||||
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {},
|
||||
})
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
const cancelDocType = () => {
|
||||
setTempDocType(metadataParams.documentType ?? '')
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
const enableEdit = () => {
|
||||
setEditStatus(true)
|
||||
}
|
||||
|
||||
const cancelEdit = () => {
|
||||
setMetadataParams({ documentType: docType || '', metadata: { ...(docDetail?.doc_metadata || {}) } })
|
||||
setEditStatus(!docType)
|
||||
if (!docType)
|
||||
setShowDocTypes(true)
|
||||
}
|
||||
|
||||
const saveMetadata = async () => {
|
||||
setSaveLoading(true)
|
||||
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
|
||||
datasetId,
|
||||
documentId,
|
||||
body: {
|
||||
doc_type: metadataParams.documentType || docType || '',
|
||||
doc_metadata: metadataParams.metadata,
|
||||
},
|
||||
}) as Promise<CommonResponse>)
|
||||
if (!e)
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
else
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
onUpdate?.()
|
||||
setEditStatus(false)
|
||||
setSaveLoading(false)
|
||||
}
|
||||
|
||||
const updateMetadataField = (field: string, value: string) => {
|
||||
setMetadataParams(prev => ({ ...prev, metadata: { ...prev.metadata, [field]: value } }))
|
||||
}
|
||||
|
||||
return {
|
||||
docType,
|
||||
editStatus,
|
||||
showDocTypes,
|
||||
tempDocType,
|
||||
saveLoading,
|
||||
metadataParams,
|
||||
setTempDocType,
|
||||
setShowDocTypes,
|
||||
confirmDocType,
|
||||
cancelDocType,
|
||||
enableEdit,
|
||||
cancelEdit,
|
||||
saveMetadata,
|
||||
updateMetadataField,
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import type { FullDocumentDetail } from '@/models/datasets'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import Metadata, { FieldInfo } from './index'
|
||||
|
||||
// Mock document context
|
||||
@@ -120,6 +121,7 @@ vi.mock('@/hooks/use-metadata', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock getTextWidthWithCanvas
|
||||
vi.mock('@/utils', () => ({
|
||||
asyncRunSafe: async (promise: Promise<unknown>) => {
|
||||
try {
|
||||
@@ -133,32 +135,33 @@ vi.mock('@/utils', () => ({
|
||||
getTextWidthWithCanvas: () => 100,
|
||||
}))
|
||||
|
||||
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
|
||||
id: 'doc-1',
|
||||
name: 'Test Document',
|
||||
doc_type: 'book',
|
||||
doc_metadata: {
|
||||
title: 'Test Book',
|
||||
author: 'Test Author',
|
||||
language: 'en',
|
||||
},
|
||||
data_source_type: 'upload_file',
|
||||
segment_count: 10,
|
||||
hit_count: 5,
|
||||
...overrides,
|
||||
} as FullDocumentDetail)
|
||||
|
||||
describe('Metadata', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
|
||||
id: 'doc-1',
|
||||
name: 'Test Document',
|
||||
doc_type: 'book',
|
||||
doc_metadata: {
|
||||
title: 'Test Book',
|
||||
author: 'Test Author',
|
||||
language: 'en',
|
||||
},
|
||||
data_source_type: 'upload_file',
|
||||
segment_count: 10,
|
||||
hit_count: 5,
|
||||
...overrides,
|
||||
} as FullDocumentDetail)
|
||||
|
||||
const defaultProps = {
|
||||
docDetail: createMockDocDetail(),
|
||||
loading: false,
|
||||
onUpdate: vi.fn(),
|
||||
}
|
||||
|
||||
// Rendering tests
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
// Arrange & Act
|
||||
@@ -188,7 +191,7 @@ describe('Metadata', () => {
|
||||
// Arrange & Act
|
||||
render(<Metadata {...defaultProps} loading={true} />)
|
||||
|
||||
// Assert - Loading component should be rendered, title should not
|
||||
// Assert - Loading component should be rendered
|
||||
expect(screen.queryByText(/metadata\.title/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
@@ -201,7 +204,7 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Edit mode (tests useMetadataState hook integration)
|
||||
// Edit mode tests
|
||||
describe('Edit Mode', () => {
|
||||
it('should enter edit mode when edit button is clicked', () => {
|
||||
// Arrange
|
||||
@@ -300,7 +303,7 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Document type selection (tests DocTypeSelector sub-component integration)
|
||||
// Document type selection
|
||||
describe('Document Type Selection', () => {
|
||||
it('should show doc type selection when no doc_type exists', () => {
|
||||
// Arrange
|
||||
@@ -350,13 +353,13 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Fixed fields (tests MetadataFieldList sub-component integration)
|
||||
// Origin info and technical parameters
|
||||
describe('Fixed Fields', () => {
|
||||
it('should render origin info fields', () => {
|
||||
// Arrange & Act
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Assert
|
||||
// Assert - Origin info fields should be displayed
|
||||
expect(screen.getByText('Data Source Type')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
@@ -379,7 +382,7 @@ describe('Metadata', () => {
|
||||
// Act
|
||||
const { container } = render(<Metadata {...defaultProps} docDetail={docDetail} />)
|
||||
|
||||
// Assert
|
||||
// Assert - should render without crashing
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
})
|
||||
|
||||
@@ -387,7 +390,7 @@ describe('Metadata', () => {
|
||||
// Arrange & Act
|
||||
const { container } = render(<Metadata {...defaultProps} docDetail={undefined} loading={false} />)
|
||||
|
||||
// Assert
|
||||
// Assert - should render without crashing
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
})
|
||||
|
||||
@@ -422,6 +425,7 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// FieldInfo component tests
|
||||
describe('FieldInfo', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
@@ -539,149 +543,3 @@ describe('FieldInfo', () => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// --- useMetadataState hook coverage tests (via component interactions) ---
|
||||
describe('useMetadataState coverage', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
const defaultProps = {
|
||||
docDetail: createMockDocDetail(),
|
||||
loading: false,
|
||||
onUpdate: vi.fn(),
|
||||
}
|
||||
|
||||
describe('cancelDocType', () => {
|
||||
it('should cancel doc type change and return to edit mode', () => {
|
||||
// Arrange
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode → click change to open doc type selector
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
fireEvent.click(screen.getByText(/operation\.change/i))
|
||||
|
||||
// Now in doc type selector mode — should show cancel button
|
||||
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
|
||||
|
||||
// Act — cancel the doc type change
|
||||
fireEvent.click(screen.getByText(/operation\.cancel/i))
|
||||
|
||||
// Assert — should be back to edit mode (cancel + save buttons visible)
|
||||
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('confirmDocType', () => {
|
||||
it('should confirm same doc type and return to edit mode keeping metadata', () => {
|
||||
// Arrange — useEffect syncs tempDocType='book' from docDetail
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode → click change to open doc type selector
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
fireEvent.click(screen.getByText(/operation\.change/i))
|
||||
|
||||
// DocTypeSelector shows save/cancel buttons
|
||||
expect(screen.getByText(/metadata\.docTypeChangeTitle/i)).toBeInTheDocument()
|
||||
|
||||
// Act — click save to confirm same doc type (tempDocType='book')
|
||||
fireEvent.click(screen.getByText(/operation\.save/i))
|
||||
|
||||
// Assert — should return to edit mode with metadata fields visible
|
||||
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('cancelEdit when no docType', () => {
|
||||
it('should show doc type selection when cancel is clicked with doc_type others', () => {
|
||||
// Arrange — doc with 'others' type normalizes to '' internally.
|
||||
// The useEffect sees doc_type='others' (truthy) and syncs state,
|
||||
// so the component initially shows view mode. Enter edit → cancel to trigger cancelEdit.
|
||||
const docDetail = createMockDocDetail({ doc_type: 'others' })
|
||||
render(<Metadata {...defaultProps} docDetail={docDetail} />)
|
||||
|
||||
// 'others' is normalized to '' → useEffect fires (doc_type truthy) → view mode
|
||||
// The rendered type uses default 'book' fallback for display
|
||||
expect(screen.getByText(/operation\.edit/i)).toBeInTheDocument()
|
||||
|
||||
// Enter edit mode
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
|
||||
|
||||
// Act — cancel edit; internally docType is '' so cancelEdit goes to showDocTypes
|
||||
fireEvent.click(screen.getByText(/operation\.cancel/i))
|
||||
|
||||
// Assert — should show doc type selection since normalized docType was ''
|
||||
expect(screen.getByText(/metadata\.docTypeSelectTitle/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateMetadataField', () => {
|
||||
it('should update metadata field value via input', () => {
|
||||
// Arrange
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
|
||||
// Act — find an input and change its value (Title field)
|
||||
const inputs = screen.getAllByRole('textbox')
|
||||
expect(inputs.length).toBeGreaterThan(0)
|
||||
fireEvent.change(inputs[0], { target: { value: 'Updated Title' } })
|
||||
|
||||
// Assert — the input should have the new value
|
||||
expect(inputs[0]).toHaveValue('Updated Title')
|
||||
})
|
||||
})
|
||||
|
||||
describe('saveMetadata calls modifyDocMetadata with correct body', () => {
|
||||
it('should pass doc_type and doc_metadata in save request', async () => {
|
||||
// Arrange
|
||||
mockModifyDocMetadata.mockResolvedValueOnce({})
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
|
||||
// Act — save
|
||||
fireEvent.click(screen.getByText(/operation\.save/i))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockModifyDocMetadata).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
datasetId: 'test-dataset-id',
|
||||
documentId: 'test-document-id',
|
||||
body: expect.objectContaining({
|
||||
doc_type: 'book',
|
||||
}),
|
||||
}),
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('useEffect sync', () => {
|
||||
it('should handle doc_metadata being null in effect sync', () => {
|
||||
// Arrange — first render with null metadata
|
||||
const { rerender } = render(
|
||||
<Metadata
|
||||
{...defaultProps}
|
||||
docDetail={createMockDocDetail({ doc_metadata: null })}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act — rerender with a different doc_type to trigger useEffect sync
|
||||
rerender(
|
||||
<Metadata
|
||||
{...defaultProps}
|
||||
docDetail={createMockDocDetail({ doc_type: 'paper', doc_metadata: null })}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert — should render without crashing, showing Paper type
|
||||
expect(screen.getByText('Paper')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,124 +1,422 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { FullDocumentDetail } from '@/models/datasets'
|
||||
import type { FC, ReactNode } from 'react'
|
||||
import type { inputType, metadataType } from '@/hooks/use-metadata'
|
||||
import type { CommonResponse } from '@/models/common'
|
||||
import type { DocType, FullDocumentDetail } from '@/models/datasets'
|
||||
import { PencilIcon } from '@heroicons/react/24/outline'
|
||||
import { get } from 'es-toolkit/compat'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import Input from '@/app/components/base/input'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import { useMetadataMap } from '@/hooks/use-metadata'
|
||||
import DocTypeSelector, { DocumentTypeDisplay } from './components/doc-type-selector'
|
||||
import MetadataFieldList from './components/metadata-field-list'
|
||||
import { useMetadataState } from './hooks/use-metadata-state'
|
||||
import Radio from '@/app/components/base/radio'
|
||||
import { SimpleSelect } from '@/app/components/base/select'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
|
||||
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
|
||||
import { modifyDocMetadata } from '@/service/datasets'
|
||||
import { asyncRunSafe, getTextWidthWithCanvas } from '@/utils'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { useDocumentContext } from '../context'
|
||||
import s from './style.module.css'
|
||||
|
||||
export { default as FieldInfo } from './components/field-info'
|
||||
const map2Options = (map: { [key: string]: string }) => {
|
||||
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
|
||||
}
|
||||
|
||||
type MetadataProps = {
|
||||
type IFieldInfoProps = {
|
||||
label: string
|
||||
value?: string
|
||||
valueIcon?: ReactNode
|
||||
displayedValue?: string
|
||||
defaultValue?: string
|
||||
showEdit?: boolean
|
||||
inputType?: inputType
|
||||
selectOptions?: Array<{ value: string, name: string }>
|
||||
onUpdate?: (v: any) => void
|
||||
}
|
||||
|
||||
export const FieldInfo: FC<IFieldInfoProps> = ({
|
||||
label,
|
||||
value = '',
|
||||
valueIcon,
|
||||
displayedValue = '',
|
||||
defaultValue,
|
||||
showEdit = false,
|
||||
inputType = 'input',
|
||||
selectOptions = [],
|
||||
onUpdate,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
|
||||
const editAlignTop = showEdit && inputType === 'textarea'
|
||||
const readAlignTop = !showEdit && textNeedWrap
|
||||
|
||||
const renderContent = () => {
|
||||
if (!showEdit)
|
||||
return displayedValue
|
||||
|
||||
if (inputType === 'select') {
|
||||
return (
|
||||
<SimpleSelect
|
||||
onSelect={({ value }) => onUpdate?.(value as string)}
|
||||
items={selectOptions}
|
||||
defaultValue={value}
|
||||
className={s.select}
|
||||
wrapperClassName={s.selectWrapper}
|
||||
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
if (inputType === 'textarea') {
|
||||
return (
|
||||
<AutoHeightTextarea
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
className={s.textArea}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Input
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
defaultValue={defaultValue}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
|
||||
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
|
||||
<div className="flex grow items-center gap-1 text-text-secondary">
|
||||
{valueIcon}
|
||||
{renderContent()}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
|
||||
return (
|
||||
<div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
|
||||
)
|
||||
}
|
||||
|
||||
const IconButton: FC<{
|
||||
type: DocType
|
||||
isChecked: boolean
|
||||
}> = ({ type, isChecked = false }) => {
|
||||
const metadataMap = useMetadataMap()
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
popupContent={metadataMap[type].text}
|
||||
>
|
||||
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
|
||||
<TypeIcon
|
||||
iconName={metadataMap[type].iconName || ''}
|
||||
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
|
||||
/>
|
||||
</button>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
type IMetadataProps = {
|
||||
docDetail?: FullDocumentDetail
|
||||
loading: boolean
|
||||
onUpdate: () => void
|
||||
}
|
||||
|
||||
const Metadata: FC<MetadataProps> = ({ docDetail, loading, onUpdate }) => {
|
||||
type MetadataState = {
|
||||
documentType?: DocType | ''
|
||||
metadata: Record<string, string>
|
||||
}
|
||||
|
||||
const Metadata: FC<IMetadataProps> = ({ docDetail, loading, onUpdate }) => {
|
||||
const { doc_metadata = {} } = docDetail || {}
|
||||
const rawDocType = docDetail?.doc_type ?? ''
|
||||
const doc_type = rawDocType === 'others' ? '' : rawDocType
|
||||
|
||||
const { t } = useTranslation()
|
||||
const metadataMap = useMetadataMap()
|
||||
const languageMap = useLanguages()
|
||||
const bookCategoryMap = useBookCategories()
|
||||
const personalDocCategoryMap = usePersonalDocCategories()
|
||||
const businessDocCategoryMap = useBusinessDocCategories()
|
||||
const [editStatus, setEditStatus] = useState(!doc_type) // if no documentType, in editing status by default
|
||||
// the initial values are according to the documentType
|
||||
const [metadataParams, setMetadataParams] = useState<MetadataState>(
|
||||
doc_type
|
||||
? {
|
||||
documentType: doc_type as DocType,
|
||||
metadata: (doc_metadata || {}) as Record<string, string>,
|
||||
}
|
||||
: { metadata: {} },
|
||||
)
|
||||
const [showDocTypes, setShowDocTypes] = useState(!doc_type) // whether show doc types
|
||||
const [tempDocType, setTempDocType] = useState<DocType | ''>('') // for remember icon click
|
||||
const [saveLoading, setSaveLoading] = useState(false)
|
||||
|
||||
const {
|
||||
docType,
|
||||
editStatus,
|
||||
showDocTypes,
|
||||
tempDocType,
|
||||
saveLoading,
|
||||
metadataParams,
|
||||
setTempDocType,
|
||||
setShowDocTypes,
|
||||
confirmDocType,
|
||||
cancelDocType,
|
||||
enableEdit,
|
||||
cancelEdit,
|
||||
saveMetadata,
|
||||
updateMetadataField,
|
||||
} = useMetadataState({ docDetail, onUpdate })
|
||||
const { notify } = useContext(ToastContext)
|
||||
const datasetId = useDocumentContext(s => s.datasetId)
|
||||
const documentId = useDocumentContext(s => s.documentId)
|
||||
|
||||
useEffect(() => {
|
||||
if (docDetail?.doc_type) {
|
||||
setEditStatus(false)
|
||||
setShowDocTypes(false)
|
||||
setTempDocType(doc_type as DocType | '')
|
||||
setMetadataParams({
|
||||
documentType: doc_type as DocType | '',
|
||||
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
|
||||
})
|
||||
}
|
||||
}, [docDetail?.doc_type, docDetail?.doc_metadata, doc_type])
|
||||
|
||||
// confirm doc type
|
||||
const confirmDocType = () => {
|
||||
if (!tempDocType)
|
||||
return
|
||||
setMetadataParams({
|
||||
documentType: tempDocType,
|
||||
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {} as Record<string, string>, // change doc type, clear metadata
|
||||
})
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
// cancel doc type
|
||||
const cancelDocType = () => {
|
||||
setTempDocType(metadataParams.documentType ?? '')
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
// show doc type select
|
||||
const renderSelectDocType = () => {
|
||||
const { documentType } = metadataParams
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className={`${s.main} bg-gray-25`}>
|
||||
<Loading type="app" />
|
||||
<>
|
||||
{!doc_type && !documentType && (
|
||||
<>
|
||||
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
|
||||
</>
|
||||
)}
|
||||
<div className={s.operationWrapper}>
|
||||
{!doc_type && !documentType && (
|
||||
<>
|
||||
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
|
||||
</>
|
||||
)}
|
||||
{documentType && (
|
||||
<>
|
||||
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
|
||||
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
|
||||
</>
|
||||
)}
|
||||
<Radio.Group value={tempDocType ?? documentType ?? ''} onChange={setTempDocType} className={s.radioGroup}>
|
||||
{CUSTOMIZABLE_DOC_TYPES.map((type, index) => {
|
||||
const currValue = tempDocType ?? documentType
|
||||
return (
|
||||
<Radio key={index} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
|
||||
<IconButton
|
||||
type={type}
|
||||
isChecked={currValue === type}
|
||||
/>
|
||||
</Radio>
|
||||
)
|
||||
})}
|
||||
</Radio.Group>
|
||||
{!doc_type && !documentType && (
|
||||
<Button
|
||||
variant="primary"
|
||||
onClick={confirmDocType}
|
||||
disabled={!tempDocType}
|
||||
>
|
||||
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
|
||||
</Button>
|
||||
)}
|
||||
{documentType && (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={confirmDocType} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">{t('operation.save', { ns: 'common' })}</Button>
|
||||
<Button onClick={cancelDocType} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
// show metadata info and edit
|
||||
const renderFieldInfos = ({ mainField = 'book', canEdit }: { mainField?: metadataType | '', canEdit?: boolean }) => {
|
||||
if (!mainField)
|
||||
return null
|
||||
const fieldMap = metadataMap[mainField]?.subFieldsMap
|
||||
const sourceData = ['originInfo', 'technicalParameters'].includes(mainField) ? docDetail : metadataParams.metadata
|
||||
|
||||
const getTargetMap = (field: string) => {
|
||||
if (field === 'language')
|
||||
return languageMap
|
||||
if (field === 'category' && mainField === 'book')
|
||||
return bookCategoryMap
|
||||
|
||||
if (field === 'document_type') {
|
||||
if (mainField === 'personal_document')
|
||||
return personalDocCategoryMap
|
||||
if (mainField === 'business_document')
|
||||
return businessDocCategoryMap
|
||||
}
|
||||
return {} as any
|
||||
}
|
||||
|
||||
const getTargetValue = (field: string) => {
|
||||
const val = get(sourceData, field, '')
|
||||
if (!val && val !== 0)
|
||||
return '-'
|
||||
if (fieldMap[field]?.inputType === 'select')
|
||||
return getTargetMap(field)[val]
|
||||
if (fieldMap[field]?.render)
|
||||
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
|
||||
return val
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
{Object.keys(fieldMap).map((field) => {
|
||||
return (
|
||||
<FieldInfo
|
||||
key={fieldMap[field]?.label}
|
||||
label={fieldMap[field]?.label}
|
||||
displayedValue={getTargetValue(field)}
|
||||
value={get(sourceData, field, '')}
|
||||
inputType={fieldMap[field]?.inputType || 'input'}
|
||||
showEdit={canEdit}
|
||||
onUpdate={(val) => {
|
||||
setMetadataParams(pre => ({ ...pre, metadata: { ...pre.metadata, [field]: val } }))
|
||||
}}
|
||||
selectOptions={map2Options(getTargetMap(field))}
|
||||
/>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const enabledEdit = () => {
|
||||
setEditStatus(true)
|
||||
}
|
||||
|
||||
const onCancel = () => {
|
||||
setMetadataParams({ documentType: doc_type || '', metadata: { ...docDetail?.doc_metadata } })
|
||||
setEditStatus(!doc_type)
|
||||
if (!doc_type)
|
||||
setShowDocTypes(true)
|
||||
}
|
||||
|
||||
const onSave = async () => {
|
||||
setSaveLoading(true)
|
||||
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
|
||||
datasetId,
|
||||
documentId,
|
||||
body: {
|
||||
doc_type: metadataParams.documentType || doc_type || '',
|
||||
doc_metadata: metadataParams.metadata,
|
||||
},
|
||||
}) as Promise<CommonResponse>)
|
||||
if (!e)
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
else
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
onUpdate?.()
|
||||
setEditStatus(false)
|
||||
setSaveLoading(false)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={`${s.main} ${editStatus ? 'bg-white' : 'bg-gray-25'}`}>
|
||||
{/* Header: title + action buttons */}
|
||||
<div className={s.titleWrapper}>
|
||||
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
|
||||
{!editStatus
|
||||
? (
|
||||
<Button onClick={enableEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
|
||||
<PencilIcon className={s.opIcon} />
|
||||
{t('operation.edit', { ns: 'common' })}
|
||||
</Button>
|
||||
)
|
||||
: !showDocTypes && (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={cancelEdit} className={`${s.opBtn} ${s.opCancelBtn}`}>
|
||||
{t('operation.cancel', { ns: 'common' })}
|
||||
</Button>
|
||||
<Button onClick={saveMetadata} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary" loading={saveLoading}>
|
||||
{t('operation.save', { ns: 'common' })}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Document type display / selector */}
|
||||
{!editStatus
|
||||
? <DocumentTypeDisplay displayType={docType} />
|
||||
: showDocTypes
|
||||
? null
|
||||
: (
|
||||
<DocumentTypeDisplay
|
||||
displayType={metadataParams.documentType || ''}
|
||||
showChangeLink={editStatus}
|
||||
onChangeClick={() => setShowDocTypes(true)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Divider between type display and fields (skip when in first-time selection) */}
|
||||
{(!docType && showDocTypes) ? null : <Divider />}
|
||||
|
||||
{/* Doc type selector or editable metadata fields */}
|
||||
{showDocTypes
|
||||
? (
|
||||
<DocTypeSelector
|
||||
docType={docType}
|
||||
documentType={metadataParams.documentType}
|
||||
tempDocType={tempDocType}
|
||||
onTempDocTypeChange={setTempDocType}
|
||||
onConfirm={confirmDocType}
|
||||
onCancel={cancelDocType}
|
||||
/>
|
||||
)
|
||||
{loading
|
||||
? (<Loading type="app" />)
|
||||
: (
|
||||
<MetadataFieldList
|
||||
mainField={metadataParams.documentType || ''}
|
||||
canEdit={editStatus}
|
||||
metadata={metadataParams.metadata}
|
||||
docDetail={docDetail}
|
||||
onFieldUpdate={updateMetadataField}
|
||||
/>
|
||||
<>
|
||||
<div className={s.titleWrapper}>
|
||||
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
|
||||
{!editStatus
|
||||
? (
|
||||
<Button onClick={enabledEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
|
||||
<PencilIcon className={s.opIcon} />
|
||||
{t('operation.edit', { ns: 'common' })}
|
||||
</Button>
|
||||
)
|
||||
: showDocTypes
|
||||
? null
|
||||
: (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
|
||||
<Button
|
||||
onClick={onSave}
|
||||
className={`${s.opBtn} ${s.opSaveBtn}`}
|
||||
variant="primary"
|
||||
loading={saveLoading}
|
||||
>
|
||||
{t('operation.save', { ns: 'common' })}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{/* show selected doc type and changing entry */}
|
||||
{!editStatus
|
||||
? (
|
||||
<div className={s.documentTypeShow}>
|
||||
<TypeIcon iconName={metadataMap[doc_type || 'book']?.iconName || ''} className={s.iconShow} />
|
||||
{metadataMap[doc_type || 'book'].text}
|
||||
</div>
|
||||
)
|
||||
: showDocTypes
|
||||
? null
|
||||
: (
|
||||
<div className={s.documentTypeShow}>
|
||||
{metadataParams.documentType && (
|
||||
<>
|
||||
<TypeIcon iconName={metadataMap[metadataParams.documentType || 'book'].iconName || ''} className={s.iconShow} />
|
||||
{metadataMap[metadataParams.documentType || 'book'].text}
|
||||
{editStatus && (
|
||||
<div className="ml-1 inline-flex items-center gap-1">
|
||||
·
|
||||
<div
|
||||
onClick={() => { setShowDocTypes(true) }}
|
||||
className="cursor-pointer hover:text-text-accent"
|
||||
>
|
||||
{t('operation.change', { ns: 'common' })}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{(!doc_type && showDocTypes) ? null : <Divider />}
|
||||
{showDocTypes ? renderSelectDocType() : renderFieldInfos({ mainField: metadataParams.documentType, canEdit: editStatus })}
|
||||
{/* show fixed fields */}
|
||||
<Divider />
|
||||
{renderFieldInfos({ mainField: 'originInfo', canEdit: false })}
|
||||
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
|
||||
<Divider />
|
||||
{renderFieldInfos({ mainField: 'technicalParameters', canEdit: false })}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Fixed fields: origin info */}
|
||||
<Divider />
|
||||
<MetadataFieldList mainField="originInfo" docDetail={docDetail} />
|
||||
|
||||
{/* Fixed fields: technical parameters */}
|
||||
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
|
||||
<Divider />
|
||||
<MetadataFieldList mainField="technicalParameters" docDetail={docDetail} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -28,7 +28,6 @@ import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import { useModalContext } from '@/context/modal-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { env } from '@/env'
|
||||
import { useLogout } from '@/service/use-common'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import AccountAbout from '../account-about'
|
||||
@@ -179,7 +178,7 @@ export default function AppSelector() {
|
||||
</Link>
|
||||
</MenuItem>
|
||||
{
|
||||
env.NEXT_PUBLIC_SITE_ABOUT !== 'hide' && (
|
||||
document?.body?.getAttribute('data-public-site-about') !== 'hide' && (
|
||||
<MenuItem>
|
||||
<div
|
||||
className={cn(itemClassName, 'justify-between', 'data-[active]:bg-state-base-hover')}
|
||||
|
||||
@@ -104,7 +104,7 @@ const MembersPage = () => {
|
||||
<UpgradeBtn className="mr-2" loc="member-invite" />
|
||||
)}
|
||||
<div className="shrink-0">
|
||||
{isCurrentWorkspaceManager && <InviteButton disabled={isMemberFull} onClick={() => setInviteModalVisible(true)} />}
|
||||
<InviteButton disabled={!isCurrentWorkspaceManager || isMemberFull} onClick={() => setInviteModalVisible(true)} />
|
||||
</div>
|
||||
</div>
|
||||
<div className="overflow-visible lg:overflow-visible">
|
||||
|
||||
@@ -1,59 +1,10 @@
|
||||
import { renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { PLUGIN_PAGE_TABS_MAP, useCategories, usePluginPageTabs, useTags } from './hooks'
|
||||
|
||||
// Create mock translation function
|
||||
const mockT = vi.fn((key: string, _options?: Record<string, string>) => {
|
||||
const translations: Record<string, string> = {
|
||||
'tags.agent': 'Agent',
|
||||
'tags.rag': 'RAG',
|
||||
'tags.search': 'Search',
|
||||
'tags.image': 'Image',
|
||||
'tags.videos': 'Videos',
|
||||
'tags.weather': 'Weather',
|
||||
'tags.finance': 'Finance',
|
||||
'tags.design': 'Design',
|
||||
'tags.travel': 'Travel',
|
||||
'tags.social': 'Social',
|
||||
'tags.news': 'News',
|
||||
'tags.medical': 'Medical',
|
||||
'tags.productivity': 'Productivity',
|
||||
'tags.education': 'Education',
|
||||
'tags.business': 'Business',
|
||||
'tags.entertainment': 'Entertainment',
|
||||
'tags.utilities': 'Utilities',
|
||||
'tags.other': 'Other',
|
||||
'category.models': 'Models',
|
||||
'category.tools': 'Tools',
|
||||
'category.datasources': 'Datasources',
|
||||
'category.agents': 'Agents',
|
||||
'category.extensions': 'Extensions',
|
||||
'category.bundles': 'Bundles',
|
||||
'category.triggers': 'Triggers',
|
||||
'categorySingle.model': 'Model',
|
||||
'categorySingle.tool': 'Tool',
|
||||
'categorySingle.datasource': 'Datasource',
|
||||
'categorySingle.agent': 'Agent',
|
||||
'categorySingle.extension': 'Extension',
|
||||
'categorySingle.bundle': 'Bundle',
|
||||
'categorySingle.trigger': 'Trigger',
|
||||
'menus.plugins': 'Plugins',
|
||||
'menus.exploreMarketplace': 'Explore Marketplace',
|
||||
}
|
||||
return translations[key] || key
|
||||
})
|
||||
|
||||
// Mock react-i18next
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: mockT,
|
||||
}),
|
||||
}))
|
||||
import { PLUGIN_PAGE_TABS_MAP, useCategories, usePluginPageTabs, useTags } from '../hooks'
|
||||
|
||||
describe('useTags', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockT.mockClear()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
@@ -65,13 +16,12 @@ describe('useTags', () => {
|
||||
expect(result.current.tags.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it('should call translation function for each tag', () => {
|
||||
renderHook(() => useTags())
|
||||
it('should return tags with translated labels', () => {
|
||||
const { result } = renderHook(() => useTags())
|
||||
|
||||
// Verify t() was called for tag translations
|
||||
expect(mockT).toHaveBeenCalled()
|
||||
const tagCalls = mockT.mock.calls.filter(call => call[0].startsWith('tags.'))
|
||||
expect(tagCalls.length).toBeGreaterThan(0)
|
||||
result.current.tags.forEach((tag) => {
|
||||
expect(tag.label).toBe(`pluginTags.tags.${tag.name}`)
|
||||
})
|
||||
})
|
||||
|
||||
it('should return tags with name and label properties', () => {
|
||||
@@ -99,7 +49,7 @@ describe('useTags', () => {
|
||||
|
||||
expect(result.current.tagsMap.agent).toBeDefined()
|
||||
expect(result.current.tagsMap.agent.name).toBe('agent')
|
||||
expect(result.current.tagsMap.agent.label).toBe('Agent')
|
||||
expect(result.current.tagsMap.agent.label).toBe('pluginTags.tags.agent')
|
||||
})
|
||||
|
||||
it('should contain all tags from tags array', () => {
|
||||
@@ -116,9 +66,8 @@ describe('useTags', () => {
|
||||
it('should return label for existing tag', () => {
|
||||
const { result } = renderHook(() => useTags())
|
||||
|
||||
// Test existing tags - this covers the branch where tagsMap[name] exists
|
||||
expect(result.current.getTagLabel('agent')).toBe('Agent')
|
||||
expect(result.current.getTagLabel('search')).toBe('Search')
|
||||
expect(result.current.getTagLabel('agent')).toBe('pluginTags.tags.agent')
|
||||
expect(result.current.getTagLabel('search')).toBe('pluginTags.tags.search')
|
||||
})
|
||||
|
||||
it('should return name for non-existing tag', () => {
|
||||
@@ -132,11 +81,9 @@ describe('useTags', () => {
|
||||
it('should cover both branches of getTagLabel conditional', () => {
|
||||
const { result } = renderHook(() => useTags())
|
||||
|
||||
// Branch 1: tag exists in tagsMap - returns label
|
||||
const existingTagResult = result.current.getTagLabel('rag')
|
||||
expect(existingTagResult).toBe('RAG')
|
||||
expect(existingTagResult).toBe('pluginTags.tags.rag')
|
||||
|
||||
// Branch 2: tag does not exist in tagsMap - returns name itself
|
||||
const nonExistingTagResult = result.current.getTagLabel('unknown-tag-xyz')
|
||||
expect(nonExistingTagResult).toBe('unknown-tag-xyz')
|
||||
})
|
||||
@@ -150,23 +97,22 @@ describe('useTags', () => {
|
||||
it('should return correct labels for all predefined tags', () => {
|
||||
const { result } = renderHook(() => useTags())
|
||||
|
||||
// Test all predefined tags
|
||||
expect(result.current.getTagLabel('rag')).toBe('RAG')
|
||||
expect(result.current.getTagLabel('image')).toBe('Image')
|
||||
expect(result.current.getTagLabel('videos')).toBe('Videos')
|
||||
expect(result.current.getTagLabel('weather')).toBe('Weather')
|
||||
expect(result.current.getTagLabel('finance')).toBe('Finance')
|
||||
expect(result.current.getTagLabel('design')).toBe('Design')
|
||||
expect(result.current.getTagLabel('travel')).toBe('Travel')
|
||||
expect(result.current.getTagLabel('social')).toBe('Social')
|
||||
expect(result.current.getTagLabel('news')).toBe('News')
|
||||
expect(result.current.getTagLabel('medical')).toBe('Medical')
|
||||
expect(result.current.getTagLabel('productivity')).toBe('Productivity')
|
||||
expect(result.current.getTagLabel('education')).toBe('Education')
|
||||
expect(result.current.getTagLabel('business')).toBe('Business')
|
||||
expect(result.current.getTagLabel('entertainment')).toBe('Entertainment')
|
||||
expect(result.current.getTagLabel('utilities')).toBe('Utilities')
|
||||
expect(result.current.getTagLabel('other')).toBe('Other')
|
||||
expect(result.current.getTagLabel('rag')).toBe('pluginTags.tags.rag')
|
||||
expect(result.current.getTagLabel('image')).toBe('pluginTags.tags.image')
|
||||
expect(result.current.getTagLabel('videos')).toBe('pluginTags.tags.videos')
|
||||
expect(result.current.getTagLabel('weather')).toBe('pluginTags.tags.weather')
|
||||
expect(result.current.getTagLabel('finance')).toBe('pluginTags.tags.finance')
|
||||
expect(result.current.getTagLabel('design')).toBe('pluginTags.tags.design')
|
||||
expect(result.current.getTagLabel('travel')).toBe('pluginTags.tags.travel')
|
||||
expect(result.current.getTagLabel('social')).toBe('pluginTags.tags.social')
|
||||
expect(result.current.getTagLabel('news')).toBe('pluginTags.tags.news')
|
||||
expect(result.current.getTagLabel('medical')).toBe('pluginTags.tags.medical')
|
||||
expect(result.current.getTagLabel('productivity')).toBe('pluginTags.tags.productivity')
|
||||
expect(result.current.getTagLabel('education')).toBe('pluginTags.tags.education')
|
||||
expect(result.current.getTagLabel('business')).toBe('pluginTags.tags.business')
|
||||
expect(result.current.getTagLabel('entertainment')).toBe('pluginTags.tags.entertainment')
|
||||
expect(result.current.getTagLabel('utilities')).toBe('pluginTags.tags.utilities')
|
||||
expect(result.current.getTagLabel('other')).toBe('pluginTags.tags.other')
|
||||
})
|
||||
|
||||
it('should handle empty string tag name', () => {
|
||||
@@ -255,27 +201,27 @@ describe('useCategories', () => {
|
||||
it('should use plural labels when isSingle is false', () => {
|
||||
const { result } = renderHook(() => useCategories(false))
|
||||
|
||||
expect(result.current.categoriesMap.tool.label).toBe('Tools')
|
||||
expect(result.current.categoriesMap.tool.label).toBe('plugin.category.tools')
|
||||
})
|
||||
|
||||
it('should use plural labels when isSingle is undefined', () => {
|
||||
const { result } = renderHook(() => useCategories())
|
||||
|
||||
expect(result.current.categoriesMap.tool.label).toBe('Tools')
|
||||
expect(result.current.categoriesMap.tool.label).toBe('plugin.category.tools')
|
||||
})
|
||||
|
||||
it('should use singular labels when isSingle is true', () => {
|
||||
const { result } = renderHook(() => useCategories(true))
|
||||
|
||||
expect(result.current.categoriesMap.tool.label).toBe('Tool')
|
||||
expect(result.current.categoriesMap.tool.label).toBe('plugin.categorySingle.tool')
|
||||
})
|
||||
|
||||
it('should handle agent category specially', () => {
|
||||
const { result: resultPlural } = renderHook(() => useCategories(false))
|
||||
const { result: resultSingle } = renderHook(() => useCategories(true))
|
||||
|
||||
expect(resultPlural.current.categoriesMap['agent-strategy'].label).toBe('Agents')
|
||||
expect(resultSingle.current.categoriesMap['agent-strategy'].label).toBe('Agent')
|
||||
expect(resultPlural.current.categoriesMap['agent-strategy'].label).toBe('plugin.category.agents')
|
||||
expect(resultSingle.current.categoriesMap['agent-strategy'].label).toBe('plugin.categorySingle.agent')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -298,7 +244,6 @@ describe('useCategories', () => {
|
||||
describe('usePluginPageTabs', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockT.mockClear()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
@@ -326,12 +271,11 @@ describe('usePluginPageTabs', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('should call translation function for tab texts', () => {
|
||||
renderHook(() => usePluginPageTabs())
|
||||
it('should return tabs with translated texts', () => {
|
||||
const { result } = renderHook(() => usePluginPageTabs())
|
||||
|
||||
// Verify t() was called for menu translations
|
||||
expect(mockT).toHaveBeenCalledWith('menus.plugins', { ns: 'common' })
|
||||
expect(mockT).toHaveBeenCalledWith('menus.exploreMarketplace', { ns: 'common' })
|
||||
expect(result.current[0].text).toBe('common.menus.plugins')
|
||||
expect(result.current[1].text).toBe('common.menus.exploreMarketplace')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -342,7 +286,7 @@ describe('usePluginPageTabs', () => {
|
||||
const pluginsTab = result.current.find(tab => tab.value === PLUGIN_PAGE_TABS_MAP.plugins)
|
||||
expect(pluginsTab).toBeDefined()
|
||||
expect(pluginsTab?.value).toBe('plugins')
|
||||
expect(pluginsTab?.text).toBe('Plugins')
|
||||
expect(pluginsTab?.text).toBe('common.menus.plugins')
|
||||
})
|
||||
|
||||
it('should have marketplace tab with correct value', () => {
|
||||
@@ -351,7 +295,7 @@ describe('usePluginPageTabs', () => {
|
||||
const marketplaceTab = result.current.find(tab => tab.value === PLUGIN_PAGE_TABS_MAP.marketplace)
|
||||
expect(marketplaceTab).toBeDefined()
|
||||
expect(marketplaceTab?.value).toBe('discover')
|
||||
expect(marketplaceTab?.text).toBe('Explore Marketplace')
|
||||
expect(marketplaceTab?.text).toBe('common.menus.exploreMarketplace')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -360,14 +304,14 @@ describe('usePluginPageTabs', () => {
|
||||
const { result } = renderHook(() => usePluginPageTabs())
|
||||
|
||||
expect(result.current[0].value).toBe('plugins')
|
||||
expect(result.current[0].text).toBe('Plugins')
|
||||
expect(result.current[0].text).toBe('common.menus.plugins')
|
||||
})
|
||||
|
||||
it('should return marketplace tab as second tab', () => {
|
||||
const { result } = renderHook(() => usePluginPageTabs())
|
||||
|
||||
expect(result.current[1].value).toBe('discover')
|
||||
expect(result.current[1].text).toBe('Explore Marketplace')
|
||||
expect(result.current[1].text).toBe('common.menus.exploreMarketplace')
|
||||
})
|
||||
})
|
||||
|
||||
50
web/app/components/plugins/__tests__/utils.spec.ts
Normal file
50
web/app/components/plugins/__tests__/utils.spec.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import type { TagKey } from '../constants'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { PluginCategoryEnum } from '../types'
|
||||
import { getValidCategoryKeys, getValidTagKeys } from '../utils'
|
||||
|
||||
describe('plugins/utils', () => {
|
||||
describe('getValidTagKeys', () => {
|
||||
it('returns only valid tag keys from the predefined set', () => {
|
||||
const input = ['agent', 'rag', 'invalid-tag', 'search'] as TagKey[]
|
||||
const result = getValidTagKeys(input)
|
||||
expect(result).toEqual(['agent', 'rag', 'search'])
|
||||
})
|
||||
|
||||
it('returns empty array when no valid tags', () => {
|
||||
const result = getValidTagKeys(['foo', 'bar'] as unknown as TagKey[])
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
|
||||
it('returns empty array for empty input', () => {
|
||||
expect(getValidTagKeys([])).toEqual([])
|
||||
})
|
||||
|
||||
it('preserves all valid tags when all are valid', () => {
|
||||
const input: TagKey[] = ['agent', 'rag', 'search', 'image']
|
||||
const result = getValidTagKeys(input)
|
||||
expect(result).toEqual(input)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getValidCategoryKeys', () => {
|
||||
it('returns matching category for valid key', () => {
|
||||
expect(getValidCategoryKeys(PluginCategoryEnum.model)).toBe(PluginCategoryEnum.model)
|
||||
expect(getValidCategoryKeys(PluginCategoryEnum.tool)).toBe(PluginCategoryEnum.tool)
|
||||
expect(getValidCategoryKeys(PluginCategoryEnum.agent)).toBe(PluginCategoryEnum.agent)
|
||||
expect(getValidCategoryKeys('bundle')).toBe('bundle')
|
||||
})
|
||||
|
||||
it('returns undefined for invalid category', () => {
|
||||
expect(getValidCategoryKeys('nonexistent')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('returns undefined for undefined input', () => {
|
||||
expect(getValidCategoryKeys(undefined)).toBeUndefined()
|
||||
})
|
||||
|
||||
it('returns undefined for empty string', () => {
|
||||
expect(getValidCategoryKeys('')).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,92 @@
|
||||
import { cleanup, render, screen } from '@testing-library/react'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import DeprecationNotice from '../deprecation-notice'
|
||||
|
||||
vi.mock('next/link', () => ({
|
||||
default: ({ children, href }: { children: React.ReactNode, href: string }) => (
|
||||
<a data-testid="link" href={href}>{children}</a>
|
||||
),
|
||||
}))
|
||||
|
||||
describe('DeprecationNotice', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
cleanup()
|
||||
})
|
||||
|
||||
it('returns null when status is not "deleted"', () => {
|
||||
const { container } = render(
|
||||
<DeprecationNotice
|
||||
status="active"
|
||||
deprecatedReason="business_adjustments"
|
||||
alternativePluginId="alt-plugin"
|
||||
alternativePluginURL="/plugins/alt-plugin"
|
||||
/>,
|
||||
)
|
||||
expect(container.firstChild).toBeNull()
|
||||
})
|
||||
|
||||
it('renders deprecation notice when status is "deleted"', () => {
|
||||
render(
|
||||
<DeprecationNotice
|
||||
status="deleted"
|
||||
deprecatedReason=""
|
||||
alternativePluginId=""
|
||||
alternativePluginURL=""
|
||||
/>,
|
||||
)
|
||||
expect(screen.getByText('plugin.detailPanel.deprecation.noReason')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders with valid reason and alternative plugin', () => {
|
||||
render(
|
||||
<DeprecationNotice
|
||||
status="deleted"
|
||||
deprecatedReason="business_adjustments"
|
||||
alternativePluginId="better-plugin"
|
||||
alternativePluginURL="/plugins/better-plugin"
|
||||
/>,
|
||||
)
|
||||
expect(screen.getByText('detailPanel.deprecation.fullMessage')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders only reason without alternative plugin', () => {
|
||||
render(
|
||||
<DeprecationNotice
|
||||
status="deleted"
|
||||
deprecatedReason="no_maintainer"
|
||||
alternativePluginId=""
|
||||
alternativePluginURL=""
|
||||
/>,
|
||||
)
|
||||
expect(screen.getByText(/plugin\.detailPanel\.deprecation\.onlyReason/)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders no-reason message for invalid reason', () => {
|
||||
render(
|
||||
<DeprecationNotice
|
||||
status="deleted"
|
||||
deprecatedReason="unknown_reason"
|
||||
alternativePluginId=""
|
||||
alternativePluginURL=""
|
||||
/>,
|
||||
)
|
||||
expect(screen.getByText('plugin.detailPanel.deprecation.noReason')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('applies custom className', () => {
|
||||
const { container } = render(
|
||||
<DeprecationNotice
|
||||
status="deleted"
|
||||
deprecatedReason=""
|
||||
alternativePluginId=""
|
||||
alternativePluginURL=""
|
||||
className="my-custom-class"
|
||||
/>,
|
||||
)
|
||||
expect((container.firstChild as HTMLElement).className).toContain('my-custom-class')
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,59 @@
|
||||
import { cleanup, fireEvent, render, screen } from '@testing-library/react'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import KeyValueItem from '../key-value-item'
|
||||
|
||||
vi.mock('../../../base/icons/src/vender/line/files', () => ({
|
||||
CopyCheck: () => <span data-testid="copy-check-icon" />,
|
||||
}))
|
||||
|
||||
vi.mock('../../../base/tooltip', () => ({
|
||||
default: ({ children, popupContent }: { children: React.ReactNode, popupContent: string }) => (
|
||||
<div data-testid="tooltip" data-content={popupContent}>{children}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/action-button', () => ({
|
||||
default: ({ children, onClick }: { children: React.ReactNode, onClick: () => void }) => (
|
||||
<button data-testid="action-button" onClick={onClick}>{children}</button>
|
||||
),
|
||||
}))
|
||||
|
||||
const mockCopy = vi.fn()
|
||||
vi.mock('copy-to-clipboard', () => ({
|
||||
default: (...args: unknown[]) => mockCopy(...args),
|
||||
}))
|
||||
|
||||
describe('KeyValueItem', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
vi.useFakeTimers()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers()
|
||||
cleanup()
|
||||
})
|
||||
|
||||
it('renders label and value', () => {
|
||||
render(<KeyValueItem label="ID" value="abc-123" />)
|
||||
expect(screen.getByText('ID')).toBeInTheDocument()
|
||||
expect(screen.getByText('abc-123')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders maskedValue instead of value when provided', () => {
|
||||
render(<KeyValueItem label="Key" value="sk-secret" maskedValue="sk-***" />)
|
||||
expect(screen.getByText('sk-***')).toBeInTheDocument()
|
||||
expect(screen.queryByText('sk-secret')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('copies actual value (not masked) when copy button is clicked', () => {
|
||||
render(<KeyValueItem label="Key" value="sk-secret" maskedValue="sk-***" />)
|
||||
fireEvent.click(screen.getByTestId('action-button'))
|
||||
expect(mockCopy).toHaveBeenCalledWith('sk-secret')
|
||||
})
|
||||
|
||||
it('renders copy tooltip', () => {
|
||||
render(<KeyValueItem label="ID" value="123" />)
|
||||
expect(screen.getByTestId('tooltip')).toHaveAttribute('data-content', 'common.operation.copy')
|
||||
})
|
||||
})
|
||||
@@ -1,7 +1,7 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { Theme } from '@/types/app'
|
||||
import IconWithTooltip from './icon-with-tooltip'
|
||||
import IconWithTooltip from '../icon-with-tooltip'
|
||||
|
||||
// Mock Tooltip component
|
||||
vi.mock('@/app/components/base/tooltip', () => ({
|
||||
@@ -2,7 +2,7 @@ import type { ComponentProps } from 'react'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { Theme } from '@/types/app'
|
||||
import Partner from './partner'
|
||||
import Partner from '../partner'
|
||||
|
||||
// Mock useTheme hook
|
||||
const mockUseTheme = vi.fn()
|
||||
@@ -11,9 +11,9 @@ vi.mock('@/hooks/use-theme', () => ({
|
||||
}))
|
||||
|
||||
// Mock IconWithTooltip to directly test Partner's behavior
|
||||
type IconWithTooltipProps = ComponentProps<typeof import('./icon-with-tooltip').default>
|
||||
type IconWithTooltipProps = ComponentProps<typeof import('../icon-with-tooltip').default>
|
||||
const mockIconWithTooltip = vi.fn()
|
||||
vi.mock('./icon-with-tooltip', () => ({
|
||||
vi.mock('../icon-with-tooltip', () => ({
|
||||
default: (props: IconWithTooltipProps) => {
|
||||
mockIconWithTooltip(props)
|
||||
const { theme, BadgeIconLight, BadgeIconDark, className, popupContent } = props
|
||||
@@ -0,0 +1,52 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/app/components/base/icons/src/public/plugins/VerifiedDark', () => ({
|
||||
default: () => <span data-testid="verified-dark" />,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/icons/src/public/plugins/VerifiedLight', () => ({
|
||||
default: () => <span data-testid="verified-light" />,
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-theme', () => ({
|
||||
default: () => ({ theme: 'light' }),
|
||||
}))
|
||||
|
||||
vi.mock('../icon-with-tooltip', () => ({
|
||||
default: ({ popupContent, BadgeIconLight, BadgeIconDark, theme }: {
|
||||
popupContent: string
|
||||
BadgeIconLight: React.FC
|
||||
BadgeIconDark: React.FC
|
||||
theme: string
|
||||
[key: string]: unknown
|
||||
}) => (
|
||||
<div data-testid="icon-with-tooltip" data-popup={popupContent}>
|
||||
{theme === 'light' ? <BadgeIconLight /> : <BadgeIconDark />}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
describe('Verified', () => {
|
||||
let Verified: (typeof import('../verified'))['default']
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks()
|
||||
const mod = await import('../verified')
|
||||
Verified = mod.default
|
||||
})
|
||||
|
||||
it('should render with tooltip text', () => {
|
||||
render(<Verified text="Verified Plugin" />)
|
||||
|
||||
const tooltip = screen.getByTestId('icon-with-tooltip')
|
||||
expect(tooltip).toHaveAttribute('data-popup', 'Verified Plugin')
|
||||
})
|
||||
|
||||
it('should render light theme icon by default', () => {
|
||||
render(<Verified text="Verified" />)
|
||||
|
||||
expect(screen.getByTestId('verified-light')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,50 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import CardMoreInfo from '../card-more-info'
|
||||
|
||||
vi.mock('../base/download-count', () => ({
|
||||
default: ({ downloadCount }: { downloadCount: number }) => (
|
||||
<span data-testid="download-count">{downloadCount}</span>
|
||||
),
|
||||
}))
|
||||
|
||||
describe('CardMoreInfo', () => {
|
||||
it('renders tags with # prefix', () => {
|
||||
render(<CardMoreInfo tags={['search', 'agent']} />)
|
||||
expect(screen.getByText('search')).toBeInTheDocument()
|
||||
expect(screen.getByText('agent')).toBeInTheDocument()
|
||||
// # prefixes
|
||||
const hashmarks = screen.getAllByText('#')
|
||||
expect(hashmarks).toHaveLength(2)
|
||||
})
|
||||
|
||||
it('renders download count when provided', () => {
|
||||
render(<CardMoreInfo downloadCount={1000} tags={[]} />)
|
||||
expect(screen.getByTestId('download-count')).toHaveTextContent('1000')
|
||||
})
|
||||
|
||||
it('does not render download count when undefined', () => {
|
||||
render(<CardMoreInfo tags={['tag1']} />)
|
||||
expect(screen.queryByTestId('download-count')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders separator between download count and tags', () => {
|
||||
render(<CardMoreInfo downloadCount={500} tags={['test']} />)
|
||||
expect(screen.getByText('·')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('does not render separator when no tags', () => {
|
||||
render(<CardMoreInfo downloadCount={500} tags={[]} />)
|
||||
expect(screen.queryByText('·')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('does not render separator when no download count', () => {
|
||||
render(<CardMoreInfo tags={['tag1']} />)
|
||||
expect(screen.queryByText('·')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('handles empty tags array', () => {
|
||||
const { container } = render(<CardMoreInfo tags={[]} />)
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
589
web/app/components/plugins/card/__tests__/index.spec.tsx
Normal file
589
web/app/components/plugins/card/__tests__/index.spec.tsx
Normal file
@@ -0,0 +1,589 @@
|
||||
import type { Plugin } from '../../types'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { PluginCategoryEnum } from '../../types'
|
||||
import Card from '../index'
|
||||
|
||||
let mockTheme = 'light'
|
||||
vi.mock('@/hooks/use-theme', () => ({
|
||||
default: () => ({ theme: mockTheme }),
|
||||
}))
|
||||
|
||||
vi.mock('@/i18n-config', () => ({
|
||||
renderI18nObject: (obj: Record<string, string>, locale: string) => {
|
||||
return obj?.[locale] || obj?.['en-US'] || ''
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/i18n-config/language', () => ({
|
||||
getLanguage: (locale: string) => locale || 'en-US',
|
||||
}))
|
||||
|
||||
const mockCategoriesMap: Record<string, { label: string }> = {
|
||||
'tool': { label: 'Tool' },
|
||||
'model': { label: 'Model' },
|
||||
'extension': { label: 'Extension' },
|
||||
'agent-strategy': { label: 'Agent' },
|
||||
'datasource': { label: 'Datasource' },
|
||||
'trigger': { label: 'Trigger' },
|
||||
'bundle': { label: 'Bundle' },
|
||||
}
|
||||
|
||||
vi.mock('../../hooks', () => ({
|
||||
useCategories: () => ({
|
||||
categoriesMap: mockCategoriesMap,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/format', () => ({
|
||||
formatNumber: (num: number) => num.toLocaleString(),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/mcp', () => ({
|
||||
shouldUseMcpIcon: (src: unknown) => typeof src === 'object' && src !== null && (src as { content?: string })?.content === '🔗',
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/app-icon', () => ({
|
||||
default: ({ icon, background, innerIcon, size, iconType }: {
|
||||
icon?: string
|
||||
background?: string
|
||||
innerIcon?: React.ReactNode
|
||||
size?: string
|
||||
iconType?: string
|
||||
}) => (
|
||||
<div
|
||||
data-testid="app-icon"
|
||||
data-icon={icon}
|
||||
data-background={background}
|
||||
data-size={size}
|
||||
data-icon-type={iconType}
|
||||
>
|
||||
{!!innerIcon && <div data-testid="inner-icon">{innerIcon}</div>}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/icons/src/vender/other', () => ({
|
||||
Mcp: ({ className }: { className?: string }) => (
|
||||
<div data-testid="mcp-icon" className={className}>MCP</div>
|
||||
),
|
||||
Group: ({ className }: { className?: string }) => (
|
||||
<div data-testid="group-icon" className={className}>Group</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('../../../base/icons/src/vender/plugin', () => ({
|
||||
LeftCorner: ({ className }: { className?: string }) => (
|
||||
<div data-testid="left-corner" className={className}>LeftCorner</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('../../base/badges/partner', () => ({
|
||||
default: ({ className, text }: { className?: string, text?: string }) => (
|
||||
<div data-testid="partner-badge" className={className} title={text}>Partner</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('../../base/badges/verified', () => ({
|
||||
default: ({ className, text }: { className?: string, text?: string }) => (
|
||||
<div data-testid="verified-badge" className={className} title={text}>Verified</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/skeleton', () => ({
|
||||
SkeletonContainer: ({ children }: { children: React.ReactNode }) => (
|
||||
<div data-testid="skeleton-container">{children}</div>
|
||||
),
|
||||
SkeletonPoint: () => <div data-testid="skeleton-point" />,
|
||||
SkeletonRectangle: ({ className }: { className?: string }) => (
|
||||
<div data-testid="skeleton-rectangle" className={className} />
|
||||
),
|
||||
SkeletonRow: ({ children, className }: { children: React.ReactNode, className?: string }) => (
|
||||
<div data-testid="skeleton-row" className={className}>{children}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
const createMockPlugin = (overrides?: Partial<Plugin>): Plugin => ({
|
||||
type: 'plugin',
|
||||
org: 'test-org',
|
||||
name: 'test-plugin',
|
||||
plugin_id: 'plugin-123',
|
||||
version: '1.0.0',
|
||||
latest_version: '1.0.0',
|
||||
latest_package_identifier: 'test-org/test-plugin:1.0.0',
|
||||
icon: '/test-icon.png',
|
||||
verified: false,
|
||||
label: { 'en-US': 'Test Plugin' },
|
||||
brief: { 'en-US': 'Test plugin description' },
|
||||
description: { 'en-US': 'Full test plugin description' },
|
||||
introduction: 'Test plugin introduction',
|
||||
repository: 'https://github.com/test/plugin',
|
||||
category: PluginCategoryEnum.tool,
|
||||
install_count: 1000,
|
||||
endpoint: { settings: [] },
|
||||
tags: [{ name: 'search' }],
|
||||
badges: [],
|
||||
verification: { authorized_category: 'community' },
|
||||
from: 'marketplace',
|
||||
...overrides,
|
||||
})
|
||||
|
||||
describe('Card', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
// ================================
|
||||
// Rendering Tests
|
||||
// ================================
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
const plugin = createMockPlugin()
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(document.body).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render plugin title from label', () => {
|
||||
const plugin = createMockPlugin({
|
||||
label: { 'en-US': 'My Plugin Title' },
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.getByText('My Plugin Title')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render plugin description from brief', () => {
|
||||
const plugin = createMockPlugin({
|
||||
brief: { 'en-US': 'This is a brief description' },
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.getByText('This is a brief description')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render organization info with org name and package name', () => {
|
||||
const plugin = createMockPlugin({
|
||||
org: 'my-org',
|
||||
name: 'my-plugin',
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.getByText('my-org')).toBeInTheDocument()
|
||||
expect(screen.getByText('my-plugin')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render plugin icon', () => {
|
||||
const plugin = createMockPlugin({
|
||||
icon: '/custom-icon.png',
|
||||
})
|
||||
|
||||
const { container } = render(<Card payload={plugin} />)
|
||||
|
||||
// Check for background image style on icon element
|
||||
const iconElement = container.querySelector('[style*="background-image"]')
|
||||
expect(iconElement).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should use icon_dark when theme is dark and icon_dark is provided', () => {
|
||||
// Set theme to dark
|
||||
mockTheme = 'dark'
|
||||
|
||||
const plugin = createMockPlugin({
|
||||
icon: '/light-icon.png',
|
||||
icon_dark: '/dark-icon.png',
|
||||
})
|
||||
|
||||
const { container } = render(<Card payload={plugin} />)
|
||||
|
||||
// Check that icon uses dark icon
|
||||
const iconElement = container.querySelector('[style*="background-image"]')
|
||||
expect(iconElement).toBeInTheDocument()
|
||||
expect(iconElement).toHaveStyle({ backgroundImage: 'url(/dark-icon.png)' })
|
||||
|
||||
// Reset theme
|
||||
mockTheme = 'light'
|
||||
})
|
||||
|
||||
it('should use icon when theme is dark but icon_dark is not provided', () => {
|
||||
mockTheme = 'dark'
|
||||
|
||||
const plugin = createMockPlugin({
|
||||
icon: '/light-icon.png',
|
||||
})
|
||||
|
||||
const { container } = render(<Card payload={plugin} />)
|
||||
|
||||
// Should fallback to light icon
|
||||
const iconElement = container.querySelector('[style*="background-image"]')
|
||||
expect(iconElement).toBeInTheDocument()
|
||||
expect(iconElement).toHaveStyle({ backgroundImage: 'url(/light-icon.png)' })
|
||||
|
||||
mockTheme = 'light'
|
||||
})
|
||||
|
||||
it('should render corner mark with category label', () => {
|
||||
const plugin = createMockPlugin({
|
||||
category: PluginCategoryEnum.tool,
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.getByText('Tool')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ================================
|
||||
// Props Testing
|
||||
// ================================
|
||||
describe('Props', () => {
|
||||
it('should apply custom className', () => {
|
||||
const plugin = createMockPlugin()
|
||||
const { container } = render(
|
||||
<Card payload={plugin} className="custom-class" />,
|
||||
)
|
||||
|
||||
expect(container.querySelector('.custom-class')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should hide corner mark when hideCornerMark is true', () => {
|
||||
const plugin = createMockPlugin({
|
||||
category: PluginCategoryEnum.tool,
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} hideCornerMark={true} />)
|
||||
|
||||
expect(screen.queryByTestId('left-corner')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show corner mark by default', () => {
|
||||
const plugin = createMockPlugin()
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.getByTestId('left-corner')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should pass installed prop to Icon component', () => {
|
||||
const plugin = createMockPlugin()
|
||||
const { container } = render(<Card payload={plugin} installed={true} />)
|
||||
|
||||
expect(container.querySelector('.bg-state-success-solid')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should pass installFailed prop to Icon component', () => {
|
||||
const plugin = createMockPlugin()
|
||||
const { container } = render(<Card payload={plugin} installFailed={true} />)
|
||||
|
||||
expect(container.querySelector('.bg-state-destructive-solid')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render footer when provided', () => {
|
||||
const plugin = createMockPlugin()
|
||||
render(
|
||||
<Card payload={plugin} footer={<div data-testid="custom-footer">Footer Content</div>} />,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('custom-footer')).toBeInTheDocument()
|
||||
expect(screen.getByText('Footer Content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render titleLeft when provided', () => {
|
||||
const plugin = createMockPlugin()
|
||||
render(
|
||||
<Card payload={plugin} titleLeft={<span data-testid="title-left">v1.0</span>} />,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('title-left')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should use custom descriptionLineRows', () => {
|
||||
const plugin = createMockPlugin()
|
||||
|
||||
const { container } = render(
|
||||
<Card payload={plugin} descriptionLineRows={1} />,
|
||||
)
|
||||
|
||||
// Check for h-4 truncate class when descriptionLineRows is 1
|
||||
expect(container.querySelector('.h-4.truncate')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should use default descriptionLineRows of 2', () => {
|
||||
const plugin = createMockPlugin()
|
||||
|
||||
const { container } = render(<Card payload={plugin} />)
|
||||
|
||||
// Check for h-8 line-clamp-2 class when descriptionLineRows is 2 (default)
|
||||
expect(container.querySelector('.h-8.line-clamp-2')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ================================
|
||||
// Loading State Tests
|
||||
// ================================
|
||||
describe('Loading State', () => {
|
||||
it('should render Placeholder when isLoading is true', () => {
|
||||
const plugin = createMockPlugin()
|
||||
|
||||
render(<Card payload={plugin} isLoading={true} loadingFileName="loading.txt" />)
|
||||
|
||||
// Should render skeleton elements
|
||||
expect(screen.getByTestId('skeleton-container')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render loadingFileName in Placeholder', () => {
|
||||
const plugin = createMockPlugin()
|
||||
|
||||
render(<Card payload={plugin} isLoading={true} loadingFileName="my-plugin.zip" />)
|
||||
|
||||
expect(screen.getByText('my-plugin.zip')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render card content when loading', () => {
|
||||
const plugin = createMockPlugin({
|
||||
label: { 'en-US': 'Plugin Title' },
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} isLoading={true} loadingFileName="file.txt" />)
|
||||
|
||||
// Plugin content should not be visible during loading
|
||||
expect(screen.queryByText('Plugin Title')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render loading state by default', () => {
|
||||
const plugin = createMockPlugin()
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.queryByTestId('skeleton-container')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ================================
|
||||
// Badges Tests
|
||||
// ================================
|
||||
describe('Badges', () => {
|
||||
it('should render Partner badge when badges includes partner', () => {
|
||||
const plugin = createMockPlugin({
|
||||
badges: ['partner'],
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.getByTestId('partner-badge')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Verified badge when verified is true', () => {
|
||||
const plugin = createMockPlugin({
|
||||
verified: true,
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.getByTestId('verified-badge')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render both Partner and Verified badges', () => {
|
||||
const plugin = createMockPlugin({
|
||||
badges: ['partner'],
|
||||
verified: true,
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.getByTestId('partner-badge')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('verified-badge')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render Partner badge when badges is empty', () => {
|
||||
const plugin = createMockPlugin({
|
||||
badges: [],
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.queryByTestId('partner-badge')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render Verified badge when verified is false', () => {
|
||||
const plugin = createMockPlugin({
|
||||
verified: false,
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.queryByTestId('verified-badge')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle undefined badges gracefully', () => {
|
||||
const plugin = createMockPlugin()
|
||||
// @ts-expect-error - Testing undefined badges
|
||||
plugin.badges = undefined
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.queryByTestId('partner-badge')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ================================
|
||||
// Limited Install Warning Tests
|
||||
// ================================
|
||||
describe('Limited Install Warning', () => {
|
||||
it('should render warning when limitedInstall is true', () => {
|
||||
const plugin = createMockPlugin()
|
||||
|
||||
const { container } = render(<Card payload={plugin} limitedInstall={true} />)
|
||||
|
||||
expect(container.querySelector('.text-text-warning-secondary')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render warning by default', () => {
|
||||
const plugin = createMockPlugin()
|
||||
|
||||
const { container } = render(<Card payload={plugin} />)
|
||||
|
||||
expect(container.querySelector('.text-text-warning-secondary')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should apply limited padding when limitedInstall is true', () => {
|
||||
const plugin = createMockPlugin()
|
||||
|
||||
const { container } = render(<Card payload={plugin} limitedInstall={true} />)
|
||||
|
||||
expect(container.querySelector('.pb-1')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ================================
|
||||
// Category Type Tests
|
||||
// ================================
|
||||
describe('Category Types', () => {
|
||||
it('should display bundle label for bundle type', () => {
|
||||
const plugin = createMockPlugin({
|
||||
type: 'bundle',
|
||||
category: PluginCategoryEnum.tool,
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
// For bundle type, should show 'Bundle' instead of category
|
||||
expect(screen.getByText('Bundle')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display category label for non-bundle types', () => {
|
||||
const plugin = createMockPlugin({
|
||||
type: 'plugin',
|
||||
category: PluginCategoryEnum.model,
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.getByText('Model')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ================================
|
||||
// Memoization Tests
|
||||
// ================================
|
||||
describe('Memoization', () => {
|
||||
it('should be memoized with React.memo', () => {
|
||||
// Card is wrapped with React.memo
|
||||
expect(Card).toBeDefined()
|
||||
// The component should have the memo display name characteristic
|
||||
expect(typeof Card).toBe('object')
|
||||
})
|
||||
|
||||
it('should not re-render when props are the same', () => {
|
||||
const plugin = createMockPlugin()
|
||||
const renderCount = vi.fn()
|
||||
|
||||
const TestWrapper = ({ p }: { p: Plugin }) => {
|
||||
renderCount()
|
||||
return <Card payload={p} />
|
||||
}
|
||||
|
||||
const { rerender } = render(<TestWrapper p={plugin} />)
|
||||
expect(renderCount).toHaveBeenCalledTimes(1)
|
||||
|
||||
// Re-render with same plugin reference
|
||||
rerender(<TestWrapper p={plugin} />)
|
||||
expect(renderCount).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
})
|
||||
|
||||
// ================================
|
||||
// Edge Cases Tests
|
||||
// ================================
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty label object', () => {
|
||||
const plugin = createMockPlugin({
|
||||
label: {},
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
// Should render without crashing
|
||||
expect(document.body).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle empty brief object', () => {
|
||||
const plugin = createMockPlugin({
|
||||
brief: {},
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(document.body).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle undefined label', () => {
|
||||
const plugin = createMockPlugin()
|
||||
// @ts-expect-error - Testing undefined label
|
||||
plugin.label = undefined
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(document.body).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle special characters in plugin name', () => {
|
||||
const plugin = createMockPlugin({
|
||||
name: 'plugin-with-special-chars!@#$%',
|
||||
org: 'org<script>alert(1)</script>',
|
||||
})
|
||||
|
||||
render(<Card payload={plugin} />)
|
||||
|
||||
expect(screen.getByText('plugin-with-special-chars!@#$%')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle very long title', () => {
|
||||
const longTitle = 'A'.repeat(500)
|
||||
const plugin = createMockPlugin({
|
||||
label: { 'en-US': longTitle },
|
||||
})
|
||||
|
||||
const { container } = render(<Card payload={plugin} />)
|
||||
|
||||
// Should have truncate class for long text
|
||||
expect(container.querySelector('.truncate')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle very long description', () => {
|
||||
const longDescription = 'B'.repeat(1000)
|
||||
const plugin = createMockPlugin({
|
||||
brief: { 'en-US': longDescription },
|
||||
})
|
||||
|
||||
const { container } = render(<Card payload={plugin} />)
|
||||
|
||||
// Should have line-clamp class for long text
|
||||
expect(container.querySelector('.line-clamp-2')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,61 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import Icon from '../card-icon'
|
||||
|
||||
vi.mock('@/app/components/base/app-icon', () => ({
|
||||
default: ({ icon, background }: { icon: string, background: string }) => (
|
||||
<div data-testid="app-icon" data-icon={icon} data-bg={background} />
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/icons/src/vender/other', () => ({
|
||||
Mcp: () => <span data-testid="mcp-icon" />,
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/mcp', () => ({
|
||||
shouldUseMcpIcon: () => false,
|
||||
}))
|
||||
|
||||
describe('Icon', () => {
|
||||
it('renders string src as background image', () => {
|
||||
const { container } = render(<Icon src="https://example.com/icon.png" />)
|
||||
const el = container.firstChild as HTMLElement
|
||||
expect(el.style.backgroundImage).toContain('https://example.com/icon.png')
|
||||
})
|
||||
|
||||
it('renders emoji src using AppIcon', () => {
|
||||
render(<Icon src={{ content: '🔍', background: '#fff' }} />)
|
||||
expect(screen.getByTestId('app-icon')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('app-icon')).toHaveAttribute('data-icon', '🔍')
|
||||
expect(screen.getByTestId('app-icon')).toHaveAttribute('data-bg', '#fff')
|
||||
})
|
||||
|
||||
it('shows check icon when installed', () => {
|
||||
const { container } = render(<Icon src="icon.png" installed />)
|
||||
expect(container.querySelector('.bg-state-success-solid')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows close icon when installFailed', () => {
|
||||
const { container } = render(<Icon src="icon.png" installFailed />)
|
||||
expect(container.querySelector('.bg-state-destructive-solid')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('does not show status icons by default', () => {
|
||||
const { container } = render(<Icon src="icon.png" />)
|
||||
expect(container.querySelector('.bg-state-success-solid')).not.toBeInTheDocument()
|
||||
expect(container.querySelector('.bg-state-destructive-solid')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('applies custom className', () => {
|
||||
const { container } = render(<Icon src="icon.png" className="my-class" />)
|
||||
const el = container.firstChild as HTMLElement
|
||||
expect(el.className).toContain('my-class')
|
||||
})
|
||||
|
||||
it('applies correct size class', () => {
|
||||
const { container } = render(<Icon src="icon.png" size="small" />)
|
||||
const el = container.firstChild as HTMLElement
|
||||
expect(el.className).toContain('w-8')
|
||||
expect(el.className).toContain('h-8')
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,27 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import CornerMark from '../corner-mark'
|
||||
|
||||
vi.mock('../../../../base/icons/src/vender/plugin', () => ({
|
||||
LeftCorner: ({ className }: { className: string }) => <svg data-testid="left-corner" className={className} />,
|
||||
}))
|
||||
|
||||
describe('CornerMark', () => {
|
||||
it('renders the text content', () => {
|
||||
render(<CornerMark text="NEW" />)
|
||||
expect(screen.getByText('NEW')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders the LeftCorner icon', () => {
|
||||
render(<CornerMark text="BETA" />)
|
||||
expect(screen.getByTestId('left-corner')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders with absolute positioning', () => {
|
||||
const { container } = render(<CornerMark text="TAG" />)
|
||||
const wrapper = container.firstChild as HTMLElement
|
||||
expect(wrapper.className).toContain('absolute')
|
||||
expect(wrapper.className).toContain('right-0')
|
||||
expect(wrapper.className).toContain('top-0')
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,37 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import Description from '../description'
|
||||
|
||||
describe('Description', () => {
|
||||
it('renders description text', () => {
|
||||
render(<Description text="A great plugin" descriptionLineRows={1} />)
|
||||
expect(screen.getByText('A great plugin')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('applies truncate class for 1 line', () => {
|
||||
render(<Description text="Single line" descriptionLineRows={1} />)
|
||||
const el = screen.getByText('Single line')
|
||||
expect(el.className).toContain('truncate')
|
||||
expect(el.className).toContain('h-4')
|
||||
})
|
||||
|
||||
it('applies line-clamp-2 class for 2 lines', () => {
|
||||
render(<Description text="Two lines" descriptionLineRows={2} />)
|
||||
const el = screen.getByText('Two lines')
|
||||
expect(el.className).toContain('line-clamp-2')
|
||||
expect(el.className).toContain('h-8')
|
||||
})
|
||||
|
||||
it('applies line-clamp-3 class for 3 lines', () => {
|
||||
render(<Description text="Three lines" descriptionLineRows={3} />)
|
||||
const el = screen.getByText('Three lines')
|
||||
expect(el.className).toContain('line-clamp-3')
|
||||
expect(el.className).toContain('h-12')
|
||||
})
|
||||
|
||||
it('applies custom className', () => {
|
||||
render(<Description text="test" descriptionLineRows={1} className="mt-2" />)
|
||||
const el = screen.getByText('test')
|
||||
expect(el.className).toContain('mt-2')
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,28 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import DownloadCount from '../download-count'
|
||||
|
||||
vi.mock('@/utils/format', () => ({
|
||||
formatNumber: (n: number) => {
|
||||
if (n >= 1000)
|
||||
return `${(n / 1000).toFixed(1)}k`
|
||||
return String(n)
|
||||
},
|
||||
}))
|
||||
|
||||
describe('DownloadCount', () => {
|
||||
it('renders formatted download count', () => {
|
||||
render(<DownloadCount downloadCount={1500} />)
|
||||
expect(screen.getByText('1.5k')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders small numbers directly', () => {
|
||||
render(<DownloadCount downloadCount={42} />)
|
||||
expect(screen.getByText('42')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders zero download count', () => {
|
||||
render(<DownloadCount downloadCount={0} />)
|
||||
expect(screen.getByText('0')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,34 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import OrgInfo from '../org-info'
|
||||
|
||||
describe('OrgInfo', () => {
|
||||
it('renders package name', () => {
|
||||
render(<OrgInfo packageName="my-plugin" />)
|
||||
expect(screen.getByText('my-plugin')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders org name with separator when provided', () => {
|
||||
render(<OrgInfo orgName="dify" packageName="search-tool" />)
|
||||
expect(screen.getByText('dify')).toBeInTheDocument()
|
||||
expect(screen.getByText('/')).toBeInTheDocument()
|
||||
expect(screen.getByText('search-tool')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('does not render org name or separator when orgName is not provided', () => {
|
||||
render(<OrgInfo packageName="standalone" />)
|
||||
expect(screen.queryByText('/')).not.toBeInTheDocument()
|
||||
expect(screen.getByText('standalone')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('applies custom className', () => {
|
||||
const { container } = render(<OrgInfo packageName="pkg" className="custom-class" />)
|
||||
expect((container.firstChild as HTMLElement).className).toContain('custom-class')
|
||||
})
|
||||
|
||||
it('applies packageNameClassName to package name element', () => {
|
||||
render(<OrgInfo packageName="pkg" packageNameClassName="w-auto" />)
|
||||
const pkgEl = screen.getByText('pkg')
|
||||
expect(pkgEl.className).toContain('w-auto')
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,71 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('../title', () => ({
|
||||
default: ({ title }: { title: string }) => <span data-testid="title">{title}</span>,
|
||||
}))
|
||||
|
||||
vi.mock('../../../../base/icons/src/vender/other', () => ({
|
||||
Group: ({ className }: { className: string }) => <span data-testid="group-icon" className={className} />,
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/classnames', () => ({
|
||||
cn: (...args: unknown[]) => args.filter(Boolean).join(' '),
|
||||
}))
|
||||
|
||||
describe('Placeholder', () => {
|
||||
let Placeholder: (typeof import('../placeholder'))['default']
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks()
|
||||
const mod = await import('../placeholder')
|
||||
Placeholder = mod.default
|
||||
})
|
||||
|
||||
it('should render skeleton rows', () => {
|
||||
const { container } = render(<Placeholder wrapClassName="w-full" />)
|
||||
|
||||
expect(container.querySelectorAll('.gap-2').length).toBeGreaterThanOrEqual(1)
|
||||
})
|
||||
|
||||
it('should render group icon placeholder', () => {
|
||||
render(<Placeholder wrapClassName="w-full" />)
|
||||
|
||||
expect(screen.getByTestId('group-icon')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render loading filename when provided', () => {
|
||||
render(<Placeholder wrapClassName="w-full" loadingFileName="test-plugin.zip" />)
|
||||
|
||||
expect(screen.getByTestId('title')).toHaveTextContent('test-plugin.zip')
|
||||
})
|
||||
|
||||
it('should render skeleton rectangles when no filename', () => {
|
||||
const { container } = render(<Placeholder wrapClassName="w-full" />)
|
||||
|
||||
expect(container.querySelectorAll('.bg-text-quaternary').length).toBeGreaterThanOrEqual(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('LoadingPlaceholder', () => {
|
||||
let LoadingPlaceholder: (typeof import('../placeholder'))['LoadingPlaceholder']
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks()
|
||||
const mod = await import('../placeholder')
|
||||
LoadingPlaceholder = mod.LoadingPlaceholder
|
||||
})
|
||||
|
||||
it('should render as a simple div with background', () => {
|
||||
const { container } = render(<LoadingPlaceholder />)
|
||||
|
||||
expect(container.firstChild).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should accept className prop', () => {
|
||||
const { container } = render(<LoadingPlaceholder className="mt-3 w-[420px]" />)
|
||||
|
||||
expect(container.firstChild).toBeTruthy()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,21 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import Title from '../title'
|
||||
|
||||
describe('Title', () => {
|
||||
it('renders the title text', () => {
|
||||
render(<Title title="Test Plugin" />)
|
||||
expect(screen.getByText('Test Plugin')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders with truncate class for long text', () => {
|
||||
render(<Title title="A very long title that should be truncated" />)
|
||||
const el = screen.getByText('A very long title that should be truncated')
|
||||
expect(el.className).toContain('truncate')
|
||||
})
|
||||
|
||||
it('renders empty string without error', () => {
|
||||
const { container } = render(<Title title="" />)
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,166 @@
|
||||
import { renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { useGitHubReleases, useGitHubUpload } from '../hooks'
|
||||
|
||||
const mockNotify = vi.fn()
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
default: { notify: (...args: unknown[]) => mockNotify(...args) },
|
||||
}))
|
||||
|
||||
vi.mock('@/config', () => ({
|
||||
GITHUB_ACCESS_TOKEN: '',
|
||||
}))
|
||||
|
||||
const mockUploadGitHub = vi.fn()
|
||||
vi.mock('@/service/plugins', () => ({
|
||||
uploadGitHub: (...args: unknown[]) => mockUploadGitHub(...args),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/semver', () => ({
|
||||
compareVersion: (a: string, b: string) => {
|
||||
const parseVersion = (v: string) => v.replace(/^v/, '').split('.').map(Number)
|
||||
const va = parseVersion(a)
|
||||
const vb = parseVersion(b)
|
||||
for (let i = 0; i < Math.max(va.length, vb.length); i++) {
|
||||
const diff = (va[i] || 0) - (vb[i] || 0)
|
||||
if (diff > 0)
|
||||
return 1
|
||||
if (diff < 0)
|
||||
return -1
|
||||
}
|
||||
return 0
|
||||
},
|
||||
getLatestVersion: (versions: string[]) => {
|
||||
return versions.sort((a, b) => {
|
||||
const pa = a.replace(/^v/, '').split('.').map(Number)
|
||||
const pb = b.replace(/^v/, '').split('.').map(Number)
|
||||
for (let i = 0; i < Math.max(pa.length, pb.length); i++) {
|
||||
const diff = (pa[i] || 0) - (pb[i] || 0)
|
||||
if (diff !== 0)
|
||||
return diff
|
||||
}
|
||||
return 0
|
||||
}).pop()!
|
||||
},
|
||||
}))
|
||||
|
||||
const mockFetch = vi.fn()
|
||||
globalThis.fetch = mockFetch
|
||||
|
||||
describe('install-plugin/hooks', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('useGitHubReleases', () => {
|
||||
describe('fetchReleases', () => {
|
||||
it('fetches releases from GitHub API and formats them', async () => {
|
||||
mockFetch.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([
|
||||
{
|
||||
tag_name: 'v1.0.0',
|
||||
assets: [{ browser_download_url: 'https://example.com/v1.zip', name: 'plugin.zip' }],
|
||||
body: 'Release notes',
|
||||
},
|
||||
]),
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useGitHubReleases())
|
||||
const releases = await result.current.fetchReleases('owner', 'repo')
|
||||
|
||||
expect(releases).toHaveLength(1)
|
||||
expect(releases[0].tag_name).toBe('v1.0.0')
|
||||
expect(releases[0].assets[0].name).toBe('plugin.zip')
|
||||
expect(releases[0]).not.toHaveProperty('body')
|
||||
})
|
||||
|
||||
it('returns empty array and shows toast on fetch error', async () => {
|
||||
mockFetch.mockResolvedValue({
|
||||
ok: false,
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useGitHubReleases())
|
||||
const releases = await result.current.fetchReleases('owner', 'repo')
|
||||
|
||||
expect(releases).toEqual([])
|
||||
expect(mockNotify).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ type: 'error' }),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('checkForUpdates', () => {
|
||||
it('detects newer version available', () => {
|
||||
const { result } = renderHook(() => useGitHubReleases())
|
||||
const releases = [
|
||||
{ tag_name: 'v1.0.0', assets: [] },
|
||||
{ tag_name: 'v2.0.0', assets: [] },
|
||||
]
|
||||
const { needUpdate, toastProps } = result.current.checkForUpdates(releases, 'v1.0.0')
|
||||
expect(needUpdate).toBe(true)
|
||||
expect(toastProps.message).toContain('v2.0.0')
|
||||
})
|
||||
|
||||
it('returns no update when current is latest', () => {
|
||||
const { result } = renderHook(() => useGitHubReleases())
|
||||
const releases = [
|
||||
{ tag_name: 'v1.0.0', assets: [] },
|
||||
]
|
||||
const { needUpdate, toastProps } = result.current.checkForUpdates(releases, 'v1.0.0')
|
||||
expect(needUpdate).toBe(false)
|
||||
expect(toastProps.type).toBe('info')
|
||||
})
|
||||
|
||||
it('returns error for empty releases', () => {
|
||||
const { result } = renderHook(() => useGitHubReleases())
|
||||
const { needUpdate, toastProps } = result.current.checkForUpdates([], 'v1.0.0')
|
||||
expect(needUpdate).toBe(false)
|
||||
expect(toastProps.type).toBe('error')
|
||||
expect(toastProps.message).toContain('empty')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('useGitHubUpload', () => {
|
||||
it('uploads successfully and calls onSuccess', async () => {
|
||||
const mockManifest = { name: 'test-plugin' }
|
||||
mockUploadGitHub.mockResolvedValue({
|
||||
manifest: mockManifest,
|
||||
unique_identifier: 'uid-123',
|
||||
})
|
||||
const onSuccess = vi.fn()
|
||||
|
||||
const { result } = renderHook(() => useGitHubUpload())
|
||||
const pkg = await result.current.handleUpload(
|
||||
'https://github.com/owner/repo',
|
||||
'v1.0.0',
|
||||
'plugin.difypkg',
|
||||
onSuccess,
|
||||
)
|
||||
|
||||
expect(mockUploadGitHub).toHaveBeenCalledWith(
|
||||
'https://github.com/owner/repo',
|
||||
'v1.0.0',
|
||||
'plugin.difypkg',
|
||||
)
|
||||
expect(onSuccess).toHaveBeenCalledWith({
|
||||
manifest: mockManifest,
|
||||
unique_identifier: 'uid-123',
|
||||
})
|
||||
expect(pkg.unique_identifier).toBe('uid-123')
|
||||
})
|
||||
|
||||
it('shows toast on upload error', async () => {
|
||||
mockUploadGitHub.mockRejectedValue(new Error('Upload failed'))
|
||||
|
||||
const { result } = renderHook(() => useGitHubUpload())
|
||||
await expect(
|
||||
result.current.handleUpload('url', 'v1', 'pkg'),
|
||||
).rejects.toThrow('Upload failed')
|
||||
expect(mockNotify).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ type: 'error', message: 'Error uploading package' }),
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,12 +1,12 @@
|
||||
import type { PluginDeclaration, PluginManifestInMarket } from '../types'
|
||||
import type { PluginDeclaration, PluginManifestInMarket } from '../../types'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { PluginCategoryEnum } from '../types'
|
||||
import { PluginCategoryEnum } from '../../types'
|
||||
import {
|
||||
convertRepoToUrl,
|
||||
parseGitHubUrl,
|
||||
pluginManifestInMarketToPluginProps,
|
||||
pluginManifestToCardPluginProps,
|
||||
} from './utils'
|
||||
} from '../utils'
|
||||
|
||||
// Mock es-toolkit/compat
|
||||
vi.mock('es-toolkit/compat', () => ({
|
||||
@@ -0,0 +1,125 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { TaskStatus } from '../../../types'
|
||||
import checkTaskStatus from '../check-task-status'
|
||||
|
||||
const mockCheckTaskStatus = vi.fn()
|
||||
vi.mock('@/service/plugins', () => ({
|
||||
checkTaskStatus: (...args: unknown[]) => mockCheckTaskStatus(...args),
|
||||
}))
|
||||
|
||||
// Mock sleep to avoid actual waiting in tests
|
||||
vi.mock('@/utils', () => ({
|
||||
sleep: vi.fn().mockResolvedValue(undefined),
|
||||
}))
|
||||
|
||||
describe('checkTaskStatus', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('returns success when plugin status is success', async () => {
|
||||
mockCheckTaskStatus.mockResolvedValue({
|
||||
task: {
|
||||
plugins: [
|
||||
{ plugin_unique_identifier: 'test-plugin', status: TaskStatus.success, message: '' },
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const { check } = checkTaskStatus()
|
||||
const result = await check({ taskId: 'task-1', pluginUniqueIdentifier: 'test-plugin' })
|
||||
expect(result.status).toBe(TaskStatus.success)
|
||||
})
|
||||
|
||||
it('returns failed when plugin status is failed', async () => {
|
||||
mockCheckTaskStatus.mockResolvedValue({
|
||||
task: {
|
||||
plugins: [
|
||||
{ plugin_unique_identifier: 'test-plugin', status: TaskStatus.failed, message: 'Install failed' },
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const { check } = checkTaskStatus()
|
||||
const result = await check({ taskId: 'task-1', pluginUniqueIdentifier: 'test-plugin' })
|
||||
expect(result.status).toBe(TaskStatus.failed)
|
||||
expect(result.error).toBe('Install failed')
|
||||
})
|
||||
|
||||
it('returns failed when plugin is not found in task', async () => {
|
||||
mockCheckTaskStatus.mockResolvedValue({
|
||||
task: {
|
||||
plugins: [
|
||||
{ plugin_unique_identifier: 'other-plugin', status: TaskStatus.success, message: '' },
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const { check } = checkTaskStatus()
|
||||
const result = await check({ taskId: 'task-1', pluginUniqueIdentifier: 'test-plugin' })
|
||||
expect(result.status).toBe(TaskStatus.failed)
|
||||
expect(result.error).toBe('Plugin package not found')
|
||||
})
|
||||
|
||||
it('polls recursively when status is running, then resolves on success', async () => {
|
||||
let callCount = 0
|
||||
mockCheckTaskStatus.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount < 3) {
|
||||
return Promise.resolve({
|
||||
task: {
|
||||
plugins: [
|
||||
{ plugin_unique_identifier: 'test-plugin', status: TaskStatus.running, message: '' },
|
||||
],
|
||||
},
|
||||
})
|
||||
}
|
||||
return Promise.resolve({
|
||||
task: {
|
||||
plugins: [
|
||||
{ plugin_unique_identifier: 'test-plugin', status: TaskStatus.success, message: '' },
|
||||
],
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
const { check } = checkTaskStatus()
|
||||
const result = await check({ taskId: 'task-1', pluginUniqueIdentifier: 'test-plugin' })
|
||||
expect(result.status).toBe(TaskStatus.success)
|
||||
expect(mockCheckTaskStatus).toHaveBeenCalledTimes(3)
|
||||
})
|
||||
|
||||
it('stop() causes early return with success', async () => {
|
||||
const { check, stop } = checkTaskStatus()
|
||||
stop()
|
||||
const result = await check({ taskId: 'task-1', pluginUniqueIdentifier: 'test-plugin' })
|
||||
expect(result.status).toBe(TaskStatus.success)
|
||||
expect(mockCheckTaskStatus).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('returns different instances with independent state', async () => {
|
||||
const checker1 = checkTaskStatus()
|
||||
const checker2 = checkTaskStatus()
|
||||
|
||||
checker1.stop()
|
||||
|
||||
mockCheckTaskStatus.mockResolvedValue({
|
||||
task: {
|
||||
plugins: [
|
||||
{ plugin_unique_identifier: 'test-plugin', status: TaskStatus.success, message: '' },
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
const result1 = await checker1.check({ taskId: 'task-1', pluginUniqueIdentifier: 'test-plugin' })
|
||||
const result2 = await checker2.check({ taskId: 'task-2', pluginUniqueIdentifier: 'test-plugin' })
|
||||
|
||||
expect(result1.status).toBe(TaskStatus.success)
|
||||
expect(result2.status).toBe(TaskStatus.success)
|
||||
expect(mockCheckTaskStatus).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,81 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('../../../card', () => ({
|
||||
default: ({ installed, installFailed, titleLeft }: { installed: boolean, installFailed: boolean, titleLeft?: React.ReactNode }) => (
|
||||
<div data-testid="card" data-installed={installed} data-failed={installFailed}>{titleLeft}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('../../utils', () => ({
|
||||
pluginManifestInMarketToPluginProps: (p: unknown) => p,
|
||||
pluginManifestToCardPluginProps: (p: unknown) => p,
|
||||
}))
|
||||
|
||||
describe('Installed', () => {
|
||||
let Installed: (typeof import('../installed'))['default']
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks()
|
||||
const mod = await import('../installed')
|
||||
Installed = mod.default
|
||||
})
|
||||
|
||||
it('should render success message when not failed', () => {
|
||||
render(<Installed isFailed={false} onCancel={vi.fn()} />)
|
||||
|
||||
expect(screen.getByText('plugin.installModal.installedSuccessfullyDesc')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render failure message when failed', () => {
|
||||
render(<Installed isFailed={true} onCancel={vi.fn()} />)
|
||||
|
||||
expect(screen.getByText('plugin.installModal.installFailedDesc')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render custom error message when provided', () => {
|
||||
render(<Installed isFailed={true} errMsg="Custom error" onCancel={vi.fn()} />)
|
||||
|
||||
expect(screen.getByText('Custom error')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render card with payload', () => {
|
||||
const payload = { version: '1.0.0', name: 'test-plugin' } as never
|
||||
render(<Installed payload={payload} isFailed={false} onCancel={vi.fn()} />)
|
||||
|
||||
const card = screen.getByTestId('card')
|
||||
expect(card).toHaveAttribute('data-installed', 'true')
|
||||
expect(card).toHaveAttribute('data-failed', 'false')
|
||||
})
|
||||
|
||||
it('should render card as failed when isFailed', () => {
|
||||
const payload = { version: '1.0.0', name: 'test-plugin' } as never
|
||||
render(<Installed payload={payload} isFailed={true} onCancel={vi.fn()} />)
|
||||
|
||||
const card = screen.getByTestId('card')
|
||||
expect(card).toHaveAttribute('data-installed', 'false')
|
||||
expect(card).toHaveAttribute('data-failed', 'true')
|
||||
})
|
||||
|
||||
it('should call onCancel when close button clicked', () => {
|
||||
const mockOnCancel = vi.fn()
|
||||
render(<Installed isFailed={false} onCancel={mockOnCancel} />)
|
||||
|
||||
fireEvent.click(screen.getByText('common.operation.close'))
|
||||
expect(mockOnCancel).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should show version badge in card', () => {
|
||||
const payload = { version: '1.0.0', name: 'test-plugin' } as never
|
||||
render(<Installed payload={payload} isFailed={false} onCancel={vi.fn()} />)
|
||||
|
||||
expect(screen.getByText('1.0.0')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render card when no payload', () => {
|
||||
render(<Installed isFailed={false} onCancel={vi.fn()} />)
|
||||
|
||||
expect(screen.queryByTestId('card')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,46 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/placeholder', () => ({
|
||||
LoadingPlaceholder: () => <div data-testid="loading-placeholder" />,
|
||||
}))
|
||||
|
||||
vi.mock('../../../../base/icons/src/vender/other', () => ({
|
||||
Group: ({ className }: { className: string }) => <span data-testid="group-icon" className={className} />,
|
||||
}))
|
||||
|
||||
describe('LoadingError', () => {
|
||||
let LoadingError: React.FC
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks()
|
||||
const mod = await import('../loading-error')
|
||||
LoadingError = mod.default
|
||||
})
|
||||
|
||||
it('should render error message', () => {
|
||||
render(<LoadingError />)
|
||||
|
||||
expect(screen.getByText('plugin.installModal.pluginLoadError')).toBeInTheDocument()
|
||||
expect(screen.getByText('plugin.installModal.pluginLoadErrorDesc')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render disabled checkbox', () => {
|
||||
render(<LoadingError />)
|
||||
|
||||
expect(screen.getByTestId('checkbox-undefined')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render error icon with close indicator', () => {
|
||||
render(<LoadingError />)
|
||||
|
||||
expect(screen.getByTestId('group-icon')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render loading placeholder', () => {
|
||||
render(<LoadingError />)
|
||||
|
||||
expect(screen.getByTestId('loading-placeholder')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,29 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('../../../card/base/placeholder', () => ({
|
||||
default: () => <div data-testid="placeholder" />,
|
||||
}))
|
||||
|
||||
describe('Loading', () => {
|
||||
let Loading: React.FC
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks()
|
||||
const mod = await import('../loading')
|
||||
Loading = mod.default
|
||||
})
|
||||
|
||||
it('should render disabled unchecked checkbox', () => {
|
||||
render(<Loading />)
|
||||
|
||||
expect(screen.getByTestId('checkbox-undefined')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render placeholder', () => {
|
||||
render(<Loading />)
|
||||
|
||||
expect(screen.getByTestId('placeholder')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,43 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
describe('Version', () => {
|
||||
let Version: (typeof import('../version'))['default']
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks()
|
||||
const mod = await import('../version')
|
||||
Version = mod.default
|
||||
})
|
||||
|
||||
it('should show simple version badge for new install', () => {
|
||||
render(<Version hasInstalled={false} toInstallVersion="1.0.0" />)
|
||||
|
||||
expect(screen.getByText('1.0.0')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show upgrade version badge for existing install', () => {
|
||||
render(
|
||||
<Version
|
||||
hasInstalled={true}
|
||||
installedVersion="1.0.0"
|
||||
toInstallVersion="2.0.0"
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('1.0.0 -> 2.0.0')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle downgrade version display', () => {
|
||||
render(
|
||||
<Version
|
||||
hasInstalled={true}
|
||||
installedVersion="2.0.0"
|
||||
toInstallVersion="1.0.0"
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('2.0.0 -> 1.0.0')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,79 @@
|
||||
import { renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import useCheckInstalled from '../use-check-installed'
|
||||
|
||||
const mockPlugins = [
|
||||
{
|
||||
plugin_id: 'plugin-1',
|
||||
id: 'installed-1',
|
||||
declaration: { version: '1.0.0' },
|
||||
plugin_unique_identifier: 'org/plugin-1',
|
||||
},
|
||||
{
|
||||
plugin_id: 'plugin-2',
|
||||
id: 'installed-2',
|
||||
declaration: { version: '2.0.0' },
|
||||
plugin_unique_identifier: 'org/plugin-2',
|
||||
},
|
||||
]
|
||||
|
||||
vi.mock('@/service/use-plugins', () => ({
|
||||
useCheckInstalled: ({ pluginIds, enabled }: { pluginIds: string[], enabled: boolean }) => ({
|
||||
data: enabled && pluginIds.length > 0 ? { plugins: mockPlugins } : undefined,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
}),
|
||||
}))
|
||||
|
||||
describe('useCheckInstalled', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should return installed info when enabled and has plugin IDs', () => {
|
||||
const { result } = renderHook(() => useCheckInstalled({
|
||||
pluginIds: ['plugin-1', 'plugin-2'],
|
||||
enabled: true,
|
||||
}))
|
||||
|
||||
expect(result.current.installedInfo).toBeDefined()
|
||||
expect(result.current.installedInfo?.['plugin-1']).toEqual({
|
||||
installedId: 'installed-1',
|
||||
installedVersion: '1.0.0',
|
||||
uniqueIdentifier: 'org/plugin-1',
|
||||
})
|
||||
expect(result.current.installedInfo?.['plugin-2']).toEqual({
|
||||
installedId: 'installed-2',
|
||||
installedVersion: '2.0.0',
|
||||
uniqueIdentifier: 'org/plugin-2',
|
||||
})
|
||||
})
|
||||
|
||||
it('should return undefined installedInfo when disabled', () => {
|
||||
const { result } = renderHook(() => useCheckInstalled({
|
||||
pluginIds: ['plugin-1'],
|
||||
enabled: false,
|
||||
}))
|
||||
|
||||
expect(result.current.installedInfo).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should return undefined installedInfo with empty plugin IDs', () => {
|
||||
const { result } = renderHook(() => useCheckInstalled({
|
||||
pluginIds: [],
|
||||
enabled: true,
|
||||
}))
|
||||
|
||||
expect(result.current.installedInfo).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should return isLoading and error states', () => {
|
||||
const { result } = renderHook(() => useCheckInstalled({
|
||||
pluginIds: ['plugin-1'],
|
||||
enabled: true,
|
||||
}))
|
||||
|
||||
expect(result.current.isLoading).toBe(false)
|
||||
expect(result.current.error).toBeNull()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,76 @@
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import useHideLogic from '../use-hide-logic'
|
||||
|
||||
const mockFoldAnimInto = vi.fn()
|
||||
const mockClearCountDown = vi.fn()
|
||||
const mockCountDownFoldIntoAnim = vi.fn()
|
||||
|
||||
vi.mock('../use-fold-anim-into', () => ({
|
||||
default: () => ({
|
||||
modalClassName: 'test-modal-class',
|
||||
foldIntoAnim: mockFoldAnimInto,
|
||||
clearCountDown: mockClearCountDown,
|
||||
countDownFoldIntoAnim: mockCountDownFoldIntoAnim,
|
||||
}),
|
||||
}))
|
||||
|
||||
describe('useHideLogic', () => {
|
||||
const mockOnClose = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should return initial state with modalClassName', () => {
|
||||
const { result } = renderHook(() => useHideLogic(mockOnClose))
|
||||
|
||||
expect(result.current.modalClassName).toBe('test-modal-class')
|
||||
})
|
||||
|
||||
it('should call onClose directly when not installing', () => {
|
||||
const { result } = renderHook(() => useHideLogic(mockOnClose))
|
||||
|
||||
act(() => {
|
||||
result.current.foldAnimInto()
|
||||
})
|
||||
|
||||
expect(mockOnClose).toHaveBeenCalled()
|
||||
expect(mockFoldAnimInto).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call doFoldAnimInto when installing', () => {
|
||||
const { result } = renderHook(() => useHideLogic(mockOnClose))
|
||||
|
||||
act(() => {
|
||||
result.current.handleStartToInstall()
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.foldAnimInto()
|
||||
})
|
||||
|
||||
expect(mockFoldAnimInto).toHaveBeenCalled()
|
||||
expect(mockOnClose).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should set installing and start countdown on handleStartToInstall', () => {
|
||||
const { result } = renderHook(() => useHideLogic(mockOnClose))
|
||||
|
||||
act(() => {
|
||||
result.current.handleStartToInstall()
|
||||
})
|
||||
|
||||
expect(mockCountDownFoldIntoAnim).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should clear countdown when setIsInstalling to false', () => {
|
||||
const { result } = renderHook(() => useHideLogic(mockOnClose))
|
||||
|
||||
act(() => {
|
||||
result.current.setIsInstalling(false)
|
||||
})
|
||||
|
||||
expect(mockClearCountDown).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user