Compare commits

...

37 Commits

Author SHA1 Message Date
CodingOnStar
cb8440925e Merge remote-tracking branch 'origin/main' into feat/tracking-1203 2025-12-04 18:25:51 +08:00
Wu Tianwei
2219b93d6b fix: modify usePluginTaskList initialization and dependencies in use-plugins.ts (#29130) 2025-12-04 17:19:31 +08:00
非法操作
e8c47ec8ac fix: incorrect last run result (#29128) 2025-12-04 16:23:22 +08:00
wangxiaolei
e904c65a9d perf: decrease heavy db operation (#29125) 2025-12-04 16:09:47 +08:00
kenwoodjw
f62926f0ca fix: bump pyarrow to 17.0.0, werkzeug to 3.1.4, urllib3 to 2.5.0 (#29089)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-12-04 15:39:31 +08:00
NFish
b033bb02fc chore: upgrade React to 19.2.1,fix cve-2025-55182 (#29121)
Co-authored-by: zhsama <torvalds@linux.do>
2025-12-04 14:44:52 +08:00
zyssyz123
031cba81b4 Fix/app list compatible (#29123) 2025-12-04 14:44:24 +08:00
yangzheli
693ab6ad82 fix(web): disable tooltip delay to avoid tooltip flickering (#29104) 2025-12-04 14:16:56 +08:00
NFish
541fd7daa2 chore: update Next.js dev dependencies to 15.5.7 (#29120) 2025-12-04 14:16:45 +08:00
Boris Polonsky
61d79a1502 feat: Unify environment variables for database connection and authentication (#29092) 2025-12-04 14:16:11 +08:00
Yunlu Wen
03357ff1ec fix: catch error in response converter (#29056)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-12-04 11:25:16 +08:00
dependabot[bot]
b4bed94cc5 chore(deps): bump next from 15.5.6 to 15.5.7 in /web (#29105)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-04 10:14:50 +08:00
wangxiaolei
e924dc7b30 chore: ignore redis lock not owned error (#29064)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-12-04 10:14:28 +08:00
longbingljw
4b969bdce3 fix:mysql does not support 'returning' (#29069) 2025-12-04 10:14:19 +08:00
非法操作
d07afb38a0 fix: trigger call workflow_as_tool error (#29058) 2025-12-04 10:13:18 +08:00
hj24
5bb715ee2f fix: remove chat conversation api dead arg message_count_gte (#29097) 2025-12-04 10:12:47 +08:00
非法操作
3e5f683e90 feat: dark theme icon support (#28858) 2025-12-04 09:29:00 +08:00
zhsama
31481581e8 refactor: simplify marketplace component structure by removing unused… (#29095) 2025-12-03 21:30:24 +08:00
yyh
2e0c2e8482 refactor/marketplace react query (#29028)
Co-authored-by: zhsama <torvalds@linux.do>
2025-12-03 18:30:20 +08:00
zhsama
0343374d52 feat: add ReactScan component for enhanced development scanning (#29086) 2025-12-03 18:19:12 +08:00
Joel
c1fe394c0e fix: check education verify api slow may cause page redirects when modal closes (#29078) 2025-12-03 17:11:57 +08:00
Joel
876f48df76 chore: remove useless mock files (#29068) 2025-12-03 15:34:11 +08:00
Coding On Star
fbb2d076f4 integrate Amplitude analytics into the application (#29049)
Co-authored-by: CodingOnStar <hanxujiang@dify.ai>
Co-authored-by: Joel <iamjoel007@gmail.com>
2025-12-03 14:22:12 +08:00
CodingOnStar
f8f4bf0efb Update AmplitudeProvider to use IS_CLOUD_EDITION for conditional initialization 2025-12-03 14:02:39 +08:00
Joel
727720b59e Merge branch 'main' into feat/tracking-1203 2025-12-03 13:55:33 +08:00
非法操作
c7d2a13524 fix: improve chat message log feedback (#29045)
Co-authored-by: yyh <yuanyouhuilyz@gmail.com>
2025-12-03 13:42:40 +08:00
CodingOnStar
9a9935f018 Refactor user tracking and event logging across components
- Added resetUser function call on logout in avatar and account dropdown components to ensure user state is cleared.
- Removed redundant time logging from event tracking in various components to streamline data sent to Amplitude.
- Updated user properties reporting in AppContextProvider to consolidate user and workspace information into a single properties object.
2025-12-03 11:10:54 +08:00
CodingOnStar
72e3aad6e1 Refactor AmplitudeProvider to enable conditional initialization based on CE edition
- Added check for IS_CE_EDITION to prevent Amplitude initialization in the CE edition.
- Updated warning log to inform users when Amplitude is disabled in CE edition.
2025-12-03 10:45:27 +08:00
CodingOnStar
4037487064 Update Amplitude integration and dependencies
- Replaced @amplitude/unified with @amplitude/analytics-browser and added @amplitude/plugin-session-replay-browser for enhanced tracking capabilities.
- Modified AmplitudeProvider to include session replay functionality with a configurable sample rate.
- Updated user identification in AppContextProvider to use user email instead of ID for tracking.
- Bumped various dependencies in package.json and pnpm-lock.yaml for compatibility and improvements.
2025-12-03 10:42:12 +08:00
CodingOnStar
837b6cfc19 Add Amplitude analytics integration and tracking (#27890)
- Introduced AmplitudeProvider component for initializing Amplitude analytics.
- Integrated user tracking in AppContextProvider to report user and workspace information.
- Added event tracking for workflow log filter selections.
- Updated package.json and pnpm-lock.yaml to include @amplitude/unified dependency.
2025-12-03 10:39:15 +08:00
CodingOnStar
1416d71ece feat: add registration and create_app tracking 2025-12-03 10:38:38 +08:00
CodingOnStar
adf8ae79d5 Enhance Amplitude tracking by validating user ID and properties
- Updated middleware to include Amplitude domains.
- Improved setUserId function to handle empty user IDs and log warnings in development.
- Enhanced setUserProperties function to filter out invalid properties and log results in development.
- Refactored user tracking in AppContextProvider to streamline setting user and workspace properties.
2025-12-03 10:38:33 +08:00
CodingOnStar
d7d0fad436 Implement event tracking for app creation and user actions across various components 2025-12-03 10:38:26 +08:00
CodingOnStar
478aee1c07 feat: integrate Amplitude analytics into the application
- Added AmplitudeProvider component to initialize Amplitude analytics.
- Integrated Amplitude tracking in layout components and workflow log filter.
- Implemented user tracking in AppContextProvider to report user and workspace information to Amplitude.
- Added utility functions for tracking events and managing user properties.
2025-12-03 10:38:10 +08:00
kenwoodjw
9b9588f20d fix: CVE-2025-64718 (#29027)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-12-02 21:49:57 +08:00
wangxiaolei
d6bbf0f975 chore: enhance test (#29002)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-12-02 21:49:08 +08:00
wangxiaolei
f48522e923 feat: add x-trace-id to http responses and logs (#29015)
Introduce trace id to http responses and logs to facilitate debugging process.
2025-12-02 17:22:34 +08:00
93 changed files with 6612 additions and 5578 deletions

View File

@@ -1,6 +1,8 @@
import logging
import time
from opentelemetry.trace import get_current_span
from configs import dify_config
from contexts.wrapper import RecyclableContextVar
from dify_app import DifyApp
@@ -26,8 +28,25 @@ def create_flask_app_with_configs() -> DifyApp:
# add an unique identifier to each request
RecyclableContextVar.increment_thread_recycles()
# add after request hook for injecting X-Trace-Id header from OpenTelemetry span context
@dify_app.after_request
def add_trace_id_header(response):
try:
span = get_current_span()
ctx = span.get_span_context() if span else None
if ctx and ctx.is_valid:
trace_id_hex = format(ctx.trace_id, "032x")
# Avoid duplicates if some middleware added it
if "X-Trace-Id" not in response.headers:
response.headers["X-Trace-Id"] = trace_id_hex
except Exception:
# Never break the response due to tracing header injection
logger.warning("Failed to add trace ID to response header", exc_info=True)
return response
# Capture the decorator's return value to avoid pyright reportUnusedFunction
_ = before_request
_ = add_trace_id_header
return dify_app

View File

@@ -553,7 +553,10 @@ class LoggingConfig(BaseSettings):
LOG_FORMAT: str = Field(
description="Format string for log messages",
default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s",
default=(
"%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] "
"[%(filename)s:%(lineno)d] %(trace_id)s - %(message)s"
),
)
LOG_DATEFORMAT: str | None = Field(

View File

@@ -324,10 +324,13 @@ class AppListApi(Resource):
NodeType.TRIGGER_PLUGIN,
}
for workflow in draft_workflows:
for _, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
try:
for _, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
except Exception:
continue
for app in app_pagination.items:
app.has_draft_trigger = str(app.id) in draft_trigger_app_ids

View File

@@ -49,7 +49,6 @@ class CompletionConversationQuery(BaseConversationQuery):
class ChatConversationQuery(BaseConversationQuery):
message_count_gte: int | None = Field(default=None, ge=1, description="Minimum message count")
sort_by: Literal["created_at", "-created_at", "updated_at", "-updated_at"] = Field(
default="-updated_at", description="Sort field and direction"
)
@@ -509,14 +508,6 @@ class ChatConversationApi(Resource):
.having(func.count(MessageAnnotation.id) == 0)
)
if args.message_count_gte and args.message_count_gte >= 1:
query = (
query.options(joinedload(Conversation.messages)) # type: ignore
.join(Message, Message.conversation_id == Conversation.id)
.group_by(Conversation.id)
.having(func.count(Message.id) >= args.message_count_gte)
)
if app_model.mode == AppMode.ADVANCED_CHAT:
query = query.where(Conversation.invoke_from != InvokeFrom.DEBUGGER)

View File

@@ -316,18 +316,16 @@ def validate_and_get_api_token(scope: str | None = None):
ApiToken.type == scope,
)
.values(last_used_at=current_time)
.returning(ApiToken)
)
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
result = session.execute(update_stmt)
api_token = result.scalar_one_or_none()
api_token = session.scalar(stmt)
if hasattr(result, "rowcount") and result.rowcount > 0:
session.commit()
if not api_token:
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
api_token = session.scalar(stmt)
if not api_token:
raise Unauthorized("Access token is invalid")
else:
session.commit()
raise Unauthorized("Access token is invalid")
return api_token

View File

@@ -1,3 +1,4 @@
import logging
import time
from collections.abc import Mapping, Sequence
from dataclasses import dataclass
@@ -55,6 +56,7 @@ from models import Account, EndUser
from services.variable_truncator import BaseTruncator, DummyVariableTruncator, VariableTruncator
NodeExecutionId = NewType("NodeExecutionId", str)
logger = logging.getLogger(__name__)
@dataclass(slots=True)
@@ -289,26 +291,30 @@ class WorkflowResponseConverter:
),
)
if event.node_type == NodeType.TOOL:
response.data.extras["icon"] = ToolManager.get_tool_icon(
tenant_id=self._application_generate_entity.app_config.tenant_id,
provider_type=ToolProviderType(event.provider_type),
provider_id=event.provider_id,
)
elif event.node_type == NodeType.DATASOURCE:
manager = PluginDatasourceManager()
provider_entity = manager.fetch_datasource_provider(
self._application_generate_entity.app_config.tenant_id,
event.provider_id,
)
response.data.extras["icon"] = provider_entity.declaration.identity.generate_datasource_icon_url(
self._application_generate_entity.app_config.tenant_id
)
elif event.node_type == NodeType.TRIGGER_PLUGIN:
response.data.extras["icon"] = TriggerManager.get_trigger_plugin_icon(
self._application_generate_entity.app_config.tenant_id,
event.provider_id,
)
try:
if event.node_type == NodeType.TOOL:
response.data.extras["icon"] = ToolManager.get_tool_icon(
tenant_id=self._application_generate_entity.app_config.tenant_id,
provider_type=ToolProviderType(event.provider_type),
provider_id=event.provider_id,
)
elif event.node_type == NodeType.DATASOURCE:
manager = PluginDatasourceManager()
provider_entity = manager.fetch_datasource_provider(
self._application_generate_entity.app_config.tenant_id,
event.provider_id,
)
response.data.extras["icon"] = provider_entity.declaration.identity.generate_datasource_icon_url(
self._application_generate_entity.app_config.tenant_id
)
elif event.node_type == NodeType.TRIGGER_PLUGIN:
response.data.extras["icon"] = TriggerManager.get_trigger_plugin_icon(
self._application_generate_entity.app_config.tenant_id,
event.provider_id,
)
except Exception:
# metadata fetch may fail, for example, the plugin daemon is down or plugin is uninstalled.
logger.warning("failed to fetch icon for %s", event.provider_id)
return response

View File

@@ -156,78 +156,82 @@ class MessageBasedAppGenerator(BaseAppGenerator):
query = application_generate_entity.query or "New conversation"
conversation_name = (query[:20] + "") if len(query) > 20 else query
if not conversation:
conversation = Conversation(
with db.session.begin():
if not conversation:
conversation = Conversation(
app_id=app_config.app_id,
app_model_config_id=app_model_config_id,
model_provider=model_provider,
model_id=model_id,
override_model_configs=json.dumps(override_model_configs) if override_model_configs else None,
mode=app_config.app_mode.value,
name=conversation_name,
inputs=application_generate_entity.inputs,
introduction=introduction,
system_instruction="",
system_instruction_tokens=0,
status="normal",
invoke_from=application_generate_entity.invoke_from.value,
from_source=from_source,
from_end_user_id=end_user_id,
from_account_id=account_id,
)
db.session.add(conversation)
db.session.flush()
db.session.refresh(conversation)
else:
conversation.updated_at = naive_utc_now()
message = Message(
app_id=app_config.app_id,
app_model_config_id=app_model_config_id,
model_provider=model_provider,
model_id=model_id,
override_model_configs=json.dumps(override_model_configs) if override_model_configs else None,
mode=app_config.app_mode.value,
name=conversation_name,
conversation_id=conversation.id,
inputs=application_generate_entity.inputs,
introduction=introduction,
system_instruction="",
system_instruction_tokens=0,
status="normal",
query=application_generate_entity.query,
message="",
message_tokens=0,
message_unit_price=0,
message_price_unit=0,
answer="",
answer_tokens=0,
answer_unit_price=0,
answer_price_unit=0,
parent_message_id=getattr(application_generate_entity, "parent_message_id", None),
provider_response_latency=0,
total_price=0,
currency="USD",
invoke_from=application_generate_entity.invoke_from.value,
from_source=from_source,
from_end_user_id=end_user_id,
from_account_id=account_id,
app_mode=app_config.app_mode,
)
db.session.add(conversation)
db.session.add(message)
db.session.flush()
db.session.refresh(message)
message_files = []
for file in application_generate_entity.files:
message_file = MessageFile(
message_id=message.id,
type=file.type,
transfer_method=file.transfer_method,
belongs_to="user",
url=file.remote_url,
upload_file_id=file.related_id,
created_by_role=(CreatorUserRole.ACCOUNT if account_id else CreatorUserRole.END_USER),
created_by=account_id or end_user_id or "",
)
message_files.append(message_file)
if message_files:
db.session.add_all(message_files)
db.session.commit()
db.session.refresh(conversation)
else:
conversation.updated_at = naive_utc_now()
db.session.commit()
message = Message(
app_id=app_config.app_id,
model_provider=model_provider,
model_id=model_id,
override_model_configs=json.dumps(override_model_configs) if override_model_configs else None,
conversation_id=conversation.id,
inputs=application_generate_entity.inputs,
query=application_generate_entity.query,
message="",
message_tokens=0,
message_unit_price=0,
message_price_unit=0,
answer="",
answer_tokens=0,
answer_unit_price=0,
answer_price_unit=0,
parent_message_id=getattr(application_generate_entity, "parent_message_id", None),
provider_response_latency=0,
total_price=0,
currency="USD",
invoke_from=application_generate_entity.invoke_from.value,
from_source=from_source,
from_end_user_id=end_user_id,
from_account_id=account_id,
app_mode=app_config.app_mode,
)
db.session.add(message)
db.session.commit()
db.session.refresh(message)
for file in application_generate_entity.files:
message_file = MessageFile(
message_id=message.id,
type=file.type,
transfer_method=file.transfer_method,
belongs_to="user",
url=file.remote_url,
upload_file_id=file.related_id,
created_by_role=(CreatorUserRole.ACCOUNT if account_id else CreatorUserRole.END_USER),
created_by=account_id or end_user_id or "",
)
db.session.add(message_file)
db.session.commit()
return conversation, message
def _get_conversation_introduction(self, application_generate_entity: AppGenerateEntity) -> str:

View File

@@ -29,6 +29,7 @@ class SimpleModelProviderEntity(BaseModel):
provider: str
label: I18nObject
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
supported_model_types: list[ModelType]
@@ -42,6 +43,7 @@ class SimpleModelProviderEntity(BaseModel):
provider=provider_entity.provider,
label=provider_entity.label,
icon_small=provider_entity.icon_small,
icon_small_dark=provider_entity.icon_small_dark,
icon_large=provider_entity.icon_large,
supported_model_types=provider_entity.supported_model_types,
)

View File

@@ -99,6 +99,7 @@ class SimpleProviderEntity(BaseModel):
provider: str
label: I18nObject
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
supported_model_types: Sequence[ModelType]
models: list[AIModelEntity] = []
@@ -124,7 +125,6 @@ class ProviderEntity(BaseModel):
icon_small: I18nObject | None = None
icon_large: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large_dark: I18nObject | None = None
background: str | None = None
help: ProviderHelpEntity | None = None
supported_model_types: Sequence[ModelType]

View File

@@ -300,6 +300,14 @@ class ModelProviderFactory:
file_name = provider_schema.icon_small.zh_Hans
else:
file_name = provider_schema.icon_small.en_US
elif icon_type.lower() == "icon_small_dark":
if not provider_schema.icon_small_dark:
raise ValueError(f"Provider {provider} does not have small dark icon.")
if lang.lower() == "zh_hans":
file_name = provider_schema.icon_small_dark.zh_Hans
else:
file_name = provider_schema.icon_small_dark.en_US
else:
if not provider_schema.icon_large:
raise ValueError(f"Provider {provider} does not have large icon.")

View File

@@ -203,7 +203,7 @@ class WorkflowTool(Tool):
Resolve user object in both HTTP and worker contexts.
In HTTP context: dereference the current_user LocalProxy (can return Account or EndUser).
In worker context: load Account from database by user_id (only returns Account, never EndUser).
In worker context: load Account(knowledge pipeline) or EndUser(trigger) from database by user_id.
Returns:
Account | EndUser | None: The resolved user object, or None if resolution fails.
@@ -224,24 +224,28 @@ class WorkflowTool(Tool):
logger.warning("Failed to resolve user from request context: %s", e)
return None
def _resolve_user_from_database(self, user_id: str) -> Account | None:
def _resolve_user_from_database(self, user_id: str) -> Account | EndUser | None:
"""
Resolve user from database (worker/Celery context).
"""
user_stmt = select(Account).where(Account.id == user_id)
user = db.session.scalar(user_stmt)
if not user:
return None
tenant_stmt = select(Tenant).where(Tenant.id == self.runtime.tenant_id)
tenant = db.session.scalar(tenant_stmt)
if not tenant:
return None
user.current_tenant = tenant
user_stmt = select(Account).where(Account.id == user_id)
user = db.session.scalar(user_stmt)
if user:
user.current_tenant = tenant
return user
return user
end_user_stmt = select(EndUser).where(EndUser.id == user_id, EndUser.tenant_id == tenant.id)
end_user = db.session.scalar(end_user_stmt)
if end_user:
return end_user
return None
def _get_workflow(self, app_id: str, version: str) -> Workflow:
"""

View File

@@ -6,6 +6,7 @@ BASE_CORS_HEADERS: tuple[str, ...] = ("Content-Type", HEADER_NAME_APP_CODE, HEAD
SERVICE_API_HEADERS: tuple[str, ...] = (*BASE_CORS_HEADERS, "Authorization")
AUTHENTICATED_HEADERS: tuple[str, ...] = (*SERVICE_API_HEADERS, HEADER_NAME_CSRF_TOKEN)
FILES_HEADERS: tuple[str, ...] = (*BASE_CORS_HEADERS, HEADER_NAME_CSRF_TOKEN)
EXPOSED_HEADERS: tuple[str, ...] = ("X-Version", "X-Env", "X-Trace-Id")
def init_app(app: DifyApp):
@@ -25,6 +26,7 @@ def init_app(app: DifyApp):
service_api_bp,
allow_headers=list(SERVICE_API_HEADERS),
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
expose_headers=list(EXPOSED_HEADERS),
)
app.register_blueprint(service_api_bp)
@@ -34,7 +36,7 @@ def init_app(app: DifyApp):
supports_credentials=True,
allow_headers=list(AUTHENTICATED_HEADERS),
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
expose_headers=["X-Version", "X-Env"],
expose_headers=list(EXPOSED_HEADERS),
)
app.register_blueprint(web_bp)
@@ -44,7 +46,7 @@ def init_app(app: DifyApp):
supports_credentials=True,
allow_headers=list(AUTHENTICATED_HEADERS),
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
expose_headers=["X-Version", "X-Env"],
expose_headers=list(EXPOSED_HEADERS),
)
app.register_blueprint(console_app_bp)
@@ -52,6 +54,7 @@ def init_app(app: DifyApp):
files_bp,
allow_headers=list(FILES_HEADERS),
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
expose_headers=list(EXPOSED_HEADERS),
)
app.register_blueprint(files_bp)
@@ -63,5 +66,6 @@ def init_app(app: DifyApp):
trigger_bp,
allow_headers=["Content-Type", "Authorization", "X-App-Code"],
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH", "HEAD"],
expose_headers=list(EXPOSED_HEADERS),
)
app.register_blueprint(trigger_bp)

View File

@@ -7,6 +7,7 @@ from logging.handlers import RotatingFileHandler
import flask
from configs import dify_config
from core.helper.trace_id_helper import get_trace_id_from_otel_context
from dify_app import DifyApp
@@ -76,7 +77,9 @@ class RequestIdFilter(logging.Filter):
# the logging format. Note that we're checking if we're in a request
# context, as we may want to log things before Flask is fully loaded.
def filter(self, record):
trace_id = get_trace_id_from_otel_context() or ""
record.req_id = get_request_id() if flask.has_request_context() else ""
record.trace_id = trace_id
return True
@@ -84,6 +87,8 @@ class RequestIdFormatter(logging.Formatter):
def format(self, record):
if not hasattr(record, "req_id"):
record.req_id = ""
if not hasattr(record, "trace_id"):
record.trace_id = ""
return super().format(record)

View File

@@ -1,12 +1,14 @@
import json
import logging
import time
import flask
import werkzeug.http
from flask import Flask
from flask import Flask, g
from flask.signals import request_finished, request_started
from configs import dify_config
from core.helper.trace_id_helper import get_trace_id_from_otel_context
logger = logging.getLogger(__name__)
@@ -20,6 +22,9 @@ def _is_content_type_json(content_type: str) -> bool:
def _log_request_started(_sender, **_extra):
"""Log the start of a request."""
# Record start time for access logging
g.__request_started_ts = time.perf_counter()
if not logger.isEnabledFor(logging.DEBUG):
return
@@ -42,8 +47,39 @@ def _log_request_started(_sender, **_extra):
def _log_request_finished(_sender, response, **_extra):
"""Log the end of a request."""
if not logger.isEnabledFor(logging.DEBUG) or response is None:
"""Log the end of a request.
Safe to call with or without an active Flask request context.
"""
if response is None:
return
# Always emit a compact access line at INFO with trace_id so it can be grepped
has_ctx = flask.has_request_context()
start_ts = getattr(g, "__request_started_ts", None) if has_ctx else None
duration_ms = None
if start_ts is not None:
duration_ms = round((time.perf_counter() - start_ts) * 1000, 3)
# Request attributes are available only when a request context exists
if has_ctx:
req_method = flask.request.method
req_path = flask.request.path
else:
req_method = "-"
req_path = "-"
trace_id = get_trace_id_from_otel_context() or response.headers.get("X-Trace-Id") or ""
logger.info(
"%s %s %s %s %s",
req_method,
req_path,
getattr(response, "status_code", "-"),
duration_ms if duration_ms is not None else "-",
trace_id,
)
if not logger.isEnabledFor(logging.DEBUG):
return
if not _is_content_type_json(response.content_type):

View File

@@ -10,6 +10,7 @@ from collections.abc import Sequence
from typing import Any, Literal
import sqlalchemy as sa
from redis.exceptions import LockNotOwnedError
from sqlalchemy import exists, func, select
from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound
@@ -1593,173 +1594,176 @@ class DocumentService:
db.session.add(dataset_process_rule)
db.session.flush()
lock_name = f"add_document_lock_dataset_id_{dataset.id}"
with redis_client.lock(lock_name, timeout=600):
assert dataset_process_rule
position = DocumentService.get_documents_position(dataset.id)
document_ids = []
duplicate_document_ids = []
if knowledge_config.data_source.info_list.data_source_type == "upload_file":
if not knowledge_config.data_source.info_list.file_info_list:
raise ValueError("File source info is required")
upload_file_list = knowledge_config.data_source.info_list.file_info_list.file_ids
for file_id in upload_file_list:
file = (
db.session.query(UploadFile)
.where(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id)
.first()
)
# raise error if file not found
if not file:
raise FileNotExistsError()
file_name = file.name
data_source_info: dict[str, str | bool] = {
"upload_file_id": file_id,
}
# check duplicate
if knowledge_config.duplicate:
document = (
db.session.query(Document)
.filter_by(
dataset_id=dataset.id,
tenant_id=current_user.current_tenant_id,
data_source_type="upload_file",
enabled=True,
name=file_name,
)
try:
with redis_client.lock(lock_name, timeout=600):
assert dataset_process_rule
position = DocumentService.get_documents_position(dataset.id)
document_ids = []
duplicate_document_ids = []
if knowledge_config.data_source.info_list.data_source_type == "upload_file":
if not knowledge_config.data_source.info_list.file_info_list:
raise ValueError("File source info is required")
upload_file_list = knowledge_config.data_source.info_list.file_info_list.file_ids
for file_id in upload_file_list:
file = (
db.session.query(UploadFile)
.where(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id)
.first()
)
if document:
document.dataset_process_rule_id = dataset_process_rule.id
document.updated_at = naive_utc_now()
document.created_from = created_from
document.doc_form = knowledge_config.doc_form
document.doc_language = knowledge_config.doc_language
document.data_source_info = json.dumps(data_source_info)
document.batch = batch
document.indexing_status = "waiting"
db.session.add(document)
documents.append(document)
duplicate_document_ids.append(document.id)
continue
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
file_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
elif knowledge_config.data_source.info_list.data_source_type == "notion_import":
notion_info_list = knowledge_config.data_source.info_list.notion_info_list # type: ignore
if not notion_info_list:
raise ValueError("No notion info list found.")
exist_page_ids = []
exist_document = {}
documents = (
db.session.query(Document)
.filter_by(
dataset_id=dataset.id,
tenant_id=current_user.current_tenant_id,
data_source_type="notion_import",
enabled=True,
)
.all()
)
if documents:
for document in documents:
data_source_info = json.loads(document.data_source_info)
exist_page_ids.append(data_source_info["notion_page_id"])
exist_document[data_source_info["notion_page_id"]] = document.id
for notion_info in notion_info_list:
workspace_id = notion_info.workspace_id
for page in notion_info.pages:
if page.page_id not in exist_page_ids:
data_source_info = {
"credential_id": notion_info.credential_id,
"notion_workspace_id": workspace_id,
"notion_page_id": page.page_id,
"notion_page_icon": page.page_icon.model_dump() if page.page_icon else None, # type: ignore
"type": page.type,
}
# Truncate page name to 255 characters to prevent DB field length errors
truncated_page_name = page.page_name[:255] if page.page_name else "nopagename"
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
truncated_page_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
else:
exist_document.pop(page.page_id)
# delete not selected documents
if len(exist_document) > 0:
clean_notion_document_task.delay(list(exist_document.values()), dataset.id)
elif knowledge_config.data_source.info_list.data_source_type == "website_crawl":
website_info = knowledge_config.data_source.info_list.website_info_list
if not website_info:
raise ValueError("No website info list found.")
urls = website_info.urls
for url in urls:
data_source_info = {
"url": url,
"provider": website_info.provider,
"job_id": website_info.job_id,
"only_main_content": website_info.only_main_content,
"mode": "crawl",
}
if len(url) > 255:
document_name = url[:200] + "..."
else:
document_name = url
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
document_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
db.session.commit()
# trigger async task
if document_ids:
DocumentIndexingTaskProxy(dataset.tenant_id, dataset.id, document_ids).delay()
if duplicate_document_ids:
duplicate_document_indexing_task.delay(dataset.id, duplicate_document_ids)
# raise error if file not found
if not file:
raise FileNotExistsError()
file_name = file.name
data_source_info: dict[str, str | bool] = {
"upload_file_id": file_id,
}
# check duplicate
if knowledge_config.duplicate:
document = (
db.session.query(Document)
.filter_by(
dataset_id=dataset.id,
tenant_id=current_user.current_tenant_id,
data_source_type="upload_file",
enabled=True,
name=file_name,
)
.first()
)
if document:
document.dataset_process_rule_id = dataset_process_rule.id
document.updated_at = naive_utc_now()
document.created_from = created_from
document.doc_form = knowledge_config.doc_form
document.doc_language = knowledge_config.doc_language
document.data_source_info = json.dumps(data_source_info)
document.batch = batch
document.indexing_status = "waiting"
db.session.add(document)
documents.append(document)
duplicate_document_ids.append(document.id)
continue
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
file_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
elif knowledge_config.data_source.info_list.data_source_type == "notion_import":
notion_info_list = knowledge_config.data_source.info_list.notion_info_list # type: ignore
if not notion_info_list:
raise ValueError("No notion info list found.")
exist_page_ids = []
exist_document = {}
documents = (
db.session.query(Document)
.filter_by(
dataset_id=dataset.id,
tenant_id=current_user.current_tenant_id,
data_source_type="notion_import",
enabled=True,
)
.all()
)
if documents:
for document in documents:
data_source_info = json.loads(document.data_source_info)
exist_page_ids.append(data_source_info["notion_page_id"])
exist_document[data_source_info["notion_page_id"]] = document.id
for notion_info in notion_info_list:
workspace_id = notion_info.workspace_id
for page in notion_info.pages:
if page.page_id not in exist_page_ids:
data_source_info = {
"credential_id": notion_info.credential_id,
"notion_workspace_id": workspace_id,
"notion_page_id": page.page_id,
"notion_page_icon": page.page_icon.model_dump() if page.page_icon else None, # type: ignore
"type": page.type,
}
# Truncate page name to 255 characters to prevent DB field length errors
truncated_page_name = page.page_name[:255] if page.page_name else "nopagename"
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
truncated_page_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
else:
exist_document.pop(page.page_id)
# delete not selected documents
if len(exist_document) > 0:
clean_notion_document_task.delay(list(exist_document.values()), dataset.id)
elif knowledge_config.data_source.info_list.data_source_type == "website_crawl":
website_info = knowledge_config.data_source.info_list.website_info_list
if not website_info:
raise ValueError("No website info list found.")
urls = website_info.urls
for url in urls:
data_source_info = {
"url": url,
"provider": website_info.provider,
"job_id": website_info.job_id,
"only_main_content": website_info.only_main_content,
"mode": "crawl",
}
if len(url) > 255:
document_name = url[:200] + "..."
else:
document_name = url
document = DocumentService.build_document(
dataset,
dataset_process_rule.id,
knowledge_config.data_source.info_list.data_source_type,
knowledge_config.doc_form,
knowledge_config.doc_language,
data_source_info,
created_from,
position,
account,
document_name,
batch,
)
db.session.add(document)
db.session.flush()
document_ids.append(document.id)
documents.append(document)
position += 1
db.session.commit()
# trigger async task
if document_ids:
DocumentIndexingTaskProxy(dataset.tenant_id, dataset.id, document_ids).delay()
if duplicate_document_ids:
duplicate_document_indexing_task.delay(dataset.id, duplicate_document_ids)
except LockNotOwnedError:
pass
return documents, batch
@@ -2699,50 +2703,55 @@ class SegmentService:
# calc embedding use tokens
tokens = embedding_model.get_text_embedding_num_tokens(texts=[content])[0]
lock_name = f"add_segment_lock_document_id_{document.id}"
with redis_client.lock(lock_name, timeout=600):
max_position = (
db.session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == document.id)
.scalar()
)
segment_document = DocumentSegment(
tenant_id=current_user.current_tenant_id,
dataset_id=document.dataset_id,
document_id=document.id,
index_node_id=doc_id,
index_node_hash=segment_hash,
position=max_position + 1 if max_position else 1,
content=content,
word_count=len(content),
tokens=tokens,
status="completed",
indexing_at=naive_utc_now(),
completed_at=naive_utc_now(),
created_by=current_user.id,
)
if document.doc_form == "qa_model":
segment_document.word_count += len(args["answer"])
segment_document.answer = args["answer"]
try:
with redis_client.lock(lock_name, timeout=600):
max_position = (
db.session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == document.id)
.scalar()
)
segment_document = DocumentSegment(
tenant_id=current_user.current_tenant_id,
dataset_id=document.dataset_id,
document_id=document.id,
index_node_id=doc_id,
index_node_hash=segment_hash,
position=max_position + 1 if max_position else 1,
content=content,
word_count=len(content),
tokens=tokens,
status="completed",
indexing_at=naive_utc_now(),
completed_at=naive_utc_now(),
created_by=current_user.id,
)
if document.doc_form == "qa_model":
segment_document.word_count += len(args["answer"])
segment_document.answer = args["answer"]
db.session.add(segment_document)
# update document word count
assert document.word_count is not None
document.word_count += segment_document.word_count
db.session.add(document)
db.session.commit()
# save vector index
try:
VectorService.create_segments_vector([args["keywords"]], [segment_document], dataset, document.doc_form)
except Exception as e:
logger.exception("create segment index failed")
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
segment_document.status = "error"
segment_document.error = str(e)
db.session.add(segment_document)
# update document word count
assert document.word_count is not None
document.word_count += segment_document.word_count
db.session.add(document)
db.session.commit()
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_document.id).first()
return segment
# save vector index
try:
VectorService.create_segments_vector(
[args["keywords"]], [segment_document], dataset, document.doc_form
)
except Exception as e:
logger.exception("create segment index failed")
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
segment_document.status = "error"
segment_document.error = str(e)
db.session.commit()
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_document.id).first()
return segment
except LockNotOwnedError:
pass
@classmethod
def multi_create_segment(cls, segments: list, document: Document, dataset: Dataset):
@@ -2751,84 +2760,89 @@ class SegmentService:
lock_name = f"multi_add_segment_lock_document_id_{document.id}"
increment_word_count = 0
with redis_client.lock(lock_name, timeout=600):
embedding_model = None
if dataset.indexing_technique == "high_quality":
model_manager = ModelManager()
embedding_model = model_manager.get_model_instance(
tenant_id=current_user.current_tenant_id,
provider=dataset.embedding_model_provider,
model_type=ModelType.TEXT_EMBEDDING,
model=dataset.embedding_model,
try:
with redis_client.lock(lock_name, timeout=600):
embedding_model = None
if dataset.indexing_technique == "high_quality":
model_manager = ModelManager()
embedding_model = model_manager.get_model_instance(
tenant_id=current_user.current_tenant_id,
provider=dataset.embedding_model_provider,
model_type=ModelType.TEXT_EMBEDDING,
model=dataset.embedding_model,
)
max_position = (
db.session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == document.id)
.scalar()
)
max_position = (
db.session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == document.id)
.scalar()
)
pre_segment_data_list = []
segment_data_list = []
keywords_list = []
position = max_position + 1 if max_position else 1
for segment_item in segments:
content = segment_item["content"]
doc_id = str(uuid.uuid4())
segment_hash = helper.generate_text_hash(content)
tokens = 0
if dataset.indexing_technique == "high_quality" and embedding_model:
# calc embedding use tokens
pre_segment_data_list = []
segment_data_list = []
keywords_list = []
position = max_position + 1 if max_position else 1
for segment_item in segments:
content = segment_item["content"]
doc_id = str(uuid.uuid4())
segment_hash = helper.generate_text_hash(content)
tokens = 0
if dataset.indexing_technique == "high_quality" and embedding_model:
# calc embedding use tokens
if document.doc_form == "qa_model":
tokens = embedding_model.get_text_embedding_num_tokens(
texts=[content + segment_item["answer"]]
)[0]
else:
tokens = embedding_model.get_text_embedding_num_tokens(texts=[content])[0]
segment_document = DocumentSegment(
tenant_id=current_user.current_tenant_id,
dataset_id=document.dataset_id,
document_id=document.id,
index_node_id=doc_id,
index_node_hash=segment_hash,
position=position,
content=content,
word_count=len(content),
tokens=tokens,
keywords=segment_item.get("keywords", []),
status="completed",
indexing_at=naive_utc_now(),
completed_at=naive_utc_now(),
created_by=current_user.id,
)
if document.doc_form == "qa_model":
tokens = embedding_model.get_text_embedding_num_tokens(
texts=[content + segment_item["answer"]]
)[0]
segment_document.answer = segment_item["answer"]
segment_document.word_count += len(segment_item["answer"])
increment_word_count += segment_document.word_count
db.session.add(segment_document)
segment_data_list.append(segment_document)
position += 1
pre_segment_data_list.append(segment_document)
if "keywords" in segment_item:
keywords_list.append(segment_item["keywords"])
else:
tokens = embedding_model.get_text_embedding_num_tokens(texts=[content])[0]
segment_document = DocumentSegment(
tenant_id=current_user.current_tenant_id,
dataset_id=document.dataset_id,
document_id=document.id,
index_node_id=doc_id,
index_node_hash=segment_hash,
position=position,
content=content,
word_count=len(content),
tokens=tokens,
keywords=segment_item.get("keywords", []),
status="completed",
indexing_at=naive_utc_now(),
completed_at=naive_utc_now(),
created_by=current_user.id,
)
if document.doc_form == "qa_model":
segment_document.answer = segment_item["answer"]
segment_document.word_count += len(segment_item["answer"])
increment_word_count += segment_document.word_count
db.session.add(segment_document)
segment_data_list.append(segment_document)
position += 1
pre_segment_data_list.append(segment_document)
if "keywords" in segment_item:
keywords_list.append(segment_item["keywords"])
else:
keywords_list.append(None)
# update document word count
assert document.word_count is not None
document.word_count += increment_word_count
db.session.add(document)
try:
# save vector index
VectorService.create_segments_vector(keywords_list, pre_segment_data_list, dataset, document.doc_form)
except Exception as e:
logger.exception("create segment index failed")
for segment_document in segment_data_list:
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
segment_document.status = "error"
segment_document.error = str(e)
db.session.commit()
return segment_data_list
keywords_list.append(None)
# update document word count
assert document.word_count is not None
document.word_count += increment_word_count
db.session.add(document)
try:
# save vector index
VectorService.create_segments_vector(
keywords_list, pre_segment_data_list, dataset, document.doc_form
)
except Exception as e:
logger.exception("create segment index failed")
for segment_document in segment_data_list:
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
segment_document.status = "error"
segment_document.error = str(e)
db.session.commit()
return segment_data_list
except LockNotOwnedError:
pass
@classmethod
def update_segment(cls, args: SegmentUpdateArgs, segment: DocumentSegment, document: Document, dataset: Dataset):

View File

@@ -69,6 +69,7 @@ class ProviderResponse(BaseModel):
label: I18nObject
description: I18nObject | None = None
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
background: str | None = None
help: ProviderHelpEntity | None = None
@@ -92,6 +93,11 @@ class ProviderResponse(BaseModel):
self.icon_small = I18nObject(
en_US=f"{url_prefix}/icon_small/en_US", zh_Hans=f"{url_prefix}/icon_small/zh_Hans"
)
if self.icon_small_dark is not None:
self.icon_small_dark = I18nObject(
en_US=f"{url_prefix}/icon_small_dark/en_US",
zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans",
)
if self.icon_large is not None:
self.icon_large = I18nObject(
@@ -109,6 +115,7 @@ class ProviderWithModelsResponse(BaseModel):
provider: str
label: I18nObject
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
status: CustomConfigurationStatus
models: list[ProviderModelWithStatusEntity]
@@ -123,6 +130,11 @@ class ProviderWithModelsResponse(BaseModel):
en_US=f"{url_prefix}/icon_small/en_US", zh_Hans=f"{url_prefix}/icon_small/zh_Hans"
)
if self.icon_small_dark is not None:
self.icon_small_dark = I18nObject(
en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans"
)
if self.icon_large is not None:
self.icon_large = I18nObject(
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"
@@ -147,6 +159,11 @@ class SimpleProviderEntityResponse(SimpleProviderEntity):
en_US=f"{url_prefix}/icon_small/en_US", zh_Hans=f"{url_prefix}/icon_small/zh_Hans"
)
if self.icon_small_dark is not None:
self.icon_small_dark = I18nObject(
en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans"
)
if self.icon_large is not None:
self.icon_large = I18nObject(
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"

View File

@@ -79,6 +79,7 @@ class ModelProviderService:
label=provider_configuration.provider.label,
description=provider_configuration.provider.description,
icon_small=provider_configuration.provider.icon_small,
icon_small_dark=provider_configuration.provider.icon_small_dark,
icon_large=provider_configuration.provider.icon_large,
background=provider_configuration.provider.background,
help=provider_configuration.provider.help,
@@ -402,6 +403,7 @@ class ModelProviderService:
provider=provider,
label=first_model.provider.label,
icon_small=first_model.provider.icon_small,
icon_small_dark=first_model.provider.icon_small_dark,
icon_large=first_model.provider.icon_large,
status=CustomConfigurationStatus.ACTIVE,
models=[

View File

@@ -233,7 +233,7 @@ workflow:
- value_selector:
- iteration_node
- output
value_type: array[array[number]]
value_type: array[number]
variable: output
selected: false
title: End

View File

@@ -227,6 +227,7 @@ class TestModelProviderService:
mock_provider_entity.label = {"en_US": "OpenAI", "zh_Hans": "OpenAI"}
mock_provider_entity.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"}
mock_provider_entity.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
mock_provider_entity.icon_small_dark = None
mock_provider_entity.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
mock_provider_entity.background = "#FF6B6B"
mock_provider_entity.help = None
@@ -300,6 +301,7 @@ class TestModelProviderService:
mock_provider_entity_llm.label = {"en_US": "OpenAI", "zh_Hans": "OpenAI"}
mock_provider_entity_llm.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"}
mock_provider_entity_llm.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
mock_provider_entity_llm.icon_small_dark = None
mock_provider_entity_llm.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
mock_provider_entity_llm.background = "#FF6B6B"
mock_provider_entity_llm.help = None
@@ -313,6 +315,7 @@ class TestModelProviderService:
mock_provider_entity_embedding.label = {"en_US": "Cohere", "zh_Hans": "Cohere"}
mock_provider_entity_embedding.description = {"en_US": "Cohere provider", "zh_Hans": "Cohere 提供商"}
mock_provider_entity_embedding.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
mock_provider_entity_embedding.icon_small_dark = None
mock_provider_entity_embedding.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
mock_provider_entity_embedding.background = "#4ECDC4"
mock_provider_entity_embedding.help = None
@@ -1023,6 +1026,7 @@ class TestModelProviderService:
provider="openai",
label={"en_US": "OpenAI", "zh_Hans": "OpenAI"},
icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"},
icon_small_dark=None,
icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"},
),
model="gpt-3.5-turbo",
@@ -1040,6 +1044,7 @@ class TestModelProviderService:
provider="openai",
label={"en_US": "OpenAI", "zh_Hans": "OpenAI"},
icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"},
icon_small_dark=None,
icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"},
),
model="gpt-4",

View File

@@ -1,3 +1,5 @@
from types import SimpleNamespace
import pytest
from core.app.entities.app_invoke_entities import InvokeFrom
@@ -214,3 +216,76 @@ def test_create_variable_message():
assert message.message.variable_name == var_name
assert message.message.variable_value == var_value
assert message.message.stream is False
def test_resolve_user_from_database_falls_back_to_end_user(monkeypatch: pytest.MonkeyPatch):
"""Ensure worker context can resolve EndUser when Account is missing."""
class StubSession:
def __init__(self, results: list):
self.results = results
def scalar(self, _stmt):
return self.results.pop(0)
tenant = SimpleNamespace(id="tenant_id")
end_user = SimpleNamespace(id="end_user_id", tenant_id="tenant_id")
db_stub = SimpleNamespace(session=StubSession([tenant, None, end_user]))
monkeypatch.setattr("core.tools.workflow_as_tool.tool.db", db_stub)
entity = ToolEntity(
identity=ToolIdentity(author="test", name="test tool", label=I18nObject(en_US="test tool"), provider="test"),
parameters=[],
description=None,
has_runtime_parameters=False,
)
runtime = ToolRuntime(tenant_id="tenant_id", invoke_from=InvokeFrom.SERVICE_API)
tool = WorkflowTool(
workflow_app_id="",
workflow_as_tool_id="",
version="1",
workflow_entities={},
workflow_call_depth=1,
entity=entity,
runtime=runtime,
)
resolved_user = tool._resolve_user_from_database(user_id=end_user.id)
assert resolved_user is end_user
def test_resolve_user_from_database_returns_none_when_no_tenant(monkeypatch: pytest.MonkeyPatch):
"""Return None if tenant cannot be found in worker context."""
class StubSession:
def __init__(self, results: list):
self.results = results
def scalar(self, _stmt):
return self.results.pop(0)
db_stub = SimpleNamespace(session=StubSession([None]))
monkeypatch.setattr("core.tools.workflow_as_tool.tool.db", db_stub)
entity = ToolEntity(
identity=ToolIdentity(author="test", name="test tool", label=I18nObject(en_US="test tool"), provider="test"),
parameters=[],
description=None,
has_runtime_parameters=False,
)
runtime = ToolRuntime(tenant_id="missing_tenant", invoke_from=InvokeFrom.SERVICE_API)
tool = WorkflowTool(
workflow_app_id="",
workflow_as_tool_id="",
version="1",
workflow_entities={},
workflow_call_depth=1,
entity=entity,
runtime=runtime,
)
resolved_user = tool._resolve_user_from_database(user_id="any")
assert resolved_user is None

View File

@@ -7,9 +7,31 @@ This module tests the iteration node's ability to:
"""
from .test_database_utils import skip_if_database_unavailable
from .test_mock_config import MockConfigBuilder, NodeMockConfig
from .test_table_runner import TableTestRunner, WorkflowTestCase
def _create_iteration_mock_config():
"""Helper to create a mock config for iteration tests."""
def code_inner_handler(node):
pool = node.graph_runtime_state.variable_pool
item_seg = pool.get(["iteration_node", "item"])
if item_seg is not None:
item = item_seg.to_object()
return {"result": [item, item * 2]}
# This fallback is likely unreachable, but if it is,
# it doesn't simulate iteration with different values as the comment suggests.
return {"result": [1, 2]}
return (
MockConfigBuilder()
.with_node_output("code_node", {"result": [1, 2, 3]})
.with_node_config(NodeMockConfig(node_id="code_inner_node", custom_handler=code_inner_handler))
.build()
)
@skip_if_database_unavailable()
def test_iteration_with_flatten_output_enabled():
"""
@@ -27,7 +49,8 @@ def test_iteration_with_flatten_output_enabled():
inputs={},
expected_outputs={"output": [1, 2, 2, 4, 3, 6]},
description="Iteration with flatten_output=True flattens nested arrays",
use_auto_mock=False, # Run code nodes directly
use_auto_mock=True, # Use auto-mock to avoid sandbox service
mock_config=_create_iteration_mock_config(),
)
result = runner.run_test_case(test_case)
@@ -56,7 +79,8 @@ def test_iteration_with_flatten_output_disabled():
inputs={},
expected_outputs={"output": [[1, 2], [2, 4], [3, 6]]},
description="Iteration with flatten_output=False preserves nested structure",
use_auto_mock=False, # Run code nodes directly
use_auto_mock=True, # Use auto-mock to avoid sandbox service
mock_config=_create_iteration_mock_config(),
)
result = runner.run_test_case(test_case)
@@ -81,14 +105,16 @@ def test_iteration_flatten_output_comparison():
inputs={},
expected_outputs={"output": [1, 2, 2, 4, 3, 6]},
description="flatten_output=True: Flattened output",
use_auto_mock=False, # Run code nodes directly
use_auto_mock=True, # Use auto-mock to avoid sandbox service
mock_config=_create_iteration_mock_config(),
),
WorkflowTestCase(
fixture_path="iteration_flatten_output_disabled_workflow",
inputs={},
expected_outputs={"output": [[1, 2], [2, 4], [3, 6]]},
description="flatten_output=False: Nested output",
use_auto_mock=False, # Run code nodes directly
use_auto_mock=True, # Use auto-mock to avoid sandbox service
mock_config=_create_iteration_mock_config(),
),
]

View File

@@ -263,3 +263,62 @@ class TestResponseUnmodified:
)
assert response.text == _RESPONSE_NEEDLE
assert response.status_code == 200
class TestRequestFinishedInfoAccessLine:
def test_info_access_log_includes_method_path_status_duration_trace_id(self, monkeypatch, caplog):
"""Ensure INFO access line contains expected fields with computed duration and trace id."""
app = _get_test_app()
# Push a real request context so flask.request and g are available
with app.test_request_context("/foo", method="GET"):
# Seed start timestamp via the extension's own start hook and control perf_counter deterministically
seq = iter([100.0, 100.123456])
monkeypatch.setattr(ext_request_logging.time, "perf_counter", lambda: next(seq))
# Provide a deterministic trace id
monkeypatch.setattr(
ext_request_logging,
"get_trace_id_from_otel_context",
lambda: "trace-xyz",
)
# Simulate request_started to record start timestamp on g
ext_request_logging._log_request_started(app)
# Capture logs from the real logger at INFO level only (skip DEBUG branch)
caplog.set_level(logging.INFO, logger=ext_request_logging.__name__)
response = Response(json.dumps({"ok": True}), mimetype="application/json", status=200)
_log_request_finished(app, response)
# Verify a single INFO record with the five fields in order
info_records = [rec for rec in caplog.records if rec.levelno == logging.INFO]
assert len(info_records) == 1
msg = info_records[0].getMessage()
# Expected format: METHOD PATH STATUS DURATION_MS TRACE_ID
assert "GET" in msg
assert "/foo" in msg
assert "200" in msg
assert "123.456" in msg # rounded to 3 decimals
assert "trace-xyz" in msg
def test_info_access_log_uses_dash_without_start_timestamp(self, monkeypatch, caplog):
app = _get_test_app()
with app.test_request_context("/bar", method="POST"):
# No g.__request_started_ts set -> duration should be '-'
monkeypatch.setattr(
ext_request_logging,
"get_trace_id_from_otel_context",
lambda: "tid-no-start",
)
caplog.set_level(logging.INFO, logger=ext_request_logging.__name__)
response = Response("OK", mimetype="text/plain", status=204)
_log_request_finished(app, response)
info_records = [rec for rec in caplog.records if rec.levelno == logging.INFO]
assert len(info_records) == 1
msg = info_records[0].getMessage()
assert "POST" in msg
assert "/bar" in msg
assert "204" in msg
# Duration placeholder
# The fields are space separated; ensure a standalone '-' appears
assert " - " in msg or msg.endswith(" -")
assert "tid-no-start" in msg

View File

@@ -0,0 +1,177 @@
import types
from unittest.mock import Mock, create_autospec
import pytest
from redis.exceptions import LockNotOwnedError
from models.account import Account
from models.dataset import Dataset, Document
from services.dataset_service import DocumentService, SegmentService
class FakeLock:
"""Lock that always fails on enter with LockNotOwnedError."""
def __enter__(self):
raise LockNotOwnedError("simulated")
def __exit__(self, exc_type, exc, tb):
# Normal contextmanager signature; return False so exceptions propagate
return False
@pytest.fixture
def fake_current_user(monkeypatch):
user = create_autospec(Account, instance=True)
user.id = "user-1"
user.current_tenant_id = "tenant-1"
monkeypatch.setattr("services.dataset_service.current_user", user)
return user
@pytest.fixture
def fake_features(monkeypatch):
"""Features.billing.enabled == False to skip quota logic."""
features = types.SimpleNamespace(
billing=types.SimpleNamespace(enabled=False, subscription=types.SimpleNamespace(plan="ENTERPRISE")),
documents_upload_quota=types.SimpleNamespace(limit=10_000, size=0),
)
monkeypatch.setattr(
"services.dataset_service.FeatureService.get_features",
lambda tenant_id: features,
)
return features
@pytest.fixture
def fake_lock(monkeypatch):
"""Patch redis_client.lock to always raise LockNotOwnedError on enter."""
def _fake_lock(name, timeout=None, *args, **kwargs):
return FakeLock()
# DatasetService imports redis_client directly from extensions.ext_redis
monkeypatch.setattr("services.dataset_service.redis_client.lock", _fake_lock)
# ---------------------------------------------------------------------------
# 1. Knowledge Pipeline document creation (save_document_with_dataset_id)
# ---------------------------------------------------------------------------
def test_save_document_with_dataset_id_ignores_lock_not_owned(
monkeypatch,
fake_current_user,
fake_features,
fake_lock,
):
# Arrange
dataset = create_autospec(Dataset, instance=True)
dataset.id = "ds-1"
dataset.tenant_id = fake_current_user.current_tenant_id
dataset.data_source_type = "upload_file"
dataset.indexing_technique = "high_quality" # so we skip re-initialization branch
# Minimal knowledge_config stub that satisfies pre-lock code
info_list = types.SimpleNamespace(data_source_type="upload_file")
data_source = types.SimpleNamespace(info_list=info_list)
knowledge_config = types.SimpleNamespace(
doc_form="qa_model",
original_document_id=None, # go into "new document" branch
data_source=data_source,
indexing_technique="high_quality",
embedding_model=None,
embedding_model_provider=None,
retrieval_model=None,
process_rule=None,
duplicate=False,
doc_language="en",
)
account = fake_current_user
# Avoid touching real doc_form logic
monkeypatch.setattr("services.dataset_service.DatasetService.check_doc_form", lambda *a, **k: None)
# Avoid real DB interactions
monkeypatch.setattr("services.dataset_service.db", Mock())
# Act: this would hit the redis lock, whose __enter__ raises LockNotOwnedError.
# Our implementation should catch it and still return (documents, batch).
documents, batch = DocumentService.save_document_with_dataset_id(
dataset=dataset,
knowledge_config=knowledge_config,
account=account,
)
# Assert
# We mainly care that:
# - No exception is raised
# - The function returns a sensible tuple
assert isinstance(documents, list)
assert isinstance(batch, str)
# ---------------------------------------------------------------------------
# 2. Single-segment creation (add_segment)
# ---------------------------------------------------------------------------
def test_add_segment_ignores_lock_not_owned(
monkeypatch,
fake_current_user,
fake_lock,
):
# Arrange
dataset = create_autospec(Dataset, instance=True)
dataset.id = "ds-1"
dataset.tenant_id = fake_current_user.current_tenant_id
dataset.indexing_technique = "economy" # skip embedding/token calculation branch
document = create_autospec(Document, instance=True)
document.id = "doc-1"
document.dataset_id = dataset.id
document.word_count = 0
document.doc_form = "qa_model"
# Minimal args required by add_segment
args = {
"content": "question text",
"answer": "answer text",
"keywords": ["k1", "k2"],
}
# Avoid real DB operations
db_mock = Mock()
db_mock.session = Mock()
monkeypatch.setattr("services.dataset_service.db", db_mock)
monkeypatch.setattr("services.dataset_service.VectorService", Mock())
# Act
result = SegmentService.create_segment(args=args, document=document, dataset=dataset)
# Assert
# Under LockNotOwnedError except, add_segment should swallow the error and return None.
assert result is None
# ---------------------------------------------------------------------------
# 3. Multi-segment creation (multi_create_segment)
# ---------------------------------------------------------------------------
def test_multi_create_segment_ignores_lock_not_owned(
monkeypatch,
fake_current_user,
fake_lock,
):
# Arrange
dataset = create_autospec(Dataset, instance=True)
dataset.id = "ds-1"
dataset.tenant_id = fake_current_user.current_tenant_id
dataset.indexing_technique = "economy" # again, skip high_quality path
document = create_autospec(Document, instance=True)
document.id = "doc-1"
document.dataset_id = dataset.id
document.word_count = 0
document.doc_form = "qa_model"

4623
api/uv.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -233,7 +233,7 @@ NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false
# Database type, supported values are `postgresql` and `mysql`
DB_TYPE=postgresql
# For MySQL, only `root` user is supported for now
DB_USERNAME=postgres
DB_PASSWORD=difyai123456
DB_HOST=db_postgres
@@ -1076,24 +1076,10 @@ MAX_TREE_DEPTH=50
# ------------------------------
# Environment Variables for database Service
# ------------------------------
# The name of the default postgres user.
POSTGRES_USER=${DB_USERNAME}
# The password for the default postgres user.
POSTGRES_PASSWORD=${DB_PASSWORD}
# The name of the default postgres database.
POSTGRES_DB=${DB_DATABASE}
# Postgres data directory
PGDATA=/var/lib/postgresql/data/pgdata
# MySQL Default Configuration
# The name of the default mysql user.
MYSQL_USERNAME=${DB_USERNAME}
# The password for the default mysql user.
MYSQL_PASSWORD=${DB_PASSWORD}
# The name of the default mysql database.
MYSQL_DATABASE=${DB_DATABASE}
# MySQL data directory
MYSQL_HOST_VOLUME=./volumes/mysql/data
# ------------------------------

View File

@@ -139,9 +139,9 @@ services:
- postgresql
restart: always
environment:
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456}
POSTGRES_DB: ${POSTGRES_DB:-dify}
POSTGRES_USER: ${DB_USERNAME:-postgres}
POSTGRES_PASSWORD: ${DB_PASSWORD:-difyai123456}
POSTGRES_DB: ${DB_DATABASE:-dify}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
command: >
postgres -c 'max_connections=${POSTGRES_MAX_CONNECTIONS:-100}'
@@ -161,7 +161,7 @@ services:
"-h",
"db_postgres",
"-U",
"${PGUSER:-postgres}",
"${DB_USERNAME:-postgres}",
"-d",
"${DB_DATABASE:-dify}",
]
@@ -176,8 +176,8 @@ services:
- mysql
restart: always
environment:
MYSQL_ROOT_PASSWORD: ${MYSQL_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${MYSQL_DATABASE:-dify}
MYSQL_ROOT_PASSWORD: ${DB_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${DB_DATABASE:-dify}
command: >
--max_connections=1000
--innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M}
@@ -193,7 +193,7 @@ services:
"ping",
"-u",
"root",
"-p${MYSQL_PASSWORD:-difyai123456}",
"-p${DB_PASSWORD:-difyai123456}",
]
interval: 1s
timeout: 3s

View File

@@ -9,8 +9,8 @@ services:
env_file:
- ./middleware.env
environment:
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456}
POSTGRES_DB: ${POSTGRES_DB:-dify}
POSTGRES_PASSWORD: ${DB_PASSWORD:-difyai123456}
POSTGRES_DB: ${DB_DATABASE:-dify}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
command: >
postgres -c 'max_connections=${POSTGRES_MAX_CONNECTIONS:-100}'
@@ -32,9 +32,9 @@ services:
"-h",
"db_postgres",
"-U",
"${PGUSER:-postgres}",
"${DB_USERNAME:-postgres}",
"-d",
"${POSTGRES_DB:-dify}",
"${DB_DATABASE:-dify}",
]
interval: 1s
timeout: 3s
@@ -48,8 +48,8 @@ services:
env_file:
- ./middleware.env
environment:
MYSQL_ROOT_PASSWORD: ${MYSQL_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${MYSQL_DATABASE:-dify}
MYSQL_ROOT_PASSWORD: ${DB_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${DB_DATABASE:-dify}
command: >
--max_connections=1000
--innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M}
@@ -67,7 +67,7 @@ services:
"ping",
"-u",
"root",
"-p${MYSQL_PASSWORD:-difyai123456}",
"-p${DB_PASSWORD:-difyai123456}",
]
interval: 1s
timeout: 3s

View File

@@ -455,13 +455,7 @@ x-shared-env: &shared-api-worker-env
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
ALLOW_UNSAFE_DATA_SCHEME: ${ALLOW_UNSAFE_DATA_SCHEME:-false}
MAX_TREE_DEPTH: ${MAX_TREE_DEPTH:-50}
POSTGRES_USER: ${POSTGRES_USER:-${DB_USERNAME}}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-${DB_PASSWORD}}
POSTGRES_DB: ${POSTGRES_DB:-${DB_DATABASE}}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
MYSQL_USERNAME: ${MYSQL_USERNAME:-${DB_USERNAME}}
MYSQL_PASSWORD: ${MYSQL_PASSWORD:-${DB_PASSWORD}}
MYSQL_DATABASE: ${MYSQL_DATABASE:-${DB_DATABASE}}
MYSQL_HOST_VOLUME: ${MYSQL_HOST_VOLUME:-./volumes/mysql/data}
SANDBOX_API_KEY: ${SANDBOX_API_KEY:-dify-sandbox}
SANDBOX_GIN_MODE: ${SANDBOX_GIN_MODE:-release}
@@ -774,9 +768,9 @@ services:
- postgresql
restart: always
environment:
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456}
POSTGRES_DB: ${POSTGRES_DB:-dify}
POSTGRES_USER: ${DB_USERNAME:-postgres}
POSTGRES_PASSWORD: ${DB_PASSWORD:-difyai123456}
POSTGRES_DB: ${DB_DATABASE:-dify}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
command: >
postgres -c 'max_connections=${POSTGRES_MAX_CONNECTIONS:-100}'
@@ -796,7 +790,7 @@ services:
"-h",
"db_postgres",
"-U",
"${PGUSER:-postgres}",
"${DB_USERNAME:-postgres}",
"-d",
"${DB_DATABASE:-dify}",
]
@@ -811,8 +805,8 @@ services:
- mysql
restart: always
environment:
MYSQL_ROOT_PASSWORD: ${MYSQL_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${MYSQL_DATABASE:-dify}
MYSQL_ROOT_PASSWORD: ${DB_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${DB_DATABASE:-dify}
command: >
--max_connections=1000
--innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M}
@@ -828,7 +822,7 @@ services:
"ping",
"-u",
"root",
"-p${MYSQL_PASSWORD:-difyai123456}",
"-p${DB_PASSWORD:-difyai123456}",
]
interval: 1s
timeout: 3s

View File

@@ -4,6 +4,7 @@
# Database Configuration
# Database type, supported values are `postgresql` and `mysql`
DB_TYPE=postgresql
# For MySQL, only `root` user is supported for now
DB_USERNAME=postgres
DB_PASSWORD=difyai123456
DB_HOST=db_postgres
@@ -11,11 +12,6 @@ DB_PORT=5432
DB_DATABASE=dify
# PostgreSQL Configuration
POSTGRES_USER=${DB_USERNAME}
# The password for the default postgres user.
POSTGRES_PASSWORD=${DB_PASSWORD}
# The name of the default postgres database.
POSTGRES_DB=${DB_DATABASE}
# postgres data directory
PGDATA=/var/lib/postgresql/data/pgdata
PGDATA_HOST_VOLUME=./volumes/db/data
@@ -65,11 +61,6 @@ POSTGRES_STATEMENT_TIMEOUT=0
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0
# MySQL Configuration
MYSQL_USERNAME=${DB_USERNAME}
# MySQL password
MYSQL_PASSWORD=${DB_PASSWORD}
# MySQL database name
MYSQL_DATABASE=${DB_DATABASE}
# MySQL data directory host volume
MYSQL_HOST_VOLUME=./volumes/mysql/data

View File

@@ -3,6 +3,7 @@ import type { ReactNode } from 'react'
import SwrInitializer from '@/app/components/swr-initializer'
import { AppContextProvider } from '@/context/app-context'
import GA, { GaType } from '@/app/components/base/ga'
import AmplitudeProvider from '@/app/components/base/amplitude'
import HeaderWrapper from '@/app/components/header/header-wrapper'
import Header from '@/app/components/header'
import { EventEmitterContextProvider } from '@/context/event-emitter'
@@ -18,6 +19,7 @@ const Layout = ({ children }: { children: ReactNode }) => {
return (
<>
<GA gaType={GaType.admin} />
<AmplitudeProvider />
<SwrInitializer>
<AppContextProvider>
<EventEmitterContextProvider>

View File

@@ -8,7 +8,7 @@ const PluginList = async () => {
return (
<PluginPage
plugins={<PluginsPanel />}
marketplace={<Marketplace locale={locale} pluginTypeSwitchClassName='top-[60px]' searchBoxAutoAnimate={false} showSearchParams={false} />}
marketplace={<Marketplace locale={locale} pluginTypeSwitchClassName='top-[60px]' showSearchParams={false} />}
/>
)
}

View File

@@ -12,6 +12,7 @@ import { useProviderContext } from '@/context/provider-context'
import { LogOut01 } from '@/app/components/base/icons/src/vender/line/general'
import PremiumBadge from '@/app/components/base/premium-badge'
import { useLogout } from '@/service/use-common'
import { resetUser } from '@/app/components/base/amplitude/utils'
export type IAppSelector = {
isMobile: boolean
@@ -28,6 +29,7 @@ export default function AppSelector() {
await logout()
localStorage.removeItem('setup_status')
resetUser()
// Tokens are now stored in cookies and cleared by backend
router.push('/signin')

View File

@@ -4,6 +4,7 @@ import Header from './header'
import SwrInitor from '@/app/components/swr-initializer'
import { AppContextProvider } from '@/context/app-context'
import GA, { GaType } from '@/app/components/base/ga'
import AmplitudeProvider from '@/app/components/base/amplitude'
import HeaderWrapper from '@/app/components/header/header-wrapper'
import { EventEmitterContextProvider } from '@/context/event-emitter'
import { ProviderContextProvider } from '@/context/provider-context'
@@ -13,6 +14,7 @@ const Layout = ({ children }: { children: ReactNode }) => {
return (
<>
<GA gaType={GaType.admin} />
<AmplitudeProvider />
<SwrInitor>
<AppContextProvider>
<EventEmitterContextProvider>

View File

@@ -251,6 +251,7 @@ const AgentTools: FC = () => {
{!item.notAuthor && (
<Tooltip
popupContent={t('tools.setBuiltInTools.infoAndSetting')}
needsDelay={false}
>
<div className='cursor-pointer rounded-md p-1 hover:bg-black/5' onClick={() => {
setCurrentTool(item)

View File

@@ -28,6 +28,7 @@ import Input from '@/app/components/base/input'
import { AppModeEnum } from '@/types/app'
import { DSLImportMode } from '@/models/app'
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
import { trackEvent } from '@/app/components/base/amplitude'
type AppsProps = {
onSuccess?: () => void
@@ -141,6 +142,15 @@ const Apps = ({
icon_background,
description,
})
// Track app creation from template
trackEvent('create_app_with_template', {
app_mode: mode,
template_id: currApp?.app.id,
template_name: currApp?.app.name,
description,
})
setIsShowCreateModal(false)
Toast.notify({
type: 'success',

View File

@@ -30,6 +30,7 @@ import { getRedirection } from '@/utils/app-redirection'
import FullScreenModal from '@/app/components/base/fullscreen-modal'
import useTheme from '@/hooks/use-theme'
import { useDocLink } from '@/context/i18n'
import { trackEvent } from '@/app/components/base/amplitude'
type CreateAppProps = {
onSuccess: () => void
@@ -82,6 +83,13 @@ function CreateApp({ onClose, onSuccess, onCreateFromTemplate, defaultAppMode }:
icon_background: appIcon.type === 'emoji' ? appIcon.background : undefined,
mode: appMode,
})
// Track app creation success
trackEvent('create_app', {
app_mode: appMode,
description,
})
notify({ type: 'success', message: t('app.newApp.appCreated') })
onSuccess()
onClose()

View File

@@ -28,6 +28,7 @@ import { getRedirection } from '@/utils/app-redirection'
import cn from '@/utils/classnames'
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
import { noop } from 'lodash-es'
import { trackEvent } from '@/app/components/base/amplitude'
type CreateFromDSLModalProps = {
show: boolean
@@ -112,6 +113,13 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
return
const { id, status, app_id, app_mode, imported_dsl_version, current_dsl_version } = response
if (status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS) {
// Track app creation from DSL import
trackEvent('create_app_with_dsl', {
app_mode,
creation_method: currentTab === CreateFromDSLModalTab.FROM_FILE ? 'dsl_file' : 'dsl_url',
has_warnings: status === DSLImportStatus.COMPLETED_WITH_WARNINGS,
})
if (onSuccess)
onSuccess()
if (onClose)

View File

@@ -8,6 +8,7 @@ import quarterOfYear from 'dayjs/plugin/quarterOfYear'
import type { QueryParam } from './index'
import Chip from '@/app/components/base/chip'
import Input from '@/app/components/base/input'
import { trackEvent } from '@/app/components/base/amplitude/utils'
dayjs.extend(quarterOfYear)
const today = dayjs()
@@ -37,6 +38,9 @@ const Filter: FC<IFilterProps> = ({ queryParams, setQueryParams }: IFilterProps)
value={queryParams.status || 'all'}
onSelect={(item) => {
setQueryParams({ ...queryParams, status: item.value as string })
trackEvent('workflow_log_filter_status_selected', {
workflow_log_filter_status: item.value as string,
})
}}
onClear={() => setQueryParams({ ...queryParams, status: 'all' })}
items={[{ value: 'all', name: 'All' },

View File

@@ -0,0 +1,46 @@
'use client'
import type { FC } from 'react'
import React, { useEffect } from 'react'
import * as amplitude from '@amplitude/analytics-browser'
import { sessionReplayPlugin } from '@amplitude/plugin-session-replay-browser'
import { IS_CLOUD_EDITION } from '@/config'
export type IAmplitudeProps = {
apiKey?: string
sessionReplaySampleRate?: number
}
const AmplitudeProvider: FC<IAmplitudeProps> = ({
apiKey = process.env.NEXT_PUBLIC_AMPLITUDE_API_KEY ?? '',
sessionReplaySampleRate = 1,
}) => {
useEffect(() => {
// Only enable in Saas edition
if (!IS_CLOUD_EDITION)
return
// Initialize Amplitude
amplitude.init(apiKey, {
defaultTracking: {
sessions: true,
pageViews: true,
formInteractions: true,
fileDownloads: true,
},
// Enable debug logs in development environment
logLevel: amplitude.Types.LogLevel.Warn,
})
// Add Session Replay plugin
const sessionReplay = sessionReplayPlugin({
sampleRate: sessionReplaySampleRate,
})
amplitude.add(sessionReplay)
}, [])
// This is a client component that renders nothing
return null
}
export default React.memo(AmplitudeProvider)

View File

@@ -0,0 +1,2 @@
export { default } from './AmplitudeProvider'
export { resetUser, setUserId, setUserProperties, trackEvent } from './utils'

View File

@@ -0,0 +1,37 @@
import * as amplitude from '@amplitude/analytics-browser'
/**
* Track custom event
* @param eventName Event name
* @param eventProperties Event properties (optional)
*/
export const trackEvent = (eventName: string, eventProperties?: Record<string, any>) => {
amplitude.track(eventName, eventProperties)
}
/**
* Set user ID
* @param userId User ID
*/
export const setUserId = (userId: string) => {
amplitude.setUserId(userId)
}
/**
* Set user properties
* @param properties User properties
*/
export const setUserProperties = (properties: Record<string, any>) => {
const identifyEvent = new amplitude.Identify()
Object.entries(properties).forEach(([key, value]) => {
identifyEvent.set(key, value)
})
amplitude.identify(identifyEvent)
}
/**
* Reset user (e.g., when user logs out)
*/
export const resetUser = () => {
amplitude.reset()
}

View File

@@ -11,7 +11,10 @@ import {
RiThumbDownLine,
RiThumbUpLine,
} from '@remixicon/react'
import type { ChatItem } from '../../types'
import type {
ChatItem,
Feedback,
} from '../../types'
import { useChatContext } from '../context'
import copy from 'copy-to-clipboard'
import Toast from '@/app/components/base/toast'
@@ -22,6 +25,7 @@ import ActionButton, { ActionButtonState } from '@/app/components/base/action-bu
import NewAudioButton from '@/app/components/base/new-audio-button'
import Modal from '@/app/components/base/modal/modal'
import Textarea from '@/app/components/base/textarea'
import Tooltip from '@/app/components/base/tooltip'
import cn from '@/utils/classnames'
type OperationProps = {
@@ -66,8 +70,9 @@ const Operation: FC<OperationProps> = ({
adminFeedback,
agent_thoughts,
} = item
const [localFeedback, setLocalFeedback] = useState(config?.supportAnnotation ? adminFeedback : feedback)
const [userLocalFeedback, setUserLocalFeedback] = useState(feedback)
const [adminLocalFeedback, setAdminLocalFeedback] = useState(adminFeedback)
const [feedbackTarget, setFeedbackTarget] = useState<'user' | 'admin'>('user')
// Separate feedback types for display
const userFeedback = feedback
@@ -79,24 +84,68 @@ const Operation: FC<OperationProps> = ({
return messageContent
}, [agent_thoughts, messageContent])
const handleFeedback = async (rating: 'like' | 'dislike' | null, content?: string) => {
const displayUserFeedback = userLocalFeedback ?? userFeedback
const hasUserFeedback = !!displayUserFeedback?.rating
const hasAdminFeedback = !!adminLocalFeedback?.rating
const shouldShowUserFeedbackBar = !isOpeningStatement && config?.supportFeedback && !!onFeedback && !config?.supportAnnotation
const shouldShowAdminFeedbackBar = !isOpeningStatement && config?.supportFeedback && !!onFeedback && !!config?.supportAnnotation
const userFeedbackLabel = t('appLog.table.header.userRate') || 'User feedback'
const adminFeedbackLabel = t('appLog.table.header.adminRate') || 'Admin feedback'
const feedbackTooltipClassName = 'max-w-[260px]'
const buildFeedbackTooltip = (feedbackData?: Feedback | null, label = userFeedbackLabel) => {
if (!feedbackData?.rating)
return label
const ratingLabel = feedbackData.rating === 'like'
? (t('appLog.detail.operation.like') || 'like')
: (t('appLog.detail.operation.dislike') || 'dislike')
const feedbackText = feedbackData.content?.trim()
if (feedbackText)
return `${label}: ${ratingLabel} - ${feedbackText}`
return `${label}: ${ratingLabel}`
}
const handleFeedback = async (rating: 'like' | 'dislike' | null, content?: string, target: 'user' | 'admin' = 'user') => {
if (!config?.supportFeedback || !onFeedback)
return
await onFeedback?.(id, { rating, content })
setLocalFeedback({ rating })
// Update admin feedback state separately if annotation is supported
if (config?.supportAnnotation)
setAdminLocalFeedback(rating ? { rating } : undefined)
const nextFeedback = rating === null ? { rating: null } : { rating, content }
if (target === 'admin')
setAdminLocalFeedback(nextFeedback)
else
setUserLocalFeedback(nextFeedback)
}
const handleThumbsDown = () => {
const handleLikeClick = (target: 'user' | 'admin') => {
const currentRating = target === 'admin' ? adminLocalFeedback?.rating : displayUserFeedback?.rating
if (currentRating === 'like') {
handleFeedback(null, undefined, target)
return
}
handleFeedback('like', undefined, target)
}
const handleDislikeClick = (target: 'user' | 'admin') => {
const currentRating = target === 'admin' ? adminLocalFeedback?.rating : displayUserFeedback?.rating
if (currentRating === 'dislike') {
handleFeedback(null, undefined, target)
return
}
setFeedbackTarget(target)
setIsShowFeedbackModal(true)
}
const handleFeedbackSubmit = async () => {
await handleFeedback('dislike', feedbackContent)
await handleFeedback('dislike', feedbackContent, feedbackTarget)
setFeedbackContent('')
setIsShowFeedbackModal(false)
}
@@ -116,12 +165,13 @@ const Operation: FC<OperationProps> = ({
width += 26
if (!isOpeningStatement && config?.supportAnnotation && config?.annotation_reply?.enabled)
width += 26
if (config?.supportFeedback && !localFeedback?.rating && onFeedback && !isOpeningStatement)
width += 60 + 8
if (config?.supportFeedback && localFeedback?.rating && onFeedback && !isOpeningStatement)
width += 28 + 8
if (shouldShowUserFeedbackBar)
width += hasUserFeedback ? 28 + 8 : 60 + 8
if (shouldShowAdminFeedbackBar)
width += (hasAdminFeedback ? 28 : 60) + 8 + (hasUserFeedback ? 28 : 0)
return width
}, [isOpeningStatement, showPromptLog, config?.text_to_speech?.enabled, config?.supportAnnotation, config?.annotation_reply?.enabled, config?.supportFeedback, localFeedback?.rating, onFeedback])
}, [config?.annotation_reply?.enabled, config?.supportAnnotation, config?.text_to_speech?.enabled, hasAdminFeedback, hasUserFeedback, isOpeningStatement, shouldShowAdminFeedbackBar, shouldShowUserFeedbackBar, showPromptLog])
const positionRight = useMemo(() => operationWidth < maxSize, [operationWidth, maxSize])
@@ -136,6 +186,110 @@ const Operation: FC<OperationProps> = ({
)}
style={(!hasWorkflowProcess && positionRight) ? { left: contentWidth + 8 } : {}}
>
{shouldShowUserFeedbackBar && (
<div className={cn(
'ml-1 items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm',
hasUserFeedback ? 'flex' : 'hidden group-hover:flex',
)}>
{hasUserFeedback ? (
<Tooltip
popupContent={buildFeedbackTooltip(displayUserFeedback, userFeedbackLabel)}
popupClassName={feedbackTooltipClassName}
>
<ActionButton
state={displayUserFeedback?.rating === 'like' ? ActionButtonState.Active : ActionButtonState.Destructive}
onClick={() => handleFeedback(null, undefined, 'user')}
>
{displayUserFeedback?.rating === 'like'
? <RiThumbUpLine className='h-4 w-4' />
: <RiThumbDownLine className='h-4 w-4' />}
</ActionButton>
</Tooltip>
) : (
<>
<ActionButton
state={displayUserFeedback?.rating === 'like' ? ActionButtonState.Active : ActionButtonState.Default}
onClick={() => handleLikeClick('user')}
>
<RiThumbUpLine className='h-4 w-4' />
</ActionButton>
<ActionButton
state={displayUserFeedback?.rating === 'dislike' ? ActionButtonState.Destructive : ActionButtonState.Default}
onClick={() => handleDislikeClick('user')}
>
<RiThumbDownLine className='h-4 w-4' />
</ActionButton>
</>
)}
</div>
)}
{shouldShowAdminFeedbackBar && (
<div className={cn(
'ml-1 items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm',
(hasAdminFeedback || hasUserFeedback) ? 'flex' : 'hidden group-hover:flex',
)}>
{/* User Feedback Display */}
{displayUserFeedback?.rating && (
<Tooltip
popupContent={buildFeedbackTooltip(displayUserFeedback, userFeedbackLabel)}
popupClassName={feedbackTooltipClassName}
>
{displayUserFeedback.rating === 'like' ? (
<ActionButton state={ActionButtonState.Active}>
<RiThumbUpLine className='h-4 w-4' />
</ActionButton>
) : (
<ActionButton state={ActionButtonState.Destructive}>
<RiThumbDownLine className='h-4 w-4' />
</ActionButton>
)}
</Tooltip>
)}
{/* Admin Feedback Controls */}
{displayUserFeedback?.rating && <div className='mx-1 h-3 w-[0.5px] bg-components-actionbar-border' />}
{hasAdminFeedback ? (
<Tooltip
popupContent={buildFeedbackTooltip(adminLocalFeedback, adminFeedbackLabel)}
popupClassName={feedbackTooltipClassName}
>
<ActionButton
state={adminLocalFeedback?.rating === 'like' ? ActionButtonState.Active : ActionButtonState.Destructive}
onClick={() => handleFeedback(null, undefined, 'admin')}
>
{adminLocalFeedback?.rating === 'like'
? <RiThumbUpLine className='h-4 w-4' />
: <RiThumbDownLine className='h-4 w-4' />}
</ActionButton>
</Tooltip>
) : (
<>
<Tooltip
popupContent={buildFeedbackTooltip(adminLocalFeedback, adminFeedbackLabel)}
popupClassName={feedbackTooltipClassName}
>
<ActionButton
state={adminLocalFeedback?.rating === 'like' ? ActionButtonState.Active : ActionButtonState.Default}
onClick={() => handleLikeClick('admin')}
>
<RiThumbUpLine className='h-4 w-4' />
</ActionButton>
</Tooltip>
<Tooltip
popupContent={buildFeedbackTooltip(adminLocalFeedback, adminFeedbackLabel)}
popupClassName={feedbackTooltipClassName}
>
<ActionButton
state={adminLocalFeedback?.rating === 'dislike' ? ActionButtonState.Destructive : ActionButtonState.Default}
onClick={() => handleDislikeClick('admin')}
>
<RiThumbDownLine className='h-4 w-4' />
</ActionButton>
</Tooltip>
</>
)}
</div>
)}
{showPromptLog && !isOpeningStatement && (
<div className='hidden group-hover:block'>
<Log logItem={item} />
@@ -174,69 +328,6 @@ const Operation: FC<OperationProps> = ({
)}
</div>
)}
{!isOpeningStatement && config?.supportFeedback && !localFeedback?.rating && onFeedback && (
<div className='ml-1 hidden items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm group-hover:flex'>
{!localFeedback?.rating && (
<>
<ActionButton onClick={() => handleFeedback('like')}>
<RiThumbUpLine className='h-4 w-4' />
</ActionButton>
<ActionButton onClick={handleThumbsDown}>
<RiThumbDownLine className='h-4 w-4' />
</ActionButton>
</>
)}
</div>
)}
{!isOpeningStatement && config?.supportFeedback && onFeedback && (
<div className='ml-1 flex items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm'>
{/* User Feedback Display */}
{userFeedback?.rating && (
<div className='flex items-center'>
<span className='mr-1 text-xs text-text-tertiary'>User</span>
{userFeedback.rating === 'like' ? (
<ActionButton state={ActionButtonState.Active} title={userFeedback.content ? `User liked this response: ${userFeedback.content}` : 'User liked this response'}>
<RiThumbUpLine className='h-3 w-3' />
</ActionButton>
) : (
<ActionButton state={ActionButtonState.Destructive} title={userFeedback.content ? `User disliked this response: ${userFeedback.content}` : 'User disliked this response'}>
<RiThumbDownLine className='h-3 w-3' />
</ActionButton>
)}
</div>
)}
{/* Admin Feedback Controls */}
{config?.supportAnnotation && (
<div className='flex items-center'>
{userFeedback?.rating && <div className='mx-1 h-3 w-[0.5px] bg-components-actionbar-border' />}
{!adminLocalFeedback?.rating ? (
<>
<ActionButton onClick={() => handleFeedback('like')}>
<RiThumbUpLine className='h-4 w-4' />
</ActionButton>
<ActionButton onClick={handleThumbsDown}>
<RiThumbDownLine className='h-4 w-4' />
</ActionButton>
</>
) : (
<>
{adminLocalFeedback.rating === 'like' ? (
<ActionButton state={ActionButtonState.Active} onClick={() => handleFeedback(null)}>
<RiThumbUpLine className='h-4 w-4' />
</ActionButton>
) : (
<ActionButton state={ActionButtonState.Destructive} onClick={() => handleFeedback(null)}>
<RiThumbDownLine className='h-4 w-4' />
</ActionButton>
)}
</>
)}
</div>
)}
</div>
)}
</div>
<EditReplyModal
isShow={isShowReplyModal}

View File

@@ -1,8 +1,8 @@
'use client'
import type { FC } from 'react'
import React from 'react'
import React, { useEffect } from 'react'
import { useTranslation } from 'react-i18next'
import { useRouter } from 'next/navigation'
import { usePathname, useRouter } from 'next/navigation'
import {
RiBook2Line,
RiFileEditLine,
@@ -25,6 +25,8 @@ import { EDUCATION_VERIFYING_LOCALSTORAGE_ITEM } from '@/app/education-apply/con
import { useEducationVerify } from '@/service/use-education'
import { useModalContextSelector } from '@/context/modal-context'
import { Enterprise, Professional, Sandbox, Team } from './assets'
import { Loading } from '../../base/icons/src/public/thought'
import { useUnmountedRef } from 'ahooks'
type Props = {
loc: string
@@ -35,6 +37,7 @@ const PlanComp: FC<Props> = ({
}) => {
const { t } = useTranslation()
const router = useRouter()
const path = usePathname()
const { userProfile } = useAppContext()
const { plan, enableEducationPlan, allowRefreshEducationVerify, isEducationAccount } = useProviderContext()
const isAboutToExpire = allowRefreshEducationVerify
@@ -61,17 +64,24 @@ const PlanComp: FC<Props> = ({
})()
const [showModal, setShowModal] = React.useState(false)
const { mutateAsync } = useEducationVerify()
const { mutateAsync, isPending } = useEducationVerify()
const setShowAccountSettingModal = useModalContextSelector(s => s.setShowAccountSettingModal)
const unmountedRef = useUnmountedRef()
const handleVerify = () => {
if (isPending) return
mutateAsync().then((res) => {
localStorage.removeItem(EDUCATION_VERIFYING_LOCALSTORAGE_ITEM)
if (unmountedRef.current) return
router.push(`/education-apply?token=${res.token}`)
setShowAccountSettingModal(null)
}).catch(() => {
setShowModal(true)
})
}
useEffect(() => {
// setShowAccountSettingModal would prevent navigation
if (path.startsWith('/education-apply'))
setShowAccountSettingModal(null)
}, [path, setShowAccountSettingModal])
return (
<div className='relative rounded-2xl border-[0.5px] border-effects-highlight-lightmode-off bg-background-section-burn'>
<div className='p-6 pb-2'>
@@ -96,9 +106,10 @@ const PlanComp: FC<Props> = ({
</div>
<div className='flex shrink-0 items-center gap-1'>
{enableEducationPlan && (!isEducationAccount || isAboutToExpire) && (
<Button variant='ghost' onClick={handleVerify}>
<Button variant='ghost' onClick={handleVerify} disabled={isPending} >
<RiGraduationCapLine className='mr-1 h-4 w-4' />
{t('education.toVerified')}
{isPending && <Loading className='ml-1 animate-spin-slow' />}
</Button>
)}
{(plan.type as any) !== SelfHostedPlan.enterprise && (

View File

@@ -1,24 +0,0 @@
import type { CrawlResultItem } from '@/models/datasets'
const result: CrawlResultItem[] = [
{
title: 'Start the frontend Docker container separately',
content: 'Markdown 1',
description: 'Description 1',
source_url: 'https://example.com/1',
},
{
title: 'Advanced Tool Integration',
content: 'Markdown 2',
description: 'Description 2',
source_url: 'https://example.com/2',
},
{
title: 'Local Source Code Start | English | Dify',
content: 'Markdown 3',
description: 'Description 3',
source_url: 'https://example.com/3',
},
]
export default result

View File

@@ -34,6 +34,7 @@ import { useGlobalPublicStore } from '@/context/global-public-context'
import { useDocLink } from '@/context/i18n'
import { useLogout } from '@/service/use-common'
import { ACCOUNT_SETTING_TAB } from '@/app/components/header/account-setting/constants'
import { resetUser } from '@/app/components/base/amplitude/utils'
export default function AppSelector() {
const itemClassName = `
@@ -53,7 +54,7 @@ export default function AppSelector() {
const { mutateAsync: logout } = useLogout()
const handleLogout = async () => {
await logout()
resetUser()
localStorage.removeItem('setup_status')
// Tokens are now stored in cookies and cleared by backend

View File

@@ -1,39 +1,28 @@
import {
useCallback,
useEffect,
useMemo,
useState,
} from 'react'
import {
useMarketplacePlugins,
useMarketplacePluginsByCollectionId,
} from '@/app/components/plugins/marketplace/hooks'
import type { Plugin } from '@/app/components/plugins/types'
import { PluginCategoryEnum } from '@/app/components/plugins/types'
import { getMarketplacePluginsByCollectionId } from '@/app/components/plugins/marketplace/utils'
export const useMarketplaceAllPlugins = (providers: any[], searchText: string) => {
const exclude = useMemo(() => {
return providers.map(provider => provider.plugin_id)
}, [providers])
const [collectionPlugins, setCollectionPlugins] = useState<Plugin[]>([])
const {
plugins: collectionPlugins = [],
isLoading: isCollectionLoading,
} = useMarketplacePluginsByCollectionId('__datasource-settings-pinned-datasources')
const {
plugins,
queryPlugins,
queryPluginsWithDebounced,
isLoading,
isLoading: isPluginsLoading,
} = useMarketplacePlugins()
const getCollectionPlugins = useCallback(async () => {
const collectionPlugins = await getMarketplacePluginsByCollectionId('__datasource-settings-pinned-datasources')
setCollectionPlugins(collectionPlugins)
}, [])
useEffect(() => {
getCollectionPlugins()
}, [getCollectionPlugins])
useEffect(() => {
if (searchText) {
queryPluginsWithDebounced({
@@ -75,6 +64,6 @@ export const useMarketplaceAllPlugins = (providers: any[], searchText: string) =
return {
plugins: allPlugins,
isLoading,
isLoading: isCollectionLoading || isPluginsLoading,
}
}

View File

@@ -217,6 +217,7 @@ export type ModelProvider = {
url: TypeWithI18N
}
icon_small: TypeWithI18N
icon_small_dark?: TypeWithI18N
icon_large: TypeWithI18N
background?: string
supported_model_types: ModelTypeEnum[]
@@ -255,6 +256,7 @@ export type Model = {
provider: string
icon_large: TypeWithI18N
icon_small: TypeWithI18N
icon_small_dark?: TypeWithI18N
label: TypeWithI18N
models: ModelItem[]
status: ModelStatusEnum

View File

@@ -33,10 +33,9 @@ import {
import { useProviderContext } from '@/context/provider-context'
import {
useMarketplacePlugins,
useMarketplacePluginsByCollectionId,
} from '@/app/components/plugins/marketplace/hooks'
import type { Plugin } from '@/app/components/plugins/types'
import { PluginCategoryEnum } from '@/app/components/plugins/types'
import { getMarketplacePluginsByCollectionId } from '@/app/components/plugins/marketplace/utils'
import { useModalContextSelector } from '@/context/modal-context'
import { useEventEmitterContextContext } from '@/context/event-emitter'
import { UPDATE_MODEL_PROVIDER_CUSTOM_MODEL_LIST } from './provider-added-card'
@@ -255,25 +254,17 @@ export const useMarketplaceAllPlugins = (providers: ModelProvider[], searchText:
const exclude = useMemo(() => {
return providers.map(provider => provider.provider.replace(/(.+)\/([^/]+)$/, '$1'))
}, [providers])
const [collectionPlugins, setCollectionPlugins] = useState<Plugin[]>([])
const {
plugins: collectionPlugins = [],
isLoading: isCollectionLoading,
} = useMarketplacePluginsByCollectionId('__model-settings-pinned-models')
const {
plugins,
queryPlugins,
queryPluginsWithDebounced,
isLoading,
isLoading: isPluginsLoading,
} = useMarketplacePlugins()
const getCollectionPlugins = useCallback(async () => {
const collectionPlugins = await getMarketplacePluginsByCollectionId('__model-settings-pinned-models')
setCollectionPlugins(collectionPlugins)
}, [])
useEffect(() => {
getCollectionPlugins()
}, [getCollectionPlugins])
useEffect(() => {
if (searchText) {
queryPluginsWithDebounced({
@@ -315,7 +306,7 @@ export const useMarketplaceAllPlugins = (providers: ModelProvider[], searchText:
return {
plugins: allPlugins,
isLoading,
isLoading: isCollectionLoading || isPluginsLoading,
}
}

View File

@@ -6,8 +6,10 @@ import type {
import { useLanguage } from '../hooks'
import { Group } from '@/app/components/base/icons/src/vender/other'
import { OpenaiBlue, OpenaiTeal, OpenaiViolet, OpenaiYellow } from '@/app/components/base/icons/src/public/llm'
import cn from '@/utils/classnames'
import { renderI18nObject } from '@/i18n-config'
import { Theme } from '@/types/app'
import cn from '@/utils/classnames'
import useTheme from '@/hooks/use-theme'
type ModelIconProps = {
provider?: Model | ModelProvider
@@ -23,6 +25,7 @@ const ModelIcon: FC<ModelIconProps> = ({
iconClassName,
isDeprecated = false,
}) => {
const { theme } = useTheme()
const language = useLanguage()
if (provider?.provider && ['openai', 'langgenius/openai/openai'].includes(provider.provider) && modelName?.startsWith('o'))
return <div className='flex items-center justify-center'><OpenaiYellow className={cn('h-5 w-5', className)} /></div>
@@ -36,7 +39,16 @@ const ModelIcon: FC<ModelIconProps> = ({
if (provider?.icon_small) {
return (
<div className={cn('flex h-5 w-5 items-center justify-center', isDeprecated && 'opacity-50', className)}>
<img alt='model-icon' src={renderI18nObject(provider.icon_small, language)} className={iconClassName} />
<img
alt='model-icon'
src={renderI18nObject(
theme === Theme.dark && provider.icon_small_dark
? provider.icon_small_dark
: provider.icon_small,
language,
)}
className={iconClassName}
/>
</div>
)
}

View File

@@ -40,7 +40,12 @@ const ProviderIcon: FC<ProviderIconProps> = ({
<div className={cn('inline-flex items-center gap-2', className)}>
<img
alt='provider-icon'
src={renderI18nObject(provider.icon_small, language)}
src={renderI18nObject(
theme === Theme.dark && provider.icon_small_dark
? provider.icon_small_dark
: provider.icon_small,
language,
)}
className='h-6 w-6'
/>
<div className='system-md-semibold text-text-primary'>

View File

@@ -6,6 +6,8 @@ import { getLanguage } from '@/i18n-config/language'
import cn from '@/utils/classnames'
import { RiAlertFill } from '@remixicon/react'
import React from 'react'
import useTheme from '@/hooks/use-theme'
import { Theme } from '@/types/app'
import Partner from '../base/badges/partner'
import Verified from '../base/badges/verified'
import Icon from '../card/base/card-icon'
@@ -50,7 +52,9 @@ const Card = ({
const locale = localeFromProps ? getLanguage(localeFromProps) : defaultLocale
const { t } = useMixedTranslation(localeFromProps)
const { categoriesMap } = useCategories(t, true)
const { category, type, name, org, label, brief, icon, verified, badges = [] } = payload
const { category, type, name, org, label, brief, icon, icon_dark, verified, badges = [] } = payload
const { theme } = useTheme()
const iconSrc = theme === Theme.dark && icon_dark ? icon_dark : icon
const getLocalizedText = (obj: Record<string, string> | undefined) =>
obj ? renderI18nObject(obj, locale) : ''
const isPartner = badges.includes('partner')
@@ -71,7 +75,7 @@ const Card = ({
{!hideCornerMark && <CornerMark text={categoriesMap[type === 'bundle' ? type : category]?.label} />}
{/* Header */}
<div className="flex">
<Icon src={icon} installed={installed} installFailed={installFailed} />
<Icon src={iconSrc} installed={installed} installFailed={installFailed} />
<div className="ml-3 w-0 grow">
<div className="flex h-5 items-center">
<Title title={getLocalizedText(label)} />

View File

@@ -64,10 +64,12 @@ const InstallFromLocalPackage: React.FC<InstallFromLocalPackageProps> = ({
uniqueIdentifier,
} = result
const icon = await getIconUrl(manifest!.icon)
const iconDark = manifest.icon_dark ? await getIconUrl(manifest.icon_dark) : undefined
setUniqueIdentifier(uniqueIdentifier)
setManifest({
...manifest,
icon,
icon_dark: iconDark,
})
setStep(InstallStep.readyToInstall)
}, [getIconUrl])

View File

@@ -17,6 +17,7 @@ export const pluginManifestToCardPluginProps = (pluginManifest: PluginDeclaratio
brief: pluginManifest.description,
description: pluginManifest.description,
icon: pluginManifest.icon,
icon_dark: pluginManifest.icon_dark,
verified: pluginManifest.verified,
introduction: '',
repository: '',

View File

@@ -2,3 +2,5 @@ export const DEFAULT_SORT = {
sortBy: 'install_count',
sortOrder: 'DESC',
}
export const SCROLL_BOTTOM_THRESHOLD = 100

View File

@@ -41,8 +41,6 @@ import { useInstalledPluginList } from '@/service/use-plugins'
import { debounce, noop } from 'lodash-es'
export type MarketplaceContextValue = {
intersected: boolean
setIntersected: (intersected: boolean) => void
searchPluginText: string
handleSearchPluginTextChange: (text: string) => void
filterPluginTags: string[]
@@ -50,7 +48,7 @@ export type MarketplaceContextValue = {
activePluginType: string
handleActivePluginTypeChange: (type: string) => void
page: number
handlePageChange: (page: number) => void
handlePageChange: () => void
plugins?: Plugin[]
pluginsTotal?: number
resetPlugins: () => void
@@ -67,8 +65,6 @@ export type MarketplaceContextValue = {
}
export const MarketplaceContext = createContext<MarketplaceContextValue>({
intersected: true,
setIntersected: noop,
searchPluginText: '',
handleSearchPluginTextChange: noop,
filterPluginTags: [],
@@ -121,15 +117,12 @@ export const MarketplaceContextProvider = ({
const hasValidTags = !!tagsFromSearchParams.length
const hasValidCategory = getValidCategoryKeys(searchParams?.category)
const categoryFromSearchParams = hasValidCategory || PLUGIN_TYPE_SEARCH_MAP.all
const [intersected, setIntersected] = useState(true)
const [searchPluginText, setSearchPluginText] = useState(queryFromSearchParams)
const searchPluginTextRef = useRef(searchPluginText)
const [filterPluginTags, setFilterPluginTags] = useState<string[]>(tagsFromSearchParams)
const filterPluginTagsRef = useRef(filterPluginTags)
const [activePluginType, setActivePluginType] = useState(categoryFromSearchParams)
const activePluginTypeRef = useRef(activePluginType)
const [page, setPage] = useState(1)
const pageRef = useRef(page)
const [sort, setSort] = useState(DEFAULT_SORT)
const sortRef = useRef(sort)
const {
@@ -149,7 +142,11 @@ export const MarketplaceContextProvider = ({
queryPluginsWithDebounced,
cancelQueryPluginsWithDebounced,
isLoading: isPluginsLoading,
fetchNextPage: fetchNextPluginsPage,
hasNextPage: hasNextPluginsPage,
page: pluginsPage,
} = useMarketplacePlugins()
const page = Math.max(pluginsPage || 0, 1)
useEffect(() => {
if (queryFromSearchParams || hasValidTags || hasValidCategory) {
@@ -160,7 +157,6 @@ export const MarketplaceContextProvider = ({
sortBy: sortRef.current.sortBy,
sortOrder: sortRef.current.sortOrder,
type: getMarketplaceListFilterType(activePluginTypeRef.current),
page: pageRef.current,
})
const url = new URL(window.location.href)
if (searchParams?.language)
@@ -221,7 +217,6 @@ export const MarketplaceContextProvider = ({
sortOrder: sortRef.current.sortOrder,
exclude,
type: getMarketplaceListFilterType(activePluginTypeRef.current),
page: pageRef.current,
})
}
else {
@@ -233,7 +228,6 @@ export const MarketplaceContextProvider = ({
sortOrder: sortRef.current.sortOrder,
exclude,
type: getMarketplaceListFilterType(activePluginTypeRef.current),
page: pageRef.current,
})
}
}, [exclude, queryPluginsWithDebounced, queryPlugins, handleUpdateSearchParams])
@@ -252,8 +246,6 @@ export const MarketplaceContextProvider = ({
const handleSearchPluginTextChange = useCallback((text: string) => {
setSearchPluginText(text)
searchPluginTextRef.current = text
setPage(1)
pageRef.current = 1
handleQuery(true)
}, [handleQuery])
@@ -261,8 +253,6 @@ export const MarketplaceContextProvider = ({
const handleFilterPluginTagsChange = useCallback((tags: string[]) => {
setFilterPluginTags(tags)
filterPluginTagsRef.current = tags
setPage(1)
pageRef.current = 1
handleQuery()
}, [handleQuery])
@@ -270,8 +260,6 @@ export const MarketplaceContextProvider = ({
const handleActivePluginTypeChange = useCallback((type: string) => {
setActivePluginType(type)
activePluginTypeRef.current = type
setPage(1)
pageRef.current = 1
handleQuery()
}, [handleQuery])
@@ -279,20 +267,14 @@ export const MarketplaceContextProvider = ({
const handleSortChange = useCallback((sort: PluginsSort) => {
setSort(sort)
sortRef.current = sort
setPage(1)
pageRef.current = 1
handleQueryPlugins()
}, [handleQueryPlugins])
const handlePageChange = useCallback(() => {
if (pluginsTotal && plugins && pluginsTotal > plugins.length) {
setPage(pageRef.current + 1)
pageRef.current++
handleQueryPlugins()
}
}, [handleQueryPlugins, plugins, pluginsTotal])
if (hasNextPluginsPage)
fetchNextPluginsPage()
}, [fetchNextPluginsPage, hasNextPluginsPage])
const handleMoreClick = useCallback((searchParams: SearchParamsFromCollection) => {
setSearchPluginText(searchParams?.query || '')
@@ -305,9 +287,6 @@ export const MarketplaceContextProvider = ({
sortBy: searchParams?.sort_by || DEFAULT_SORT.sortBy,
sortOrder: searchParams?.sort_order || DEFAULT_SORT.sortOrder,
}
setPage(1)
pageRef.current = 1
handleQueryPlugins()
}, [handleQueryPlugins])
@@ -316,8 +295,6 @@ export const MarketplaceContextProvider = ({
return (
<MarketplaceContext.Provider
value={{
intersected,
setIntersected,
searchPluginText,
handleSearchPluginTextChange,
filterPluginTags,

View File

@@ -3,6 +3,11 @@ import {
useEffect,
useState,
} from 'react'
import {
useInfiniteQuery,
useQuery,
useQueryClient,
} from '@tanstack/react-query'
import { useTranslation } from 'react-i18next'
import { useDebounceFn } from 'ahooks'
import type {
@@ -16,39 +21,41 @@ import type {
import {
getFormattedPlugin,
getMarketplaceCollectionsAndPlugins,
getMarketplacePluginsByCollectionId,
} from './utils'
import { SCROLL_BOTTOM_THRESHOLD } from './constants'
import i18n from '@/i18n-config/i18next-config'
import {
useMutationPluginsFromMarketplace,
} from '@/service/use-plugins'
import { postMarketplace } from '@/service/base'
import type { PluginsFromMarketplaceResponse } from '@/app/components/plugins/types'
export const useMarketplaceCollectionsAndPlugins = () => {
const [isLoading, setIsLoading] = useState(false)
const [isSuccess, setIsSuccess] = useState(false)
const [marketplaceCollections, setMarketplaceCollections] = useState<MarketplaceCollection[]>()
const [marketplaceCollectionPluginsMap, setMarketplaceCollectionPluginsMap] = useState<Record<string, Plugin[]>>()
const [queryParams, setQueryParams] = useState<CollectionsAndPluginsSearchParams>()
const [marketplaceCollectionsOverride, setMarketplaceCollections] = useState<MarketplaceCollection[]>()
const [marketplaceCollectionPluginsMapOverride, setMarketplaceCollectionPluginsMap] = useState<Record<string, Plugin[]>>()
const queryMarketplaceCollectionsAndPlugins = useCallback(async (query?: CollectionsAndPluginsSearchParams) => {
try {
setIsLoading(true)
setIsSuccess(false)
const { marketplaceCollections, marketplaceCollectionPluginsMap } = await getMarketplaceCollectionsAndPlugins(query)
setIsLoading(false)
setIsSuccess(true)
setMarketplaceCollections(marketplaceCollections)
setMarketplaceCollectionPluginsMap(marketplaceCollectionPluginsMap)
}
// eslint-disable-next-line unused-imports/no-unused-vars
catch (e) {
setIsLoading(false)
setIsSuccess(false)
}
const {
data,
isFetching,
isSuccess,
isPending,
} = useQuery({
queryKey: ['marketplaceCollectionsAndPlugins', queryParams],
queryFn: ({ signal }) => getMarketplaceCollectionsAndPlugins(queryParams, { signal }),
enabled: queryParams !== undefined,
staleTime: 1000 * 60 * 5,
gcTime: 1000 * 60 * 10,
retry: false,
})
const queryMarketplaceCollectionsAndPlugins = useCallback((query?: CollectionsAndPluginsSearchParams) => {
setQueryParams(query ? { ...query } : {})
}, [])
const isLoading = !!queryParams && (isFetching || isPending)
return {
marketplaceCollections,
marketplaceCollections: marketplaceCollectionsOverride ?? data?.marketplaceCollections,
setMarketplaceCollections,
marketplaceCollectionPluginsMap,
marketplaceCollectionPluginsMap: marketplaceCollectionPluginsMapOverride ?? data?.marketplaceCollectionPluginsMap,
setMarketplaceCollectionPluginsMap,
queryMarketplaceCollectionsAndPlugins,
isLoading,
@@ -56,37 +63,128 @@ export const useMarketplaceCollectionsAndPlugins = () => {
}
}
export const useMarketplacePlugins = () => {
export const useMarketplacePluginsByCollectionId = (
collectionId?: string,
query?: CollectionsAndPluginsSearchParams,
) => {
const {
data,
mutateAsync,
reset,
isFetching,
isSuccess,
isPending,
} = useMutationPluginsFromMarketplace()
} = useQuery({
queryKey: ['marketplaceCollectionPlugins', collectionId, query],
queryFn: ({ signal }) => {
if (!collectionId)
return Promise.resolve<Plugin[]>([])
return getMarketplacePluginsByCollectionId(collectionId, query, { signal })
},
enabled: !!collectionId,
staleTime: 1000 * 60 * 5,
gcTime: 1000 * 60 * 10,
retry: false,
})
const [prevPlugins, setPrevPlugins] = useState<Plugin[] | undefined>()
return {
plugins: data || [],
isLoading: !!collectionId && (isFetching || isPending),
isSuccess,
}
}
export const useMarketplacePlugins = () => {
const queryClient = useQueryClient()
const [queryParams, setQueryParams] = useState<PluginsSearchParams>()
const normalizeParams = useCallback((pluginsSearchParams: PluginsSearchParams) => {
const pageSize = pluginsSearchParams.pageSize || 40
return {
...pluginsSearchParams,
pageSize,
}
}, [])
const marketplacePluginsQuery = useInfiniteQuery({
queryKey: ['marketplacePlugins', queryParams],
queryFn: async ({ pageParam = 1, signal }) => {
if (!queryParams) {
return {
plugins: [] as Plugin[],
total: 0,
page: 1,
pageSize: 40,
}
}
const params = normalizeParams(queryParams)
const {
query,
sortBy,
sortOrder,
category,
tags,
exclude,
type,
pageSize,
} = params
const pluginOrBundle = type === 'bundle' ? 'bundles' : 'plugins'
try {
const res = await postMarketplace<{ data: PluginsFromMarketplaceResponse }>(`/${pluginOrBundle}/search/advanced`, {
body: {
page: pageParam,
page_size: pageSize,
query,
sort_by: sortBy,
sort_order: sortOrder,
category: category !== 'all' ? category : '',
tags,
exclude,
type,
},
signal,
})
const resPlugins = res.data.bundles || res.data.plugins || []
return {
plugins: resPlugins.map(plugin => getFormattedPlugin(plugin)),
total: res.data.total,
page: pageParam,
pageSize,
}
}
catch {
return {
plugins: [],
total: 0,
page: pageParam,
pageSize,
}
}
},
getNextPageParam: (lastPage) => {
const nextPage = lastPage.page + 1
const loaded = lastPage.page * lastPage.pageSize
return loaded < (lastPage.total || 0) ? nextPage : undefined
},
initialPageParam: 1,
enabled: !!queryParams,
staleTime: 1000 * 60 * 5,
gcTime: 1000 * 60 * 10,
retry: false,
})
const resetPlugins = useCallback(() => {
reset()
setPrevPlugins(undefined)
}, [reset])
setQueryParams(undefined)
queryClient.removeQueries({
queryKey: ['marketplacePlugins'],
})
}, [queryClient])
const handleUpdatePlugins = useCallback((pluginsSearchParams: PluginsSearchParams) => {
mutateAsync(pluginsSearchParams).then((res) => {
const currentPage = pluginsSearchParams.page || 1
const resPlugins = res.data.bundles || res.data.plugins
if (currentPage > 1) {
setPrevPlugins(prevPlugins => [...(prevPlugins || []), ...resPlugins.map((plugin) => {
return getFormattedPlugin(plugin)
})])
}
else {
setPrevPlugins(resPlugins.map((plugin) => {
return getFormattedPlugin(plugin)
}))
}
})
}, [mutateAsync])
setQueryParams(normalizeParams(pluginsSearchParams))
}, [normalizeParams])
const { run: queryPluginsWithDebounced, cancel: cancelQueryPluginsWithDebounced } = useDebounceFn((pluginsSearchParams: PluginsSearchParams) => {
handleUpdatePlugins(pluginsSearchParams)
@@ -94,14 +192,29 @@ export const useMarketplacePlugins = () => {
wait: 500,
})
const hasQuery = !!queryParams
const hasData = marketplacePluginsQuery.data !== undefined
const plugins = hasQuery && hasData
? marketplacePluginsQuery.data.pages.flatMap(page => page.plugins)
: undefined
const total = hasQuery && hasData ? marketplacePluginsQuery.data.pages?.[0]?.total : undefined
const isPluginsLoading = hasQuery && (
marketplacePluginsQuery.isPending
|| (marketplacePluginsQuery.isFetching && !marketplacePluginsQuery.data)
)
return {
plugins: prevPlugins,
total: data?.data?.total,
plugins,
total,
resetPlugins,
queryPlugins: handleUpdatePlugins,
queryPluginsWithDebounced,
cancelQueryPluginsWithDebounced,
isLoading: isPending,
isLoading: isPluginsLoading,
isFetchingNextPage: marketplacePluginsQuery.isFetchingNextPage,
hasNextPage: marketplacePluginsQuery.hasNextPage,
fetchNextPage: marketplacePluginsQuery.fetchNextPage,
page: marketplacePluginsQuery.data?.pages?.length || (marketplacePluginsQuery.isPending && hasQuery ? 1 : 0),
}
}
@@ -131,7 +244,7 @@ export const useMarketplaceContainerScroll = (
scrollHeight,
clientHeight,
} = target
if (scrollTop + clientHeight >= scrollHeight - 5 && scrollTop > 0)
if (scrollTop + clientHeight >= scrollHeight - SCROLL_BOTTOM_THRESHOLD && scrollTop > 0)
callback()
}, [callback])
@@ -146,34 +259,3 @@ export const useMarketplaceContainerScroll = (
}
}, [handleScroll])
}
export const useSearchBoxAutoAnimate = (searchBoxAutoAnimate?: boolean) => {
const [searchBoxCanAnimate, setSearchBoxCanAnimate] = useState(true)
const handleSearchBoxCanAnimateChange = useCallback(() => {
if (!searchBoxAutoAnimate) {
const clientWidth = document.documentElement.clientWidth
if (clientWidth < 1400)
setSearchBoxCanAnimate(false)
else
setSearchBoxCanAnimate(true)
}
}, [searchBoxAutoAnimate])
useEffect(() => {
handleSearchBoxCanAnimateChange()
}, [handleSearchBoxCanAnimateChange])
useEffect(() => {
window.addEventListener('resize', handleSearchBoxCanAnimateChange)
return () => {
window.removeEventListener('resize', handleSearchBoxCanAnimateChange)
}
}, [handleSearchBoxCanAnimateChange])
return {
searchBoxCanAnimate,
}
}

View File

@@ -1,37 +1,32 @@
import { MarketplaceContextProvider } from './context'
import Description from './description'
import IntersectionLine from './intersection-line'
import SearchBoxWrapper from './search-box/search-box-wrapper'
import PluginTypeSwitch from './plugin-type-switch'
import StickySearchAndSwitchWrapper from './sticky-search-and-switch-wrapper'
import ListWrapper from './list/list-wrapper'
import type { SearchParams } from './types'
import type { MarketplaceCollection, SearchParams } from './types'
import type { Plugin } from '@/app/components/plugins/types'
import { getMarketplaceCollectionsAndPlugins } from './utils'
import { TanstackQueryInitializer } from '@/context/query-client'
type MarketplaceProps = {
locale: string
searchBoxAutoAnimate?: boolean
showInstallButton?: boolean
shouldExclude?: boolean
searchParams?: SearchParams
pluginTypeSwitchClassName?: string
intersectionContainerId?: string
scrollContainerId?: string
showSearchParams?: boolean
}
const Marketplace = async ({
locale,
searchBoxAutoAnimate = true,
showInstallButton = true,
shouldExclude,
searchParams,
pluginTypeSwitchClassName,
intersectionContainerId,
scrollContainerId,
showSearchParams = true,
}: MarketplaceProps) => {
let marketplaceCollections: any = []
let marketplaceCollectionPluginsMap = {}
let marketplaceCollections: MarketplaceCollection[] = []
let marketplaceCollectionPluginsMap: Record<string, Plugin[]> = {}
if (!shouldExclude) {
const marketplaceCollectionsAndPluginsData = await getMarketplaceCollectionsAndPlugins()
marketplaceCollections = marketplaceCollectionsAndPluginsData.marketplaceCollections
@@ -47,15 +42,9 @@ const Marketplace = async ({
showSearchParams={showSearchParams}
>
<Description locale={locale} />
<IntersectionLine intersectionContainerId={intersectionContainerId} />
<SearchBoxWrapper
<StickySearchAndSwitchWrapper
locale={locale}
searchBoxAutoAnimate={searchBoxAutoAnimate}
/>
<PluginTypeSwitch
locale={locale}
className={pluginTypeSwitchClassName}
searchBoxAutoAnimate={searchBoxAutoAnimate}
pluginTypeSwitchClassName={pluginTypeSwitchClassName}
showSearchParams={showSearchParams}
/>
<ListWrapper

View File

@@ -1,30 +0,0 @@
import { useEffect } from 'react'
import { useMarketplaceContext } from '@/app/components/plugins/marketplace/context'
export const useScrollIntersection = (
anchorRef: React.RefObject<HTMLDivElement | null>,
intersectionContainerId = 'marketplace-container',
) => {
const intersected = useMarketplaceContext(v => v.intersected)
const setIntersected = useMarketplaceContext(v => v.setIntersected)
useEffect(() => {
const container = document.getElementById(intersectionContainerId)
let observer: IntersectionObserver | undefined
if (container && anchorRef.current) {
observer = new IntersectionObserver((entries) => {
const isIntersecting = entries[0].isIntersecting
if (isIntersecting && !intersected)
setIntersected(true)
if (!isIntersecting && intersected)
setIntersected(false)
}, {
root: container,
})
observer.observe(anchorRef.current)
}
return () => observer?.disconnect()
}, [anchorRef, intersected, setIntersected, intersectionContainerId])
}

View File

@@ -1,21 +0,0 @@
'use client'
import { useRef } from 'react'
import { useScrollIntersection } from './hooks'
type IntersectionLineProps = {
intersectionContainerId?: string
}
const IntersectionLine = ({
intersectionContainerId,
}: IntersectionLineProps) => {
const ref = useRef<HTMLDivElement>(null)
useScrollIntersection(ref, intersectionContainerId)
return (
<div ref={ref} className='mb-4 h-px shrink-0 bg-transparent'></div>
)
}
export default IntersectionLine

View File

@@ -28,13 +28,20 @@ const ListWrapper = ({
const isLoading = useMarketplaceContext(v => v.isLoading)
const isSuccessCollections = useMarketplaceContext(v => v.isSuccessCollections)
const handleQueryPlugins = useMarketplaceContext(v => v.handleQueryPlugins)
const searchPluginText = useMarketplaceContext(v => v.searchPluginText)
const filterPluginTags = useMarketplaceContext(v => v.filterPluginTags)
const page = useMarketplaceContext(v => v.page)
const handleMoreClick = useMarketplaceContext(v => v.handleMoreClick)
useEffect(() => {
if (!marketplaceCollectionsFromClient?.length && isSuccessCollections)
if (
!marketplaceCollectionsFromClient?.length
&& isSuccessCollections
&& !searchPluginText
&& !filterPluginTags.length
)
handleQueryPlugins()
}, [handleQueryPlugins, marketplaceCollections, marketplaceCollectionsFromClient, isSuccessCollections])
}, [handleQueryPlugins, marketplaceCollections, marketplaceCollectionsFromClient, isSuccessCollections, searchPluginText, filterPluginTags])
return (
<div

View File

@@ -12,10 +12,7 @@ import {
import { useCallback, useEffect } from 'react'
import { PluginCategoryEnum } from '../types'
import { useMarketplaceContext } from './context'
import {
useMixedTranslation,
useSearchBoxAutoAnimate,
} from './hooks'
import { useMixedTranslation } from './hooks'
export const PLUGIN_TYPE_SEARCH_MAP = {
all: 'all',
@@ -30,19 +27,16 @@ export const PLUGIN_TYPE_SEARCH_MAP = {
type PluginTypeSwitchProps = {
locale?: string
className?: string
searchBoxAutoAnimate?: boolean
showSearchParams?: boolean
}
const PluginTypeSwitch = ({
locale,
className,
searchBoxAutoAnimate,
showSearchParams,
}: PluginTypeSwitchProps) => {
const { t } = useMixedTranslation(locale)
const activePluginType = useMarketplaceContext(s => s.activePluginType)
const handleActivePluginTypeChange = useMarketplaceContext(s => s.handleActivePluginTypeChange)
const { searchBoxCanAnimate } = useSearchBoxAutoAnimate(searchBoxAutoAnimate)
const options = [
{
@@ -105,7 +99,6 @@ const PluginTypeSwitch = ({
return (
<div className={cn(
'flex shrink-0 items-center justify-center space-x-2 bg-background-body py-3',
searchBoxCanAnimate && 'sticky top-[56px] z-10',
className,
)}>
{

View File

@@ -1,36 +1,24 @@
'use client'
import { useMarketplaceContext } from '../context'
import {
useMixedTranslation,
useSearchBoxAutoAnimate,
} from '../hooks'
import { useMixedTranslation } from '../hooks'
import SearchBox from './index'
import cn from '@/utils/classnames'
type SearchBoxWrapperProps = {
locale?: string
searchBoxAutoAnimate?: boolean
}
const SearchBoxWrapper = ({
locale,
searchBoxAutoAnimate,
}: SearchBoxWrapperProps) => {
const { t } = useMixedTranslation(locale)
const intersected = useMarketplaceContext(v => v.intersected)
const searchPluginText = useMarketplaceContext(v => v.searchPluginText)
const handleSearchPluginTextChange = useMarketplaceContext(v => v.handleSearchPluginTextChange)
const filterPluginTags = useMarketplaceContext(v => v.filterPluginTags)
const handleFilterPluginTagsChange = useMarketplaceContext(v => v.handleFilterPluginTagsChange)
const { searchBoxCanAnimate } = useSearchBoxAutoAnimate(searchBoxAutoAnimate)
return (
<SearchBox
wrapperClassName={cn(
'z-[0] mx-auto w-[640px] shrink-0',
searchBoxCanAnimate && 'sticky top-3 z-[11]',
!intersected && searchBoxCanAnimate && 'w-[508px] transition-[width] duration-300',
)}
wrapperClassName='z-[11] mx-auto w-[640px] shrink-0'
inputClassName='w-full'
search={searchPluginText}
onSearchChange={handleSearchPluginTextChange}

View File

@@ -0,0 +1,37 @@
'use client'
import SearchBoxWrapper from './search-box/search-box-wrapper'
import PluginTypeSwitch from './plugin-type-switch'
import cn from '@/utils/classnames'
type StickySearchAndSwitchWrapperProps = {
locale?: string
pluginTypeSwitchClassName?: string
showSearchParams?: boolean
}
const StickySearchAndSwitchWrapper = ({
locale,
pluginTypeSwitchClassName,
showSearchParams,
}: StickySearchAndSwitchWrapperProps) => {
const hasCustomTopClass = pluginTypeSwitchClassName?.includes('top-')
return (
<div
className={cn(
'mt-4 bg-background-body',
hasCustomTopClass && 'sticky z-10',
pluginTypeSwitchClassName,
)}
>
<SearchBoxWrapper locale={locale} />
<PluginTypeSwitch
locale={locale}
showSearchParams={showSearchParams}
/>
</div>
)
}
export default StickySearchAndSwitchWrapper

View File

@@ -13,6 +13,14 @@ import {
} from '@/config'
import { getMarketplaceUrl } from '@/utils/var'
type MarketplaceFetchOptions = {
signal?: AbortSignal
}
const getMarketplaceHeaders = () => new Headers({
'X-Dify-Version': !IS_MARKETPLACE ? APP_VERSION : '999.0.0',
})
export const getPluginIconInMarketplace = (plugin: Plugin) => {
if (plugin.type === 'bundle')
return `${MARKETPLACE_API_PREFIX}/bundles/${plugin.org}/${plugin.name}/icon`
@@ -46,20 +54,23 @@ export const getPluginDetailLinkInMarketplace = (plugin: Plugin) => {
return `/plugins/${plugin.org}/${plugin.name}`
}
export const getMarketplacePluginsByCollectionId = async (collectionId: string, query?: CollectionsAndPluginsSearchParams) => {
let plugins: Plugin[]
export const getMarketplacePluginsByCollectionId = async (
collectionId: string,
query?: CollectionsAndPluginsSearchParams,
options?: MarketplaceFetchOptions,
) => {
let plugins: Plugin[] = []
try {
const url = `${MARKETPLACE_API_PREFIX}/collections/${collectionId}/plugins`
const headers = new Headers({
'X-Dify-Version': !IS_MARKETPLACE ? APP_VERSION : '999.0.0',
})
const headers = getMarketplaceHeaders()
const marketplaceCollectionPluginsData = await globalThis.fetch(
url,
{
cache: 'no-store',
method: 'POST',
headers,
signal: options?.signal,
body: JSON.stringify({
category: query?.category,
exclude: query?.exclude,
@@ -68,9 +79,7 @@ export const getMarketplacePluginsByCollectionId = async (collectionId: string,
},
)
const marketplaceCollectionPluginsDataJson = await marketplaceCollectionPluginsData.json()
plugins = marketplaceCollectionPluginsDataJson.data.plugins.map((plugin: Plugin) => {
return getFormattedPlugin(plugin)
})
plugins = (marketplaceCollectionPluginsDataJson.data.plugins || []).map((plugin: Plugin) => getFormattedPlugin(plugin))
}
// eslint-disable-next-line unused-imports/no-unused-vars
catch (e) {
@@ -80,23 +89,31 @@ export const getMarketplacePluginsByCollectionId = async (collectionId: string,
return plugins
}
export const getMarketplaceCollectionsAndPlugins = async (query?: CollectionsAndPluginsSearchParams) => {
let marketplaceCollections = [] as MarketplaceCollection[]
let marketplaceCollectionPluginsMap = {} as Record<string, Plugin[]>
export const getMarketplaceCollectionsAndPlugins = async (
query?: CollectionsAndPluginsSearchParams,
options?: MarketplaceFetchOptions,
) => {
let marketplaceCollections: MarketplaceCollection[] = []
let marketplaceCollectionPluginsMap: Record<string, Plugin[]> = {}
try {
let marketplaceUrl = `${MARKETPLACE_API_PREFIX}/collections?page=1&page_size=100`
if (query?.condition)
marketplaceUrl += `&condition=${query.condition}`
if (query?.type)
marketplaceUrl += `&type=${query.type}`
const headers = new Headers({
'X-Dify-Version': !IS_MARKETPLACE ? APP_VERSION : '999.0.0',
})
const marketplaceCollectionsData = await globalThis.fetch(marketplaceUrl, { headers, cache: 'no-store' })
const headers = getMarketplaceHeaders()
const marketplaceCollectionsData = await globalThis.fetch(
marketplaceUrl,
{
headers,
cache: 'no-store',
signal: options?.signal,
},
)
const marketplaceCollectionsDataJson = await marketplaceCollectionsData.json()
marketplaceCollections = marketplaceCollectionsDataJson.data.collections
marketplaceCollections = marketplaceCollectionsDataJson.data.collections || []
await Promise.all(marketplaceCollections.map(async (collection: MarketplaceCollection) => {
const plugins = await getMarketplacePluginsByCollectionId(collection.name, query)
const plugins = await getMarketplacePluginsByCollectionId(collection.name, query, options)
marketplaceCollectionPluginsMap[collection.name] = plugins
}))

View File

@@ -28,9 +28,9 @@ import {
RiHardDrive3Line,
} from '@remixicon/react'
import { useBoolean } from 'ahooks'
import { useTheme } from 'next-themes'
import React, { useCallback, useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next'
import useTheme from '@/hooks/use-theme'
import Verified from '../base/badges/verified'
import { AutoUpdateLine } from '../../base/icons/src/vender/system'
import DeprecationNotice from '../base/deprecation-notice'
@@ -86,7 +86,7 @@ const DetailHeader = ({
alternative_plugin_id,
} = detail
const { author, category, name, label, description, icon, verified, tool } = detail.declaration || detail
const { author, category, name, label, description, icon, icon_dark, verified, tool } = detail.declaration || detail
const isTool = category === PluginCategoryEnum.tool
const providerBriefInfo = tool?.identity
const providerKey = `${plugin_id}/${providerBriefInfo?.name}`
@@ -109,6 +109,11 @@ const DetailHeader = ({
return false
}, [isFromMarketplace, latest_version, version])
const iconFileName = theme === 'dark' && icon_dark ? icon_dark : icon
const iconSrc = iconFileName
? (iconFileName.startsWith('http') ? iconFileName : `${API_PREFIX}/workspaces/current/plugin/icon?tenant_id=${tenant_id}&filename=${iconFileName}`)
: ''
const detailUrl = useMemo(() => {
if (isFromGitHub)
return `https://github.com/${meta!.repo}`
@@ -214,7 +219,7 @@ const DetailHeader = ({
<div className={cn('shrink-0 border-b border-divider-subtle bg-components-panel-bg p-4 pb-3', isReadmeView && 'border-b-0 bg-transparent p-0')}>
<div className="flex">
<div className={cn('overflow-hidden rounded-xl border border-components-panel-border-subtle', isReadmeView && 'bg-components-panel-bg')}>
<Icon src={icon.startsWith('http') ? icon : `${API_PREFIX}/workspaces/current/plugin/icon?tenant_id=${tenant_id}&filename=${icon}`} />
<Icon src={iconSrc} />
</div>
<div className="ml-3 w-0 grow">
<div className="flex h-5 items-center">

View File

@@ -14,11 +14,11 @@ import {
RiHardDrive3Line,
RiLoginCircleLine,
} from '@remixicon/react'
import { useTheme } from 'next-themes'
import type { FC } from 'react'
import React, { useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import { gte } from 'semver'
import useTheme from '@/hooks/use-theme'
import Verified from '../base/badges/verified'
import Badge from '../../base/badge'
import { Github } from '../../base/icons/src/public/common'
@@ -58,7 +58,7 @@ const PluginItem: FC<Props> = ({
status,
deprecated_reason,
} = plugin
const { category, author, name, label, description, icon, verified, meta: declarationMeta } = plugin.declaration
const { category, author, name, label, description, icon, icon_dark, verified, meta: declarationMeta } = plugin.declaration
const orgName = useMemo(() => {
return [PluginSource.github, PluginSource.marketplace].includes(source) ? author : ''
@@ -84,6 +84,10 @@ const PluginItem: FC<Props> = ({
const title = getValueFromI18nObject(label)
const descriptionText = getValueFromI18nObject(description)
const { enable_marketplace } = useGlobalPublicStore(s => s.systemFeatures)
const iconFileName = theme === 'dark' && icon_dark ? icon_dark : icon
const iconSrc = iconFileName
? (iconFileName.startsWith('http') ? iconFileName : `${API_PREFIX}/workspaces/current/plugin/icon?tenant_id=${tenant_id}&filename=${iconFileName}`)
: ''
return (
<div
@@ -105,7 +109,7 @@ const PluginItem: FC<Props> = ({
<div className='flex h-10 w-10 items-center justify-center overflow-hidden rounded-xl border-[1px] border-components-panel-border-subtle'>
<img
className='h-full w-full'
src={`${API_PREFIX}/workspaces/current/plugin/icon?tenant_id=${tenant_id}&filename=${icon}`}
src={iconSrc}
alt={`plugin-${plugin_unique_identifier}-logo`}
/>
</div>

View File

@@ -71,6 +71,7 @@ export type PluginDeclaration = {
version: string
author: string
icon: string
icon_dark?: string
name: string
category: PluginCategoryEnum
label: Record<Locale, string>
@@ -248,7 +249,7 @@ export type PluginInfoFromMarketPlace = {
}
export type Plugin = {
type: 'plugin' | 'bundle' | 'model' | 'extension' | 'tool' | 'agent_strategy'
type: 'plugin' | 'bundle' | 'model' | 'extension' | 'tool' | 'agent_strategy' | 'datasource' | 'trigger'
org: string
author?: string
name: string
@@ -257,6 +258,7 @@ export type Plugin = {
latest_version: string
latest_package_identifier: string
icon: string
icon_dark?: string
verified: boolean
label: Record<Locale, string>
brief: Record<Locale, string>

View File

@@ -0,0 +1,22 @@
'use client'
import { scan } from 'react-scan'
import { useEffect } from 'react'
import { IS_DEV } from '@/config'
export function ReactScan() {
useEffect(() => {
if (IS_DEV) {
scan({
enabled: true,
// HACK: react-scan's getIsProduction() incorrectly detects Next.js dev as production
// because Next.js devtools overlay uses production React build
// Issue: https://github.com/aidenybai/react-scan/issues/402
// TODO: remove this option after upstream fix
dangerouslyForceRunInProduction: true,
})
}
}, [])
return null
}

View File

@@ -3,12 +3,12 @@ import {
useEffect,
useMemo,
useRef,
useState,
} from 'react'
import {
useMarketplaceCollectionsAndPlugins,
useMarketplacePlugins,
} from '@/app/components/plugins/marketplace/hooks'
import { SCROLL_BOTTOM_THRESHOLD } from '@/app/components/plugins/marketplace/constants'
import { PluginCategoryEnum } from '@/app/components/plugins/types'
import { getMarketplaceListCondition } from '@/app/components/plugins/marketplace/utils'
import { useAllToolProviders } from '@/service/use-tools'
@@ -31,10 +31,10 @@ export const useMarketplace = (searchPluginText: string, filterPluginTags: strin
queryPlugins,
queryPluginsWithDebounced,
isLoading: isPluginsLoading,
total: pluginsTotal,
fetchNextPage,
hasNextPage,
page: pluginsPage,
} = useMarketplacePlugins()
const [page, setPage] = useState(1)
const pageRef = useRef(page)
const searchPluginTextRef = useRef(searchPluginText)
const filterPluginTagsRef = useRef(filterPluginTags)
@@ -44,9 +44,6 @@ export const useMarketplace = (searchPluginText: string, filterPluginTags: strin
}, [searchPluginText, filterPluginTags])
useEffect(() => {
if ((searchPluginText || filterPluginTags.length) && isSuccess) {
setPage(1)
pageRef.current = 1
if (searchPluginText) {
queryPluginsWithDebounced({
category: PluginCategoryEnum.tool,
@@ -54,7 +51,6 @@ export const useMarketplace = (searchPluginText: string, filterPluginTags: strin
tags: filterPluginTags,
exclude,
type: 'plugin',
page: pageRef.current,
})
return
}
@@ -64,7 +60,6 @@ export const useMarketplace = (searchPluginText: string, filterPluginTags: strin
tags: filterPluginTags,
exclude,
type: 'plugin',
page: pageRef.current,
})
}
else {
@@ -87,24 +82,13 @@ export const useMarketplace = (searchPluginText: string, filterPluginTags: strin
scrollHeight,
clientHeight,
} = target
if (scrollTop + clientHeight >= scrollHeight - 5 && scrollTop > 0) {
if (scrollTop + clientHeight >= scrollHeight - SCROLL_BOTTOM_THRESHOLD && scrollTop > 0) {
const searchPluginText = searchPluginTextRef.current
const filterPluginTags = filterPluginTagsRef.current
if (pluginsTotal && plugins && pluginsTotal > plugins.length && (!!searchPluginText || !!filterPluginTags.length)) {
setPage(pageRef.current + 1)
pageRef.current++
queryPlugins({
category: PluginCategoryEnum.tool,
query: searchPluginText,
tags: filterPluginTags,
exclude,
type: 'plugin',
page: pageRef.current,
})
}
if (hasNextPage && (!!searchPluginText || !!filterPluginTags.length))
fetchNextPage()
}
}, [exclude, plugins, pluginsTotal, queryPlugins])
}, [exclude, fetchNextPage, hasNextPage, plugins, queryPlugins])
return {
isLoading: isLoading || isPluginsLoading,
@@ -112,6 +96,6 @@ export const useMarketplace = (searchPluginText: string, filterPluginTags: strin
marketplaceCollectionPluginsMap,
plugins,
handleScroll,
page,
page: Math.max(pluginsPage || 0, 1),
}
}

View File

@@ -1,154 +0,0 @@
const tools = [
{
author: 'Novice',
name: 'NOTION_ADD_PAGE_CONTENT',
label: {
en_US: 'NOTION_ADD_PAGE_CONTENT',
zh_Hans: 'NOTION_ADD_PAGE_CONTENT',
pt_BR: 'NOTION_ADD_PAGE_CONTENT',
ja_JP: 'NOTION_ADD_PAGE_CONTENT',
},
description: {
en_US: 'Adds a single content block to a notion page. multiple calls needed for multiple blocks. note: only supports adding to notion pages. blocks that can contain children: - page (any block type) - toggle (any nested content) - to-do (nested to-dos/blocks) - bulleted list (nested lists/blocks) - numbered list (nested lists/blocks) - callout (child blocks) - quote (nested blocks)',
zh_Hans: 'Adds a single content block to a notion page. multiple calls needed for multiple blocks. note: only supports adding to notion pages. blocks that can contain children: - page (any block type) - toggle (any nested content) - to-do (nested to-dos/blocks) - bulleted list (nested lists/blocks) - numbered list (nested lists/blocks) - callout (child blocks) - quote (nested blocks)',
pt_BR: 'Adds a single content block to a notion page. multiple calls needed for multiple blocks. note: only supports adding to notion pages. blocks that can contain children: - page (any block type) - toggle (any nested content) - to-do (nested to-dos/blocks) - bulleted list (nested lists/blocks) - numbered list (nested lists/blocks) - callout (child blocks) - quote (nested blocks)',
ja_JP: 'Adds a single content block to a notion page. multiple calls needed for multiple blocks. note: only supports adding to notion pages. blocks that can contain children: - page (any block type) - toggle (any nested content) - to-do (nested to-dos/blocks) - bulleted list (nested lists/blocks) - numbered list (nested lists/blocks) - callout (child blocks) - quote (nested blocks)',
},
parameters: [
{
name: 'after',
label: {
en_US: 'after',
zh_Hans: 'after',
pt_BR: 'after',
ja_JP: 'after',
},
placeholder: null,
scope: null,
auto_generate: null,
template: null,
required: false,
default: null,
min: null,
max: null,
precision: null,
options: [],
type: 'string',
human_description: {
en_US: 'The ID of the existing block that the new block should be appended after. If not provided, content will be appended at the end of the page.',
zh_Hans: 'The ID of the existing block that the new block should be appended after. If not provided, content will be appended at the end of the page.',
pt_BR: 'The ID of the existing block that the new block should be appended after. If not provided, content will be appended at the end of the page.',
ja_JP: 'The ID of the existing block that the new block should be appended after. If not provided, content will be appended at the end of the page.',
},
form: 'llm',
llm_description: 'The ID of the existing block that the new block should be appended after. If not provided, content will be appended at the end of the page.',
},
{
name: 'content_block',
label: {
en_US: 'content_block',
zh_Hans: 'content_block',
pt_BR: 'content_block',
ja_JP: 'content_block',
},
placeholder: null,
scope: null,
auto_generate: null,
template: null,
required: false,
default: null,
min: null,
max: null,
precision: null,
options: [],
type: 'string',
human_description: {
en_US: 'Child content to append to a page.',
zh_Hans: 'Child content to append to a page.',
pt_BR: 'Child content to append to a page.',
ja_JP: 'Child content to append to a page.',
},
form: 'llm',
llm_description: 'Child content to append to a page.',
},
{
name: 'parent_block_id',
label: {
en_US: 'parent_block_id',
zh_Hans: 'parent_block_id',
pt_BR: 'parent_block_id',
ja_JP: 'parent_block_id',
},
placeholder: null,
scope: null,
auto_generate: null,
template: null,
required: false,
default: null,
min: null,
max: null,
precision: null,
options: [],
type: 'string',
human_description: {
en_US: 'The ID of the page which the children will be added.',
zh_Hans: 'The ID of the page which the children will be added.',
pt_BR: 'The ID of the page which the children will be added.',
ja_JP: 'The ID of the page which the children will be added.',
},
form: 'llm',
llm_description: 'The ID of the page which the children will be added.',
},
],
labels: [],
output_schema: null,
},
]
export const listData = [
{
id: 'fdjklajfkljadslf111',
author: 'KVOJJJin',
name: 'GOGOGO',
icon: 'https://cloud.dify.dev/console/api/workspaces/694cc430-fa36-4458-86a0-4a98c09c4684/model-providers/langgenius/openai/openai/icon_small/en_US',
server_url: 'https://mcp.composio.dev/notion/****/abc',
type: 'mcp',
is_team_authorization: true,
tools,
update_elapsed_time: 1744793369,
label: {
en_US: 'GOGOGO',
zh_Hans: 'GOGOGO',
},
},
{
id: 'fdjklajfkljadslf222',
author: 'KVOJJJin',
name: 'GOGOGO2',
icon: 'https://cloud.dify.dev/console/api/workspaces/694cc430-fa36-4458-86a0-4a98c09c4684/model-providers/langgenius/openai/openai/icon_small/en_US',
server_url: 'https://mcp.composio.dev/notion/****/abc',
type: 'mcp',
is_team_authorization: false,
tools: [],
update_elapsed_time: 1744793369,
label: {
en_US: 'GOGOGO2',
zh_Hans: 'GOGOGO2',
},
},
{
id: 'fdjklajfkljadslf333',
author: 'KVOJJJin',
name: 'GOGOGO3',
icon: 'https://cloud.dify.dev/console/api/workspaces/694cc430-fa36-4458-86a0-4a98c09c4684/model-providers/langgenius/openai/openai/icon_small/en_US',
server_url: 'https://mcp.composio.dev/notion/****/abc',
type: 'mcp',
is_team_authorization: true,
tools,
update_elapsed_time: 1744793369,
label: {
en_US: 'GOGOGO3',
zh_Hans: 'GOGOGO3',
},
},
]

View File

@@ -49,6 +49,7 @@ export type Collection = {
author: string
description: TypeWithI18N
icon: string | Emoji
icon_dark?: string | Emoji
label: TypeWithI18N
type: CollectionType | string
team_credentials: Record<string, any>

View File

@@ -1,6 +1,6 @@
'use client'
import type { FC } from 'react'
import React from 'react'
import React, { useMemo } from 'react'
import type { ToolWithProvider } from '../../types'
import { BlockEnum } from '../../types'
import type { ToolDefaultValue } from '../types'
@@ -10,9 +10,13 @@ import { useGetLanguage } from '@/context/i18n'
import BlockIcon from '../../block-icon'
import cn from '@/utils/classnames'
import { useTranslation } from 'react-i18next'
import useTheme from '@/hooks/use-theme'
import { Theme } from '@/types/app'
import { basePath } from '@/utils/var'
const normalizeProviderIcon = (icon: ToolWithProvider['icon']) => {
const normalizeProviderIcon = (icon?: ToolWithProvider['icon']) => {
if (!icon)
return icon
if (typeof icon === 'string' && basePath && icon.startsWith('/') && !icon.startsWith(`${basePath}/`))
return `${basePath}${icon}`
return icon
@@ -36,6 +40,20 @@ const ToolItem: FC<Props> = ({
const { t } = useTranslation()
const language = useGetLanguage()
const { theme } = useTheme()
const normalizedIcon = useMemo<ToolWithProvider['icon']>(() => {
return normalizeProviderIcon(provider.icon) ?? provider.icon
}, [provider.icon])
const normalizedIconDark = useMemo(() => {
if (!provider.icon_dark)
return undefined
return normalizeProviderIcon(provider.icon_dark) ?? provider.icon_dark
}, [provider.icon_dark])
const providerIcon = useMemo(() => {
if (theme === Theme.dark && normalizedIconDark)
return normalizedIconDark
return normalizedIcon
}, [theme, normalizedIcon, normalizedIconDark])
return (
<Tooltip
@@ -49,7 +67,7 @@ const ToolItem: FC<Props> = ({
size='md'
className='mb-2'
type={BlockEnum.Tool}
toolIcon={provider.icon}
toolIcon={providerIcon}
/>
<div className='mb-1 text-sm leading-5 text-text-primary'>{payload.label[language]}</div>
<div className='text-xs leading-[18px] text-text-secondary'>{payload.description[language]}</div>
@@ -73,7 +91,8 @@ const ToolItem: FC<Props> = ({
provider_name: provider.name,
plugin_id: provider.plugin_id,
plugin_unique_identifier: provider.plugin_unique_identifier,
provider_icon: normalizeProviderIcon(provider.icon),
provider_icon: normalizedIcon,
provider_icon_dark: normalizedIconDark,
tool_name: payload.name,
tool_label: payload.label[language],
tool_description: payload.description[language],

View File

@@ -14,11 +14,15 @@ import ActionItem from './action-item'
import BlockIcon from '../../block-icon'
import { useTranslation } from 'react-i18next'
import { useHover } from 'ahooks'
import useTheme from '@/hooks/use-theme'
import { Theme } from '@/types/app'
import McpToolNotSupportTooltip from '../../nodes/_base/components/mcp-tool-not-support-tooltip'
import { Mcp } from '@/app/components/base/icons/src/vender/other'
import { basePath } from '@/utils/var'
const normalizeProviderIcon = (icon: ToolWithProvider['icon']) => {
const normalizeProviderIcon = (icon?: ToolWithProvider['icon']) => {
if (!icon)
return icon
if (typeof icon === 'string' && basePath && icon.startsWith('/') && !icon.startsWith(`${basePath}/`))
return `${basePath}${icon}`
return icon
@@ -59,6 +63,20 @@ const Tool: FC<Props> = ({
const isHovering = useHover(ref)
const isMCPTool = payload.type === CollectionType.mcp
const isShowCanNotChooseMCPTip = !canChooseMCPTool && isMCPTool
const { theme } = useTheme()
const normalizedIcon = useMemo<ToolWithProvider['icon']>(() => {
return normalizeProviderIcon(payload.icon) ?? payload.icon
}, [payload.icon])
const normalizedIconDark = useMemo(() => {
if (!payload.icon_dark)
return undefined
return normalizeProviderIcon(payload.icon_dark) ?? payload.icon_dark
}, [payload.icon_dark])
const providerIcon = useMemo<ToolWithProvider['icon']>(() => {
if (theme === Theme.dark && normalizedIconDark)
return normalizedIconDark
return normalizedIcon
}, [theme, normalizedIcon, normalizedIconDark])
const getIsDisabled = useCallback((tool: ToolType) => {
if (!selectedTools || !selectedTools.length) return false
return selectedTools.some(selectedTool => (selectedTool.provider_name === payload.name || selectedTool.provider_name === payload.id) && selectedTool.tool_name === tool.name)
@@ -95,7 +113,8 @@ const Tool: FC<Props> = ({
provider_name: payload.name,
plugin_id: payload.plugin_id,
plugin_unique_identifier: payload.plugin_unique_identifier,
provider_icon: normalizeProviderIcon(payload.icon),
provider_icon: normalizedIcon,
provider_icon_dark: normalizedIconDark,
tool_name: tool.name,
tool_label: tool.label[language],
tool_description: tool.description[language],
@@ -177,7 +196,8 @@ const Tool: FC<Props> = ({
provider_name: payload.name,
plugin_id: payload.plugin_id,
plugin_unique_identifier: payload.plugin_unique_identifier,
provider_icon: normalizeProviderIcon(payload.icon),
provider_icon: normalizedIcon,
provider_icon_dark: normalizedIconDark,
tool_name: tool.name,
tool_label: tool.label[language],
tool_description: tool.description[language],
@@ -192,7 +212,7 @@ const Tool: FC<Props> = ({
<BlockIcon
className='shrink-0'
type={BlockEnum.Tool}
toolIcon={payload.icon}
toolIcon={providerIcon}
/>
<div className='ml-2 flex w-0 grow items-center text-sm text-text-primary'>
<span className='max-w-[250px] truncate'>{notShowProvider ? actions[0]?.label[language] : payload.label[language]}</span>

View File

@@ -10,6 +10,17 @@ import BlockIcon from '@/app/components/workflow/block-icon'
import { BlockEnum } from '@/app/components/workflow/types'
import type { TriggerDefaultValue, TriggerWithProvider } from '@/app/components/workflow/block-selector/types'
import TriggerPluginActionItem from './action-item'
import { Theme } from '@/types/app'
import useTheme from '@/hooks/use-theme'
import { basePath } from '@/utils/var'
const normalizeProviderIcon = (icon?: TriggerWithProvider['icon']) => {
if (!icon)
return icon
if (typeof icon === 'string' && basePath && icon.startsWith('/') && !icon.startsWith(`${basePath}/`))
return `${basePath}${icon}`
return icon
}
type Props = {
className?: string
@@ -26,6 +37,7 @@ const TriggerPluginItem: FC<Props> = ({
}) => {
const { t } = useTranslation()
const language = useGetLanguage()
const { theme } = useTheme()
const notShowProvider = payload.type === CollectionType.workflow
const actions = payload.events
const hasAction = !notShowProvider
@@ -55,6 +67,23 @@ const TriggerPluginItem: FC<Props> = ({
return payload.author || ''
}, [payload.author, payload.type, t])
const normalizedIcon = useMemo<TriggerWithProvider['icon']>(() => {
return normalizeProviderIcon(payload.icon) ?? payload.icon
}, [payload.icon])
const normalizedIconDark = useMemo(() => {
if (!payload.icon_dark)
return undefined
return normalizeProviderIcon(payload.icon_dark) ?? payload.icon_dark
}, [payload.icon_dark])
const providerIcon = useMemo<TriggerWithProvider['icon']>(() => {
if (theme === Theme.dark && normalizedIconDark)
return normalizedIconDark
return normalizedIcon
}, [normalizedIcon, normalizedIconDark, theme])
const providerWithResolvedIcon = useMemo(() => ({
...payload,
icon: providerIcon,
}), [payload, providerIcon])
return (
<div
@@ -99,7 +128,7 @@ const TriggerPluginItem: FC<Props> = ({
<BlockIcon
className='shrink-0'
type={BlockEnum.TriggerPlugin}
toolIcon={payload.icon}
toolIcon={providerIcon}
/>
<div className='ml-2 flex min-w-0 flex-1 items-center text-sm text-text-primary'>
<span className='max-w-[200px] truncate'>{notShowProvider ? actions[0]?.label[language] : payload.label[language]}</span>
@@ -118,7 +147,7 @@ const TriggerPluginItem: FC<Props> = ({
actions.map(action => (
<TriggerPluginActionItem
key={action.name}
provider={payload}
provider={providerWithResolvedIcon}
payload={action}
onSelect={onSelect}
disabled={false}

View File

@@ -59,6 +59,7 @@ export type ToolDefaultValue = PluginCommonDefaultValue & {
meta?: PluginMeta
plugin_id?: string
provider_icon?: Collection['icon']
provider_icon_dark?: Collection['icon']
plugin_unique_identifier?: string
}

View File

@@ -15,6 +15,7 @@ import type { PluginTriggerNodeType } from '../nodes/trigger-plugin/types'
import type { ToolNodeType } from '../nodes/tool/types'
import type { DataSourceNodeType } from '../nodes/data-source/types'
import type { TriggerWithProvider } from '../block-selector/types'
import useTheme from '@/hooks/use-theme'
const isTriggerPluginNode = (data: Node['data']): data is PluginTriggerNodeType => data.type === BlockEnum.TriggerPlugin
@@ -22,17 +23,30 @@ const isToolNode = (data: Node['data']): data is ToolNodeType => data.type === B
const isDataSourceNode = (data: Node['data']): data is DataSourceNodeType => data.type === BlockEnum.DataSource
type IconValue = ToolWithProvider['icon']
const resolveIconByTheme = (
currentTheme: string | undefined,
icon?: IconValue,
iconDark?: IconValue,
) => {
if (currentTheme === 'dark' && iconDark)
return iconDark
return icon
}
const findTriggerPluginIcon = (
identifiers: (string | undefined)[],
triggers: TriggerWithProvider[] | undefined,
currentTheme?: string,
) => {
const targetTriggers = triggers || []
for (const identifier of identifiers) {
if (!identifier)
continue
const matched = targetTriggers.find(trigger => trigger.id === identifier || canFindTool(trigger.id, identifier))
if (matched?.icon)
return matched.icon
if (matched)
return resolveIconByTheme(currentTheme, matched.icon, matched.icon_dark)
}
return undefined
}
@@ -44,6 +58,7 @@ export const useToolIcon = (data?: Node['data']) => {
const { data: mcpTools } = useAllMCPTools()
const dataSourceList = useStore(s => s.dataSourceList)
const { data: triggerPlugins } = useAllTriggerPlugins()
const { theme } = useTheme()
const toolIcon = useMemo(() => {
if (!data)
@@ -57,6 +72,7 @@ export const useToolIcon = (data?: Node['data']) => {
data.provider_name,
],
triggerPlugins,
theme,
)
if (icon)
return icon
@@ -100,12 +116,16 @@ export const useToolIcon = (data?: Node['data']) => {
return true
return data.provider_name === toolWithProvider.name
})
if (matched?.icon)
return matched.icon
if (matched) {
const icon = resolveIconByTheme(theme, matched.icon, matched.icon_dark)
if (icon)
return icon
}
}
if (data.provider_icon)
return data.provider_icon
const fallbackIcon = resolveIconByTheme(theme, data.provider_icon, data.provider_icon_dark)
if (fallbackIcon)
return fallbackIcon
return ''
}
@@ -114,7 +134,7 @@ export const useToolIcon = (data?: Node['data']) => {
return dataSourceList?.find(toolWithProvider => toolWithProvider.plugin_id === data.plugin_id)?.icon || ''
return ''
}, [data, dataSourceList, buildInTools, customTools, workflowTools, mcpTools, triggerPlugins])
}, [data, dataSourceList, buildInTools, customTools, workflowTools, mcpTools, triggerPlugins, theme])
return toolIcon
}
@@ -126,6 +146,7 @@ export const useGetToolIcon = () => {
const { data: mcpTools } = useAllMCPTools()
const { data: triggerPlugins } = useAllTriggerPlugins()
const workflowStore = useWorkflowStore()
const { theme } = useTheme()
const getToolIcon = useCallback((data: Node['data']) => {
const {
@@ -144,6 +165,7 @@ export const useGetToolIcon = () => {
data.provider_name,
],
triggerPlugins,
theme,
)
}
@@ -182,12 +204,16 @@ export const useGetToolIcon = () => {
return true
return data.provider_name === toolWithProvider.name
})
if (matched?.icon)
return matched.icon
if (matched) {
const icon = resolveIconByTheme(theme, matched.icon, matched.icon_dark)
if (icon)
return icon
}
}
if (data.provider_icon)
return data.provider_icon
const fallbackIcon = resolveIconByTheme(theme, data.provider_icon, data.provider_icon_dark)
if (fallbackIcon)
return fallbackIcon
return undefined
}
@@ -196,7 +222,7 @@ export const useGetToolIcon = () => {
return dataSourceList?.find(toolWithProvider => toolWithProvider.plugin_id === data.plugin_id)?.icon
return undefined
}, [workflowStore, triggerPlugins, buildInTools, customTools, workflowTools, mcpTools])
}, [workflowStore, triggerPlugins, buildInTools, customTools, workflowTools, mcpTools, theme])
return getToolIcon
}

View File

@@ -22,5 +22,6 @@ export type ToolNodeType = CommonNodeType & {
params?: Record<string, any>
plugin_id?: string
provider_icon?: Collection['icon']
provider_icon_dark?: Collection['icon_dark']
plugin_unique_identifier?: string
}

View File

@@ -1,90 +0,0 @@
import { VarType } from '../../../types'
import type { VarInInspect } from '@/types/workflow'
import { VarInInspectType } from '@/types/workflow'
export const vars: VarInInspect[] = [
{
id: 'xxx',
type: VarInInspectType.node,
name: 'text00',
description: '',
selector: ['1745476079387', 'text'],
value_type: VarType.string,
value: 'text value...',
edited: false,
visible: true,
is_truncated: false,
full_content: { size_bytes: 0, download_url: '' },
},
{
id: 'fdklajljgldjglkagjlk',
type: VarInInspectType.node,
name: 'text',
description: '',
selector: ['1712386917734', 'text'],
value_type: VarType.string,
value: 'made zhizhang',
edited: false,
visible: true,
is_truncated: false,
full_content: { size_bytes: 0, download_url: '' },
},
]
export const conversationVars: VarInInspect[] = [
{
id: 'con1',
type: VarInInspectType.conversation,
name: 'conversationVar 1',
description: '',
selector: ['conversation', 'var1'],
value_type: VarType.string,
value: 'conversation var value...',
edited: false,
visible: true,
is_truncated: false,
full_content: { size_bytes: 0, download_url: '' },
},
{
id: 'con2',
type: VarInInspectType.conversation,
name: 'conversationVar 2',
description: '',
selector: ['conversation', 'var2'],
value_type: VarType.number,
value: 456,
edited: false,
visible: true,
is_truncated: false,
full_content: { size_bytes: 0, download_url: '' },
},
]
export const systemVars: VarInInspect[] = [
{
id: 'sys1',
type: VarInInspectType.system,
name: 'query',
description: '',
selector: ['sys', 'query'],
value_type: VarType.string,
value: 'Hello robot!',
edited: false,
visible: true,
is_truncated: false,
full_content: { size_bytes: 0, download_url: '' },
},
{
id: 'sys2',
type: VarInInspectType.system,
name: 'user_id',
description: '',
selector: ['sys', 'user_id'],
value_type: VarType.string,
value: 'djflakjerlkjdlksfjslakjsdfl',
edited: false,
visible: true,
is_truncated: false,
full_content: { size_bytes: 0, download_url: '' },
},
]

View File

@@ -1,3 +1,4 @@
import { ReactScan } from './components/react-scan'
import RoutePrefixHandle from './routePrefixHandle'
import type { Viewport } from 'next'
import I18nServer from './components/i18n-server'
@@ -86,6 +87,7 @@ const LocaleLayout = async ({
className='color-scheme h-full select-auto'
{...datasetMap}
>
<ReactScan />
<ThemeProvider
attribute='data-theme'
defaultTheme='system'

View File

@@ -11,6 +11,7 @@ import Toast from '@/app/components/base/toast'
import { emailLoginWithCode, sendEMailLoginCode } from '@/service/common'
import I18NContext from '@/context/i18n'
import { resolvePostLoginRedirect } from '../utils/post-login-redirect'
import { trackEvent } from '@/app/components/base/amplitude'
export default function CheckCode() {
const { t, i18n } = useTranslation()
@@ -44,6 +45,12 @@ export default function CheckCode() {
setIsLoading(true)
const ret = await emailLoginWithCode({ email, code, token, language })
if (ret.result === 'success') {
// Track login success event
trackEvent('user_login_success', {
method: 'email_code',
is_invite: !!invite_token,
})
if (invite_token) {
router.replace(`/signin/invite-settings?${searchParams.toString()}`)
}

View File

@@ -12,6 +12,7 @@ import I18NContext from '@/context/i18n'
import { noop } from 'lodash-es'
import { resolvePostLoginRedirect } from '../utils/post-login-redirect'
import type { ResponseError } from '@/service/fetch'
import { trackEvent } from '@/app/components/base/amplitude'
type MailAndPasswordAuthProps = {
isInvite: boolean
@@ -63,6 +64,12 @@ export default function MailAndPasswordAuth({ isInvite, isEmailSetup, allowRegis
body: loginData,
})
if (res.result === 'success') {
// Track login success event
trackEvent('user_login_success', {
method: 'email_password',
is_invite: isInvite,
})
if (isInvite) {
router.replace(`/signin/invite-settings?${searchParams.toString()}`)
}

View File

@@ -42,7 +42,6 @@ export default function CheckCode() {
}
setIsLoading(true)
const res = await verifyCode({ email, code, token })
console.log(res)
if ((res as MailValidityResponse).is_valid) {
const params = new URLSearchParams(searchParams)
params.set('token', encodeURIComponent((res as MailValidityResponse).token))

View File

@@ -9,6 +9,7 @@ import Input from '@/app/components/base/input'
import { validPassword } from '@/config'
import type { MailRegisterResponse } from '@/service/use-common'
import { useMailRegister } from '@/service/use-common'
import { trackEvent } from '@/app/components/base/amplitude'
const ChangePasswordForm = () => {
const { t } = useTranslation()
@@ -54,6 +55,11 @@ const ChangePasswordForm = () => {
})
const { result } = res as MailRegisterResponse
if (result === 'success') {
// Track registration success event
trackEvent('user_registration_success', {
method: 'email',
})
Toast.notify({
type: 'success',
message: t('common.api.actionSuccess'),

View File

@@ -77,6 +77,9 @@ const EDITION = getStringConfig(
export const IS_CE_EDITION = EDITION === 'SELF_HOSTED'
export const IS_CLOUD_EDITION = EDITION === 'CLOUD'
export const IS_DEV = process.env.NODE_ENV === 'development'
export const IS_PROD = process.env.NODE_ENV === 'production'
export const SUPPORT_MAIL_LOGIN = !!(
process.env.NEXT_PUBLIC_SUPPORT_MAIL_LOGIN
|| globalThis.document?.body?.getAttribute('data-public-support-mail-login')

View File

@@ -11,6 +11,7 @@ import { noop } from 'lodash-es'
import { setZendeskConversationFields } from '@/app/components/base/zendesk/utils'
import { ZENDESK_FIELD_IDS } from '@/config'
import { useGlobalPublicStore } from './global-public-context'
import { setUserId, setUserProperties } from '@/app/components/base/amplitude'
export type AppContextValue = {
userProfile: UserProfileResponse
@@ -159,6 +160,28 @@ export const AppContextProvider: FC<AppContextProviderProps> = ({ children }) =>
}, [currentWorkspace?.id])
// #endregion Zendesk conversation fields
useEffect(() => {
// Report user and workspace info to Amplitude when loaded
if (userProfile?.id) {
setUserId(userProfile.email)
const properties: Record<string, any> = {
email: userProfile.email,
name: userProfile.name,
has_password: userProfile.is_password_set,
}
if (currentWorkspace?.id) {
properties.workspace_id = currentWorkspace.id
properties.workspace_name = currentWorkspace.name
properties.workspace_plan = currentWorkspace.plan
properties.workspace_status = currentWorkspace.status
properties.workspace_role = currentWorkspace.role
}
setUserProperties(properties)
}
}, [userProfile, currentWorkspace])
return (
<AppContext.Provider value={{
userProfile,

View File

@@ -1,7 +1,7 @@
import type { NextRequest } from 'next/server'
import { NextResponse } from 'next/server'
const NECESSARY_DOMAIN = '*.sentry.io http://localhost:* http://127.0.0.1:* https://analytics.google.com googletagmanager.com *.googletagmanager.com https://www.google-analytics.com https://api.github.com'
const NECESSARY_DOMAIN = '*.sentry.io http://localhost:* http://127.0.0.1:* https://analytics.google.com googletagmanager.com *.googletagmanager.com https://www.google-analytics.com https://api.github.com https://api2.amplitude.com *.amplitude.com'
const wrapResponseWithXFrameOptions = (response: NextResponse, pathname: string) => {
// prevent clickjacking: https://owasp.org/www-community/attacks/Clickjacking

View File

@@ -21,9 +21,6 @@ export type ConversationListResponse = {
logs: Conversation[]
}
export const fetchLogs = (url: string) =>
fetch(url).then<ConversationListResponse>(r => r.json())
export const CompletionParams = ['temperature', 'top_p', 'presence_penalty', 'max_token', 'stop', 'frequency_penalty'] as const
export type CompletionParamType = typeof CompletionParams[number]

View File

@@ -1,17 +0,0 @@
export type User = {
id: string
firstName: string
lastName: string
name: string
phone: string
username: string
email: string
avatar: string
}
export type UserResponse = {
users: User[]
}
export const fetchUsers = (url: string) =>
fetch(url).then<UserResponse>(r => r.json())

View File

@@ -45,6 +45,8 @@
"knip": "knip"
},
"dependencies": {
"@amplitude/analytics-browser": "^2.31.3",
"@amplitude/plugin-session-replay-browser": "^1.23.6",
"@emoji-mart/data": "^1.2.1",
"@floating-ui/react": "^0.26.28",
"@formatjs/intl-localematcher": "^0.5.10",
@@ -102,15 +104,15 @@
"mime": "^4.1.0",
"mitt": "^3.0.1",
"negotiator": "^1.0.0",
"next": "~15.5.6",
"next": "~15.5.7",
"next-pwa": "^5.6.0",
"next-themes": "^0.4.6",
"pinyin-pro": "^3.27.0",
"qrcode.react": "^4.2.0",
"qs": "^6.14.0",
"react": "19.1.1",
"react": "19.2.1",
"react-18-input-autosize": "^3.0.0",
"react-dom": "19.1.1",
"react-dom": "19.2.1",
"react-easy-crop": "^5.5.3",
"react-hook-form": "^7.65.0",
"react-hotkeys-hook": "^4.6.2",
@@ -151,9 +153,9 @@
"@happy-dom/jest-environment": "^20.0.8",
"@mdx-js/loader": "^3.1.1",
"@mdx-js/react": "^3.1.1",
"@next/bundle-analyzer": "15.5.4",
"@next/eslint-plugin-next": "15.5.4",
"@next/mdx": "15.5.4",
"@next/bundle-analyzer": "15.5.7",
"@next/eslint-plugin-next": "15.5.7",
"@next/mdx": "15.5.7",
"@rgrove/parse-xml": "^4.2.0",
"@storybook/addon-docs": "9.1.13",
"@storybook/addon-links": "9.1.13",
@@ -171,8 +173,8 @@
"@types/negotiator": "^0.6.4",
"@types/node": "18.15.0",
"@types/qs": "^6.14.0",
"@types/react": "~19.1.17",
"@types/react-dom": "~19.1.11",
"@types/react": "~19.2.7",
"@types/react-dom": "~19.2.3",
"@types/react-slider": "^1.3.6",
"@types/react-syntax-highlighter": "^15.5.13",
"@types/react-window": "^1.8.8",
@@ -199,6 +201,7 @@
"lodash": "^4.17.21",
"magicast": "^0.3.5",
"postcss": "^8.5.6",
"react-scan": "^0.4.3",
"sass": "^1.93.2",
"storybook": "9.1.13",
"tailwindcss": "^3.4.18",
@@ -207,8 +210,8 @@
"uglify-js": "^3.19.3"
},
"resolutions": {
"@types/react": "~19.1.17",
"@types/react-dom": "~19.1.11",
"@types/react": "~19.2.7",
"@types/react-dom": "~19.2.3",
"string-width": "~4.2.3",
"@eslint/plugin-kit": "~0.3",
"canvas": "^3.2.0",

4670
web/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -612,12 +612,11 @@ export const usePluginTaskList = (category?: PluginCategoryEnum | string) => {
const taskAllFailed = lastData?.tasks.every(task => task.status === TaskStatus.failed)
if (taskDone && lastData?.tasks.length && !taskAllFailed)
refreshPluginList(category ? { category } as any : undefined, !category)
}, [initialized, isRefetching, data, category, refreshPluginList])
}, [isRefetching])
useEffect(() => {
if (isFetched && !initialized)
setInitialized(true)
}, [isFetched, initialized])
setInitialized(true)
}, [])
const handleRefetch = useCallback(() => {
refetch()

View File

@@ -25,6 +25,7 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
author: provider.author,
description: provider.description,
icon: provider.icon || '',
icon_dark: provider.icon_dark || '',
label: provider.label,
type: CollectionType.trigger,
team_credentials: {},

View File

@@ -123,7 +123,7 @@ export const useInvalidLastRun = (flowType: FlowType, flowId: string, nodeId: st
// Rerun workflow or change the version of workflow
export const useInvalidAllLastRun = (flowType?: FlowType, flowId?: string) => {
return useInvalid([NAME_SPACE, flowType, 'last-run', flowId])
return useInvalid([...useLastRunKey, flowType, flowId])
}
export const useConversationVarValues = (flowType?: FlowType, flowId?: string) => {