mirror of
https://github.com/langgenius/dify.git
synced 2026-02-09 23:20:12 -05:00
Merge remote-tracking branch 'origin/main' into feat/support-agent-sandbox
# Conflicts: # api/app.py # api/controllers/console/app/generator.py # api/core/llm_generator/llm_generator.py # web/eslint-suppressions.json # web/pnpm-lock.yaml # web/tailwind-common-config.ts
This commit is contained in:
10
api/app.py
10
api/app.py
@@ -1,5 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from celery import Celery
|
||||
|
||||
celery: Celery
|
||||
|
||||
|
||||
def is_db_command() -> bool:
|
||||
@@ -30,7 +38,7 @@ else:
|
||||
|
||||
socketio_app, flask_app = create_app()
|
||||
app = flask_app
|
||||
celery = flask_app.extensions["celery"]
|
||||
celery = cast("Celery", flask_app.extensions["celery"])
|
||||
|
||||
if __name__ == "__main__":
|
||||
from gevent import pywsgi
|
||||
|
||||
@@ -155,7 +155,7 @@ def initialize_extensions(app: DifyApp):
|
||||
logger.info("Loaded %s (%s ms)", short_name, round((end_time - start_time) * 1000, 2))
|
||||
|
||||
|
||||
def create_migrations_app():
|
||||
def create_migrations_app() -> DifyApp:
|
||||
app = create_flask_app_with_configs()
|
||||
from extensions import ext_database, ext_migrate
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ from controllers.console.app.error import (
|
||||
ProviderQuotaExceededError,
|
||||
)
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from core.app.app_config.entities import ModelConfig
|
||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||
from core.helper.code_executor.code_node_provider import CodeNodeProvider
|
||||
from core.helper.code_executor.javascript.javascript_code_provider import JavascriptCodeProvider
|
||||
@@ -21,6 +22,7 @@ from core.llm_generator.context_models import (
|
||||
CodeContextPayload,
|
||||
ParameterInfoPayload,
|
||||
)
|
||||
from core.llm_generator.entities import RuleCodeGeneratePayload, RuleGeneratePayload, RuleStructuredOutputPayload
|
||||
from core.llm_generator.llm_generator import LLMGenerator
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from extensions.ext_database import db
|
||||
@@ -31,28 +33,13 @@ from services.workflow_service import WorkflowService
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
||||
class RuleGeneratePayload(BaseModel):
|
||||
instruction: str = Field(..., description="Rule generation instruction")
|
||||
model_config_data: dict[str, Any] = Field(..., alias="model_config", description="Model configuration")
|
||||
no_variable: bool = Field(default=False, description="Whether to exclude variables")
|
||||
|
||||
|
||||
class RuleCodeGeneratePayload(RuleGeneratePayload):
|
||||
code_language: str = Field(default="javascript", description="Programming language for code generation")
|
||||
|
||||
|
||||
class RuleStructuredOutputPayload(BaseModel):
|
||||
instruction: str = Field(..., description="Structured output generation instruction")
|
||||
model_config_data: dict[str, Any] = Field(..., alias="model_config", description="Model configuration")
|
||||
|
||||
|
||||
class InstructionGeneratePayload(BaseModel):
|
||||
flow_id: str = Field(..., description="Workflow/Flow ID")
|
||||
node_id: str = Field(default="", description="Node ID for workflow context")
|
||||
current: str = Field(default="", description="Current instruction text")
|
||||
language: str = Field(default="javascript", description="Programming language (javascript/python)")
|
||||
instruction: str = Field(..., description="Instruction for generation")
|
||||
model_config_data: dict[str, Any] = Field(..., alias="model_config", description="Model configuration")
|
||||
model_config_data: ModelConfig = Field(..., alias="model_config", description="Model configuration")
|
||||
ideal_output: str = Field(default="", description="Expected ideal output")
|
||||
|
||||
|
||||
@@ -99,6 +86,7 @@ reg(InstructionGeneratePayload)
|
||||
reg(InstructionTemplatePayload)
|
||||
reg(ContextGeneratePayload)
|
||||
reg(SuggestedQuestionsPayload)
|
||||
reg(ModelConfig)
|
||||
|
||||
|
||||
@console_ns.route("/rule-generate")
|
||||
@@ -117,12 +105,7 @@ class RuleGenerateApi(Resource):
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
try:
|
||||
rules = LLMGenerator.generate_rule_config(
|
||||
tenant_id=current_tenant_id,
|
||||
instruction=args.instruction,
|
||||
model_config=args.model_config_data,
|
||||
no_variable=args.no_variable,
|
||||
)
|
||||
rules = LLMGenerator.generate_rule_config(tenant_id=current_tenant_id, args=args)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
except QuotaExceededError:
|
||||
@@ -153,9 +136,7 @@ class RuleCodeGenerateApi(Resource):
|
||||
try:
|
||||
code_result = LLMGenerator.generate_code(
|
||||
tenant_id=current_tenant_id,
|
||||
instruction=args.instruction,
|
||||
model_config=args.model_config_data,
|
||||
code_language=args.code_language,
|
||||
args=args,
|
||||
)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
@@ -187,8 +168,7 @@ class RuleStructuredOutputGenerateApi(Resource):
|
||||
try:
|
||||
structured_output = LLMGenerator.generate_structured_output(
|
||||
tenant_id=current_tenant_id,
|
||||
instruction=args.instruction,
|
||||
model_config=args.model_config_data,
|
||||
args=args,
|
||||
)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
@@ -239,23 +219,29 @@ class InstructionGenerateApi(Resource):
|
||||
case "llm":
|
||||
return LLMGenerator.generate_rule_config(
|
||||
current_tenant_id,
|
||||
instruction=args.instruction,
|
||||
model_config=args.model_config_data,
|
||||
no_variable=True,
|
||||
args=RuleGeneratePayload(
|
||||
instruction=args.instruction,
|
||||
model_config=args.model_config_data,
|
||||
no_variable=True,
|
||||
),
|
||||
)
|
||||
case "agent":
|
||||
return LLMGenerator.generate_rule_config(
|
||||
current_tenant_id,
|
||||
instruction=args.instruction,
|
||||
model_config=args.model_config_data,
|
||||
no_variable=True,
|
||||
args=RuleGeneratePayload(
|
||||
instruction=args.instruction,
|
||||
model_config=args.model_config_data,
|
||||
no_variable=True,
|
||||
),
|
||||
)
|
||||
case "code":
|
||||
return LLMGenerator.generate_code(
|
||||
tenant_id=current_tenant_id,
|
||||
instruction=args.instruction,
|
||||
model_config=args.model_config_data,
|
||||
code_language=args.language,
|
||||
args=RuleCodeGeneratePayload(
|
||||
instruction=args.instruction,
|
||||
model_config=args.model_config_data,
|
||||
code_language=args.language,
|
||||
),
|
||||
)
|
||||
case _:
|
||||
return {"error": f"invalid node type: {node_type}"}
|
||||
|
||||
@@ -1,60 +1,58 @@
|
||||
from flask_restx import Resource, fields
|
||||
from pydantic import BaseModel, Field
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from controllers.fastopenapi import console_router
|
||||
from libs.login import current_account_with_tenant, current_user, login_required
|
||||
from services.feature_service import FeatureService
|
||||
from services.feature_service import FeatureModel, FeatureService, SystemFeatureModel
|
||||
|
||||
from . import console_ns
|
||||
from .wraps import account_initialization_required, cloud_utm_record, setup_required
|
||||
|
||||
|
||||
@console_ns.route("/features")
|
||||
class FeatureApi(Resource):
|
||||
@console_ns.doc("get_tenant_features")
|
||||
@console_ns.doc(description="Get feature configuration for current tenant")
|
||||
@console_ns.response(
|
||||
200,
|
||||
"Success",
|
||||
console_ns.model("FeatureResponse", {"features": fields.Raw(description="Feature configuration object")}),
|
||||
)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@cloud_utm_record
|
||||
def get(self):
|
||||
"""Get feature configuration for current tenant"""
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
return FeatureService.get_features(current_tenant_id).model_dump()
|
||||
class FeatureResponse(BaseModel):
|
||||
features: FeatureModel = Field(description="Feature configuration object")
|
||||
|
||||
|
||||
@console_ns.route("/system-features")
|
||||
class SystemFeatureApi(Resource):
|
||||
@console_ns.doc("get_system_features")
|
||||
@console_ns.doc(description="Get system-wide feature configuration")
|
||||
@console_ns.response(
|
||||
200,
|
||||
"Success",
|
||||
console_ns.model(
|
||||
"SystemFeatureResponse", {"features": fields.Raw(description="System feature configuration object")}
|
||||
),
|
||||
)
|
||||
def get(self):
|
||||
"""Get system-wide feature configuration
|
||||
class SystemFeatureResponse(BaseModel):
|
||||
features: SystemFeatureModel = Field(description="System feature configuration object")
|
||||
|
||||
NOTE: This endpoint is unauthenticated by design, as it provides system features
|
||||
data required for dashboard initialization.
|
||||
|
||||
Authentication would create circular dependency (can't login without dashboard loading).
|
||||
@console_router.get(
|
||||
"/features",
|
||||
response_model=FeatureResponse,
|
||||
tags=["console"],
|
||||
)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@cloud_utm_record
|
||||
def get_tenant_features() -> FeatureResponse:
|
||||
"""Get feature configuration for current tenant."""
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
Only non-sensitive configuration data should be returned by this endpoint.
|
||||
"""
|
||||
# NOTE(QuantumGhost): ideally we should access `current_user.is_authenticated`
|
||||
# without a try-catch. However, due to the implementation of user loader (the `load_user_from_request`
|
||||
# in api/extensions/ext_login.py), accessing `current_user.is_authenticated` will
|
||||
# raise `Unauthorized` exception if authentication token is not provided.
|
||||
try:
|
||||
is_authenticated = current_user.is_authenticated
|
||||
except Unauthorized:
|
||||
is_authenticated = False
|
||||
return FeatureService.get_system_features(is_authenticated=is_authenticated).model_dump()
|
||||
return FeatureResponse(features=FeatureService.get_features(current_tenant_id))
|
||||
|
||||
|
||||
@console_router.get(
|
||||
"/system-features",
|
||||
response_model=SystemFeatureResponse,
|
||||
tags=["console"],
|
||||
)
|
||||
def get_system_features() -> SystemFeatureResponse:
|
||||
"""Get system-wide feature configuration
|
||||
|
||||
NOTE: This endpoint is unauthenticated by design, as it provides system features
|
||||
data required for dashboard initialization.
|
||||
|
||||
Authentication would create circular dependency (can't login without dashboard loading).
|
||||
|
||||
Only non-sensitive configuration data should be returned by this endpoint.
|
||||
"""
|
||||
# NOTE(QuantumGhost): ideally we should access `current_user.is_authenticated`
|
||||
# without a try-catch. However, due to the implementation of user loader (the `load_user_from_request`
|
||||
# in api/extensions/ext_login.py), accessing `current_user.is_authenticated` will
|
||||
# raise `Unauthorized` exception if authentication token is not provided.
|
||||
try:
|
||||
is_authenticated = current_user.is_authenticated
|
||||
except Unauthorized:
|
||||
is_authenticated = False
|
||||
return SystemFeatureResponse(features=FeatureService.get_system_features(is_authenticated=is_authenticated))
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import urllib.parse
|
||||
|
||||
import httpx
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
import services
|
||||
@@ -11,7 +10,7 @@ from controllers.common.errors import (
|
||||
RemoteFileUploadError,
|
||||
UnsupportedFileTypeError,
|
||||
)
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.fastopenapi import console_router
|
||||
from core.file import helpers as file_helpers
|
||||
from core.helper import ssrf_proxy
|
||||
from extensions.ext_database import db
|
||||
@@ -19,84 +18,74 @@ from fields.file_fields import FileWithSignedUrl, RemoteFileInfo
|
||||
from libs.login import current_account_with_tenant
|
||||
from services.file_service import FileService
|
||||
|
||||
from . import console_ns
|
||||
|
||||
register_schema_models(console_ns, RemoteFileInfo, FileWithSignedUrl)
|
||||
|
||||
|
||||
@console_ns.route("/remote-files/<path:url>")
|
||||
class RemoteFileInfoApi(Resource):
|
||||
@console_ns.response(200, "Remote file info", console_ns.models[RemoteFileInfo.__name__])
|
||||
def get(self, url):
|
||||
decoded_url = urllib.parse.unquote(url)
|
||||
resp = ssrf_proxy.head(decoded_url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
# failed back to get method
|
||||
resp = ssrf_proxy.get(decoded_url, timeout=3)
|
||||
resp.raise_for_status()
|
||||
info = RemoteFileInfo(
|
||||
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
|
||||
file_length=int(resp.headers.get("Content-Length", 0)),
|
||||
)
|
||||
return info.model_dump(mode="json")
|
||||
|
||||
|
||||
class RemoteFileUploadPayload(BaseModel):
|
||||
url: str = Field(..., description="URL to fetch")
|
||||
|
||||
|
||||
console_ns.schema_model(
|
||||
RemoteFileUploadPayload.__name__,
|
||||
RemoteFileUploadPayload.model_json_schema(ref_template="#/definitions/{model}"),
|
||||
@console_router.get(
|
||||
"/remote-files/<path:url>",
|
||||
response_model=RemoteFileInfo,
|
||||
tags=["console"],
|
||||
)
|
||||
def get_remote_file_info(url: str) -> RemoteFileInfo:
|
||||
decoded_url = urllib.parse.unquote(url)
|
||||
resp = ssrf_proxy.head(decoded_url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(decoded_url, timeout=3)
|
||||
resp.raise_for_status()
|
||||
return RemoteFileInfo(
|
||||
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
|
||||
file_length=int(resp.headers.get("Content-Length", 0)),
|
||||
)
|
||||
|
||||
|
||||
@console_ns.route("/remote-files/upload")
|
||||
class RemoteFileUploadApi(Resource):
|
||||
@console_ns.expect(console_ns.models[RemoteFileUploadPayload.__name__])
|
||||
@console_ns.response(201, "Remote file uploaded", console_ns.models[FileWithSignedUrl.__name__])
|
||||
def post(self):
|
||||
args = RemoteFileUploadPayload.model_validate(console_ns.payload)
|
||||
url = args.url
|
||||
@console_router.post(
|
||||
"/remote-files/upload",
|
||||
response_model=FileWithSignedUrl,
|
||||
tags=["console"],
|
||||
status_code=201,
|
||||
)
|
||||
def upload_remote_file(payload: RemoteFileUploadPayload) -> FileWithSignedUrl:
|
||||
url = payload.url
|
||||
|
||||
try:
|
||||
resp = ssrf_proxy.head(url=url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
|
||||
except httpx.RequestError as e:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
|
||||
try:
|
||||
resp = ssrf_proxy.head(url=url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
|
||||
except httpx.RequestError as e:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
|
||||
|
||||
file_info = helpers.guess_file_info_from_response(resp)
|
||||
file_info = helpers.guess_file_info_from_response(resp)
|
||||
|
||||
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
|
||||
raise FileTooLargeError
|
||||
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
|
||||
raise FileTooLargeError
|
||||
|
||||
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
|
||||
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
|
||||
|
||||
try:
|
||||
user, _ = current_account_with_tenant()
|
||||
upload_file = FileService(db.engine).upload_file(
|
||||
filename=file_info.filename,
|
||||
content=content,
|
||||
mimetype=file_info.mimetype,
|
||||
user=user,
|
||||
source_url=url,
|
||||
)
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
raise UnsupportedFileTypeError()
|
||||
|
||||
payload = FileWithSignedUrl(
|
||||
id=upload_file.id,
|
||||
name=upload_file.name,
|
||||
size=upload_file.size,
|
||||
extension=upload_file.extension,
|
||||
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
|
||||
mime_type=upload_file.mime_type,
|
||||
created_by=upload_file.created_by,
|
||||
created_at=int(upload_file.created_at.timestamp()),
|
||||
try:
|
||||
user, _ = current_account_with_tenant()
|
||||
upload_file = FileService(db.engine).upload_file(
|
||||
filename=file_info.filename,
|
||||
content=content,
|
||||
mimetype=file_info.mimetype,
|
||||
user=user,
|
||||
source_url=url,
|
||||
)
|
||||
return payload.model_dump(mode="json"), 201
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
raise UnsupportedFileTypeError()
|
||||
|
||||
return FileWithSignedUrl(
|
||||
id=upload_file.id,
|
||||
name=upload_file.name,
|
||||
size=upload_file.size,
|
||||
extension=upload_file.extension,
|
||||
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
|
||||
mime_type=upload_file.mime_type,
|
||||
created_by=upload_file.created_by,
|
||||
created_at=int(upload_file.created_at.timestamp()),
|
||||
)
|
||||
|
||||
20
api/core/llm_generator/entities.py
Normal file
20
api/core/llm_generator/entities.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""Shared payload models for LLM generator helpers and controllers."""
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from core.app.app_config.entities import ModelConfig
|
||||
|
||||
|
||||
class RuleGeneratePayload(BaseModel):
|
||||
instruction: str = Field(..., description="Rule generation instruction")
|
||||
model_config_data: ModelConfig = Field(..., alias="model_config", description="Model configuration")
|
||||
no_variable: bool = Field(default=False, description="Whether to exclude variables")
|
||||
|
||||
|
||||
class RuleCodeGeneratePayload(RuleGeneratePayload):
|
||||
code_language: str = Field(default="javascript", description="Programming language for code generation")
|
||||
|
||||
|
||||
class RuleStructuredOutputPayload(BaseModel):
|
||||
instruction: str = Field(..., description="Structured output generation instruction")
|
||||
model_config_data: ModelConfig = Field(..., alias="model_config", description="Model configuration")
|
||||
@@ -6,11 +6,13 @@ from typing import Protocol
|
||||
|
||||
import json_repair
|
||||
|
||||
from core.app.app_config.entities import ModelConfig
|
||||
from core.llm_generator.context_models import (
|
||||
AvailableVarPayload,
|
||||
CodeContextPayload,
|
||||
ParameterInfoPayload,
|
||||
)
|
||||
from core.llm_generator.entities import RuleCodeGeneratePayload, RuleGeneratePayload, RuleStructuredOutputPayload
|
||||
from core.llm_generator.output_models import (
|
||||
CodeNodeStructuredOutput,
|
||||
InstructionModifyOutput,
|
||||
@@ -158,19 +160,19 @@ class LLMGenerator:
|
||||
return questions
|
||||
|
||||
@classmethod
|
||||
def generate_rule_config(cls, tenant_id: str, instruction: str, model_config: dict, no_variable: bool):
|
||||
def generate_rule_config(cls, tenant_id: str, args: RuleGeneratePayload):
|
||||
output_parser = RuleConfigGeneratorOutputParser()
|
||||
|
||||
error = ""
|
||||
error_step = ""
|
||||
rule_config = {"prompt": "", "variables": [], "opening_statement": "", "error": ""}
|
||||
model_parameters = model_config.get("completion_params", {})
|
||||
if no_variable:
|
||||
model_parameters = args.model_config_data.completion_params
|
||||
if args.no_variable:
|
||||
prompt_template = PromptTemplateParser(WORKFLOW_RULE_CONFIG_PROMPT_GENERATE_TEMPLATE)
|
||||
|
||||
prompt_generate = prompt_template.format(
|
||||
inputs={
|
||||
"TASK_DESCRIPTION": instruction,
|
||||
"TASK_DESCRIPTION": args.instruction,
|
||||
},
|
||||
remove_template_variables=False,
|
||||
)
|
||||
@@ -182,8 +184,8 @@ class LLMGenerator:
|
||||
model_instance = model_manager.get_model_instance(
|
||||
tenant_id=tenant_id,
|
||||
model_type=ModelType.LLM,
|
||||
provider=model_config.get("provider", ""),
|
||||
model=model_config.get("name", ""),
|
||||
provider=args.model_config_data.provider,
|
||||
model=args.model_config_data.name,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -197,7 +199,7 @@ class LLMGenerator:
|
||||
error = str(e)
|
||||
error_step = "generate rule config"
|
||||
except Exception as e:
|
||||
logger.exception("Failed to generate rule config, model: %s", model_config.get("name"))
|
||||
logger.exception("Failed to generate rule config, model: %s", args.model_config_data.name)
|
||||
rule_config["error"] = str(e)
|
||||
|
||||
rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else ""
|
||||
@@ -216,7 +218,7 @@ class LLMGenerator:
|
||||
# format the prompt_generate_prompt
|
||||
prompt_generate_prompt = prompt_template.format(
|
||||
inputs={
|
||||
"TASK_DESCRIPTION": instruction,
|
||||
"TASK_DESCRIPTION": args.instruction,
|
||||
},
|
||||
remove_template_variables=False,
|
||||
)
|
||||
@@ -227,8 +229,8 @@ class LLMGenerator:
|
||||
model_instance = model_manager.get_model_instance(
|
||||
tenant_id=tenant_id,
|
||||
model_type=ModelType.LLM,
|
||||
provider=model_config.get("provider", ""),
|
||||
model=model_config.get("name", ""),
|
||||
provider=args.model_config_data.provider,
|
||||
model=args.model_config_data.name,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -257,7 +259,7 @@ class LLMGenerator:
|
||||
# the second step to generate the task_parameter and task_statement
|
||||
statement_generate_prompt = statement_template.format(
|
||||
inputs={
|
||||
"TASK_DESCRIPTION": instruction,
|
||||
"TASK_DESCRIPTION": args.instruction,
|
||||
"INPUT_TEXT": prompt_content.message.get_text_content(),
|
||||
},
|
||||
remove_template_variables=False,
|
||||
@@ -283,7 +285,7 @@ class LLMGenerator:
|
||||
error_step = "generate conversation opener"
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to generate rule config, model: %s", model_config.get("name"))
|
||||
logger.exception("Failed to generate rule config, model: %s", args.model_config_data.name)
|
||||
rule_config["error"] = str(e)
|
||||
|
||||
rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else ""
|
||||
@@ -291,16 +293,20 @@ class LLMGenerator:
|
||||
return rule_config
|
||||
|
||||
@classmethod
|
||||
def generate_code(cls, tenant_id: str, instruction: str, model_config: dict, code_language: str = "javascript"):
|
||||
if code_language == "python":
|
||||
def generate_code(
|
||||
cls,
|
||||
tenant_id: str,
|
||||
args: RuleCodeGeneratePayload,
|
||||
):
|
||||
if args.code_language == "python":
|
||||
prompt_template = PromptTemplateParser(PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE)
|
||||
else:
|
||||
prompt_template = PromptTemplateParser(JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE)
|
||||
|
||||
prompt = prompt_template.format(
|
||||
inputs={
|
||||
"INSTRUCTION": instruction,
|
||||
"CODE_LANGUAGE": code_language,
|
||||
"INSTRUCTION": args.instruction,
|
||||
"CODE_LANGUAGE": args.code_language,
|
||||
},
|
||||
remove_template_variables=False,
|
||||
)
|
||||
@@ -309,28 +315,28 @@ class LLMGenerator:
|
||||
model_instance = model_manager.get_model_instance(
|
||||
tenant_id=tenant_id,
|
||||
model_type=ModelType.LLM,
|
||||
provider=model_config.get("provider", ""),
|
||||
model=model_config.get("name", ""),
|
||||
provider=args.model_config_data.provider,
|
||||
model=args.model_config_data.name,
|
||||
)
|
||||
|
||||
prompt_messages = [UserPromptMessage(content=prompt)]
|
||||
model_parameters = model_config.get("completion_params", {})
|
||||
model_parameters = args.model_config_data.completion_params
|
||||
try:
|
||||
response: LLMResult = model_instance.invoke_llm(
|
||||
prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False
|
||||
)
|
||||
|
||||
generated_code = response.message.get_text_content()
|
||||
return {"code": generated_code, "language": code_language, "error": ""}
|
||||
return {"code": generated_code, "language": args.code_language, "error": ""}
|
||||
|
||||
except InvokeError as e:
|
||||
error = str(e)
|
||||
return {"code": "", "language": code_language, "error": f"Failed to generate code. Error: {error}"}
|
||||
return {"code": "", "language": args.code_language, "error": f"Failed to generate code. Error: {error}"}
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Failed to invoke LLM model, model: %s, language: %s", model_config.get("name"), code_language
|
||||
"Failed to invoke LLM model, model: %s, language: %s", args.model_config_data.name, args.code_language
|
||||
)
|
||||
return {"code": "", "language": code_language, "error": f"An unexpected error occurred: {str(e)}"}
|
||||
return {"code": "", "language": args.code_language, "error": f"An unexpected error occurred: {str(e)}"}
|
||||
|
||||
@classmethod
|
||||
def generate_qa_document(cls, tenant_id: str, query, document_language: str):
|
||||
@@ -360,20 +366,20 @@ class LLMGenerator:
|
||||
return answer.strip()
|
||||
|
||||
@classmethod
|
||||
def generate_structured_output(cls, tenant_id: str, instruction: str, model_config: dict):
|
||||
def generate_structured_output(cls, tenant_id: str, args: RuleStructuredOutputPayload):
|
||||
model_manager = ModelManager()
|
||||
model_instance = model_manager.get_model_instance(
|
||||
tenant_id=tenant_id,
|
||||
model_type=ModelType.LLM,
|
||||
provider=model_config.get("provider", ""),
|
||||
model=model_config.get("name", ""),
|
||||
provider=args.model_config_data.provider,
|
||||
model=args.model_config_data.name,
|
||||
)
|
||||
|
||||
prompt_messages = [
|
||||
SystemPromptMessage(content=SYSTEM_STRUCTURED_OUTPUT_GENERATE),
|
||||
UserPromptMessage(content=instruction),
|
||||
UserPromptMessage(content=args.instruction),
|
||||
]
|
||||
model_parameters = model_config.get("model_parameters", {})
|
||||
model_parameters = args.model_config_data.completion_params
|
||||
|
||||
try:
|
||||
response: LLMResult = model_instance.invoke_llm(
|
||||
@@ -397,7 +403,7 @@ class LLMGenerator:
|
||||
error = str(e)
|
||||
return {"output": "", "error": f"Failed to generate JSON Schema. Error: {error}"}
|
||||
except Exception as e:
|
||||
logger.exception("Failed to invoke LLM model, model: %s", model_config.get("name"))
|
||||
logger.exception("Failed to invoke LLM model, model: %s", args.model_config_data.name)
|
||||
return {"output": "", "error": f"An unexpected error occurred: {str(e)}"}
|
||||
|
||||
@classmethod
|
||||
@@ -670,7 +676,12 @@ Generate {language} code to extract/transform available variables for the target
|
||||
|
||||
@staticmethod
|
||||
def instruction_modify_legacy(
|
||||
tenant_id: str, flow_id: str, current: str, instruction: str, model_config: dict, ideal_output: str | None
|
||||
tenant_id: str,
|
||||
flow_id: str,
|
||||
current: str,
|
||||
instruction: str,
|
||||
model_config: ModelConfig,
|
||||
ideal_output: str | None,
|
||||
):
|
||||
last_run: Message | None = (
|
||||
db.session.query(Message).where(Message.app_id == flow_id).order_by(Message.created_at.desc()).first()
|
||||
@@ -709,7 +720,7 @@ Generate {language} code to extract/transform available variables for the target
|
||||
node_id: str,
|
||||
current: str,
|
||||
instruction: str,
|
||||
model_config: dict,
|
||||
model_config: ModelConfig,
|
||||
ideal_output: str | None,
|
||||
workflow_service: WorkflowServiceInterface,
|
||||
):
|
||||
@@ -782,7 +793,7 @@ Generate {language} code to extract/transform available variables for the target
|
||||
@staticmethod
|
||||
def __instruction_modify_common(
|
||||
tenant_id: str,
|
||||
model_config: dict,
|
||||
model_config: ModelConfig,
|
||||
last_run: dict | None,
|
||||
current: str | None,
|
||||
error_message: str | None,
|
||||
@@ -803,8 +814,8 @@ Generate {language} code to extract/transform available variables for the target
|
||||
model_instance = ModelManager().get_model_instance(
|
||||
tenant_id=tenant_id,
|
||||
model_type=ModelType.LLM,
|
||||
provider=model_config.get("provider", ""),
|
||||
model=model_config.get("name", ""),
|
||||
provider=model_config.provider,
|
||||
model=model_config.name,
|
||||
)
|
||||
model_name = model_config.get("name", "")
|
||||
model_schema = model_instance.model_type_instance.get_model_schema(model_name, model_instance.credentials)
|
||||
@@ -845,7 +856,5 @@ Generate {language} code to extract/transform available variables for the target
|
||||
error = str(e)
|
||||
return {"error": f"Failed to generate code. Error: {error}"}
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Failed to invoke LLM model, model: %s", json.dumps(model_config.get("name")), exc_info=True
|
||||
)
|
||||
logger.exception("Failed to invoke LLM model, model: %s", json.dumps(model_config.name), exc_info=True)
|
||||
return {"error": f"An unexpected error occurred: {str(e)}"}
|
||||
|
||||
@@ -347,7 +347,7 @@ class BaseSession(
|
||||
message.message.root.model_dump(by_alias=True, mode="json", exclude_none=True)
|
||||
)
|
||||
|
||||
responder = RequestResponder(
|
||||
responder = RequestResponder[ReceiveRequestT, SendResultT](
|
||||
request_id=message.message.root.id,
|
||||
request_meta=validated_request.root.params.meta if validated_request.root.params else None,
|
||||
request=validated_request,
|
||||
|
||||
@@ -92,6 +92,10 @@ def _build_llm_result_from_first_chunk(
|
||||
Build a single `LLMResult` from the first returned chunk.
|
||||
|
||||
This is used for `stream=False` because the plugin side may still implement the response via a chunked stream.
|
||||
|
||||
Note:
|
||||
This function always drains the `chunks` iterator after reading the first chunk to ensure any underlying
|
||||
streaming resources are released (e.g., HTTP connections owned by the plugin runtime).
|
||||
"""
|
||||
content = ""
|
||||
content_list: list[PromptMessageContentUnionTypes] = []
|
||||
@@ -99,18 +103,25 @@ def _build_llm_result_from_first_chunk(
|
||||
system_fingerprint: str | None = None
|
||||
tools_calls: list[AssistantPromptMessage.ToolCall] = []
|
||||
|
||||
first_chunk = next(chunks, None)
|
||||
if first_chunk is not None:
|
||||
if isinstance(first_chunk.delta.message.content, str):
|
||||
content += first_chunk.delta.message.content
|
||||
elif isinstance(first_chunk.delta.message.content, list):
|
||||
content_list.extend(first_chunk.delta.message.content)
|
||||
try:
|
||||
first_chunk = next(chunks, None)
|
||||
if first_chunk is not None:
|
||||
if isinstance(first_chunk.delta.message.content, str):
|
||||
content += first_chunk.delta.message.content
|
||||
elif isinstance(first_chunk.delta.message.content, list):
|
||||
content_list.extend(first_chunk.delta.message.content)
|
||||
|
||||
if first_chunk.delta.message.tool_calls:
|
||||
_increase_tool_call(first_chunk.delta.message.tool_calls, tools_calls)
|
||||
if first_chunk.delta.message.tool_calls:
|
||||
_increase_tool_call(first_chunk.delta.message.tool_calls, tools_calls)
|
||||
|
||||
usage = first_chunk.delta.usage or LLMUsage.empty_usage()
|
||||
system_fingerprint = first_chunk.system_fingerprint
|
||||
usage = first_chunk.delta.usage or LLMUsage.empty_usage()
|
||||
system_fingerprint = first_chunk.system_fingerprint
|
||||
finally:
|
||||
try:
|
||||
for _ in chunks:
|
||||
pass
|
||||
except Exception:
|
||||
logger.debug("Failed to drain non-stream plugin chunk iterator.", exc_info=True)
|
||||
|
||||
return LLMResult(
|
||||
model=model,
|
||||
@@ -283,7 +294,7 @@ class LargeLanguageModel(AIModel):
|
||||
# TODO
|
||||
raise self._transform_invoke_error(e)
|
||||
|
||||
if stream and isinstance(result, Generator):
|
||||
if stream and not isinstance(result, LLMResult):
|
||||
return self._invoke_result_generator(
|
||||
model=model,
|
||||
result=result,
|
||||
|
||||
@@ -314,6 +314,8 @@ class ModelProviderFactory:
|
||||
elif model_type == ModelType.TTS:
|
||||
return TTSModel.model_validate(init_params)
|
||||
|
||||
raise ValueError(f"Unsupported model type: {model_type}")
|
||||
|
||||
def get_provider_icon(self, provider: str, icon_type: str, lang: str) -> tuple[bytes, str]:
|
||||
"""
|
||||
Get provider icon
|
||||
|
||||
@@ -23,8 +23,8 @@ class TriggerDebugEventBus:
|
||||
"""
|
||||
|
||||
# LUA_SELECT: Atomic poll or register for event
|
||||
# KEYS[1] = trigger_debug_inbox:{tenant_id}:{address_id}
|
||||
# KEYS[2] = trigger_debug_waiting_pool:{tenant_id}:...
|
||||
# KEYS[1] = trigger_debug_inbox:{<tenant_id>}:<address_id>
|
||||
# KEYS[2] = trigger_debug_waiting_pool:{<tenant_id>}:...
|
||||
# ARGV[1] = address_id
|
||||
LUA_SELECT = (
|
||||
"local v=redis.call('GET',KEYS[1]);"
|
||||
@@ -35,7 +35,7 @@ class TriggerDebugEventBus:
|
||||
)
|
||||
|
||||
# LUA_DISPATCH: Dispatch event to all waiting addresses
|
||||
# KEYS[1] = trigger_debug_waiting_pool:{tenant_id}:...
|
||||
# KEYS[1] = trigger_debug_waiting_pool:{<tenant_id>}:...
|
||||
# ARGV[1] = tenant_id
|
||||
# ARGV[2] = event_json
|
||||
LUA_DISPATCH = (
|
||||
@@ -43,7 +43,7 @@ class TriggerDebugEventBus:
|
||||
"if #a==0 then return 0 end;"
|
||||
"redis.call('DEL',KEYS[1]);"
|
||||
"for i=1,#a do "
|
||||
f"redis.call('SET','trigger_debug_inbox:'..ARGV[1]..':'..a[i],ARGV[2],'EX',{TRIGGER_DEBUG_EVENT_TTL});"
|
||||
f"redis.call('SET','trigger_debug_inbox:{{'..ARGV[1]..'}}'..':'..a[i],ARGV[2],'EX',{TRIGGER_DEBUG_EVENT_TTL});"
|
||||
"end;"
|
||||
"return #a"
|
||||
)
|
||||
@@ -108,7 +108,7 @@ class TriggerDebugEventBus:
|
||||
Event object if available, None otherwise
|
||||
"""
|
||||
address_id: str = hashlib.sha256(f"{user_id}|{app_id}|{node_id}".encode()).hexdigest()
|
||||
address: str = f"trigger_debug_inbox:{tenant_id}:{address_id}"
|
||||
address: str = f"trigger_debug_inbox:{{{tenant_id}}}:{address_id}"
|
||||
|
||||
try:
|
||||
event_data = redis_client.eval(
|
||||
|
||||
@@ -42,7 +42,7 @@ def build_webhook_pool_key(tenant_id: str, app_id: str, node_id: str) -> str:
|
||||
app_id: App ID
|
||||
node_id: Node ID
|
||||
"""
|
||||
return f"{TriggerDebugPoolKey.WEBHOOK}:{tenant_id}:{app_id}:{node_id}"
|
||||
return f"{TriggerDebugPoolKey.WEBHOOK}:{{{tenant_id}}}:{app_id}:{node_id}"
|
||||
|
||||
|
||||
class PluginTriggerDebugEvent(BaseDebugEvent):
|
||||
@@ -64,4 +64,4 @@ def build_plugin_pool_key(tenant_id: str, provider_id: str, subscription_id: str
|
||||
provider_id: Provider ID
|
||||
subscription_id: Subscription ID
|
||||
"""
|
||||
return f"{TriggerDebugPoolKey.PLUGIN}:{tenant_id}:{str(provider_id)}:{subscription_id}:{name}"
|
||||
return f"{TriggerDebugPoolKey.PLUGIN}:{{{tenant_id}}}:{str(provider_id)}:{subscription_id}:{name}"
|
||||
|
||||
@@ -28,9 +28,10 @@ def init_app(app: DifyApp) -> None:
|
||||
|
||||
# Ensure route decorators are evaluated.
|
||||
import controllers.console.ping as ping_module
|
||||
from controllers.console import setup
|
||||
from controllers.console import remote_files, setup
|
||||
|
||||
_ = ping_module
|
||||
_ = remote_files
|
||||
_ = setup
|
||||
|
||||
router.include_router(console_router, prefix="/console/api")
|
||||
|
||||
@@ -136,7 +136,7 @@ class PKCS1OAepCipher:
|
||||
# Step 3a (OS2IP)
|
||||
em_int = bytes_to_long(em)
|
||||
# Step 3b (RSAEP)
|
||||
m_int = gmpy2.powmod(em_int, self._key.e, self._key.n)
|
||||
m_int: int = gmpy2.powmod(em_int, self._key.e, self._key.n) # type: ignore[attr-defined]
|
||||
# Step 3c (I2OSP)
|
||||
c = long_to_bytes(m_int, k)
|
||||
return c
|
||||
@@ -169,7 +169,7 @@ class PKCS1OAepCipher:
|
||||
ct_int = bytes_to_long(ciphertext)
|
||||
# Step 2b (RSADP)
|
||||
# m_int = self._key._decrypt(ct_int)
|
||||
m_int = gmpy2.powmod(ct_int, self._key.d, self._key.n)
|
||||
m_int: int = gmpy2.powmod(ct_int, self._key.d, self._key.n) # type: ignore[attr-defined]
|
||||
# Complete step 2c (I2OSP)
|
||||
em = long_to_bytes(m_int, k)
|
||||
# Step 3a
|
||||
|
||||
@@ -51,7 +51,7 @@ def upgrade():
|
||||
batch_op.add_column(sa.Column('summary_index_setting', models.types.AdjustedJSON(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('documents', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('need_summary', sa.Boolean(), server_default=sa.text('false'), nullable=True))
|
||||
batch_op.add_column(sa.Column('need_summary', sa.Boolean(), server_default=sa.text('false'), nullable=False))
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table(
|
||||
@@ -83,7 +83,7 @@ def upgrade():
|
||||
batch_op.add_column(sa.Column('summary_index_setting', models.types.AdjustedJSON(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('documents', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('need_summary', sa.Boolean(), server_default=sa.text('false'), nullable=True))
|
||||
batch_op.add_column(sa.Column('need_summary', sa.Boolean(), server_default=sa.text('false'), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@@ -420,7 +420,7 @@ class Document(Base):
|
||||
doc_metadata = mapped_column(AdjustedJSON, nullable=True)
|
||||
doc_form = mapped_column(String(255), nullable=False, server_default=sa.text("'text_model'"))
|
||||
doc_language = mapped_column(String(255), nullable=True)
|
||||
need_summary: Mapped[bool | None] = mapped_column(sa.Boolean, nullable=True, server_default=sa.text("false"))
|
||||
need_summary: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"))
|
||||
|
||||
DATA_SOURCES = ["upload_file", "notion_import", "website_crawl"]
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.11.4"
|
||||
version = "1.12.0"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
|
||||
@@ -0,0 +1,291 @@
|
||||
import builtins
|
||||
import contextlib
|
||||
import importlib
|
||||
import sys
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from extensions import ext_fastopenapi
|
||||
from extensions.ext_database import db
|
||||
from services.feature_service import FeatureModel, SystemFeatureModel
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
"""
|
||||
Creates a Flask application instance configured for testing.
|
||||
"""
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
app.config["SECRET_KEY"] = "test-secret"
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///:memory:"
|
||||
|
||||
# Initialize the database with the app
|
||||
db.init_app(app)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def fix_method_view_issue(monkeypatch):
|
||||
"""
|
||||
Automatic fixture to patch 'builtins.MethodView'.
|
||||
|
||||
Why this is needed:
|
||||
The official legacy codebase contains a global patch in its initialization logic:
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView
|
||||
|
||||
Some dependencies (like ext_fastopenapi or older Flask extensions) might implicitly
|
||||
rely on 'MethodView' being available in the global builtins namespace.
|
||||
|
||||
Refactoring Note:
|
||||
While patching builtins is generally discouraged due to global side effects,
|
||||
this fixture reproduces the production environment's state to ensure tests are realistic.
|
||||
We use 'monkeypatch' to ensure that this change is undone after the test finishes,
|
||||
keeping other tests isolated.
|
||||
"""
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
# 'raising=False' allows us to set an attribute that doesn't exist yet
|
||||
monkeypatch.setattr(builtins, "MethodView", MethodView, raising=False)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Helper Functions for Fixture Complexity Reduction
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _create_isolated_router():
|
||||
"""
|
||||
Creates a fresh, isolated router instance to prevent route pollution.
|
||||
"""
|
||||
import controllers.fastopenapi
|
||||
|
||||
# Dynamically get the class type (e.g., FlaskRouter) to avoid hardcoding dependencies
|
||||
RouterClass = type(controllers.fastopenapi.console_router)
|
||||
return RouterClass()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _patch_auth_and_router(temp_router):
|
||||
"""
|
||||
Context manager that applies all necessary patches for:
|
||||
1. The console_router (redirecting to our isolated temp_router)
|
||||
2. Authentication decorators (disabling them with no-ops)
|
||||
3. User/Account loaders (mocking authenticated state)
|
||||
"""
|
||||
|
||||
def noop(f):
|
||||
return f
|
||||
|
||||
# We patch the SOURCE of the decorators/functions, not the destination module.
|
||||
# This ensures that when 'controllers.console.feature' imports them, it gets the mocks.
|
||||
with (
|
||||
patch("controllers.fastopenapi.console_router", temp_router),
|
||||
patch("extensions.ext_fastopenapi.console_router", temp_router),
|
||||
patch("controllers.console.wraps.setup_required", side_effect=noop),
|
||||
patch("libs.login.login_required", side_effect=noop),
|
||||
patch("controllers.console.wraps.account_initialization_required", side_effect=noop),
|
||||
patch("controllers.console.wraps.cloud_utm_record", side_effect=noop),
|
||||
patch("libs.login.current_account_with_tenant", return_value=(MagicMock(), "tenant-id")),
|
||||
patch("libs.login.current_user", MagicMock(is_authenticated=True)),
|
||||
):
|
||||
# Explicitly reload ext_fastopenapi to ensure it uses the patched console_router
|
||||
import extensions.ext_fastopenapi
|
||||
|
||||
importlib.reload(extensions.ext_fastopenapi)
|
||||
|
||||
yield
|
||||
|
||||
|
||||
def _force_reload_module(target_module: str, alias_module: str):
|
||||
"""
|
||||
Forces a reload of the specified module and handles sys.modules aliasing.
|
||||
|
||||
Why reload?
|
||||
Python decorators (like @route, @login_required) run at IMPORT time.
|
||||
To apply our patches (mocks/no-ops) to these decorators, we must re-import
|
||||
the module while the patches are active.
|
||||
|
||||
Why alias?
|
||||
If 'ext_fastopenapi' imports the controller as 'api.controllers...', but we import
|
||||
it as 'controllers...', Python treats them as two separate modules. This causes:
|
||||
1. Double execution of decorators (registering routes twice -> AssertionError).
|
||||
2. Type mismatch errors (Class A from module X is not Class A from module Y).
|
||||
|
||||
This function ensures both names point to the SAME loaded module instance.
|
||||
"""
|
||||
# 1. Clean existing entries to force re-import
|
||||
if target_module in sys.modules:
|
||||
del sys.modules[target_module]
|
||||
if alias_module in sys.modules:
|
||||
del sys.modules[alias_module]
|
||||
|
||||
# 2. Import the module (triggering decorators with active patches)
|
||||
module = importlib.import_module(target_module)
|
||||
|
||||
# 3. Alias the module in sys.modules to prevent double loading
|
||||
sys.modules[alias_module] = sys.modules[target_module]
|
||||
|
||||
return module
|
||||
|
||||
|
||||
def _cleanup_modules(target_module: str, alias_module: str):
|
||||
"""
|
||||
Removes the module and its alias from sys.modules to prevent side effects
|
||||
on other tests.
|
||||
"""
|
||||
if target_module in sys.modules:
|
||||
del sys.modules[target_module]
|
||||
if alias_module in sys.modules:
|
||||
del sys.modules[alias_module]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_feature_module_env():
|
||||
"""
|
||||
Sets up a mocked environment for the feature module.
|
||||
|
||||
This fixture orchestrates:
|
||||
1. Creating an isolated router.
|
||||
2. Patching authentication and global dependencies.
|
||||
3. Reloading the controller module to apply patches to decorators.
|
||||
4. cleaning up sys.modules afterwards.
|
||||
"""
|
||||
target_module = "controllers.console.feature"
|
||||
alias_module = "api.controllers.console.feature"
|
||||
|
||||
# 1. Prepare isolated router
|
||||
temp_router = _create_isolated_router()
|
||||
|
||||
# 2. Apply patches
|
||||
try:
|
||||
with _patch_auth_and_router(temp_router):
|
||||
# 3. Reload module to register routes on the temp_router
|
||||
feature_module = _force_reload_module(target_module, alias_module)
|
||||
|
||||
yield feature_module
|
||||
|
||||
finally:
|
||||
# 4. Teardown: Clean up sys.modules
|
||||
_cleanup_modules(target_module, alias_module)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Test Cases
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("url", "service_mock_path", "mock_model_instance", "json_key"),
|
||||
[
|
||||
(
|
||||
"/console/api/features",
|
||||
"controllers.console.feature.FeatureService.get_features",
|
||||
FeatureModel(can_replace_logo=True),
|
||||
"features",
|
||||
),
|
||||
(
|
||||
"/console/api/system-features",
|
||||
"controllers.console.feature.FeatureService.get_system_features",
|
||||
SystemFeatureModel(enable_marketplace=True),
|
||||
"features",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_console_features_success(app, mock_feature_module_env, url, service_mock_path, mock_model_instance, json_key):
|
||||
"""
|
||||
Tests that the feature APIs return a 200 OK status and correct JSON structure.
|
||||
"""
|
||||
# Patch the service layer to return our mock model instance
|
||||
with patch(service_mock_path, return_value=mock_model_instance):
|
||||
# Initialize the API extension
|
||||
ext_fastopenapi.init_app(app)
|
||||
|
||||
client = app.test_client()
|
||||
response = client.get(url)
|
||||
|
||||
# Assertions
|
||||
assert response.status_code == 200, f"Request failed with status {response.status_code}: {response.text}"
|
||||
|
||||
# Verify the JSON response matches the Pydantic model dump
|
||||
expected_data = mock_model_instance.model_dump(mode="json")
|
||||
assert response.get_json() == {json_key: expected_data}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("url", "service_mock_path"),
|
||||
[
|
||||
("/console/api/features", "controllers.console.feature.FeatureService.get_features"),
|
||||
("/console/api/system-features", "controllers.console.feature.FeatureService.get_system_features"),
|
||||
],
|
||||
)
|
||||
def test_console_features_service_error(app, mock_feature_module_env, url, service_mock_path):
|
||||
"""
|
||||
Tests how the application handles Service layer errors.
|
||||
|
||||
Note: When an exception occurs in the view, it is typically caught by the framework
|
||||
(Flask or the OpenAPI wrapper) and converted to a 500 error response.
|
||||
This test verifies that the application returns a 500 status code.
|
||||
"""
|
||||
# Simulate a service failure
|
||||
with patch(service_mock_path, side_effect=ValueError("Service Failure")):
|
||||
ext_fastopenapi.init_app(app)
|
||||
client = app.test_client()
|
||||
|
||||
# When an exception occurs in the view, it is typically caught by the framework
|
||||
# (Flask or the OpenAPI wrapper) and converted to a 500 error response.
|
||||
response = client.get(url)
|
||||
|
||||
assert response.status_code == 500
|
||||
# Check if the error details are exposed in the response (depends on error handler config)
|
||||
# We accept either generic 500 or the specific error message
|
||||
assert "Service Failure" in response.text or "Internal Server Error" in response.text
|
||||
|
||||
|
||||
def test_system_features_unauthenticated(app, mock_feature_module_env):
|
||||
"""
|
||||
Tests that /console/api/system-features endpoint works without authentication.
|
||||
|
||||
This test verifies the try-except block in get_system_features that handles
|
||||
unauthenticated requests by passing is_authenticated=False to the service layer.
|
||||
"""
|
||||
feature_module = mock_feature_module_env
|
||||
|
||||
# Override the behavior of the current_user mock
|
||||
# The fixture patched 'libs.login.current_user', so 'controllers.console.feature.current_user'
|
||||
# refers to that same Mock object.
|
||||
mock_user = feature_module.current_user
|
||||
|
||||
# Simulate property access raising Unauthorized
|
||||
# Note: We must reset side_effect if it was set, or set it here.
|
||||
# The fixture initialized it as MagicMock(is_authenticated=True).
|
||||
# We want type(mock_user).is_authenticated to raise Unauthorized.
|
||||
type(mock_user).is_authenticated = PropertyMock(side_effect=Unauthorized)
|
||||
|
||||
# Patch the service layer for this specific test
|
||||
with patch("controllers.console.feature.FeatureService.get_system_features") as mock_service:
|
||||
# Setup mock service return value
|
||||
mock_model = SystemFeatureModel(enable_marketplace=True)
|
||||
mock_service.return_value = mock_model
|
||||
|
||||
# Initialize app
|
||||
ext_fastopenapi.init_app(app)
|
||||
client = app.test_client()
|
||||
|
||||
# Act
|
||||
response = client.get("/console/api/system-features")
|
||||
|
||||
# Assert
|
||||
assert response.status_code == 200, f"Request failed: {response.text}"
|
||||
|
||||
# Verify service was called with is_authenticated=False
|
||||
mock_service.assert_called_once_with(is_authenticated=False)
|
||||
|
||||
# Verify response body
|
||||
expected_data = mock_model.model_dump(mode="json")
|
||||
assert response.get_json() == {"features": expected_data}
|
||||
@@ -0,0 +1,92 @@
|
||||
import builtins
|
||||
from datetime import datetime
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
|
||||
from extensions import ext_fastopenapi
|
||||
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView # type: ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app() -> Flask:
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
return app
|
||||
|
||||
|
||||
def test_console_remote_files_fastopenapi_get_info(app: Flask):
|
||||
ext_fastopenapi.init_app(app)
|
||||
|
||||
response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("HEAD", "http://example.com/file.txt"),
|
||||
headers={"Content-Type": "text/plain", "Content-Length": "10"},
|
||||
)
|
||||
|
||||
with patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response):
|
||||
client = app.test_client()
|
||||
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt"
|
||||
resp = client.get(f"/console/api/remote-files/{encoded_url}")
|
||||
|
||||
assert resp.status_code == 200
|
||||
assert resp.get_json() == {"file_type": "text/plain", "file_length": 10}
|
||||
|
||||
|
||||
def test_console_remote_files_fastopenapi_upload(app: Flask):
|
||||
ext_fastopenapi.init_app(app)
|
||||
|
||||
head_response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("GET", "http://example.com/file.txt"),
|
||||
content=b"hello",
|
||||
)
|
||||
file_info = SimpleNamespace(
|
||||
extension="txt",
|
||||
size=5,
|
||||
filename="file.txt",
|
||||
mimetype="text/plain",
|
||||
)
|
||||
uploaded = SimpleNamespace(
|
||||
id="file-id",
|
||||
name="file.txt",
|
||||
size=5,
|
||||
extension="txt",
|
||||
mime_type="text/plain",
|
||||
created_by="user-id",
|
||||
created_at=datetime(2024, 1, 1),
|
||||
)
|
||||
|
||||
with (
|
||||
patch("controllers.console.remote_files.db", new=SimpleNamespace(engine=object())),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response),
|
||||
patch("controllers.console.remote_files.helpers.guess_file_info_from_response", return_value=file_info),
|
||||
patch("controllers.console.remote_files.FileService.is_file_size_within_limit", return_value=True),
|
||||
patch("controllers.console.remote_files.FileService.__init__", return_value=None),
|
||||
patch("controllers.console.remote_files.current_account_with_tenant", return_value=(object(), "tenant-id")),
|
||||
patch("controllers.console.remote_files.FileService.upload_file", return_value=uploaded),
|
||||
patch("controllers.console.remote_files.file_helpers.get_signed_file_url", return_value="signed-url"),
|
||||
):
|
||||
client = app.test_client()
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": "http://example.com/file.txt"},
|
||||
)
|
||||
|
||||
assert resp.status_code == 201
|
||||
assert resp.get_json() == {
|
||||
"id": "file-id",
|
||||
"name": "file.txt",
|
||||
"size": 5,
|
||||
"extension": "txt",
|
||||
"url": "signed-url",
|
||||
"mime_type": "text/plain",
|
||||
"created_by": "user-id",
|
||||
"created_at": int(uploaded.created_at.timestamp()),
|
||||
}
|
||||
@@ -101,3 +101,26 @@ def test__normalize_non_stream_plugin_result__empty_iterator_defaults():
|
||||
assert result.message.tool_calls == []
|
||||
assert result.usage == LLMUsage.empty_usage()
|
||||
assert result.system_fingerprint is None
|
||||
|
||||
|
||||
def test__normalize_non_stream_plugin_result__closes_chunk_iterator():
|
||||
prompt_messages = [UserPromptMessage(content="hi")]
|
||||
|
||||
chunk = _make_chunk(content="hello", usage=LLMUsage.empty_usage())
|
||||
closed: list[bool] = []
|
||||
|
||||
def _chunk_iter():
|
||||
try:
|
||||
yield chunk
|
||||
yield _make_chunk(content="ignored", usage=LLMUsage.empty_usage())
|
||||
finally:
|
||||
closed.append(True)
|
||||
|
||||
result = _normalize_non_stream_plugin_result(
|
||||
model="test-model",
|
||||
prompt_messages=prompt_messages,
|
||||
result=_chunk_iter(),
|
||||
)
|
||||
|
||||
assert result.message.content == "hello"
|
||||
assert closed == [True]
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
[src]
|
||||
exclude = [
|
||||
# deps groups (A1/A2/B/C/D/E)
|
||||
# A1: foundational runtime typing / provider plumbing
|
||||
"core/mcp/session",
|
||||
"core/model_runtime/model_providers",
|
||||
"core/workflow/nodes/protocols.py",
|
||||
"libs/gmpy2_pkcs10aep_cipher.py",
|
||||
# A2: workflow engine/nodes
|
||||
"core/workflow",
|
||||
"core/app/workflow",
|
||||
@@ -33,6 +28,3 @@ exclude = [
|
||||
"tests",
|
||||
]
|
||||
|
||||
[rules]
|
||||
missing-argument = "ignore" # TODO: restore when **args for constructor is supported properly
|
||||
possibly-unbound-attribute = "ignore"
|
||||
|
||||
2
api/uv.lock
generated
2
api/uv.lock
generated
@@ -1486,7 +1486,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.11.4"
|
||||
version = "1.12.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
|
||||
@@ -21,7 +21,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -63,7 +63,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -102,7 +102,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -132,7 +132,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.11.4
|
||||
image: langgenius/dify-web:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
@@ -271,7 +271,7 @@ services:
|
||||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.5.2-local
|
||||
image: langgenius/dify-plugin-daemon:0.5.3-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
||||
@@ -123,7 +123,7 @@ services:
|
||||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.5.2-local
|
||||
image: langgenius/dify-plugin-daemon:0.5.3-local
|
||||
restart: always
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
|
||||
@@ -709,7 +709,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -751,7 +751,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -790,7 +790,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -820,7 +820,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.11.4
|
||||
image: langgenius/dify-web:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
@@ -959,7 +959,7 @@ services:
|
||||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.5.2-local
|
||||
image: langgenius/dify-plugin-daemon:0.5.3-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import type { FC } from 'react'
|
||||
import * as React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import useTheme from '@/hooks/use-theme'
|
||||
import { Theme } from '@/types/app'
|
||||
import { cn } from '@/utils/classnames'
|
||||
@@ -12,12 +13,13 @@ const i18nPrefix = 'sidebar.noApps'
|
||||
const NoApps: FC = () => {
|
||||
const { t } = useTranslation()
|
||||
const { theme } = useTheme()
|
||||
const docLink = useDocLink()
|
||||
return (
|
||||
<div className="rounded-xl bg-background-default-subtle p-4">
|
||||
<div className={cn('h-[35px] w-[86px] bg-contain bg-center bg-no-repeat', theme === Theme.dark ? s.dark : s.light)}></div>
|
||||
<div className="system-sm-semibold mt-2 text-text-secondary">{t(`${i18nPrefix}.title`, { ns: 'explore' })}</div>
|
||||
<div className="system-xs-regular my-1 text-text-tertiary">{t(`${i18nPrefix}.description`, { ns: 'explore' })}</div>
|
||||
<a className="system-xs-regular text-text-accent" target="_blank" rel="noopener noreferrer" href="https://docs.dify.ai/en/guides/application-publishing/launch-your-webapp-quickly/README">{t(`${i18nPrefix}.learnMore`, { ns: 'explore' })}</a>
|
||||
<a className="system-xs-regular text-text-accent" target="_blank" rel="noopener noreferrer" href={docLink('/use-dify/publish/README')}>{t(`${i18nPrefix}.learnMore`, { ns: 'explore' })}</a>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import type { FC } from 'react'
|
||||
import * as React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { IS_CLOUD_EDITION } from '@/config'
|
||||
import TabHeader from '../../base/tab-header'
|
||||
|
||||
export enum TypeEnum {
|
||||
@@ -21,10 +22,13 @@ const Tab: FC<Props> = ({
|
||||
disableTry,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const tabs = [
|
||||
{ id: TypeEnum.TRY, name: t('tryApp.tabHeader.try', { ns: 'explore' }), disabled: disableTry },
|
||||
{ id: TypeEnum.DETAIL, name: t('tryApp.tabHeader.detail', { ns: 'explore' }) },
|
||||
]
|
||||
|
||||
const tabs = React.useMemo(() => {
|
||||
return [
|
||||
IS_CLOUD_EDITION ? { id: TypeEnum.TRY, name: t('tryApp.tabHeader.try', { ns: 'explore' }), disabled: disableTry } : null,
|
||||
{ id: TypeEnum.DETAIL, name: t('tryApp.tabHeader.detail', { ns: 'explore' }) },
|
||||
].filter(item => item !== null) as { id: TypeEnum, name: string }[]
|
||||
}, [t, disableTry])
|
||||
return (
|
||||
<TabHeader
|
||||
items={tabs}
|
||||
|
||||
@@ -221,7 +221,7 @@ const buildOutputVars = (schema: Record<string, any>, schemaTypeDefinitions?: Sc
|
||||
const metaData = genNodeMetaData({
|
||||
sort: 1,
|
||||
type: BlockEnum.TriggerPlugin,
|
||||
helpLinkUri: 'plugin-trigger',
|
||||
helpLinkUri: 'trigger/plugin-trigger',
|
||||
isStart: true,
|
||||
})
|
||||
|
||||
|
||||
@@ -110,7 +110,7 @@ const validateVisualConfig = (payload: ScheduleTriggerNodeType, t: any): string
|
||||
const metaData = genNodeMetaData({
|
||||
sort: 2,
|
||||
type: BlockEnum.TriggerSchedule,
|
||||
helpLinkUri: 'schedule-trigger',
|
||||
helpLinkUri: 'trigger/schedule-trigger',
|
||||
isStart: true,
|
||||
})
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ import { createWebhookRawVariable } from './utils/raw-variable'
|
||||
const metaData = genNodeMetaData({
|
||||
sort: 3,
|
||||
type: BlockEnum.TriggerWebhook,
|
||||
helpLinkUri: 'webhook-trigger',
|
||||
helpLinkUri: 'trigger/webhook-trigger',
|
||||
isStart: true,
|
||||
})
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { BlockEnum } from '@/app/components/workflow/types'
|
||||
import type { UseDifyNodesPath } from '@/types/doc-paths'
|
||||
import { BlockClassificationEnum } from '@/app/components/workflow/block-selector/types'
|
||||
|
||||
export type GenNodeMetaDataParams = {
|
||||
@@ -7,7 +8,7 @@ export type GenNodeMetaDataParams = {
|
||||
type: BlockEnum
|
||||
title?: string
|
||||
author?: string
|
||||
helpLinkUri?: string
|
||||
helpLinkUri?: UseDifyNodesPath
|
||||
isRequired?: boolean
|
||||
isUndeletable?: boolean
|
||||
isStart?: boolean
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import type { Locale } from '@/i18n-config/language'
|
||||
import type { DocPathWithoutLang } from '@/types/doc-paths'
|
||||
import { useTranslation } from '#i18n'
|
||||
import { useCallback } from 'react'
|
||||
import { getDocLanguage, getLanguage, getPricingPageLanguage } from '@/i18n-config/language'
|
||||
import { apiReferencePathTranslations } from '@/types/doc-paths'
|
||||
|
||||
@@ -27,21 +28,24 @@ export const useDocLink = (baseUrl?: string): ((path?: DocPathWithoutLang, pathM
|
||||
let baseDocUrl = baseUrl || defaultDocBaseUrl
|
||||
baseDocUrl = (baseDocUrl.endsWith('/')) ? baseDocUrl.slice(0, -1) : baseDocUrl
|
||||
const locale = useLocale()
|
||||
const docLanguage = getDocLanguage(locale)
|
||||
return (path?: DocPathWithoutLang, pathMap?: DocPathMap): string => {
|
||||
const pathUrl = path || ''
|
||||
let targetPath = (pathMap) ? pathMap[locale] || pathUrl : pathUrl
|
||||
let languagePrefix = `/${docLanguage}`
|
||||
return useCallback(
|
||||
(path?: DocPathWithoutLang, pathMap?: DocPathMap): string => {
|
||||
const docLanguage = getDocLanguage(locale)
|
||||
const pathUrl = path || ''
|
||||
let targetPath = (pathMap) ? pathMap[locale] || pathUrl : pathUrl
|
||||
let languagePrefix = `/${docLanguage}`
|
||||
|
||||
// Translate API reference paths for non-English locales
|
||||
if (targetPath.startsWith('/api-reference/') && docLanguage !== 'en') {
|
||||
const translatedPath = apiReferencePathTranslations[targetPath]?.[docLanguage as 'zh' | 'ja']
|
||||
if (translatedPath) {
|
||||
targetPath = translatedPath
|
||||
languagePrefix = ''
|
||||
// Translate API reference paths for non-English locales
|
||||
if (targetPath.startsWith('/api-reference/') && docLanguage !== 'en') {
|
||||
const translatedPath = apiReferencePathTranslations[targetPath]?.[docLanguage as 'zh' | 'ja']
|
||||
if (translatedPath) {
|
||||
targetPath = translatedPath
|
||||
languagePrefix = ''
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return `${baseDocUrl}${languagePrefix}${targetPath}`
|
||||
}
|
||||
return `${baseDocUrl}${languagePrefix}${targetPath}`
|
||||
},
|
||||
[baseDocUrl, locale],
|
||||
)
|
||||
}
|
||||
|
||||
@@ -3,14 +3,13 @@ import noAsAnyInT from './rules/no-as-any-in-t.js'
|
||||
import noExtraKeys from './rules/no-extra-keys.js'
|
||||
import noLegacyNamespacePrefix from './rules/no-legacy-namespace-prefix.js'
|
||||
import noVersionPrefix from './rules/no-version-prefix.js'
|
||||
import preferTailwindIcon from './rules/prefer-tailwind-icon.js'
|
||||
import requireNsOption from './rules/require-ns-option.js'
|
||||
import validI18nKeys from './rules/valid-i18n-keys.js'
|
||||
|
||||
/** @type {import('eslint').ESLint.Plugin} */
|
||||
const plugin = {
|
||||
meta: {
|
||||
name: 'dify',
|
||||
name: 'dify-i18n',
|
||||
version: '1.0.0',
|
||||
},
|
||||
rules: {
|
||||
@@ -19,7 +18,6 @@ const plugin = {
|
||||
'no-extra-keys': noExtraKeys,
|
||||
'no-legacy-namespace-prefix': noLegacyNamespacePrefix,
|
||||
'no-version-prefix': noVersionPrefix,
|
||||
'prefer-tailwind-icon': preferTailwindIcon,
|
||||
'require-ns-option': requireNsOption,
|
||||
'valid-i18n-keys': validI18nKeys,
|
||||
},
|
||||
|
||||
@@ -1,384 +0,0 @@
|
||||
/**
|
||||
* Default prop-to-class mappings
|
||||
* Maps component props to Tailwind class prefixes
|
||||
*/
|
||||
const DEFAULT_PROP_MAPPINGS = {
|
||||
size: 'size',
|
||||
width: 'w',
|
||||
height: 'h',
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert PascalCase/camelCase to kebab-case
|
||||
* @param {string} name
|
||||
* @returns {string} The kebab-case string
|
||||
*/
|
||||
function camelToKebab(name) {
|
||||
return name
|
||||
.replace(/([a-z])(\d)/g, '$1-$2')
|
||||
.replace(/(\d)([a-z])/gi, '$1-$2')
|
||||
.replace(/([a-z])([A-Z])/g, '$1-$2')
|
||||
.toLowerCase()
|
||||
}
|
||||
|
||||
/**
|
||||
* Default icon library configurations
|
||||
*
|
||||
* Config options:
|
||||
* - pattern: string | RegExp - Pattern to match import source
|
||||
* - prefix: string | ((match: RegExpMatchArray) => string) - Icon class prefix
|
||||
* - suffix: string | ((match: RegExpMatchArray) => string) - Icon class suffix
|
||||
* - extractSubPath: boolean - Extract subdirectory path and add to prefix
|
||||
* - iconFilter: (name: string) => boolean - Filter which imports to process
|
||||
* - stripPrefix: string - Prefix to remove from icon name before transform
|
||||
* - stripSuffix: string - Suffix to remove from icon name before transform
|
||||
*/
|
||||
const DEFAULT_ICON_CONFIGS = [
|
||||
{
|
||||
// @/app/components/base/icons/src/public/* and vender/*
|
||||
pattern: /^@\/app\/components\/base\/icons\/src\/(public|vender)/,
|
||||
prefix: match => `i-custom-${match[1]}-`,
|
||||
extractSubPath: true,
|
||||
},
|
||||
{
|
||||
// @remixicon/react
|
||||
pattern: '@remixicon/react',
|
||||
prefix: 'i-ri-',
|
||||
iconFilter: name => name.startsWith('Ri'),
|
||||
stripPrefix: 'Ri',
|
||||
},
|
||||
{
|
||||
// @heroicons/react/{size}/{variant}
|
||||
pattern: /^@heroicons\/react\/(\d+)\/(solid|outline)$/,
|
||||
prefix: 'i-heroicons-',
|
||||
suffix: match => `-${match[1]}-${match[2]}`,
|
||||
iconFilter: name => name.endsWith('Icon'),
|
||||
stripSuffix: 'Icon',
|
||||
},
|
||||
]
|
||||
|
||||
/**
|
||||
* Convert pixel value to Tailwind class
|
||||
* @param {number} pixels
|
||||
* @param {string} classPrefix - e.g., 'size', 'w', 'h'
|
||||
* @returns {string} The Tailwind class string
|
||||
*/
|
||||
function pixelToClass(pixels, classPrefix) {
|
||||
if (pixels % 4 === 0) {
|
||||
const units = pixels / 4
|
||||
return `${classPrefix}-${units}`
|
||||
}
|
||||
// For non-standard sizes, use Tailwind arbitrary value syntax
|
||||
return `${classPrefix}-[${pixels}px]`
|
||||
}
|
||||
|
||||
/**
|
||||
* Match source against config pattern
|
||||
* @param {string} source - The import source path
|
||||
* @param {object} config - The icon config
|
||||
* @returns {{ matched: boolean, match: RegExpMatchArray | null, basePath: string }} Match result
|
||||
*/
|
||||
function matchPattern(source, config) {
|
||||
const { pattern } = config
|
||||
if (pattern instanceof RegExp) {
|
||||
const match = source.match(pattern)
|
||||
if (match) {
|
||||
return { matched: true, match, basePath: match[0] }
|
||||
}
|
||||
return { matched: false, match: null, basePath: '' }
|
||||
}
|
||||
// String pattern: exact match or prefix match
|
||||
if (source === pattern || source.startsWith(`${pattern}/`)) {
|
||||
return { matched: true, match: null, basePath: pattern }
|
||||
}
|
||||
return { matched: false, match: null, basePath: '' }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get icon class from config
|
||||
* @param {string} iconName
|
||||
* @param {object} config
|
||||
* @param {string} source - The import source path
|
||||
* @param {RegExpMatchArray | null} match - The regex match result
|
||||
* @returns {string} The full Tailwind icon class string
|
||||
*/
|
||||
function getIconClass(iconName, config, source, match) {
|
||||
// Strip prefix/suffix from icon name if configured
|
||||
let name = iconName
|
||||
if (config.stripPrefix && name.startsWith(config.stripPrefix)) {
|
||||
name = name.slice(config.stripPrefix.length)
|
||||
}
|
||||
if (config.stripSuffix && name.endsWith(config.stripSuffix)) {
|
||||
name = name.slice(0, -config.stripSuffix.length)
|
||||
}
|
||||
|
||||
// Transform name (use custom or default camelToKebab)
|
||||
const transformed = config.transformName ? config.transformName(name, source) : camelToKebab(name)
|
||||
|
||||
// Get prefix (can be string or function)
|
||||
const prefix = typeof config.prefix === 'function' ? config.prefix(match) : config.prefix
|
||||
|
||||
// Get suffix (can be string or function)
|
||||
const suffix = typeof config.suffix === 'function' ? config.suffix(match) : (config.suffix || '')
|
||||
|
||||
// Extract subdirectory path after the pattern to include in prefix (only if extractSubPath is enabled)
|
||||
let subPrefix = ''
|
||||
if (config.extractSubPath) {
|
||||
const basePath = match ? match[0] : config.pattern
|
||||
if (source.startsWith(`${basePath}/`)) {
|
||||
const subPath = source.slice(basePath.length + 1)
|
||||
if (subPath) {
|
||||
subPrefix = `${subPath.replace(/\//g, '-')}-`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return `${prefix}${subPrefix}${transformed}${suffix}`
|
||||
}
|
||||
|
||||
/** @type {import('eslint').Rule.RuleModule} */
|
||||
export default {
|
||||
meta: {
|
||||
type: 'suggestion',
|
||||
docs: {
|
||||
description: 'Prefer Tailwind CSS icon classes over icon library components',
|
||||
},
|
||||
hasSuggestions: true,
|
||||
schema: [
|
||||
{
|
||||
type: 'object',
|
||||
properties: {
|
||||
libraries: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
pattern: { type: 'string' },
|
||||
prefix: { type: 'string' },
|
||||
suffix: { type: 'string' },
|
||||
extractSubPath: { type: 'boolean' },
|
||||
},
|
||||
required: ['pattern', 'prefix'],
|
||||
},
|
||||
},
|
||||
propMappings: {
|
||||
type: 'object',
|
||||
additionalProperties: { type: 'string' },
|
||||
description: 'Maps component props to Tailwind class prefixes, e.g., { size: "size", width: "w", height: "h" }',
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
],
|
||||
messages: {
|
||||
preferTailwindIcon:
|
||||
'Prefer using Tailwind CSS icon class "{{iconClass}}" over "{{componentName}}" from "{{source}}"',
|
||||
preferTailwindIconImport:
|
||||
'Icon "{{importedName}}" from "{{source}}" can be replaced with Tailwind CSS class "{{iconClass}}"',
|
||||
},
|
||||
},
|
||||
create(context) {
|
||||
const options = context.options[0] || {}
|
||||
const iconConfigs = options.libraries || DEFAULT_ICON_CONFIGS
|
||||
const propMappings = options.propMappings || DEFAULT_PROP_MAPPINGS
|
||||
|
||||
// Track imports: localName -> { node, importedName, config, source, match, used }
|
||||
const iconImports = new Map()
|
||||
|
||||
return {
|
||||
ImportDeclaration(node) {
|
||||
const source = node.source.value
|
||||
|
||||
// Find matching config
|
||||
let matchedConfig = null
|
||||
let matchResult = null
|
||||
for (const config of iconConfigs) {
|
||||
const result = matchPattern(source, config)
|
||||
if (result.matched) {
|
||||
matchedConfig = config
|
||||
matchResult = result.match
|
||||
break
|
||||
}
|
||||
}
|
||||
if (!matchedConfig)
|
||||
return
|
||||
|
||||
// Use default filter if not provided (for user-configured libraries)
|
||||
const iconFilter = matchedConfig.iconFilter || (() => true)
|
||||
|
||||
for (const specifier of node.specifiers) {
|
||||
if (specifier.type === 'ImportSpecifier') {
|
||||
const importedName = specifier.imported.name
|
||||
const localName = specifier.local.name
|
||||
|
||||
if (iconFilter(importedName)) {
|
||||
iconImports.set(localName, {
|
||||
node: specifier,
|
||||
importedName,
|
||||
localName,
|
||||
config: matchedConfig,
|
||||
source,
|
||||
match: matchResult,
|
||||
used: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
JSXOpeningElement(node) {
|
||||
if (node.name.type !== 'JSXIdentifier')
|
||||
return
|
||||
|
||||
const componentName = node.name.name
|
||||
const iconInfo = iconImports.get(componentName)
|
||||
|
||||
if (!iconInfo)
|
||||
return
|
||||
|
||||
iconInfo.used = true
|
||||
|
||||
const iconClass = getIconClass(iconInfo.importedName, iconInfo.config, iconInfo.source, iconInfo.match)
|
||||
|
||||
// Find className attribute
|
||||
const classNameAttr = node.attributes.find(
|
||||
attr => attr.type === 'JSXAttribute' && attr.name.name === 'className',
|
||||
)
|
||||
|
||||
// Process prop mappings (size, width, height, etc.)
|
||||
const mappedClasses = []
|
||||
const mappedPropNames = Object.keys(propMappings)
|
||||
|
||||
for (const propName of mappedPropNames) {
|
||||
const attr = node.attributes.find(
|
||||
a => a.type === 'JSXAttribute' && a.name.name === propName,
|
||||
)
|
||||
|
||||
if (attr && attr.value) {
|
||||
let pixelValue = null
|
||||
|
||||
if (attr.value.type === 'JSXExpressionContainer'
|
||||
&& attr.value.expression.type === 'Literal'
|
||||
&& typeof attr.value.expression.value === 'number') {
|
||||
pixelValue = attr.value.expression.value
|
||||
}
|
||||
else if (attr.value.type === 'Literal'
|
||||
&& typeof attr.value.value === 'number') {
|
||||
pixelValue = attr.value.value
|
||||
}
|
||||
|
||||
if (pixelValue !== null) {
|
||||
mappedClasses.push(pixelToClass(pixelValue, propMappings[propName]))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build new className
|
||||
const sourceCode = context.sourceCode
|
||||
let newClassName
|
||||
const classesToAdd = [iconClass, ...mappedClasses].filter(Boolean).join(' ')
|
||||
|
||||
if (classNameAttr && classNameAttr.value) {
|
||||
if (classNameAttr.value.type === 'Literal') {
|
||||
newClassName = `${classesToAdd} ${classNameAttr.value.value}`
|
||||
}
|
||||
else if (classNameAttr.value.type === 'JSXExpressionContainer') {
|
||||
const expression = sourceCode.getText(classNameAttr.value.expression)
|
||||
newClassName = `\`${classesToAdd} \${${expression}}\``
|
||||
}
|
||||
}
|
||||
else {
|
||||
newClassName = classesToAdd
|
||||
}
|
||||
|
||||
const parent = node.parent
|
||||
const isSelfClosing = node.selfClosing
|
||||
const excludedAttrs = ['className', ...mappedPropNames]
|
||||
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'preferTailwindIcon',
|
||||
data: {
|
||||
iconClass,
|
||||
componentName,
|
||||
source: iconInfo.source,
|
||||
},
|
||||
suggest: [
|
||||
{
|
||||
messageId: 'preferTailwindIcon',
|
||||
data: {
|
||||
iconClass,
|
||||
componentName,
|
||||
source: iconInfo.source,
|
||||
},
|
||||
fix(fixer) {
|
||||
const fixes = []
|
||||
|
||||
const classValue = newClassName.startsWith('`')
|
||||
? `{${newClassName}}`
|
||||
: `"${newClassName}"`
|
||||
|
||||
const otherAttrs = node.attributes
|
||||
.filter(attr => !(attr.type === 'JSXAttribute' && excludedAttrs.includes(attr.name.name)))
|
||||
.map(attr => sourceCode.getText(attr))
|
||||
.join(' ')
|
||||
|
||||
const attrsStr = otherAttrs
|
||||
? `className=${classValue} ${otherAttrs}`
|
||||
: `className=${classValue}`
|
||||
|
||||
if (isSelfClosing) {
|
||||
fixes.push(fixer.replaceText(parent, `<span ${attrsStr} />`))
|
||||
}
|
||||
else {
|
||||
const closingElement = parent.closingElement
|
||||
fixes.push(fixer.replaceText(node, `<span ${attrsStr}>`))
|
||||
if (closingElement) {
|
||||
fixes.push(fixer.replaceText(closingElement, '</span>'))
|
||||
}
|
||||
}
|
||||
|
||||
return fixes
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
},
|
||||
|
||||
'Program:exit': function () {
|
||||
const sourceCode = context.sourceCode
|
||||
|
||||
// Report icons that were imported but not found in JSX
|
||||
for (const [, iconInfo] of iconImports) {
|
||||
if (!iconInfo.used) {
|
||||
// Verify the import is still referenced somewhere in the file (besides the import itself)
|
||||
try {
|
||||
const variables = sourceCode.getDeclaredVariables(iconInfo.node)
|
||||
const variable = variables[0]
|
||||
// Check if there are any references besides the import declaration
|
||||
const hasReferences = variable && variable.references.some(
|
||||
ref => ref.identifier !== iconInfo.node.local,
|
||||
)
|
||||
if (!hasReferences)
|
||||
continue
|
||||
}
|
||||
catch {
|
||||
continue
|
||||
}
|
||||
|
||||
const iconClass = getIconClass(iconInfo.importedName, iconInfo.config, iconInfo.source, iconInfo.match)
|
||||
context.report({
|
||||
node: iconInfo.node,
|
||||
messageId: 'preferTailwindIconImport',
|
||||
data: {
|
||||
importedName: iconInfo.importedName,
|
||||
source: iconInfo.source,
|
||||
iconClass,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -177,6 +177,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/app/annotation/add-annotation-modal/edit-item/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/app/annotation/batch-add-annotation-modal/csv-downloader.spec.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
@@ -186,6 +191,9 @@
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 1
|
||||
},
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
}
|
||||
@@ -193,6 +201,9 @@
|
||||
"app/components/app/annotation/edit-annotation-modal/edit-item/index.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 1
|
||||
},
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/app/annotation/edit-annotation-modal/index.spec.tsx": {
|
||||
@@ -249,6 +260,11 @@
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"app/components/app/configuration/base/var-highlight/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/app/configuration/config-prompt/advanced-prompt-input.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
@@ -419,6 +435,11 @@
|
||||
"count": 6
|
||||
}
|
||||
},
|
||||
"app/components/app/configuration/debug/debug-with-multiple-model/context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/app/configuration/debug/debug-with-multiple-model/index.spec.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 5
|
||||
@@ -501,6 +522,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/app/create-app-dialog/app-list/sidebar.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/app/create-app-modal/index.spec.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 7
|
||||
@@ -517,6 +543,14 @@
|
||||
"app/components/app/create-from-dsl-modal/index.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 2
|
||||
},
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/app/log/filter.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/app/log/index.tsx": {
|
||||
@@ -585,6 +619,9 @@
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 3
|
||||
},
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 3
|
||||
}
|
||||
@@ -594,6 +631,11 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/app/workflow-log/filter.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/app/workflow-log/list.spec.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@@ -645,6 +687,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/action-button/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/agent-log-modal/detail.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@@ -673,6 +720,11 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/base/amplitude/AmplitudeProvider.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/amplitude/utils.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
@@ -721,6 +773,9 @@
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 1
|
||||
},
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
},
|
||||
"react/no-nested-component-definitions": {
|
||||
"count": 1
|
||||
}
|
||||
@@ -730,11 +785,21 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/button/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/button/sync-button.stories.tsx": {
|
||||
"no-console": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/carousel/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/chat/chat-with-history/chat-wrapper.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 6
|
||||
@@ -814,6 +879,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/chat/chat/context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/chat/chat/hooks.ts": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 2
|
||||
@@ -917,10 +987,18 @@
|
||||
}
|
||||
},
|
||||
"app/components/base/error-boundary/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 3
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/base/features/context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/features/new-feature-panel/annotation-reply/index.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 3
|
||||
@@ -986,6 +1064,11 @@
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"app/components/base/file-uploader/store.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 4
|
||||
}
|
||||
},
|
||||
"app/components/base/file-uploader/utils.spec.ts": {
|
||||
"test/no-identical-title": {
|
||||
"count": 1
|
||||
@@ -1082,6 +1165,11 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/base/ga/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/icons/utils.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 3
|
||||
@@ -1133,6 +1221,16 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/input/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/logo/dify-logo.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/base/markdown-blocks/audio-block.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 5
|
||||
@@ -1283,6 +1381,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/node-status/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/notion-connector/index.stories.tsx": {
|
||||
"no-console": {
|
||||
"count": 1
|
||||
@@ -1314,6 +1417,9 @@
|
||||
}
|
||||
},
|
||||
"app/components/base/portal-to-follow-elem/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 2
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
@@ -1480,6 +1586,16 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/textarea/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/toast/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/base/video-gallery/VideoPlayer.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 1
|
||||
@@ -1523,6 +1639,16 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/billing/pricing/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/billing/pricing/plan-switcher/plan-range-switcher.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/billing/pricing/plans/cloud-plan-item/index.spec.tsx": {
|
||||
"test/prefer-hooks-in-order": {
|
||||
"count": 1
|
||||
@@ -1573,6 +1699,11 @@
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"app/components/datasets/common/image-uploader/store.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 4
|
||||
}
|
||||
},
|
||||
"app/components/datasets/common/image-uploader/utils.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
@@ -1583,6 +1714,16 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/datasets/common/retrieval-method-info/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/datasets/create/file-preview/index.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 1
|
||||
@@ -1613,6 +1754,11 @@
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"app/components/datasets/create/step-two/preview-item/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/datasets/create/stop-embedding-modal/index.spec.tsx": {
|
||||
"test/prefer-hooks-in-order": {
|
||||
"count": 1
|
||||
@@ -1670,6 +1816,9 @@
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 2
|
||||
},
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
}
|
||||
@@ -1734,6 +1883,11 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/datasets/documents/create-from-pipeline/data-source/store/provider.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/datasets/documents/create-from-pipeline/data-source/store/slices/online-drive.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 4
|
||||
@@ -1779,6 +1933,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/datasets/documents/detail/completed/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/datasets/documents/detail/completed/new-child-segment.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@@ -1802,6 +1961,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/datasets/documents/detail/segment-add/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/datasets/documents/detail/settings/pipeline-settings/index.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 6
|
||||
@@ -1942,6 +2106,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/explore/try-app/tab.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/goto-anything/actions/commands/command-bus.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
@@ -1953,6 +2122,9 @@
|
||||
}
|
||||
},
|
||||
"app/components/goto-anything/actions/commands/slash.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 3
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
@@ -1970,6 +2142,9 @@
|
||||
"app/components/goto-anything/context.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 4
|
||||
},
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/goto-anything/index.spec.tsx": {
|
||||
@@ -2166,6 +2341,11 @@
|
||||
"count": 4
|
||||
}
|
||||
},
|
||||
"app/components/plugins/install-plugin/install-bundle/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/plugins/install-plugin/install-bundle/item/github-item.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 1
|
||||
@@ -2242,6 +2422,11 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/plugins/plugin-auth/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"app/components/plugins/plugin-auth/plugin-auth-in-agent.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@@ -2326,6 +2511,9 @@
|
||||
}
|
||||
},
|
||||
"app/components/plugins/plugin-detail-panel/subscription-list/create/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
@@ -2371,6 +2559,9 @@
|
||||
}
|
||||
},
|
||||
"app/components/plugins/plugin-page/context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 2
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
@@ -2725,6 +2916,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/block-selector/constants.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/block-selector/featured-tools.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 2
|
||||
@@ -2746,6 +2942,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/block-selector/index-bar.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/block-selector/market-place-plugin/action.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 1
|
||||
@@ -2784,16 +2985,31 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/block-selector/view-type-select.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/candidate-node-main.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/workflow/context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/custom-group-node/custom-group-node.tsx": {
|
||||
"unused-imports/no-unused-vars": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/datasets-detail-store/provider.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/header/run-mode.tsx": {
|
||||
"no-console": {
|
||||
"count": 1
|
||||
@@ -2802,11 +3018,21 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/header/test-run-menu.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/header/view-workflow-history.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/hooks-store/provider.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/hooks/use-checklist.ts": {
|
||||
"ts/no-empty-object-type": {
|
||||
"count": 2
|
||||
@@ -2871,6 +3097,9 @@
|
||||
}
|
||||
},
|
||||
"app/components/workflow/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
@@ -2917,10 +3146,18 @@
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/_base/components/editor/code-editor/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 6
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/_base/components/entry-node-container.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/_base/components/error-handle/default-value.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@@ -2946,6 +3183,16 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/_base/components/layout/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 7
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/_base/components/mcp-tool-availability.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/_base/components/memory-config.tsx": {
|
||||
"unicorn/prefer-number-properties": {
|
||||
"count": 1
|
||||
@@ -3029,6 +3276,9 @@
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/_base/components/workflow-panel/tab.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
@@ -3082,6 +3332,9 @@
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/agent/panel.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
@@ -3370,6 +3623,11 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/edit-card/auto-width-input.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 1
|
||||
@@ -3662,6 +3920,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/note-node/note-editor/toolbar/color-picker.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/note-node/note-editor/utils.ts": {
|
||||
"regexp/no-useless-quantifier": {
|
||||
"count": 1
|
||||
@@ -3698,6 +3961,9 @@
|
||||
}
|
||||
},
|
||||
"app/components/workflow/panel/chat-variable-panel/components/object-value-item.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 5
|
||||
},
|
||||
@@ -3852,6 +4118,16 @@
|
||||
"count": 4
|
||||
}
|
||||
},
|
||||
"app/components/workflow/skill/editor/skill-editor/plugins/tool-block/tool-block-context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/skill/hooks/use-skill-save-manager.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/store/workflow/debug/inspect-vars-slice.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
@@ -3959,6 +4235,11 @@
|
||||
"count": 8
|
||||
}
|
||||
},
|
||||
"app/components/workflow/workflow-history-store.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/workflow/workflow-preview/components/nodes/constants.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@@ -4020,30 +4301,79 @@
|
||||
}
|
||||
},
|
||||
"context/app-context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 2
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"context/datasets-context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"context/event-emitter.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"context/external-api-panel-context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"context/external-knowledge-api-context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"context/global-public-context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 4
|
||||
}
|
||||
},
|
||||
"context/hooks/use-trigger-events-limit-modal.ts": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"context/mitt-context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"context/modal-context.test.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"context/modal-context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 2
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 5
|
||||
}
|
||||
},
|
||||
"context/provider-context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 3
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"context/web-app-context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"context/workspace-context.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"hooks/use-async-window-open.spec.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 6
|
||||
@@ -4080,6 +4410,9 @@
|
||||
"hooks/use-pay.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 4
|
||||
},
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"i18n-config/README.md": {
|
||||
|
||||
@@ -104,17 +104,8 @@ export default antfu(
|
||||
'tailwindcss/migration-from-tailwind-2': 'warn',
|
||||
},
|
||||
},
|
||||
// Dify custom rules
|
||||
{
|
||||
plugins: {
|
||||
dify,
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['**/*.tsx'],
|
||||
rules: {
|
||||
'dify/prefer-tailwind-icon': 'warn',
|
||||
},
|
||||
plugins: { dify },
|
||||
},
|
||||
{
|
||||
files: ['i18n/**/*.json'],
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dify-web",
|
||||
"type": "module",
|
||||
"version": "1.11.4",
|
||||
"version": "1.12.0",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.28.1+sha512.7d7dbbca9e99447b7c3bf7a73286afaaf6be99251eb9498baefa7d406892f67b879adb3a1d7e687fc4ccc1a388c7175fbaae567a26ab44d1067b54fcb0d6a316",
|
||||
"imports": {
|
||||
@@ -22,6 +22,9 @@
|
||||
"and_uc >= 15.5",
|
||||
"and_qq >= 14.9"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=24"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"dev:inspect": "next dev --inspect",
|
||||
@@ -166,19 +169,13 @@
|
||||
"zustand": "5.0.9"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@antfu/eslint-config": "7.0.1",
|
||||
"@antfu/eslint-config": "7.2.0",
|
||||
"@chromatic-com/storybook": "5.0.0",
|
||||
"@egoist/tailwindcss-icons": "1.9.0",
|
||||
"@eslint-react/eslint-plugin": "2.7.0",
|
||||
"@iconify-json/heroicons": "1.2.3",
|
||||
"@iconify-json/ri": "1.2.7",
|
||||
"@iconify/tools": "5.0.2",
|
||||
"@iconify/types": "2.0.0",
|
||||
"@iconify/utils": "3.1.0",
|
||||
"@eslint-react/eslint-plugin": "2.8.1",
|
||||
"@mdx-js/loader": "3.1.1",
|
||||
"@mdx-js/react": "3.1.1",
|
||||
"@next/bundle-analyzer": "16.1.5",
|
||||
"@next/eslint-plugin-next": "16.1.5",
|
||||
"@next/eslint-plugin-next": "16.1.6",
|
||||
"@next/mdx": "16.1.5",
|
||||
"@rgrove/parse-xml": "4.2.0",
|
||||
"@serwist/turbopack": "9.5.0",
|
||||
@@ -188,7 +185,7 @@
|
||||
"@storybook/addon-themes": "10.2.0",
|
||||
"@storybook/nextjs-vite": "10.2.0",
|
||||
"@storybook/react": "10.2.0",
|
||||
"@tanstack/eslint-plugin-query": "5.91.2",
|
||||
"@tanstack/eslint-plugin-query": "5.91.3",
|
||||
"@tanstack/react-devtools": "0.9.2",
|
||||
"@tanstack/react-form-devtools": "0.2.12",
|
||||
"@tanstack/react-query-devtools": "5.90.2",
|
||||
@@ -196,9 +193,9 @@
|
||||
"@testing-library/jest-dom": "6.9.1",
|
||||
"@testing-library/react": "16.3.0",
|
||||
"@testing-library/user-event": "14.6.1",
|
||||
"@tsslint/cli": "3.0.1",
|
||||
"@tsslint/compat-eslint": "3.0.1",
|
||||
"@tsslint/config": "3.0.1",
|
||||
"@tsslint/cli": "3.0.2",
|
||||
"@tsslint/compat-eslint": "3.0.2",
|
||||
"@tsslint/config": "3.0.2",
|
||||
"@types/js-cookie": "3.0.6",
|
||||
"@types/js-yaml": "4.0.9",
|
||||
"@types/negotiator": "0.6.4",
|
||||
@@ -212,19 +209,19 @@
|
||||
"@types/semver": "7.7.1",
|
||||
"@types/sortablejs": "1.15.8",
|
||||
"@types/uuid": "10.0.0",
|
||||
"@typescript-eslint/parser": "8.53.0",
|
||||
"@typescript-eslint/parser": "8.54.0",
|
||||
"@typescript/native-preview": "7.0.0-dev.20251209.1",
|
||||
"@vitejs/plugin-react": "5.1.2",
|
||||
"@vitest/coverage-v8": "4.0.17",
|
||||
"autoprefixer": "10.4.21",
|
||||
"code-inspector-plugin": "1.4.1",
|
||||
"code-inspector-plugin": "1.3.6",
|
||||
"cross-env": "10.1.0",
|
||||
"esbuild-wasm": "0.27.2",
|
||||
"eslint": "9.39.2",
|
||||
"eslint-plugin-react-hooks": "7.0.1",
|
||||
"eslint-plugin-react-refresh": "0.4.26",
|
||||
"eslint-plugin-sonarjs": "3.0.5",
|
||||
"eslint-plugin-storybook": "10.2.0",
|
||||
"eslint-plugin-sonarjs": "3.0.6",
|
||||
"eslint-plugin-storybook": "10.2.1",
|
||||
"eslint-plugin-tailwindcss": "3.18.2",
|
||||
"husky": "9.1.7",
|
||||
"jsdom": "27.3.0",
|
||||
|
||||
919
web/pnpm-lock.yaml
generated
919
web/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -282,6 +282,15 @@ function generateTypeDefinitions(
|
||||
}
|
||||
|
||||
lines.push('')
|
||||
|
||||
// Add UseDifyNodesPath helper type after UseDifyPath
|
||||
if (section === 'use-dify') {
|
||||
lines.push('// UseDify node paths (without prefix)')
|
||||
// eslint-disable-next-line no-template-curly-in-string
|
||||
lines.push('type ExtractNodesPath<T> = T extends `/use-dify/nodes/${infer Path}` ? Path : never')
|
||||
lines.push('export type UseDifyNodesPath = ExtractNodesPath<UseDifyPath>')
|
||||
lines.push('')
|
||||
}
|
||||
}
|
||||
|
||||
// Generate API reference type (English paths only)
|
||||
|
||||
@@ -1,14 +1,8 @@
|
||||
import path from 'node:path'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import tailwindTypography from '@tailwindcss/typography'
|
||||
// @ts-expect-error workaround for turbopack issue
|
||||
import tailwindThemeVarDefine from './themes/tailwind-theme-var-define.ts'
|
||||
import typography from './typography.js'
|
||||
|
||||
const _dirname = typeof __dirname !== 'undefined'
|
||||
? __dirname
|
||||
: path.dirname(fileURLToPath(import.meta.url))
|
||||
|
||||
const config = {
|
||||
theme: {
|
||||
typography,
|
||||
@@ -159,21 +153,7 @@ const config = {
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
tailwindTypography,
|
||||
// iconsPlugin({
|
||||
// collections: {
|
||||
// ...getCollectionsFromSubDirs(path.resolve(_dirname, 'app/components/base/icons/assets/public'), 'custom-public'),
|
||||
// ...getCollectionsFromSubDirs(path.resolve(_dirname, 'app/components/base/icons/assets/vender'), 'custom-vender'),
|
||||
// ...getIconCollections(['heroicons', 'ri']),
|
||||
// },
|
||||
// extraProperties: {
|
||||
// width: '1rem',
|
||||
// height: '1rem',
|
||||
// display: 'block',
|
||||
// },
|
||||
// }),
|
||||
],
|
||||
plugins: [tailwindTypography],
|
||||
// https://github.com/tailwindlabs/tailwindcss/discussions/5969
|
||||
corePlugins: {
|
||||
preflight: false,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
//
|
||||
// Generated from: https://raw.githubusercontent.com/langgenius/dify-docs/refs/heads/main/docs.json
|
||||
// Generated at: 2026-01-21T07:24:02.413Z
|
||||
// Generated at: 2026-01-30T09:14:29.304Z
|
||||
|
||||
// Language prefixes
|
||||
export type DocLanguage = 'en' | 'zh' | 'ja'
|
||||
@@ -104,6 +104,10 @@ export type UseDifyPath =
|
||||
| '/use-dify/workspace/subscription-management'
|
||||
| '/use-dify/workspace/team-members-management'
|
||||
|
||||
// UseDify node paths (without prefix)
|
||||
type ExtractNodesPath<T> = T extends `/use-dify/nodes/${infer Path}` ? Path : never
|
||||
export type UseDifyNodesPath = ExtractNodesPath<UseDifyPath>
|
||||
|
||||
// SelfHost paths
|
||||
export type SelfHostPath =
|
||||
| '/self-host/advanced-deployments/local-source-code'
|
||||
|
||||
Reference in New Issue
Block a user