mirror of
https://github.com/langgenius/dify.git
synced 2026-02-13 17:10:15 -05:00
Compare commits
9 Commits
refactor/t
...
inject-con
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d9b41aa525 | ||
|
|
1819bd72ef | ||
|
|
7dabc03a08 | ||
|
|
1a050c9f86 | ||
|
|
7fb6e0cdfe | ||
|
|
e0fcf33979 | ||
|
|
898e09264b | ||
|
|
c2a11ffa97 | ||
|
|
5eaf535a0d |
@@ -114,7 +114,6 @@ ignore_imports =
|
||||
core.workflow.nodes.datasource.datasource_node -> models.model
|
||||
core.workflow.nodes.datasource.datasource_node -> models.tools
|
||||
core.workflow.nodes.datasource.datasource_node -> services.datasource_provider_service
|
||||
core.workflow.nodes.document_extractor.node -> configs
|
||||
core.workflow.nodes.document_extractor.node -> core.file.file_manager
|
||||
core.workflow.nodes.document_extractor.node -> core.helper.ssrf_proxy
|
||||
core.workflow.nodes.http_request.entities -> configs
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Callable, Sequence
|
||||
from typing import TYPE_CHECKING, final
|
||||
from typing import TYPE_CHECKING, Any, cast, final
|
||||
|
||||
from typing_extensions import override
|
||||
|
||||
@@ -16,6 +16,7 @@ from core.workflow.graph.graph import NodeFactory
|
||||
from core.workflow.nodes.base.node import Node
|
||||
from core.workflow.nodes.code.code_node import CodeNode
|
||||
from core.workflow.nodes.code.limits import CodeNodeLimits
|
||||
from core.workflow.nodes.document_extractor import DocumentExtractorNode, UnstructuredApiConfig
|
||||
from core.workflow.nodes.http_request.node import HttpRequestNode
|
||||
from core.workflow.nodes.knowledge_retrieval.knowledge_retrieval_node import KnowledgeRetrievalNode
|
||||
from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
|
||||
@@ -53,6 +54,7 @@ class DifyNodeFactory(NodeFactory):
|
||||
http_request_http_client: HttpClientProtocol | None = None,
|
||||
http_request_tool_file_manager_factory: Callable[[], ToolFileManager] = ToolFileManager,
|
||||
http_request_file_manager: FileManagerProtocol | None = None,
|
||||
document_extractor_unstructured_api_config: UnstructuredApiConfig | None = None,
|
||||
) -> None:
|
||||
self.graph_init_params = graph_init_params
|
||||
self.graph_runtime_state = graph_runtime_state
|
||||
@@ -78,6 +80,13 @@ class DifyNodeFactory(NodeFactory):
|
||||
self._http_request_tool_file_manager_factory = http_request_tool_file_manager_factory
|
||||
self._http_request_file_manager = http_request_file_manager or file_manager
|
||||
self._rag_retrieval = DatasetRetrieval()
|
||||
self._document_extractor_unstructured_api_config = (
|
||||
document_extractor_unstructured_api_config
|
||||
or UnstructuredApiConfig(
|
||||
api_url=dify_config.UNSTRUCTURED_API_URL,
|
||||
api_key=dify_config.UNSTRUCTURED_API_KEY or "",
|
||||
)
|
||||
)
|
||||
|
||||
@override
|
||||
def create_node(self, node_config: NodeConfigDict) -> Node:
|
||||
@@ -110,13 +119,17 @@ class DifyNodeFactory(NodeFactory):
|
||||
if not node_class:
|
||||
raise ValueError(f"No latest version class found for node type: {node_type}")
|
||||
|
||||
common_kwargs: dict[str, Any] = {
|
||||
"id": node_id,
|
||||
"config": node_config,
|
||||
"graph_init_params": self.graph_init_params,
|
||||
"graph_runtime_state": self.graph_runtime_state,
|
||||
}
|
||||
|
||||
# Create node instance
|
||||
if node_type == NodeType.CODE:
|
||||
return CodeNode(
|
||||
id=node_id,
|
||||
config=node_config,
|
||||
graph_init_params=self.graph_init_params,
|
||||
graph_runtime_state=self.graph_runtime_state,
|
||||
**common_kwargs,
|
||||
code_executor=self._code_executor,
|
||||
code_providers=self._code_providers,
|
||||
code_limits=self._code_limits,
|
||||
@@ -124,20 +137,14 @@ class DifyNodeFactory(NodeFactory):
|
||||
|
||||
if node_type == NodeType.TEMPLATE_TRANSFORM:
|
||||
return TemplateTransformNode(
|
||||
id=node_id,
|
||||
config=node_config,
|
||||
graph_init_params=self.graph_init_params,
|
||||
graph_runtime_state=self.graph_runtime_state,
|
||||
**common_kwargs,
|
||||
template_renderer=self._template_renderer,
|
||||
max_output_length=self._template_transform_max_output_length,
|
||||
)
|
||||
|
||||
if node_type == NodeType.HTTP_REQUEST:
|
||||
return HttpRequestNode(
|
||||
id=node_id,
|
||||
config=node_config,
|
||||
graph_init_params=self.graph_init_params,
|
||||
graph_runtime_state=self.graph_runtime_state,
|
||||
**common_kwargs,
|
||||
http_client=self._http_request_http_client,
|
||||
tool_file_manager_factory=self._http_request_tool_file_manager_factory,
|
||||
file_manager=self._http_request_file_manager,
|
||||
@@ -145,16 +152,15 @@ class DifyNodeFactory(NodeFactory):
|
||||
|
||||
if node_type == NodeType.KNOWLEDGE_RETRIEVAL:
|
||||
return KnowledgeRetrievalNode(
|
||||
id=node_id,
|
||||
config=node_config,
|
||||
graph_init_params=self.graph_init_params,
|
||||
graph_runtime_state=self.graph_runtime_state,
|
||||
**common_kwargs,
|
||||
rag_retrieval=self._rag_retrieval,
|
||||
)
|
||||
|
||||
return node_class(
|
||||
id=node_id,
|
||||
config=node_config,
|
||||
graph_init_params=self.graph_init_params,
|
||||
graph_runtime_state=self.graph_runtime_state,
|
||||
)
|
||||
if node_type == NodeType.DOCUMENT_EXTRACTOR:
|
||||
document_extractor_class = cast(type[DocumentExtractorNode], node_class)
|
||||
return document_extractor_class(
|
||||
**common_kwargs,
|
||||
unstructured_api_config=self._document_extractor_unstructured_api_config,
|
||||
)
|
||||
|
||||
return node_class(**common_kwargs)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from .entities import DocumentExtractorNodeData
|
||||
from .entities import DocumentExtractorNodeData, UnstructuredApiConfig
|
||||
from .node import DocumentExtractorNode
|
||||
|
||||
__all__ = ["DocumentExtractorNode", "DocumentExtractorNodeData"]
|
||||
__all__ = ["DocumentExtractorNode", "DocumentExtractorNodeData", "UnstructuredApiConfig"]
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass
|
||||
|
||||
from core.workflow.nodes.base import BaseNodeData
|
||||
|
||||
|
||||
class DocumentExtractorNodeData(BaseNodeData):
|
||||
variable_selector: Sequence[str]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class UnstructuredApiConfig:
|
||||
api_url: str | None = None
|
||||
api_key: str = ""
|
||||
|
||||
@@ -5,7 +5,7 @@ import logging
|
||||
import os
|
||||
import tempfile
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import charset_normalizer
|
||||
import docx
|
||||
@@ -20,7 +20,6 @@ from docx.oxml.text.paragraph import CT_P
|
||||
from docx.table import Table
|
||||
from docx.text.paragraph import Paragraph
|
||||
|
||||
from configs import dify_config
|
||||
from core.file import File, FileTransferMethod, file_manager
|
||||
from core.helper import ssrf_proxy
|
||||
from core.variables import ArrayFileSegment
|
||||
@@ -29,11 +28,15 @@ from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
|
||||
from core.workflow.node_events import NodeRunResult
|
||||
from core.workflow.nodes.base.node import Node
|
||||
|
||||
from .entities import DocumentExtractorNodeData
|
||||
from .entities import DocumentExtractorNodeData, UnstructuredApiConfig
|
||||
from .exc import DocumentExtractorError, FileDownloadError, TextExtractionError, UnsupportedFileTypeError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.workflow.entities import GraphInitParams
|
||||
from core.workflow.runtime import GraphRuntimeState
|
||||
|
||||
|
||||
class DocumentExtractorNode(Node[DocumentExtractorNodeData]):
|
||||
"""
|
||||
@@ -47,6 +50,23 @@ class DocumentExtractorNode(Node[DocumentExtractorNodeData]):
|
||||
def version(cls) -> str:
|
||||
return "1"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
id: str,
|
||||
config: Mapping[str, Any],
|
||||
graph_init_params: "GraphInitParams",
|
||||
graph_runtime_state: "GraphRuntimeState",
|
||||
*,
|
||||
unstructured_api_config: UnstructuredApiConfig | None = None,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
id=id,
|
||||
config=config,
|
||||
graph_init_params=graph_init_params,
|
||||
graph_runtime_state=graph_runtime_state,
|
||||
)
|
||||
self._unstructured_api_config = unstructured_api_config or UnstructuredApiConfig()
|
||||
|
||||
def _run(self):
|
||||
variable_selector = self.node_data.variable_selector
|
||||
variable = self.graph_runtime_state.variable_pool.get(variable_selector)
|
||||
@@ -64,7 +84,10 @@ class DocumentExtractorNode(Node[DocumentExtractorNodeData]):
|
||||
|
||||
try:
|
||||
if isinstance(value, list):
|
||||
extracted_text_list = list(map(_extract_text_from_file, value))
|
||||
extracted_text_list = [
|
||||
_extract_text_from_file(file, unstructured_api_config=self._unstructured_api_config)
|
||||
for file in value
|
||||
]
|
||||
return NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
inputs=inputs,
|
||||
@@ -72,7 +95,7 @@ class DocumentExtractorNode(Node[DocumentExtractorNodeData]):
|
||||
outputs={"text": ArrayStringSegment(value=extracted_text_list)},
|
||||
)
|
||||
elif isinstance(value, File):
|
||||
extracted_text = _extract_text_from_file(value)
|
||||
extracted_text = _extract_text_from_file(value, unstructured_api_config=self._unstructured_api_config)
|
||||
return NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
inputs=inputs,
|
||||
@@ -103,7 +126,12 @@ class DocumentExtractorNode(Node[DocumentExtractorNodeData]):
|
||||
return {node_id + ".files": typed_node_data.variable_selector}
|
||||
|
||||
|
||||
def _extract_text_by_mime_type(*, file_content: bytes, mime_type: str) -> str:
|
||||
def _extract_text_by_mime_type(
|
||||
*,
|
||||
file_content: bytes,
|
||||
mime_type: str,
|
||||
unstructured_api_config: UnstructuredApiConfig,
|
||||
) -> str:
|
||||
"""Extract text from a file based on its MIME type."""
|
||||
match mime_type:
|
||||
case "text/plain" | "text/html" | "text/htm" | "text/markdown" | "text/xml":
|
||||
@@ -111,7 +139,7 @@ def _extract_text_by_mime_type(*, file_content: bytes, mime_type: str) -> str:
|
||||
case "application/pdf":
|
||||
return _extract_text_from_pdf(file_content)
|
||||
case "application/msword":
|
||||
return _extract_text_from_doc(file_content)
|
||||
return _extract_text_from_doc(file_content, unstructured_api_config=unstructured_api_config)
|
||||
case "application/vnd.openxmlformats-officedocument.wordprocessingml.document":
|
||||
return _extract_text_from_docx(file_content)
|
||||
case "text/csv":
|
||||
@@ -119,11 +147,11 @@ def _extract_text_by_mime_type(*, file_content: bytes, mime_type: str) -> str:
|
||||
case "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" | "application/vnd.ms-excel":
|
||||
return _extract_text_from_excel(file_content)
|
||||
case "application/vnd.ms-powerpoint":
|
||||
return _extract_text_from_ppt(file_content)
|
||||
return _extract_text_from_ppt(file_content, unstructured_api_config=unstructured_api_config)
|
||||
case "application/vnd.openxmlformats-officedocument.presentationml.presentation":
|
||||
return _extract_text_from_pptx(file_content)
|
||||
return _extract_text_from_pptx(file_content, unstructured_api_config=unstructured_api_config)
|
||||
case "application/epub+zip":
|
||||
return _extract_text_from_epub(file_content)
|
||||
return _extract_text_from_epub(file_content, unstructured_api_config=unstructured_api_config)
|
||||
case "message/rfc822":
|
||||
return _extract_text_from_eml(file_content)
|
||||
case "application/vnd.ms-outlook":
|
||||
@@ -140,7 +168,12 @@ def _extract_text_by_mime_type(*, file_content: bytes, mime_type: str) -> str:
|
||||
raise UnsupportedFileTypeError(f"Unsupported MIME type: {mime_type}")
|
||||
|
||||
|
||||
def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str) -> str:
|
||||
def _extract_text_by_file_extension(
|
||||
*,
|
||||
file_content: bytes,
|
||||
file_extension: str,
|
||||
unstructured_api_config: UnstructuredApiConfig,
|
||||
) -> str:
|
||||
"""Extract text from a file based on its file extension."""
|
||||
match file_extension:
|
||||
case (
|
||||
@@ -203,7 +236,7 @@ def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str)
|
||||
case ".pdf":
|
||||
return _extract_text_from_pdf(file_content)
|
||||
case ".doc":
|
||||
return _extract_text_from_doc(file_content)
|
||||
return _extract_text_from_doc(file_content, unstructured_api_config=unstructured_api_config)
|
||||
case ".docx":
|
||||
return _extract_text_from_docx(file_content)
|
||||
case ".csv":
|
||||
@@ -211,11 +244,11 @@ def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str)
|
||||
case ".xls" | ".xlsx":
|
||||
return _extract_text_from_excel(file_content)
|
||||
case ".ppt":
|
||||
return _extract_text_from_ppt(file_content)
|
||||
return _extract_text_from_ppt(file_content, unstructured_api_config=unstructured_api_config)
|
||||
case ".pptx":
|
||||
return _extract_text_from_pptx(file_content)
|
||||
return _extract_text_from_pptx(file_content, unstructured_api_config=unstructured_api_config)
|
||||
case ".epub":
|
||||
return _extract_text_from_epub(file_content)
|
||||
return _extract_text_from_epub(file_content, unstructured_api_config=unstructured_api_config)
|
||||
case ".eml":
|
||||
return _extract_text_from_eml(file_content)
|
||||
case ".msg":
|
||||
@@ -312,14 +345,14 @@ def _extract_text_from_pdf(file_content: bytes) -> str:
|
||||
raise TextExtractionError(f"Failed to extract text from PDF: {str(e)}") from e
|
||||
|
||||
|
||||
def _extract_text_from_doc(file_content: bytes) -> str:
|
||||
def _extract_text_from_doc(file_content: bytes, *, unstructured_api_config: UnstructuredApiConfig) -> str:
|
||||
"""
|
||||
Extract text from a DOC file.
|
||||
"""
|
||||
from unstructured.partition.api import partition_via_api
|
||||
|
||||
if not dify_config.UNSTRUCTURED_API_URL:
|
||||
raise TextExtractionError("UNSTRUCTURED_API_URL must be set")
|
||||
if not unstructured_api_config.api_url:
|
||||
raise TextExtractionError("Unstructured API URL is not configured for DOC file processing.")
|
||||
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(suffix=".doc", delete=False) as temp_file:
|
||||
@@ -329,8 +362,8 @@ def _extract_text_from_doc(file_content: bytes) -> str:
|
||||
elements = partition_via_api(
|
||||
file=file,
|
||||
metadata_filename=temp_file.name,
|
||||
api_url=dify_config.UNSTRUCTURED_API_URL,
|
||||
api_key=dify_config.UNSTRUCTURED_API_KEY, # type: ignore
|
||||
api_url=unstructured_api_config.api_url,
|
||||
api_key=unstructured_api_config.api_key,
|
||||
)
|
||||
os.unlink(temp_file.name)
|
||||
return "\n".join([getattr(element, "text", "") for element in elements])
|
||||
@@ -420,12 +453,20 @@ def _download_file_content(file: File) -> bytes:
|
||||
raise FileDownloadError(f"Error downloading file: {str(e)}") from e
|
||||
|
||||
|
||||
def _extract_text_from_file(file: File):
|
||||
def _extract_text_from_file(file: File, *, unstructured_api_config: UnstructuredApiConfig) -> str:
|
||||
file_content = _download_file_content(file)
|
||||
if file.extension:
|
||||
extracted_text = _extract_text_by_file_extension(file_content=file_content, file_extension=file.extension)
|
||||
extracted_text = _extract_text_by_file_extension(
|
||||
file_content=file_content,
|
||||
file_extension=file.extension,
|
||||
unstructured_api_config=unstructured_api_config,
|
||||
)
|
||||
elif file.mime_type:
|
||||
extracted_text = _extract_text_by_mime_type(file_content=file_content, mime_type=file.mime_type)
|
||||
extracted_text = _extract_text_by_mime_type(
|
||||
file_content=file_content,
|
||||
mime_type=file.mime_type,
|
||||
unstructured_api_config=unstructured_api_config,
|
||||
)
|
||||
else:
|
||||
raise UnsupportedFileTypeError("Unable to determine file type: MIME type or file extension is missing")
|
||||
return extracted_text
|
||||
@@ -517,12 +558,12 @@ def _extract_text_from_excel(file_content: bytes) -> str:
|
||||
raise TextExtractionError(f"Failed to extract text from Excel file: {str(e)}") from e
|
||||
|
||||
|
||||
def _extract_text_from_ppt(file_content: bytes) -> str:
|
||||
def _extract_text_from_ppt(file_content: bytes, *, unstructured_api_config: UnstructuredApiConfig) -> str:
|
||||
from unstructured.partition.api import partition_via_api
|
||||
from unstructured.partition.ppt import partition_ppt
|
||||
|
||||
try:
|
||||
if dify_config.UNSTRUCTURED_API_URL:
|
||||
if unstructured_api_config.api_url:
|
||||
with tempfile.NamedTemporaryFile(suffix=".ppt", delete=False) as temp_file:
|
||||
temp_file.write(file_content)
|
||||
temp_file.flush()
|
||||
@@ -530,8 +571,8 @@ def _extract_text_from_ppt(file_content: bytes) -> str:
|
||||
elements = partition_via_api(
|
||||
file=file,
|
||||
metadata_filename=temp_file.name,
|
||||
api_url=dify_config.UNSTRUCTURED_API_URL,
|
||||
api_key=dify_config.UNSTRUCTURED_API_KEY, # type: ignore
|
||||
api_url=unstructured_api_config.api_url,
|
||||
api_key=unstructured_api_config.api_key,
|
||||
)
|
||||
os.unlink(temp_file.name)
|
||||
else:
|
||||
@@ -543,12 +584,12 @@ def _extract_text_from_ppt(file_content: bytes) -> str:
|
||||
raise TextExtractionError(f"Failed to extract text from PPTX: {str(e)}") from e
|
||||
|
||||
|
||||
def _extract_text_from_pptx(file_content: bytes) -> str:
|
||||
def _extract_text_from_pptx(file_content: bytes, *, unstructured_api_config: UnstructuredApiConfig) -> str:
|
||||
from unstructured.partition.api import partition_via_api
|
||||
from unstructured.partition.pptx import partition_pptx
|
||||
|
||||
try:
|
||||
if dify_config.UNSTRUCTURED_API_URL:
|
||||
if unstructured_api_config.api_url:
|
||||
with tempfile.NamedTemporaryFile(suffix=".pptx", delete=False) as temp_file:
|
||||
temp_file.write(file_content)
|
||||
temp_file.flush()
|
||||
@@ -556,8 +597,8 @@ def _extract_text_from_pptx(file_content: bytes) -> str:
|
||||
elements = partition_via_api(
|
||||
file=file,
|
||||
metadata_filename=temp_file.name,
|
||||
api_url=dify_config.UNSTRUCTURED_API_URL,
|
||||
api_key=dify_config.UNSTRUCTURED_API_KEY, # type: ignore
|
||||
api_url=unstructured_api_config.api_url,
|
||||
api_key=unstructured_api_config.api_key,
|
||||
)
|
||||
os.unlink(temp_file.name)
|
||||
else:
|
||||
@@ -568,12 +609,12 @@ def _extract_text_from_pptx(file_content: bytes) -> str:
|
||||
raise TextExtractionError(f"Failed to extract text from PPTX: {str(e)}") from e
|
||||
|
||||
|
||||
def _extract_text_from_epub(file_content: bytes) -> str:
|
||||
def _extract_text_from_epub(file_content: bytes, *, unstructured_api_config: UnstructuredApiConfig) -> str:
|
||||
from unstructured.partition.api import partition_via_api
|
||||
from unstructured.partition.epub import partition_epub
|
||||
|
||||
try:
|
||||
if dify_config.UNSTRUCTURED_API_URL:
|
||||
if unstructured_api_config.api_url:
|
||||
with tempfile.NamedTemporaryFile(suffix=".epub", delete=False) as temp_file:
|
||||
temp_file.write(file_content)
|
||||
temp_file.flush()
|
||||
@@ -581,8 +622,8 @@ def _extract_text_from_epub(file_content: bytes) -> str:
|
||||
elements = partition_via_api(
|
||||
file=file,
|
||||
metadata_filename=temp_file.name,
|
||||
api_url=dify_config.UNSTRUCTURED_API_URL,
|
||||
api_key=dify_config.UNSTRUCTURED_API_KEY, # type: ignore
|
||||
api_url=unstructured_api_config.api_url,
|
||||
api_key=unstructured_api_config.api_key,
|
||||
)
|
||||
os.unlink(temp_file.name)
|
||||
else:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import functools
|
||||
from collections.abc import Callable
|
||||
from typing import Any, TypeVar, cast
|
||||
from typing import ParamSpec, TypeVar, cast
|
||||
|
||||
from opentelemetry.trace import get_tracer
|
||||
|
||||
@@ -8,7 +8,8 @@ from configs import dify_config
|
||||
from extensions.otel.decorators.handler import SpanHandler
|
||||
from extensions.otel.runtime import is_instrument_flag_enabled
|
||||
|
||||
T = TypeVar("T", bound=Callable[..., Any])
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
_HANDLER_INSTANCES: dict[type[SpanHandler], SpanHandler] = {SpanHandler: SpanHandler()}
|
||||
|
||||
@@ -20,7 +21,7 @@ def _get_handler_instance(handler_class: type[SpanHandler]) -> SpanHandler:
|
||||
return _HANDLER_INSTANCES[handler_class]
|
||||
|
||||
|
||||
def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T], T]:
|
||||
def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
||||
"""
|
||||
Decorator that traces a function with an OpenTelemetry span.
|
||||
|
||||
@@ -30,9 +31,9 @@ def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T],
|
||||
:param handler_class: Optional handler class to use for this span. If None, uses the default SpanHandler.
|
||||
"""
|
||||
|
||||
def decorator(func: T) -> T:
|
||||
def decorator(func: Callable[P, R]) -> Callable[P, R]:
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
if not (dify_config.ENABLE_OTEL or is_instrument_flag_enabled()):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
@@ -46,6 +47,6 @@ def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T],
|
||||
kwargs=kwargs,
|
||||
)
|
||||
|
||||
return cast(T, wrapper)
|
||||
return cast(Callable[P, R], wrapper)
|
||||
|
||||
return decorator
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import inspect
|
||||
from collections.abc import Callable, Mapping
|
||||
from typing import Any
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from opentelemetry.trace import SpanKind, Status, StatusCode
|
||||
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class SpanHandler:
|
||||
"""
|
||||
@@ -31,9 +33,9 @@ class SpanHandler:
|
||||
|
||||
def _extract_arguments(
|
||||
self,
|
||||
wrapped: Callable[..., Any],
|
||||
args: tuple[Any, ...],
|
||||
kwargs: Mapping[str, Any],
|
||||
wrapped: Callable[..., R],
|
||||
args: tuple[object, ...],
|
||||
kwargs: Mapping[str, object],
|
||||
) -> dict[str, Any] | None:
|
||||
"""
|
||||
Extract function arguments using inspect.signature.
|
||||
@@ -62,10 +64,10 @@ class SpanHandler:
|
||||
def wrapper(
|
||||
self,
|
||||
tracer: Any,
|
||||
wrapped: Callable[..., Any],
|
||||
args: tuple[Any, ...],
|
||||
kwargs: Mapping[str, Any],
|
||||
) -> Any:
|
||||
wrapped: Callable[..., R],
|
||||
args: tuple[object, ...],
|
||||
kwargs: Mapping[str, object],
|
||||
) -> R:
|
||||
"""
|
||||
Fully control the wrapper behavior.
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from collections.abc import Callable, Mapping
|
||||
from typing import Any
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from opentelemetry.trace import SpanKind, Status, StatusCode
|
||||
from opentelemetry.util.types import AttributeValue
|
||||
@@ -12,16 +12,19 @@ from models.model import Account
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class AppGenerateHandler(SpanHandler):
|
||||
"""Span handler for ``AppGenerateService.generate``."""
|
||||
|
||||
def wrapper(
|
||||
self,
|
||||
tracer: Any,
|
||||
wrapped: Callable[..., Any],
|
||||
args: tuple[Any, ...],
|
||||
kwargs: Mapping[str, Any],
|
||||
) -> Any:
|
||||
wrapped: Callable[..., R],
|
||||
args: tuple[object, ...],
|
||||
kwargs: Mapping[str, object],
|
||||
) -> R:
|
||||
try:
|
||||
arguments = self._extract_arguments(wrapped, args, kwargs)
|
||||
if not arguments:
|
||||
|
||||
@@ -1225,7 +1225,12 @@ class TenantService:
|
||||
|
||||
@staticmethod
|
||||
def remove_member_from_tenant(tenant: Tenant, account: Account, operator: Account):
|
||||
"""Remove member from tenant"""
|
||||
"""Remove member from tenant.
|
||||
|
||||
If the removed member has ``AccountStatus.PENDING`` (invited but never
|
||||
activated) and no remaining workspace memberships, the orphaned account
|
||||
record is deleted as well.
|
||||
"""
|
||||
if operator.id == account.id:
|
||||
raise CannotOperateSelfError("Cannot operate self.")
|
||||
|
||||
@@ -1235,9 +1240,31 @@ class TenantService:
|
||||
if not ta:
|
||||
raise MemberNotInTenantError("Member not in tenant.")
|
||||
|
||||
# Capture identifiers before any deletions; attribute access on the ORM
|
||||
# object may fail after commit() expires the instance.
|
||||
account_id = account.id
|
||||
account_email = account.email
|
||||
|
||||
db.session.delete(ta)
|
||||
|
||||
# Clean up orphaned pending accounts (invited but never activated)
|
||||
should_delete_account = False
|
||||
if account.status == AccountStatus.PENDING:
|
||||
# autoflush flushes ta deletion before this query, so 0 means no remaining joins
|
||||
remaining_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).count()
|
||||
if remaining_joins == 0:
|
||||
db.session.delete(account)
|
||||
should_delete_account = True
|
||||
|
||||
db.session.commit()
|
||||
|
||||
if should_delete_account:
|
||||
logger.info(
|
||||
"Deleted orphaned pending account: account_id=%s, email=%s",
|
||||
account_id,
|
||||
account_email,
|
||||
)
|
||||
|
||||
if dify_config.BILLING_ENABLED:
|
||||
BillingService.clean_billing_info_cache(tenant.id)
|
||||
|
||||
@@ -1245,13 +1272,13 @@ class TenantService:
|
||||
from services.enterprise.account_deletion_sync import sync_workspace_member_removal
|
||||
|
||||
sync_success = sync_workspace_member_removal(
|
||||
workspace_id=tenant.id, member_id=account.id, source="workspace_member_removed"
|
||||
workspace_id=tenant.id, member_id=account_id, source="workspace_member_removed"
|
||||
)
|
||||
if not sync_success:
|
||||
logger.warning(
|
||||
"Enterprise workspace member removal sync failed: workspace_id=%s, member_id=%s",
|
||||
tenant.id,
|
||||
account.id,
|
||||
account_id,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from flask_login import current_user
|
||||
|
||||
from configs import dify_config
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from extensions.ext_database import db
|
||||
from models.account import Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from services.account_service import TenantService
|
||||
@@ -53,7 +54,12 @@ class WorkspaceService:
|
||||
from services.credit_pool_service import CreditPoolService
|
||||
|
||||
paid_pool = CreditPoolService.get_pool(tenant_id=tenant.id, pool_type="paid")
|
||||
if paid_pool:
|
||||
# if the tenant is not on the sandbox plan and the paid pool is not full, use the paid pool
|
||||
if (
|
||||
feature.billing.subscription.plan != CloudPlan.SANDBOX
|
||||
and paid_pool is not None
|
||||
and (paid_pool.quota_limit == -1 or paid_pool.quota_limit > paid_pool.quota_used)
|
||||
):
|
||||
tenant_info["trial_credits"] = paid_pool.quota_limit
|
||||
tenant_info["trial_credits_used"] = paid_pool.quota_used
|
||||
else:
|
||||
|
||||
@@ -698,6 +698,132 @@ class TestTenantService:
|
||||
|
||||
self._assert_database_operations_called(mock_db_dependencies["db"])
|
||||
|
||||
# ==================== Member Removal Tests ====================
|
||||
|
||||
def test_remove_pending_member_deletes_orphaned_account(self):
|
||||
"""Test that removing a pending member with no other workspaces deletes the account."""
|
||||
# Arrange
|
||||
mock_tenant = MagicMock()
|
||||
mock_tenant.id = "tenant-456"
|
||||
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
|
||||
mock_pending_member = TestAccountAssociatedDataFactory.create_account_mock(
|
||||
account_id="pending-user-789", email="pending@example.com", status=AccountStatus.PENDING
|
||||
)
|
||||
|
||||
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="pending-user-789", role="normal"
|
||||
)
|
||||
|
||||
with patch("services.account_service.db") as mock_db:
|
||||
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="operator-123", role="owner"
|
||||
)
|
||||
|
||||
query_mock_permission = MagicMock()
|
||||
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
|
||||
|
||||
query_mock_ta = MagicMock()
|
||||
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
|
||||
|
||||
query_mock_count = MagicMock()
|
||||
query_mock_count.filter_by.return_value.count.return_value = 0
|
||||
|
||||
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta, query_mock_count]
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
|
||||
mock_sync.return_value = True
|
||||
|
||||
# Act
|
||||
TenantService.remove_member_from_tenant(mock_tenant, mock_pending_member, mock_operator)
|
||||
|
||||
# Assert: enterprise sync still receives the correct member ID
|
||||
mock_sync.assert_called_once_with(
|
||||
workspace_id="tenant-456",
|
||||
member_id="pending-user-789",
|
||||
source="workspace_member_removed",
|
||||
)
|
||||
|
||||
# Assert: both join record and account should be deleted
|
||||
mock_db.session.delete.assert_any_call(mock_ta)
|
||||
mock_db.session.delete.assert_any_call(mock_pending_member)
|
||||
assert mock_db.session.delete.call_count == 2
|
||||
|
||||
def test_remove_pending_member_keeps_account_with_other_workspaces(self):
|
||||
"""Test that removing a pending member who belongs to other workspaces preserves the account."""
|
||||
# Arrange
|
||||
mock_tenant = MagicMock()
|
||||
mock_tenant.id = "tenant-456"
|
||||
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
|
||||
mock_pending_member = TestAccountAssociatedDataFactory.create_account_mock(
|
||||
account_id="pending-user-789", email="pending@example.com", status=AccountStatus.PENDING
|
||||
)
|
||||
|
||||
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="pending-user-789", role="normal"
|
||||
)
|
||||
|
||||
with patch("services.account_service.db") as mock_db:
|
||||
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="operator-123", role="owner"
|
||||
)
|
||||
|
||||
query_mock_permission = MagicMock()
|
||||
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
|
||||
|
||||
query_mock_ta = MagicMock()
|
||||
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
|
||||
|
||||
# Remaining join count = 1 (still in another workspace)
|
||||
query_mock_count = MagicMock()
|
||||
query_mock_count.filter_by.return_value.count.return_value = 1
|
||||
|
||||
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta, query_mock_count]
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
|
||||
mock_sync.return_value = True
|
||||
|
||||
# Act
|
||||
TenantService.remove_member_from_tenant(mock_tenant, mock_pending_member, mock_operator)
|
||||
|
||||
# Assert: only the join record should be deleted, not the account
|
||||
mock_db.session.delete.assert_called_once_with(mock_ta)
|
||||
|
||||
def test_remove_active_member_preserves_account(self):
|
||||
"""Test that removing an active member never deletes the account, even with no other workspaces."""
|
||||
# Arrange
|
||||
mock_tenant = MagicMock()
|
||||
mock_tenant.id = "tenant-456"
|
||||
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
|
||||
mock_active_member = TestAccountAssociatedDataFactory.create_account_mock(
|
||||
account_id="active-user-789", email="active@example.com", status=AccountStatus.ACTIVE
|
||||
)
|
||||
|
||||
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="active-user-789", role="normal"
|
||||
)
|
||||
|
||||
with patch("services.account_service.db") as mock_db:
|
||||
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="operator-123", role="owner"
|
||||
)
|
||||
|
||||
query_mock_permission = MagicMock()
|
||||
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
|
||||
|
||||
query_mock_ta = MagicMock()
|
||||
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
|
||||
|
||||
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta]
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
|
||||
mock_sync.return_value = True
|
||||
|
||||
# Act
|
||||
TenantService.remove_member_from_tenant(mock_tenant, mock_active_member, mock_operator)
|
||||
|
||||
# Assert: only the join record should be deleted
|
||||
mock_db.session.delete.assert_called_once_with(mock_ta)
|
||||
|
||||
# ==================== Tenant Switching Tests ====================
|
||||
|
||||
def test_switch_tenant_success(self):
|
||||
|
||||
@@ -3,7 +3,6 @@ import type { CSSProperties, ReactNode } from 'react'
|
||||
import { cva } from 'class-variance-authority'
|
||||
import * as React from 'react'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import './index.css'
|
||||
|
||||
enum BadgeState {
|
||||
Warning = 'warning',
|
||||
|
||||
@@ -4,7 +4,6 @@ import { cva } from 'class-variance-authority'
|
||||
import * as React from 'react'
|
||||
import { Highlight } from '@/app/components/base/icons/src/public/common'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import './index.css'
|
||||
|
||||
const PremiumBadgeVariants = cva(
|
||||
'premium-badge',
|
||||
|
||||
@@ -18,25 +18,25 @@ const StatusContainer: FC<Props> = ({
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'system-xs-regular relative break-all rounded-lg border px-3 py-2.5',
|
||||
'relative break-all rounded-lg border px-3 py-2.5 system-xs-regular',
|
||||
status === 'succeeded' && 'border-[rgba(23,178,106,0.8)] bg-workflow-display-success-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-success.svg)] text-text-success',
|
||||
status === 'succeeded' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(23,178,106,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
status === 'succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
status === 'succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
status === 'partial-succeeded' && 'border-[rgba(23,178,106,0.8)] bg-workflow-display-success-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-success.svg)] text-text-success',
|
||||
status === 'partial-succeeded' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(23,178,106,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
status === 'partial-succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
status === 'partial-succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
status === 'failed' && 'border-[rgba(240,68,56,0.8)] bg-workflow-display-error-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-error.svg)] text-text-warning',
|
||||
status === 'failed' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(240,68,56,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
status === 'failed' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(240,68,56,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
status === 'failed' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(240,68,56,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
(status === 'stopped' || status === 'paused') && 'border-[rgba(247,144,9,0.8)] bg-workflow-display-warning-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-warning.svg)] text-text-destructive',
|
||||
(status === 'stopped' || status === 'paused') && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(247,144,9,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
(status === 'stopped' || status === 'paused') && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
(status === 'stopped' || status === 'paused') && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
status === 'exception' && 'border-[rgba(247,144,9,0.8)] bg-workflow-display-warning-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-warning.svg)] text-text-destructive',
|
||||
status === 'exception' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(247,144,9,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
status === 'exception' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
status === 'exception' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
status === 'running' && 'border-[rgba(11,165,236,0.8)] bg-workflow-display-normal-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-running.svg)] text-util-colors-blue-light-blue-light-600',
|
||||
status === 'running' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(11,165,236,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
status === 'running' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(11,165,236,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
status === 'running' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(11,165,236,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
)}
|
||||
>
|
||||
<div className={cn(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,9 +2,7 @@ import consistentPlaceholders from './rules/consistent-placeholders.js'
|
||||
import noAsAnyInT from './rules/no-as-any-in-t.js'
|
||||
import noExtraKeys from './rules/no-extra-keys.js'
|
||||
import noLegacyNamespacePrefix from './rules/no-legacy-namespace-prefix.js'
|
||||
import noVersionPrefix from './rules/no-version-prefix.js'
|
||||
import requireNsOption from './rules/require-ns-option.js'
|
||||
import validI18nKeys from './rules/valid-i18n-keys.js'
|
||||
|
||||
/** @type {import('eslint').ESLint.Plugin} */
|
||||
const plugin = {
|
||||
@@ -17,9 +15,7 @@ const plugin = {
|
||||
'no-as-any-in-t': noAsAnyInT,
|
||||
'no-extra-keys': noExtraKeys,
|
||||
'no-legacy-namespace-prefix': noLegacyNamespacePrefix,
|
||||
'no-version-prefix': noVersionPrefix,
|
||||
'require-ns-option': requireNsOption,
|
||||
'valid-i18n-keys': validI18nKeys,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
const DEPENDENCY_KEYS = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies']
|
||||
const VERSION_PREFIXES = ['^', '~']
|
||||
|
||||
/** @type {import('eslint').Rule.RuleModule} */
|
||||
export default {
|
||||
meta: {
|
||||
type: 'problem',
|
||||
docs: {
|
||||
description: `Ensure package.json dependencies do not use version prefixes (${VERSION_PREFIXES.join(' or ')})`,
|
||||
},
|
||||
fixable: 'code',
|
||||
},
|
||||
create(context) {
|
||||
const { filename } = context
|
||||
|
||||
if (!filename.endsWith('package.json'))
|
||||
return {}
|
||||
|
||||
const selector = `JSONProperty:matches(${DEPENDENCY_KEYS.map(k => `[key.value="${k}"]`).join(', ')}) > JSONObjectExpression > JSONProperty`
|
||||
|
||||
return {
|
||||
[selector](node) {
|
||||
const versionNode = node.value
|
||||
|
||||
if (versionNode && versionNode.type === 'JSONLiteral' && typeof versionNode.value === 'string') {
|
||||
const version = versionNode.value
|
||||
const foundPrefix = VERSION_PREFIXES.find(prefix => version.startsWith(prefix))
|
||||
|
||||
if (foundPrefix) {
|
||||
const packageName = node.key.value || node.key.name
|
||||
const cleanVersion = version.substring(1)
|
||||
const canAutoFix = /^\d+\.\d+\.\d+$/.test(cleanVersion)
|
||||
context.report({
|
||||
node: versionNode,
|
||||
message: `Dependency "${packageName}" has version prefix "${foundPrefix}" that should be removed (found: "${version}", expected: "${cleanVersion}")`,
|
||||
fix: canAutoFix
|
||||
? fixer => fixer.replaceText(versionNode, `"${cleanVersion}"`)
|
||||
: undefined,
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
import { cleanJsonText } from '../utils.js'
|
||||
|
||||
/** @type {import('eslint').Rule.RuleModule} */
|
||||
export default {
|
||||
meta: {
|
||||
type: 'problem',
|
||||
docs: {
|
||||
description: 'Ensure i18n JSON keys are flat and valid as object paths',
|
||||
},
|
||||
},
|
||||
create(context) {
|
||||
return {
|
||||
Program(node) {
|
||||
const { filename, sourceCode } = context
|
||||
|
||||
if (!filename.endsWith('.json'))
|
||||
return
|
||||
|
||||
let json
|
||||
try {
|
||||
json = JSON.parse(cleanJsonText(sourceCode.text))
|
||||
}
|
||||
catch {
|
||||
context.report({
|
||||
node,
|
||||
message: 'Invalid JSON format',
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const keys = Object.keys(json)
|
||||
const keyPrefixes = new Set()
|
||||
|
||||
for (const key of keys) {
|
||||
if (key.includes('.')) {
|
||||
const parts = key.split('.')
|
||||
for (let i = 1; i < parts.length; i++) {
|
||||
const prefix = parts.slice(0, i).join('.')
|
||||
if (keys.includes(prefix)) {
|
||||
context.report({
|
||||
node,
|
||||
message: `Invalid key structure: '${key}' conflicts with '${prefix}'`,
|
||||
})
|
||||
}
|
||||
keyPrefixes.add(prefix)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of keys) {
|
||||
if (keyPrefixes.has(key)) {
|
||||
context.report({
|
||||
node,
|
||||
message: `Invalid key structure: '${key}' is a prefix of another key`,
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,16 @@
|
||||
// @ts-check
|
||||
import antfu from '@antfu/eslint-config'
|
||||
import antfu, { GLOB_TESTS, GLOB_TS, GLOB_TSX } from '@antfu/eslint-config'
|
||||
import pluginQuery from '@tanstack/eslint-plugin-query'
|
||||
import tailwindcss from 'eslint-plugin-better-tailwindcss'
|
||||
import hyoban from 'eslint-plugin-hyoban'
|
||||
import sonar from 'eslint-plugin-sonarjs'
|
||||
import storybook from 'eslint-plugin-storybook'
|
||||
import dify from './eslint-rules/index.js'
|
||||
|
||||
// Enable Tailwind CSS IntelliSense mode for ESLint runs
|
||||
// See: tailwind-css-plugin.ts
|
||||
process.env.TAILWIND_MODE ??= 'ESLINT'
|
||||
|
||||
export default antfu(
|
||||
{
|
||||
react: {
|
||||
@@ -67,7 +72,8 @@ export default antfu(
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
files: [GLOB_TS, GLOB_TSX],
|
||||
ignores: GLOB_TESTS,
|
||||
plugins: {
|
||||
tailwindcss,
|
||||
},
|
||||
@@ -79,7 +85,47 @@ export default antfu(
|
||||
},
|
||||
},
|
||||
{
|
||||
plugins: { dify },
|
||||
name: 'dify/custom/setup',
|
||||
plugins: {
|
||||
dify,
|
||||
hyoban,
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['**/*.tsx'],
|
||||
rules: {
|
||||
'hyoban/prefer-tailwind-icons': ['warn', {
|
||||
prefix: 'i-',
|
||||
propMappings: {
|
||||
size: 'size',
|
||||
width: 'w',
|
||||
height: 'h',
|
||||
},
|
||||
libraries: [
|
||||
{
|
||||
prefix: 'i-custom-',
|
||||
source: '^@/app/components/base/icons/src/(?<set>(?:public|vender)(?:/.*)?)$',
|
||||
name: '^(?<name>.*)$',
|
||||
},
|
||||
{
|
||||
source: '^@remixicon/react$',
|
||||
name: '^(?<set>Ri)(?<name>.+)$',
|
||||
},
|
||||
{
|
||||
source: '^@(?<set>heroicons)/react/24/outline$',
|
||||
name: '^(?<name>.*)Icon$',
|
||||
},
|
||||
{
|
||||
source: '^@(?<set>heroicons)/react/24/(?<variant>solid)$',
|
||||
name: '^(?<name>.*)Icon$',
|
||||
},
|
||||
{
|
||||
source: '^@(?<set>heroicons)/react/(?<variant>\\d+/(?:solid|outline))$',
|
||||
name: '^(?<name>.*)Icon$',
|
||||
},
|
||||
],
|
||||
}],
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['i18n/**/*.json'],
|
||||
@@ -88,7 +134,7 @@ export default antfu(
|
||||
'max-lines': 'off',
|
||||
'jsonc/sort-keys': 'error',
|
||||
|
||||
'dify/valid-i18n-keys': 'error',
|
||||
'hyoban/i18n-flat-key': 'error',
|
||||
'dify/no-extra-keys': 'error',
|
||||
'dify/consistent-placeholders': 'error',
|
||||
},
|
||||
@@ -96,7 +142,7 @@ export default antfu(
|
||||
{
|
||||
files: ['**/package.json'],
|
||||
rules: {
|
||||
'dify/no-version-prefix': 'error',
|
||||
'hyoban/no-dependency-version-prefix': 'error',
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
"build": "next build",
|
||||
"build:docker": "next build && node scripts/optimize-standalone.js",
|
||||
"start": "node ./scripts/copy-and-start.mjs",
|
||||
"lint": "eslint --cache --concurrency=\"auto\"",
|
||||
"lint:ci": "eslint --cache --concurrency 3",
|
||||
"lint": "eslint --cache --concurrency=auto",
|
||||
"lint:ci": "eslint --cache --concurrency 2",
|
||||
"lint:fix": "pnpm lint --fix",
|
||||
"lint:quiet": "pnpm lint --quiet",
|
||||
"lint:complexity": "pnpm lint --rule 'complexity: [error, {max: 15}]' --quiet",
|
||||
@@ -166,7 +166,10 @@
|
||||
"devDependencies": {
|
||||
"@antfu/eslint-config": "7.2.0",
|
||||
"@chromatic-com/storybook": "5.0.0",
|
||||
"@egoist/tailwindcss-icons": "1.9.2",
|
||||
"@eslint-react/eslint-plugin": "2.9.4",
|
||||
"@iconify-json/heroicons": "1.2.3",
|
||||
"@iconify-json/ri": "1.2.9",
|
||||
"@mdx-js/loader": "3.1.1",
|
||||
"@mdx-js/react": "3.1.1",
|
||||
"@next/bundle-analyzer": "16.1.5",
|
||||
@@ -194,7 +197,8 @@
|
||||
"@types/js-cookie": "3.0.6",
|
||||
"@types/js-yaml": "4.0.9",
|
||||
"@types/negotiator": "0.6.4",
|
||||
"@types/node": "18.15.0",
|
||||
"@types/node": "24.10.12",
|
||||
"@types/postcss-js": "4.1.0",
|
||||
"@types/qs": "6.14.0",
|
||||
"@types/react": "19.2.9",
|
||||
"@types/react-dom": "19.2.3",
|
||||
@@ -213,18 +217,21 @@
|
||||
"cross-env": "10.1.0",
|
||||
"esbuild": "0.27.2",
|
||||
"eslint": "9.39.2",
|
||||
"eslint-plugin-better-tailwindcss": "4.1.1",
|
||||
"eslint-plugin-better-tailwindcss": "https://pkg.pr.new/hyoban/eslint-plugin-better-tailwindcss@c0161c7",
|
||||
"eslint-plugin-hyoban": "0.11.1",
|
||||
"eslint-plugin-react-hooks": "7.0.1",
|
||||
"eslint-plugin-react-refresh": "0.5.0",
|
||||
"eslint-plugin-sonarjs": "3.0.6",
|
||||
"eslint-plugin-storybook": "10.2.6",
|
||||
"husky": "9.1.7",
|
||||
"iconify-import-svg": "0.1.1",
|
||||
"jsdom": "27.3.0",
|
||||
"jsdom-testing-mocks": "1.16.0",
|
||||
"knip": "5.78.0",
|
||||
"lint-staged": "15.5.2",
|
||||
"nock": "14.0.10",
|
||||
"postcss": "8.5.6",
|
||||
"postcss-js": "5.0.3",
|
||||
"react-scan": "0.4.3",
|
||||
"sass": "1.93.2",
|
||||
"serwist": "9.5.4",
|
||||
|
||||
560
web/pnpm-lock.yaml
generated
560
web/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,18 @@
|
||||
import path from 'node:path'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import { getIconCollections, iconsPlugin } from '@egoist/tailwindcss-icons'
|
||||
import tailwindTypography from '@tailwindcss/typography'
|
||||
import { importSvgCollections } from 'iconify-import-svg'
|
||||
// @ts-expect-error workaround for turbopack issue
|
||||
import { cssAsPlugin } from './tailwind-css-plugin.ts'
|
||||
// @ts-expect-error workaround for turbopack issue
|
||||
import tailwindThemeVarDefine from './themes/tailwind-theme-var-define.ts'
|
||||
import typography from './typography.js'
|
||||
|
||||
const _dirname = typeof __dirname !== 'undefined'
|
||||
? __dirname
|
||||
: path.dirname(fileURLToPath(import.meta.url))
|
||||
|
||||
const config = {
|
||||
theme: {
|
||||
typography,
|
||||
@@ -148,7 +158,37 @@ const config = {
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [tailwindTypography],
|
||||
plugins: [
|
||||
tailwindTypography,
|
||||
iconsPlugin({
|
||||
collections: {
|
||||
...getIconCollections(['heroicons', 'ri']),
|
||||
...importSvgCollections({
|
||||
source: path.resolve(_dirname, 'app/components/base/icons/assets/public'),
|
||||
prefix: 'custom-public',
|
||||
ignoreImportErrors: true,
|
||||
}),
|
||||
...importSvgCollections({
|
||||
source: path.resolve(_dirname, 'app/components/base/icons/assets/vender'),
|
||||
prefix: 'custom-vender',
|
||||
ignoreImportErrors: true,
|
||||
}),
|
||||
},
|
||||
extraProperties: {
|
||||
width: '1rem',
|
||||
height: '1rem',
|
||||
display: 'block',
|
||||
},
|
||||
}),
|
||||
cssAsPlugin([
|
||||
path.resolve(_dirname, './app/styles/globals.css'),
|
||||
path.resolve(_dirname, './app/components/base/action-button/index.css'),
|
||||
path.resolve(_dirname, './app/components/base/badge/index.css'),
|
||||
path.resolve(_dirname, './app/components/base/button/index.css'),
|
||||
path.resolve(_dirname, './app/components/base/modal/index.css'),
|
||||
path.resolve(_dirname, './app/components/base/premium-badge/index.css'),
|
||||
]),
|
||||
],
|
||||
// https://github.com/tailwindlabs/tailwindcss/discussions/5969
|
||||
corePlugins: {
|
||||
preflight: false,
|
||||
|
||||
27
web/tailwind-css-plugin.ts
Normal file
27
web/tailwind-css-plugin.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
// Credits:
|
||||
// https://github.com/tailwindlabs/tailwindcss-intellisense/issues/227
|
||||
|
||||
import type { PluginCreator } from 'tailwindcss/types/config'
|
||||
import { readFileSync } from 'node:fs'
|
||||
import { parse } from 'postcss'
|
||||
import { objectify } from 'postcss-js'
|
||||
|
||||
export const cssAsPlugin: (cssPath: string[]) => PluginCreator = (cssPath: string[]) => {
|
||||
const isTailwindCSSIntelliSenseMode = 'TAILWIND_MODE' in process.env
|
||||
if (!isTailwindCSSIntelliSenseMode) {
|
||||
return () => {}
|
||||
}
|
||||
|
||||
return ({ addUtilities, addComponents, addBase }) => {
|
||||
const jssList = cssPath.map(p => objectify(parse(readFileSync(p, 'utf8'))))
|
||||
|
||||
for (const jss of jssList) {
|
||||
if (jss['@layer utilities'])
|
||||
addUtilities(jss['@layer utilities'])
|
||||
if (jss['@layer components'])
|
||||
addComponents(jss['@layer components'])
|
||||
if (jss['@layer base'])
|
||||
addBase(jss['@layer base'])
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user