Compare commits

...

2 Commits

Author SHA1 Message Date
Harry
d8f4eddc4c refactor(skill): transition from artifact set to bundle structure
- Replaced SkillArtifactSet with SkillBundle across various components, enhancing the organization of skill dependencies and references.
- Updated SkillManager methods to load and save bundles instead of artifacts, improving clarity in asset management.
- Refactored SkillCompiler to compile skills into bundles, streamlining the dependency resolution process.
- Adjusted DifyCli and SandboxBashSession to utilize ToolDependencies, ensuring consistent handling of tool references.
- Introduced AssetReferences for better management of file dependencies within skill bundles.
2026-01-22 20:25:28 +08:00
Harry
7b56f16255 chore: update binary files and refactor LLMNode skill compilation
- Updated binary files for Dify CLI on various platforms (darwin amd64, darwin arm64, linux amd64, linux arm64).
- Refactored skill compilation in LLMNode to improve clarity and maintainability by explicitly naming parameters and incorporating AppAssets for base path management.
- Minor fix in AppAssetFileTree to remove unnecessary leading slash in path construction.
2026-01-22 18:52:52 +08:00
23 changed files with 161 additions and 197 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -107,7 +107,7 @@ class AppAssetFileTree(BaseModel):
while current:
parts.append(current.name)
current = self.get(current.parent_id) if current.parent_id else None
return "/" + "/".join(reversed(parts))
return "/".join(reversed(parts))
def relative_path(self, a: AppAssetNode, b: AppAssetNode) -> str:
"""

View File

@@ -54,8 +54,7 @@ class SkillBuilder:
documents = [SkillDocument(skill_id=s.node.id, content=s.content, metadata=s.metadata) for s in loaded]
artifact_set = SkillCompiler().compile_all(documents, tree, ctx.build_id)
# 3. Save tool artifact
SkillManager.save_artifact(ctx.tenant_id, ctx.app_id, ctx.build_id, artifact_set)
SkillManager.save_bundle(ctx.tenant_id, ctx.app_id, ctx.build_id, artifact_set)
# 4. Prepare compiled skills for upload
to_upload: list[_CompiledSkill] = []

View File

@@ -8,7 +8,7 @@ from pydantic import BaseModel, Field
from core.app.entities.app_invoke_entities import InvokeFrom
from core.model_runtime.utils.encoders import jsonable_encoder
from core.session.cli_api import CliApiSession
from core.skill.entities import ToolArtifact, ToolReference
from core.skill.entities import ToolDependencies, ToolReference
from core.tools.entities.tool_entities import ToolParameter, ToolProviderType
from core.tools.tool_manager import ToolManager
from core.virtual_environment.__base.entities import Arch, OperatingSystem
@@ -131,14 +131,14 @@ class DifyCliConfig(BaseModel):
cls,
session: CliApiSession,
tenant_id: str,
artifact: ToolArtifact,
tool_deps: ToolDependencies,
) -> DifyCliConfig:
from configs import dify_config
cli_api_url = dify_config.CLI_API_URL
tools: list[Tool] = []
for dependency in artifact.dependencies:
for dependency in tool_deps.dependencies:
tool = ToolManager.get_tool_runtime(
tenant_id=tenant_id,
provider_type=dependency.type,
@@ -155,7 +155,7 @@ class DifyCliConfig(BaseModel):
cli_api_session_id=session.id,
cli_api_secret=session.secret,
),
tool_references=[DifyCliToolReference.create_from_tool_reference(ref) for ref in artifact.references],
tool_references=[DifyCliToolReference.create_from_tool_reference(ref) for ref in tool_deps.references],
tools=[DifyCliToolConfig.create_from_tool(tool) for tool in tools],
)

View File

@@ -7,7 +7,7 @@ from types import TracebackType
from core.sandbox.sandbox import Sandbox
from core.session.cli_api import CliApiSession, CliApiSessionManager
from core.skill.entities.tool_artifact import ToolArtifact
from core.skill.entities.tool_dependencies import ToolDependencies
from core.virtual_environment.__base.helpers import pipeline
from ..bash.dify_cli import DifyCliConfig
@@ -18,7 +18,7 @@ logger = logging.getLogger(__name__)
class SandboxBashSession:
def __init__(self, *, sandbox: Sandbox, node_id: str, tools: ToolArtifact | None) -> None:
def __init__(self, *, sandbox: Sandbox, node_id: str, tools: ToolDependencies | None) -> None:
self._sandbox = sandbox
self._node_id = node_id
self._tools = tools
@@ -49,7 +49,7 @@ class SandboxBashSession:
def _setup_node_tools_directory(
self,
node_id: str,
tools: ToolArtifact,
tools: ToolDependencies,
cli_api_session: CliApiSession,
) -> str | None:
node_tools_path = f"{DifyCli.TOOLS_ROOT}/{node_id}"
@@ -63,7 +63,7 @@ class SandboxBashSession:
)
config_json = json.dumps(
DifyCliConfig.create(session=cli_api_session, tenant_id=self._tenant_id, artifact=tools).model_dump(
DifyCliConfig.create(session=cli_api_session, tenant_id=self._tenant_id, tool_deps=tools).model_dump(
mode="json"
),
ensure_ascii=False,

View File

@@ -45,8 +45,6 @@ class DifyCliInitializer(SandboxInitializer):
vm.upload_file(DifyCli.PATH, BytesIO(binary.path.read_bytes()))
# Use 'cp' with mode preservation workaround: copy file to itself to claim ownership,
# then use 'install' to set executable permission
pipeline(vm).add(
[
"sh",
@@ -60,19 +58,18 @@ class DifyCliInitializer(SandboxInitializer):
logger.info("Dify CLI uploaded to sandbox, path=%s", DifyCli.PATH)
artifact = SkillManager.load_artifact(self._tenant_id, self._app_id, self._assets_id)
if artifact is None or not artifact.get_tool_artifact().is_empty:
logger.info("No tools found in artifact for assets_id=%s", self._assets_id)
bundle = SkillManager.load_bundle(self._tenant_id, self._app_id, self._assets_id)
if bundle is None or not bundle.get_tool_dependencies().is_empty():
logger.info("No tools found in bundle for assets_id=%s", self._assets_id)
return
# FIXME(Mairuis): store it in workflow context
self._cli_api_session = CliApiSessionManager().create(tenant_id=self._tenant_id, user_id=self._user_id)
pipeline(vm).add(
["mkdir", "-p", DifyCli.GLOBAL_TOOLS_PATH], error_message="Failed to create global tools dir"
).execute(raise_on_error=True)
config = DifyCliConfig.create(self._cli_api_session, self._tenant_id, artifact.get_tool_artifact())
config = DifyCliConfig.create(self._cli_api_session, self._tenant_id, bundle.get_tool_dependencies())
config_json = json.dumps(config.model_dump(mode="json"), ensure_ascii=False)
config_path = f"{DifyCli.GLOBAL_TOOLS_PATH}/{DifyCli.CONFIG_FILENAME}"
vm.upload_file(config_path, BytesIO(config_json.encode("utf-8")))

View File

@@ -25,19 +25,19 @@ class SkillInitializer(SandboxInitializer):
self._assets_id = assets_id
def initialize(self, sandbox: Sandbox) -> None:
artifact_set = SkillManager.load_artifact(
bundle = SkillManager.load_bundle(
self._tenant_id,
self._app_id,
self._assets_id,
)
if artifact_set is None:
if bundle is None:
raise ValueError(
f"No skill artifact set found for tenant_id={self._tenant_id},"
f"No skill bundle found for tenant_id={self._tenant_id},"
f"app_id={self._app_id}, "
f"assets_id={self._assets_id} "
)
sandbox.attrs.set(
SkillAttrs.ARTIFACT_SET,
artifact_set,
SkillAttrs.BUNDLE,
bundle,
)

View File

@@ -134,6 +134,7 @@ class SandboxManager:
@classmethod
def delete_storage(cls, tenant_id: str, user_id: str) -> None:
# FIXME(Mairuis): move to SandboxArtifactService
storage = ArchiveSandboxStorage(tenant_id, SandboxBuilder.draft_id(user_id))
storage.delete()

View File

@@ -1,11 +1,11 @@
from .constants import SkillAttrs
from .entities import ToolArtifact, ToolDependency, ToolReference
from .entities import ToolDependencies, ToolDependency, ToolReference
from .skill_manager import SkillManager
__all__ = [
"SkillAttrs",
"SkillManager",
"ToolArtifact",
"ToolDependencies",
"ToolDependency",
"ToolReference",
]

View File

@@ -1,7 +1,6 @@
from core.skill.entities.skill_artifact_set import SkillArtifactSet
from core.skill.entities.skill_bundle import SkillBundle
from libs.attr_map import AttrKey
class SkillAttrs:
# Skill artifact set
ARTIFACT_SET = AttrKey("skill_artifact_set", SkillArtifactSet)
BUNDLE = AttrKey("skill_bundle", SkillBundle)

View File

@@ -1,6 +1,6 @@
from .file_artifact import FilesArtifact
from .skill_artifact import SkillArtifact, SkillSourceInfo
from .skill_artifact_set import SkillArtifactSet
from .asset_references import AssetReferences
from .skill_bundle import SkillBundle
from .skill_bundle_entry import SkillBundleEntry, SourceInfo
from .skill_document import SkillDocument
from .skill_metadata import (
FileReference,
@@ -9,18 +9,18 @@ from .skill_metadata import (
ToolFieldConfig,
ToolReference,
)
from .tool_artifact import ToolArtifact, ToolDependency
from .tool_dependencies import ToolDependencies, ToolDependency
__all__ = [
"AssetReferences",
"FileReference",
"FilesArtifact",
"SkillArtifact",
"SkillArtifactSet",
"SkillBundle",
"SkillBundleEntry",
"SkillDocument",
"SkillMetadata",
"SkillSourceInfo",
"ToolArtifact",
"SourceInfo",
"ToolConfiguration",
"ToolDependencies",
"ToolDependency",
"ToolFieldConfig",
"ToolReference",

View File

@@ -3,11 +3,7 @@ from pydantic import BaseModel, ConfigDict, Field
from core.skill.entities.skill_metadata import FileReference
class FilesArtifact(BaseModel):
"""
File artifact - contains all file references (transitive closure)
"""
class AssetReferences(BaseModel):
model_config = ConfigDict(extra="forbid")
references: list[FileReference] = Field(default_factory=list, description="All file references")
references: list[FileReference] = Field(default_factory=list)

View File

@@ -1,30 +0,0 @@
from pydantic import BaseModel, ConfigDict, Field
from core.skill.entities.file_artifact import FilesArtifact
from core.skill.entities.tool_artifact import ToolArtifact
class SkillSourceInfo(BaseModel):
"""Source file information for change detection."""
model_config = ConfigDict(extra="forbid")
asset_id: str = Field(description="Asset ID of the source skill file")
content_digest: str = Field(description="Hash of the original content for change detection")
class SkillArtifact(BaseModel):
"""
Compiled artifact for a single skill.
Contains the transitive closure of all tool and file dependencies,
plus the resolved content with all references replaced.
"""
model_config = ConfigDict(extra="forbid")
skill_id: str = Field(description="Unique identifier for this skill")
source: SkillSourceInfo = Field(description="Source file information")
tools: ToolArtifact = Field(description="All tool dependencies (transitive closure)")
files: FilesArtifact = Field(description="All file references (transitive closure)")
content: str = Field(description="Resolved content with all references replaced")

View File

@@ -3,27 +3,19 @@ from datetime import datetime
from pydantic import BaseModel, ConfigDict, Field
from core.skill.entities.skill_artifact import SkillArtifact
from core.skill.entities.skill_bundle_entry import SkillBundleEntry
from core.skill.entities.skill_metadata import ToolReference
from core.skill.entities.tool_artifact import ToolArtifact, ToolDependency
from core.skill.entities.tool_dependencies import ToolDependencies, ToolDependency
class SkillArtifactSet(BaseModel):
"""
Compiled index for an entire skill project.
- Corresponds to a single JSON file in S3
- Load once, query multiple times
- All persistence operations handled by SkillManager
"""
class SkillBundle(BaseModel):
model_config = ConfigDict(extra="forbid")
assets_id: str = Field(description="Assets ID this artifact set belongs to")
assets_id: str = Field(description="Assets ID this bundle belongs to")
schema_version: int = Field(default=1, description="Schema version for forward compatibility")
built_at: datetime | None = Field(default=None, description="Build timestamp")
items: dict[str, SkillArtifact] = Field(default_factory=dict, description="skill_id -> SkillArtifact")
entries: dict[str, SkillBundleEntry] = Field(default_factory=dict, description="skill_id -> SkillBundleEntry")
dependency_graph: dict[str, list[str]] = Field(
default_factory=dict,
@@ -35,14 +27,14 @@ class SkillArtifactSet(BaseModel):
description="skill_id -> list of skill_ids that depend on it",
)
def get(self, skill_id: str) -> SkillArtifact | None:
return self.items.get(skill_id)
def get(self, skill_id: str) -> SkillBundleEntry | None:
return self.entries.get(skill_id)
def upsert(self, artifact: SkillArtifact) -> None:
self.items[artifact.skill_id] = artifact
def upsert(self, entry: SkillBundleEntry) -> None:
self.entries[entry.skill_id] = entry
def remove(self, skill_id: str) -> None:
self.items.pop(skill_id, None)
self.entries.pop(skill_id, None)
self.dependency_graph.pop(skill_id, None)
self.reverse_graph.pop(skill_id, None)
for deps in self.reverse_graph.values():
@@ -66,13 +58,13 @@ class SkillArtifactSet(BaseModel):
queue.append(dependent)
return result
def subset(self, skill_ids: Iterable[str]) -> "SkillArtifactSet":
def subset(self, skill_ids: Iterable[str]) -> "SkillBundle":
skill_id_set = set(skill_ids)
return SkillArtifactSet(
return SkillBundle(
assets_id=self.assets_id,
schema_version=self.schema_version,
built_at=self.built_at,
items={sid: self.items[sid] for sid in skill_id_set if sid in self.items},
entries={sid: self.entries[sid] for sid in skill_id_set if sid in self.entries},
dependency_graph={
sid: [dep for dep in deps if dep in skill_id_set]
for sid, deps in self.dependency_graph.items()
@@ -85,21 +77,21 @@ class SkillArtifactSet(BaseModel):
},
)
def get_tool_artifact(self) -> ToolArtifact:
def get_tool_dependencies(self) -> ToolDependencies:
dependencies: dict[str, ToolDependency] = {}
references: dict[str, ToolReference] = {}
for artifact in self.items.values():
for dep in artifact.tools.dependencies:
for entry in self.entries.values():
for dep in entry.tools.dependencies:
key = f"{dep.provider}.{dep.tool_name}"
if key not in dependencies:
dependencies[key] = dep
for ref in artifact.tools.references:
for ref in entry.tools.references:
if ref.uuid not in references:
references[ref.uuid] = ref
return ToolArtifact(
return ToolDependencies(
dependencies=list(dependencies.values()),
references=list(references.values()),
)

View File

@@ -0,0 +1,21 @@
from pydantic import BaseModel, ConfigDict, Field
from core.skill.entities.asset_references import AssetReferences
from core.skill.entities.tool_dependencies import ToolDependencies
class SourceInfo(BaseModel):
model_config = ConfigDict(extra="forbid")
asset_id: str = Field(description="Asset ID of the source skill file")
content_digest: str = Field(description="Hash of the original content for change detection")
class SkillBundleEntry(BaseModel):
model_config = ConfigDict(extra="forbid")
skill_id: str = Field(description="Unique identifier for this skill")
source: SourceInfo = Field(description="Source file information")
tools: ToolDependencies = Field(description="All tool dependencies (transitive closure)")
files: AssetReferences = Field(description="All file references (transitive closure)")
content: str = Field(description="Resolved content with all references replaced")

View File

@@ -12,7 +12,7 @@ class ToolDependency(BaseModel):
tool_name: str
class ToolArtifact(BaseModel):
class ToolDependencies(BaseModel):
model_config = ConfigDict(extra="forbid")
dependencies: list[ToolDependency] = Field(default_factory=list)
@@ -21,9 +21,9 @@ class ToolArtifact(BaseModel):
def is_empty(self) -> bool:
return not self.dependencies and not self.references
def filter(self, tools: list[tuple[str, str]]) -> "ToolArtifact":
def filter(self, tools: list[tuple[str, str]]) -> "ToolDependencies":
tool_names = {f"{provider}.{tool_name}" for provider, tool_name in tools}
return ToolArtifact(
return ToolDependencies(
dependencies=[
dependency
for dependency in self.dependencies
@@ -36,7 +36,7 @@ class ToolArtifact(BaseModel):
],
)
def merge(self, other: "ToolArtifact") -> "ToolArtifact":
def merge(self, other: "ToolDependencies") -> "ToolDependencies":
dep_map: dict[str, ToolDependency] = {}
for dep in self.dependencies:
key = f"{dep.provider}.{dep.tool_name}"
@@ -53,7 +53,7 @@ class ToolArtifact(BaseModel):
if ref.uuid not in ref_map:
ref_map[ref.uuid] = ref
return ToolArtifact(
return ToolDependencies(
dependencies=list(dep_map.values()),
references=list(ref_map.values()),
)
)

View File

@@ -6,9 +6,9 @@ from datetime import UTC, datetime
from typing import Any
from core.app.entities.app_asset_entities import AppAssetFileTree
from core.skill.entities.file_artifact import FilesArtifact
from core.skill.entities.skill_artifact import SkillArtifact, SkillSourceInfo
from core.skill.entities.skill_artifact_set import SkillArtifactSet
from core.skill.entities.asset_references import AssetReferences
from core.skill.entities.skill_bundle import SkillBundle
from core.skill.entities.skill_bundle_entry import SkillBundleEntry, SourceInfo
from core.skill.entities.skill_document import SkillDocument
from core.skill.entities.skill_metadata import (
FileReference,
@@ -16,7 +16,7 @@ from core.skill.entities.skill_metadata import (
ToolConfiguration,
ToolReference,
)
from core.skill.entities.tool_artifact import ToolArtifact, ToolDependency
from core.skill.entities.tool_dependencies import ToolDependencies, ToolDependency
from core.tools.entities.tool_entities import ToolProviderType
logger = logging.getLogger(__name__)
@@ -26,17 +26,6 @@ FILE_REFERENCE_PATTERN = re.compile(r"§\[file\]\.\[([^\]]+)\]\.\[([^\]]+)\]§")
class SkillCompiler:
"""
Stateless skill compiler.
Responsibilities:
- Parse raw metadata dict into SkillMetadata
- Parse direct dependencies from skill content
- Compute transitive closure based on existing artifact set
- Resolve content by replacing references
- Generate SkillArtifact
"""
def _parse_metadata(self, content: str, raw_metadata: Mapping[str, Any]) -> SkillMetadata:
tools_raw: dict[str, Any] = dict(raw_metadata.get("tools", {}))
tools: dict[str, ToolReference] = {}
@@ -76,8 +65,8 @@ class SkillCompiler:
documents: list[SkillDocument],
file_tree: AppAssetFileTree,
assets_id: str,
) -> SkillArtifactSet:
artifact_set = SkillArtifactSet(
) -> SkillBundle:
bundle = SkillBundle(
assets_id=assets_id,
built_at=datetime.now(UTC),
)
@@ -89,26 +78,26 @@ class SkillCompiler:
metadata = self._parse_metadata(doc.content, doc.metadata)
parsed_metadata[doc.skill_id] = metadata
direct_skill_refs = self._extract_skill_refs(metadata, doc_map)
artifact_set.dependency_graph[doc.skill_id] = list(direct_skill_refs)
bundle.dependency_graph[doc.skill_id] = list(direct_skill_refs)
for ref_id in direct_skill_refs:
if ref_id not in artifact_set.reverse_graph:
artifact_set.reverse_graph[ref_id] = []
artifact_set.reverse_graph[ref_id].append(doc.skill_id)
if ref_id not in bundle.reverse_graph:
bundle.reverse_graph[ref_id] = []
bundle.reverse_graph[ref_id].append(doc.skill_id)
for doc in documents:
metadata = parsed_metadata[doc.skill_id]
artifact = self._compile_single(doc, metadata, artifact_set, parsed_metadata, file_tree)
artifact_set.upsert(artifact)
entry = self._compile_single(doc, metadata, bundle, parsed_metadata, file_tree)
bundle.upsert(entry)
return artifact_set
return bundle
def compile_one(
self,
artifact_set: SkillArtifactSet,
bundle: SkillBundle,
document: SkillDocument,
file_tree: AppAssetFileTree,
all_documents: dict[str, SkillDocument] | None = None,
) -> SkillArtifact:
) -> SkillBundleEntry:
doc_map = all_documents or {}
if document.skill_id not in doc_map:
doc_map[document.skill_id] = document
@@ -119,25 +108,25 @@ class SkillCompiler:
metadata = parsed_metadata[document.skill_id]
direct_skill_refs = self._extract_skill_refs(metadata, doc_map)
artifact_set.dependency_graph[document.skill_id] = list(direct_skill_refs)
bundle.dependency_graph[document.skill_id] = list(direct_skill_refs)
for ref_id in direct_skill_refs:
if ref_id not in artifact_set.reverse_graph:
artifact_set.reverse_graph[ref_id] = []
if document.skill_id not in artifact_set.reverse_graph[ref_id]:
artifact_set.reverse_graph[ref_id].append(document.skill_id)
if ref_id not in bundle.reverse_graph:
bundle.reverse_graph[ref_id] = []
if document.skill_id not in bundle.reverse_graph[ref_id]:
bundle.reverse_graph[ref_id].append(document.skill_id)
return self._compile_single(document, metadata, artifact_set, parsed_metadata, file_tree)
return self._compile_single(document, metadata, bundle, parsed_metadata, file_tree)
def _compile_single(
self,
document: SkillDocument,
metadata: SkillMetadata,
artifact_set: SkillArtifactSet,
bundle: SkillBundle,
parsed_metadata: dict[str, SkillMetadata],
file_tree: AppAssetFileTree,
) -> SkillArtifact:
) -> SkillBundleEntry:
all_tools, all_files = self._compute_transitive_closure(
document.skill_id, artifact_set, parsed_metadata
document.skill_id, bundle, parsed_metadata
)
current_node = file_tree.get(document.skill_id)
@@ -148,17 +137,17 @@ class SkillCompiler:
content_digest = hashlib.sha256(document.content.encode("utf-8")).hexdigest()
return SkillArtifact(
return SkillBundleEntry(
skill_id=document.skill_id,
source=SkillSourceInfo(
source=SourceInfo(
asset_id=document.skill_id,
content_digest=content_digest,
),
tools=ToolArtifact(
tools=ToolDependencies(
dependencies=list(all_tools.values()),
references=list(metadata.tools.values()),
),
files=FilesArtifact(
files=AssetReferences(
references=list(all_files.values()),
),
content=resolved_content,
@@ -178,7 +167,7 @@ class SkillCompiler:
def _compute_transitive_closure(
self,
skill_id: str,
artifact_set: SkillArtifactSet,
bundle: SkillBundle,
parsed_metadata: dict[str, SkillMetadata],
) -> tuple[dict[str, ToolDependency], dict[str, FileReference]]:
all_tools: dict[str, ToolDependency] = {}
@@ -195,13 +184,13 @@ class SkillCompiler:
metadata = parsed_metadata.get(current_id)
if metadata is None:
existing_artifact = artifact_set.get(current_id)
if existing_artifact:
for dep in existing_artifact.tools.dependencies:
existing_entry = bundle.get(current_id)
if existing_entry:
for dep in existing_entry.tools.dependencies:
key = f"{dep.provider}.{dep.tool_name}"
if key not in all_tools:
all_tools[key] = dep
for file_ref in existing_artifact.files.references:
for file_ref in existing_entry.files.references:
if file_ref.asset_id not in all_files:
all_files[file_ref.asset_id] = file_ref
continue
@@ -219,7 +208,7 @@ class SkillCompiler:
if file_ref.asset_id not in all_files:
all_files[file_ref.asset_id] = file_ref
for dep_id in artifact_set.dependency_graph.get(current_id, []):
for dep_id in bundle.dependency_graph.get(current_id, []):
if dep_id not in visited:
queue.append(dep_id)

View File

@@ -1,28 +1,28 @@
from core.app_assets.paths import AssetPaths
from core.skill.entities.skill_artifact_set import SkillArtifactSet
from core.skill.entities.skill_bundle import SkillBundle
from extensions.ext_storage import storage
class SkillManager:
@staticmethod
def load_artifact(
def load_bundle(
tenant_id: str,
app_id: str,
assets_id: str,
) -> SkillArtifactSet | None:
) -> SkillBundle | None:
key = AssetPaths.build_skill_artifact_set(tenant_id, app_id, assets_id)
try:
data = storage.load_once(key)
return SkillArtifactSet.model_validate_json(data)
return SkillBundle.model_validate_json(data)
except Exception:
return None
@staticmethod
def save_artifact(
def save_bundle(
tenant_id: str,
app_id: str,
assets_id: str,
artifact_set: SkillArtifactSet,
bundle: SkillBundle,
) -> None:
key = AssetPaths.build_skill_artifact_set(tenant_id, app_id, assets_id)
storage.save(key, artifact_set.model_dump_json(indent=2).encode("utf-8"))
storage.save(key, bundle.model_dump_json(indent=2).encode("utf-8"))

View File

@@ -55,10 +55,11 @@ from core.prompt.utils.prompt_message_util import PromptMessageUtil
from core.rag.entities.citation_metadata import RetrievalSourceMetadata
from core.sandbox import Sandbox
from core.sandbox.bash.session import SandboxBashSession
from core.sandbox.entities.config import AppAssets
from core.skill.constants import SkillAttrs
from core.skill.entities.skill_artifact_set import SkillArtifactSet
from core.skill.entities.skill_bundle import SkillBundle
from core.skill.entities.skill_document import SkillDocument
from core.skill.entities.tool_artifact import ToolArtifact
from core.skill.entities.tool_dependencies import ToolDependencies
from core.skill.skill_compiler import SkillCompiler
from core.tools.__base.tool import Tool
from core.tools.signature import sign_upload_file
@@ -298,14 +299,14 @@ class LLMNode(Node[LLMNodeData]):
sandbox = self.graph_runtime_state.sandbox
if sandbox:
tool_artifact = self._extract_tool_artifact()
tool_dependencies = self._extract_tool_dependencies()
generator = self._invoke_llm_with_sandbox(
sandbox=sandbox,
model_instance=model_instance,
prompt_messages=prompt_messages,
stop=stop,
variable_pool=variable_pool,
tool_artifact=tool_artifact,
tool_dependencies=tool_dependencies,
)
elif self.tool_call_enabled:
generator = self._invoke_llm_with_tools(
@@ -1491,11 +1492,10 @@ class LLMNode(Node[LLMNodeData]):
) -> Sequence[PromptMessage]:
prompt_messages: list[PromptMessage] = []
# Extract skill compilation context from sandbox if available
artifact_set: SkillArtifactSet | None = None
bundle: SkillBundle | None = None
file_tree: AppAssetFileTree | None = None
if sandbox:
artifact_set = sandbox.attrs.get(SkillAttrs.ARTIFACT_SET)
bundle = sandbox.attrs.get(SkillAttrs.BUNDLE)
file_tree = sandbox.attrs.get(AppAssetsAttrs.FILE_TREE)
for message in messages:
@@ -1506,28 +1506,26 @@ class LLMNode(Node[LLMNodeData]):
variable_pool=variable_pool,
)
# Compile skill references after jinja2 rendering
if artifact_set is not None and file_tree is not None:
skill_artifact = SkillCompiler().compile_one(
artifact_set,
SkillDocument(skill_id="anonymous", content=result_text, metadata={}),
file_tree,
if bundle is not None and file_tree is not None:
skill_entry = SkillCompiler().compile_one(
bundle=bundle,
document=SkillDocument(skill_id="anonymous", content=result_text, metadata={}),
file_tree=file_tree,
base_path=AppAssets.PATH,
)
result_text = skill_artifact.content
result_text = skill_entry.content
prompt_message = _combine_message_content_with_role(
contents=[TextPromptMessageContent(data=result_text)], role=message.role
)
prompt_messages.append(prompt_message)
else:
# Get segment group from basic message
if context:
template = message.text.replace("{#context#}", context)
else:
template = message.text
segment_group = variable_pool.convert_template(template)
# Process segments for images
file_contents = []
for segment in segment_group.value:
if isinstance(segment, ArrayFileSegment):
@@ -1545,17 +1543,16 @@ class LLMNode(Node[LLMNodeData]):
)
file_contents.append(file_content)
# Create message with text from all segments
plain_text = segment_group.text
# Compile skill references after context and variable substitution
if plain_text and artifact_set is not None and file_tree is not None:
skill_artifact = SkillCompiler().compile_one(
artifact_set,
SkillDocument(skill_id="anonymous", content=plain_text, metadata={}),
file_tree,
if plain_text and bundle is not None and file_tree is not None:
skill_entry = SkillCompiler().compile_one(
bundle=bundle,
document=SkillDocument(skill_id="anonymous", content=plain_text, metadata={}),
file_tree=file_tree,
base_path=AppAssets.PATH,
)
plain_text = skill_artifact.content
plain_text = skill_entry.content
if plain_text:
prompt_message = _combine_message_content_with_role(
@@ -1810,27 +1807,30 @@ class LLMNode(Node[LLMNodeData]):
generation_data,
)
def _extract_tool_artifact(self) -> ToolArtifact | None:
def _extract_tool_dependencies(self) -> ToolDependencies | None:
"""Extract tool artifact from prompt template."""
sandbox = self.graph_runtime_state.sandbox
if not sandbox:
raise LLMNodeError("Sandbox not found")
artifact_set = sandbox.attrs.get(SkillAttrs.ARTIFACT_SET)
bundle = sandbox.attrs.get(SkillAttrs.BUNDLE)
file_tree = sandbox.attrs.get(AppAssetsAttrs.FILE_TREE)
tool_artifacts: list[ToolArtifact] = []
tool_deps_list: list[ToolDependencies] = []
for prompt in self.node_data.prompt_template:
if isinstance(prompt, LLMNodeChatModelMessage):
skill_artifact = SkillCompiler().compile_one(
artifact_set, SkillDocument(skill_id="anonymous", content=prompt.text, metadata={}), file_tree
skill_entry = SkillCompiler().compile_one(
bundle=bundle,
document=SkillDocument(skill_id="anonymous", content=prompt.text, metadata={}),
file_tree=file_tree,
base_path=AppAssets.PATH,
)
tool_artifacts.append(skill_artifact.tools)
tool_deps_list.append(skill_entry.tools)
if len(tool_artifacts) == 0:
if len(tool_deps_list) == 0:
return None
return reduce(lambda x, y: x.merge(y), tool_artifacts)
return reduce(lambda x, y: x.merge(y), tool_deps_list)
def _invoke_llm_with_tools(
self,
@@ -1883,11 +1883,11 @@ class LLMNode(Node[LLMNodeData]):
prompt_messages: Sequence[PromptMessage],
stop: Sequence[str] | None,
variable_pool: VariablePool,
tool_artifact: ToolArtifact | None,
tool_dependencies: ToolDependencies | None,
) -> Generator[NodeEventBase, None, LLMGenerationData]:
result: LLMGenerationData | None = None
with SandboxBashSession(sandbox=sandbox, node_id=self.id, tools=tool_artifact) as session:
with SandboxBashSession(sandbox=sandbox, node_id=self.id, tools=tool_dependencies) as session:
prompt_files = self._extract_prompt_files(variable_pool)
model_features = self._get_model_features(model_instance)

View File

@@ -1,7 +1,7 @@
from typing import Any
from core.app.entities.app_asset_entities import AppAssetFileTree, AppAssetNode
from core.skill.entities.skill_artifact_set import SkillArtifactSet
from core.skill.entities.skill_bundle import SkillBundle
from core.skill.entities.skill_document import SkillDocument
from core.skill.entities.skill_metadata import FileReference, ToolConfiguration, ToolReference
from core.skill.skill_compiler import SkillCompiler
@@ -48,7 +48,7 @@ class TestSkillCompilerBasic:
# then
assert artifact_set.assets_id == "assets-1"
assert len(artifact_set.items) == 1
assert len(artifact_set.entries) == 1
artifact = artifact_set.get("skill-1")
assert artifact is not None
@@ -235,7 +235,7 @@ class TestSkillCompilerTransitiveDependencies:
assert tool_names_c == {"tool_c"}
class TestSkillArtifactSetQueries:
class TestSkillBundleQueries:
def test_recompile_group_ids(self):
# given
# skill-a -> skill-b -> skill-c
@@ -774,11 +774,11 @@ class TestSkillCompilerComplexScenarios:
# when - serialize and deserialize
json_str = original.model_dump_json()
restored = SkillArtifactSet.model_validate_json(json_str)
restored = SkillBundle.model_validate_json(json_str)
# then - all data preserved
assert restored.assets_id == original.assets_id
assert len(restored.items) == len(original.items)
assert len(restored.entries) == len(original.entries)
assert restored.dependency_graph == original.dependency_graph
assert restored.reverse_graph == original.reverse_graph
@@ -836,7 +836,7 @@ class TestSkillCompilerComplexScenarios:
subset = full_set.subset(["skill-b", "skill-c"])
# then
assert len(subset.items) == 2
assert len(subset.entries) == 2
assert subset.get("skill-b") is not None
assert subset.get("skill-c") is not None
assert subset.get("skill-a") is None