- New read_file skill: paginated file reading with line ranges, path restricted to /workspace, binary detection, directory listing - Session persistence via SQLite + FTS5: conversation history survives agent restarts, last N messages restored into deque on boot, auto-prune to 1000 messages - Update truncation hint to reference read_file instead of run_command - New scripts/update.sh for patching rootfs + rebuilding snapshot Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
176 lines
5.2 KiB
Python
176 lines
5.2 KiB
Python
"""Skill discovery, parsing, and execution."""
|
|
|
|
import os
|
|
import re
|
|
import json
|
|
import subprocess
|
|
import time
|
|
|
|
|
|
def log(msg):
|
|
"""Import-safe logging — overridden by agent.py at init."""
|
|
print(f"[skills] {msg}", flush=True)
|
|
|
|
|
|
def set_logger(fn):
|
|
"""Allow agent.py to inject its logger."""
|
|
global log
|
|
log = fn
|
|
|
|
|
|
LARGE_OUTPUT_THRESHOLD = 2000
|
|
_output_counter = 0
|
|
|
|
|
|
def parse_skill_md(path):
|
|
"""Parse a SKILL.md frontmatter into a tool definition.
|
|
Returns tool definition dict or None on failure."""
|
|
try:
|
|
with open(path) as f:
|
|
content = f.read()
|
|
except Exception as e:
|
|
log(f"Cannot read {path}: {e}")
|
|
return None
|
|
|
|
content = content.replace("\r\n", "\n")
|
|
|
|
match = re.match(r"^---\n(.*?)\n---", content, re.DOTALL)
|
|
if not match:
|
|
log(f"No frontmatter in {path}")
|
|
return None
|
|
|
|
fm = {}
|
|
current_key = None
|
|
current_param = None
|
|
params = {}
|
|
|
|
for line in match.group(1).split("\n"):
|
|
stripped = line.strip()
|
|
if not stripped or stripped.startswith("#"):
|
|
continue
|
|
|
|
indent = len(line) - len(line.lstrip())
|
|
|
|
if indent >= 2 and current_key == "parameters":
|
|
if indent >= 4 and current_param:
|
|
k, _, v = stripped.partition(":")
|
|
k = k.strip()
|
|
v = v.strip().strip('"').strip("'")
|
|
if k == "required":
|
|
v = v.lower() in ("true", "yes", "1")
|
|
params[current_param][k] = v
|
|
elif ":" in stripped:
|
|
param_name = stripped.rstrip(":").strip()
|
|
current_param = param_name
|
|
params[param_name] = {}
|
|
elif ":" in line and indent == 0:
|
|
k, _, v = line.partition(":")
|
|
k = k.strip()
|
|
v = v.strip().strip('"').strip("'")
|
|
fm[k] = v
|
|
current_key = k
|
|
if k == "parameters":
|
|
current_param = None
|
|
|
|
if "name" not in fm:
|
|
log(f"No 'name' field in {path}")
|
|
return None
|
|
|
|
if "description" not in fm:
|
|
log(f"Warning: no 'description' in {path}")
|
|
|
|
properties = {}
|
|
required = []
|
|
for pname, pdata in params.items():
|
|
ptype = pdata.get("type", "string")
|
|
if ptype not in ("string", "integer", "number", "boolean", "array", "object"):
|
|
log(f"Warning: unknown type '{ptype}' for param '{pname}' in {path}")
|
|
properties[pname] = {
|
|
"type": ptype,
|
|
"description": pdata.get("description", ""),
|
|
}
|
|
if pdata.get("required", False):
|
|
required.append(pname)
|
|
|
|
return {
|
|
"type": "function",
|
|
"function": {
|
|
"name": fm["name"],
|
|
"description": fm.get("description", ""),
|
|
"parameters": {
|
|
"type": "object",
|
|
"properties": properties,
|
|
"required": required,
|
|
},
|
|
},
|
|
}
|
|
|
|
|
|
def discover_skills(skill_dirs):
|
|
"""Scan skill directories and return tool definitions + script paths."""
|
|
tools = []
|
|
scripts = {}
|
|
|
|
for skill_dir in skill_dirs:
|
|
if not os.path.isdir(skill_dir):
|
|
continue
|
|
for name in sorted(os.listdir(skill_dir)):
|
|
skill_path = os.path.join(skill_dir, name)
|
|
skill_md = os.path.join(skill_path, "SKILL.md")
|
|
if not os.path.isfile(skill_md):
|
|
continue
|
|
|
|
tool_def = parse_skill_md(skill_md)
|
|
if not tool_def:
|
|
continue
|
|
|
|
for ext in ("run.py", "run.sh"):
|
|
script = os.path.join(skill_path, ext)
|
|
if os.path.isfile(script):
|
|
scripts[tool_def["function"]["name"]] = script
|
|
break
|
|
|
|
if tool_def["function"]["name"] in scripts:
|
|
tools.append(tool_def)
|
|
|
|
return tools, scripts
|
|
|
|
|
|
def execute_skill(script_path, args, workspace, config):
|
|
"""Execute a skill script with args as JSON on stdin.
|
|
Large outputs are saved to a file with a preview returned."""
|
|
global _output_counter
|
|
env = os.environ.copy()
|
|
env["WORKSPACE"] = workspace
|
|
env["SEARX_URL"] = config.get("searx_url", "https://searx.mymx.me")
|
|
|
|
try:
|
|
result = subprocess.run(
|
|
["python3" if script_path.endswith(".py") else "bash", script_path],
|
|
input=json.dumps(args),
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=120,
|
|
env=env,
|
|
)
|
|
output = result.stdout
|
|
if result.stderr:
|
|
output += f"\n[stderr] {result.stderr}"
|
|
output = output.strip() or "[no output]"
|
|
|
|
if len(output) > LARGE_OUTPUT_THRESHOLD:
|
|
output_dir = f"{workspace}/tool_outputs"
|
|
os.makedirs(output_dir, exist_ok=True)
|
|
_output_counter += 1
|
|
filepath = f"{output_dir}/output_{_output_counter}.txt"
|
|
with open(filepath, "w") as f:
|
|
f.write(output)
|
|
preview = output[:1500]
|
|
return f"{preview}\n\n[output truncated — full result ({len(output)} chars) saved to {filepath}. Use read_file to view it.]"
|
|
|
|
return output
|
|
except subprocess.TimeoutExpired:
|
|
return "[skill timed out after 120s]"
|
|
except Exception as e:
|
|
return f"[skill error: {e}]"
|