Initial commit

This commit is contained in:
Zzzz
2026-04-27 18:40:30 +08:00
commit 2120774b05
112 changed files with 12308 additions and 0 deletions
+247
View File
@@ -0,0 +1,247 @@
#!/usr/bin/env python3
"""Trellis UserPromptSubmit hook: inject per-turn workflow breadcrumb.
Runs on every user prompt. Reads the active task (.trellis/.current-task)
and emits a short <workflow-state> block reminding the main AI what task
is active and its expected flow. Breadcrumb text is pulled from
workflow.md [workflow-state:STATUS] tag blocks (single source of truth
for users who fork the Trellis workflow), with hardcoded fallbacks so
the hook never breaks when workflow.md is missing or malformed.
Shared across all hook-capable platforms (Claude, Cursor, Codex, Qoder,
CodeBuddy, Droid, Gemini, Copilot). Kiro is not wired (no per-turn
hook entry point). Written to each platform's hooks directory via
writeSharedHooks() at init time.
Silent exit 0 cases (no output):
- No .trellis/ directory found (not a Trellis project)
- No .current-task file, or it's empty
- task.json malformed or missing status
Unknown status (no tag + no hardcoded fallback) emits a generic
breadcrumb rather than silent-exiting, so custom statuses surface in
the UI instead of appearing as "randomly broken".
"""
from __future__ import annotations
import json
import os
import re
import sys
from pathlib import Path
from typing import Optional, Tuple
# ---------------------------------------------------------------------------
# CWD-robust Trellis root discovery (fixes hook-path-robustness for this hook)
# ---------------------------------------------------------------------------
def find_trellis_root(start: Path) -> Optional[Path]:
"""Walk up from start to find directory containing .trellis/.
Handles CWD drift: subdirectory launches, monorepo packages, etc.
Returns None if no .trellis/ found (silent no-op).
"""
cur = start.resolve()
while cur != cur.parent:
if (cur / ".trellis").is_dir():
return cur
cur = cur.parent
return None
# ---------------------------------------------------------------------------
# Active task discovery
# ---------------------------------------------------------------------------
def _normalize_task_ref(task_ref: str) -> str:
"""Normalize .current-task path ref.
Accepts:
- Absolute paths (left as-is)
- Windows-style backslashes (converted to forward slash)
- Legacy relative refs like "tasks/foo" (prefixed with .trellis/)
"""
normalized = task_ref.strip()
if not normalized:
return ""
path_obj = Path(normalized)
if path_obj.is_absolute():
return str(path_obj)
normalized = normalized.replace("\\", "/")
while normalized.startswith("./"):
normalized = normalized[2:]
if normalized.startswith("tasks/"):
normalized = f".trellis/{normalized}"
return normalized
def get_active_task(root: Path) -> Optional[Tuple[str, str]]:
"""Return (task_id, status) from the current active task, else None.
Reads .trellis/.current-task (a path relative to root, e.g.
".trellis/tasks/04-17-foo") then that task's task.json.
Normalizes backslashes so Windows paths work on Unix and vice versa.
"""
ref_file = root / ".trellis" / ".current-task"
if not ref_file.is_file():
return None
try:
raw = ref_file.read_text(encoding="utf-8").strip()
except OSError:
return None
task_ref = _normalize_task_ref(raw)
if not task_ref:
return None
path_obj = Path(task_ref)
task_dir = path_obj if path_obj.is_absolute() else root / path_obj
task_json = task_dir / "task.json"
if not task_json.is_file():
return None
try:
data = json.loads(task_json.read_text(encoding="utf-8"))
except (json.JSONDecodeError, OSError):
return None
task_id = data.get("id") or task_dir.name
status = data.get("status", "")
if not isinstance(status, str) or not status:
return None
return task_id, status
# ---------------------------------------------------------------------------
# Breadcrumb loading: parse workflow.md, fall back to hardcoded defaults
# ---------------------------------------------------------------------------
# Supports STATUS values with letters, digits, underscores, hyphens
# (so "in-review" / "blocked-by-team" work alongside "in_progress").
_TAG_RE = re.compile(
r"\[workflow-state:([A-Za-z0-9_-]+)\]\s*\n(.*?)\n\s*\[/workflow-state:\1\]",
re.DOTALL,
)
# Hardcoded defaults for built-in Trellis statuses. Used when workflow.md is
# missing, malformed, or lacks the tag for this status.
#
# `no_task` is a pseudo-status emitted when .current-task is missing — it keeps
# the Next-Action reminder flowing per-turn even without an active task.
_FALLBACK_BREADCRUMBS = {
"no_task": (
"No active task.\n"
"Trigger words in the user message that REQUIRE creating a task "
"(non-negotiable, do NOT self-exempt): 重构 / 抽成 / 独立 / 分发 / "
"拆出来 / 搞一个 / 做成 / 接入 / 集成 / refactor / rewrite / extract / "
"productize / publish / build X / design Y.\n"
"Task is NOT required ONLY if ALL three hold: (a) zero file writes "
"this turn, (b) answer fits in one reply with no multi-round plan, "
"(c) no research beyond reading 1-2 repo files.\n"
"When in doubt: create task. Over-tasking is cheap; under-tasking "
"leaks plans and research into main context.\n"
"Flow: load `trellis-brainstorm` skill → it creates the task via "
"`python3 ./.trellis/scripts/task.py create` and drives requirements Q&A. "
"For research-heavy work (tool comparison, docs, cross-platform survey), "
"spawn `trellis-research` sub-agents via Task tool — NEVER do 3+ inline "
"WebFetch/WebSearch/`gh api` calls in the main conversation."
),
"planning": (
"Complete prd.md via trellis-brainstorm skill; then run task.py start.\n"
"Research belongs in `{task_dir}/research/*.md`, written by "
"`trellis-research` sub-agents. Do NOT inline WebFetch/WebSearch in "
"main session — PRD only links to research files."
),
"in_progress": (
"Flow: trellis-implement → trellis-check → trellis-update-spec → finish\n"
"Next required action: inspect conversation history + git status, then "
"execute the next uncompleted step in that sequence.\n"
"For agent-capable platforms, do NOT edit code in the main session; "
"dispatch `trellis-implement` for implementation and dispatch "
"`trellis-check` before reporting completion."
),
"completed": (
"User commits changes; then run task.py archive."
),
}
def load_breadcrumbs(root: Path) -> dict[str, str]:
"""Parse workflow.md for [workflow-state:STATUS] blocks.
Returns {status: body_text}. Missing tags fall back to hardcoded
defaults so the hook always has something to say for built-in
statuses. Custom statuses without tags fall to generic breadcrumb
downstream (see build_breadcrumb).
"""
result = dict(_FALLBACK_BREADCRUMBS)
workflow = root / ".trellis" / "workflow.md"
if not workflow.is_file():
return result
try:
content = workflow.read_text(encoding="utf-8")
except OSError:
return result
for match in _TAG_RE.finditer(content):
status = match.group(1)
body = match.group(2).strip()
if body:
result[status] = body
return result
def build_breadcrumb(
task_id: Optional[str], status: str, templates: dict[str, str]
) -> str:
"""Build the <workflow-state>...</workflow-state> block.
- Known status (in templates or fallback) → detailed template body
- Unknown status (no tag + no fallback) → generic "refer to workflow.md"
- `no_task` pseudo-status (task_id is None) → header omits task info
"""
body = templates.get(status)
if body is None:
body = "Refer to workflow.md for current step."
header = f"Status: {status}" if task_id is None else f"Task: {task_id} ({status})"
return f"<workflow-state>\n{header}\n{body}\n</workflow-state>"
# ---------------------------------------------------------------------------
# Entry
# ---------------------------------------------------------------------------
def main() -> int:
try:
data = json.load(sys.stdin)
except (json.JSONDecodeError, ValueError):
data = {}
cwd_str = data.get("cwd") or os.getcwd()
cwd = Path(cwd_str)
root = find_trellis_root(cwd)
if root is None:
return 0 # not a Trellis project
templates = load_breadcrumbs(root)
task = get_active_task(root)
if task is None:
# No active task — still emit a breadcrumb nudging AI toward
# trellis-brainstorm + task.py create when user describes real work.
breadcrumb = build_breadcrumb(None, "no_task", templates)
else:
breadcrumb = build_breadcrumb(*task, templates=templates)
output = {
"hookSpecificOutput": {
"hookEventName": "UserPromptSubmit",
"additionalContext": breadcrumb,
}
}
print(json.dumps(output))
return 0
if __name__ == "__main__":
sys.exit(main())
+332
View File
@@ -0,0 +1,332 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Codex Session Start Hook - Inject Trellis context into Codex sessions.
Output format follows Codex hook protocol:
stdout JSON → { hookSpecificOutput: { hookEventName: "SessionStart", additionalContext: "..." } }
"""
from __future__ import annotations
import json
import os
import subprocess
import sys
import warnings
from io import StringIO
from pathlib import Path
warnings.filterwarnings("ignore")
FIRST_REPLY_NOTICE = """<first-reply-notice>
On the first visible assistant reply in this session, begin with exactly one short Chinese sentence:
Trellis SessionStart 已注入:workflow、当前任务状态、开发者身份、git 状态、active tasks、spec 索引已加载。
Then continue directly with the user's request. This notice is one-shot: do not repeat it after the first assistant reply in the same session.
</first-reply-notice>"""
def should_skip_injection() -> bool:
return os.environ.get("CODEX_NON_INTERACTIVE") == "1"
def configure_project_encoding(project_dir: Path) -> None:
"""Reuse Trellis' shared Windows stdio encoding helper before JSON output."""
scripts_dir = project_dir / ".trellis" / "scripts"
if str(scripts_dir) not in sys.path:
sys.path.insert(0, str(scripts_dir))
try:
from common import configure_encoding # type: ignore[import-not-found]
configure_encoding()
except Exception:
pass
def _has_curated_jsonl_entry(jsonl_path: Path) -> bool:
"""Return True iff jsonl has at least one row with a ``file`` field.
A freshly seeded jsonl only contains a ``{"_example": ...}`` row (no
``file`` key) — that is NOT "ready". Readiness requires at least one
curated entry. Matches the contract used by ``inject-subagent-context.py``.
"""
try:
for line in jsonl_path.read_text(encoding="utf-8").splitlines():
line = line.strip()
if not line:
continue
try:
row = json.loads(line)
except json.JSONDecodeError:
continue
if isinstance(row, dict) and row.get("file"):
return True
except (OSError, UnicodeDecodeError):
return False
return False
def read_file(path: Path, fallback: str = "") -> str:
try:
return path.read_text(encoding="utf-8")
except (FileNotFoundError, PermissionError):
return fallback
def run_script(script_path: Path) -> str:
try:
env = os.environ.copy()
env["PYTHONIOENCODING"] = "utf-8"
cmd = [sys.executable, "-W", "ignore", str(script_path)]
result = subprocess.run(
cmd,
capture_output=True,
text=True,
encoding="utf-8",
errors="replace",
timeout=5,
cwd=str(script_path.parent.parent.parent),
env=env,
)
return result.stdout if result.returncode == 0 else "No context available"
except (subprocess.TimeoutExpired, FileNotFoundError, PermissionError):
return "No context available"
def _normalize_task_ref(task_ref: str) -> str:
normalized = task_ref.strip()
if not normalized:
return ""
path_obj = Path(normalized)
if path_obj.is_absolute():
return str(path_obj)
normalized = normalized.replace("\\", "/")
while normalized.startswith("./"):
normalized = normalized[2:]
if normalized.startswith("tasks/"):
return f".trellis/{normalized}"
return normalized
def _resolve_task_dir(trellis_dir: Path, task_ref: str) -> Path:
normalized = _normalize_task_ref(task_ref)
path_obj = Path(normalized)
if path_obj.is_absolute():
return path_obj
if normalized.startswith(".trellis/"):
return trellis_dir.parent / path_obj
return trellis_dir / "tasks" / path_obj
def _get_task_status(trellis_dir: Path) -> str:
current_task_file = trellis_dir / ".current-task"
if not current_task_file.is_file():
return "Status: NO ACTIVE TASK\nNext: Describe what you want to work on"
task_ref = _normalize_task_ref(current_task_file.read_text(encoding="utf-8").strip())
if not task_ref:
return "Status: NO ACTIVE TASK\nNext: Describe what you want to work on"
task_dir = _resolve_task_dir(trellis_dir, task_ref)
if not task_dir.is_dir():
return f"Status: STALE POINTER\nTask: {task_ref}\nNext: Task directory not found. Run: python3 ./.trellis/scripts/task.py finish"
task_json_path = task_dir / "task.json"
task_data: dict = {}
if task_json_path.is_file():
try:
task_data = json.loads(task_json_path.read_text(encoding="utf-8"))
except (json.JSONDecodeError, PermissionError):
pass
task_title = task_data.get("title", task_ref)
task_status = task_data.get("status", "unknown")
if task_status == "completed":
return f"Status: COMPLETED\nTask: {task_title}\nNext: Archive with `python3 ./.trellis/scripts/task.py archive {task_dir.name}` or start a new task"
has_context = False
for jsonl_name in ("implement.jsonl", "check.jsonl", "spec.jsonl"):
jsonl_path = task_dir / jsonl_name
if jsonl_path.is_file() and _has_curated_jsonl_entry(jsonl_path):
has_context = True
break
has_prd = (task_dir / "prd.md").is_file()
if not has_prd:
return f"Status: NOT READY\nTask: {task_title}\nMissing: prd.md not created\nNext: Write PRD (see workflow.md Phase 1.1) then curate implement.jsonl per Phase 1.3"
if not has_context:
return f"Status: NOT READY\nTask: {task_title}\nMissing: implement.jsonl / check.jsonl missing or empty\nNext: Curate entries per workflow.md Phase 1.3 (spec + research files only), then `task.py start`"
return (
f"Status: READY\nTask: {task_title}\n"
"Next required action: dispatch `trellis-implement` per Phase 2.1. "
"For agent-capable platforms, do NOT edit code in the main session. "
"After implementation, dispatch `trellis-check` per Phase 2.2 before reporting completion."
)
def _extract_range(content: str, start_header: str, end_header: str) -> str:
"""Extract lines starting at `## start_header` up to (but excluding) `## end_header`."""
lines = content.splitlines()
start: "int | None" = None
end: int = len(lines)
start_match = f"## {start_header}"
end_match = f"## {end_header}"
for i, line in enumerate(lines):
stripped = line.strip()
if start is None and stripped == start_match:
start = i
continue
if start is not None and stripped == end_match:
end = i
break
if start is None:
return ""
return "\n".join(lines[start:end]).rstrip()
def _build_workflow_toc(workflow_path: Path) -> str:
"""Inject workflow guide: TOC + Phase Index + Phase 1/2/3 step details."""
content = read_file(workflow_path)
if not content:
return "No workflow.md found"
out_lines = [
"# Development Workflow — Section Index",
"Full guide: .trellis/workflow.md (read on demand)",
"",
"## Table of Contents",
]
for line in content.splitlines():
if line.startswith("## "):
out_lines.append(line)
out_lines += ["", "---", ""]
phases = _extract_range(content, "Phase Index", "Workflow State Breadcrumbs")
if phases:
out_lines.append(phases)
return "\n".join(out_lines).rstrip()
def main() -> None:
if should_skip_injection():
sys.exit(0)
# Read hook input from stdin
try:
hook_input = json.loads(sys.stdin.read())
project_dir = Path(hook_input.get("cwd", ".")).resolve()
except (json.JSONDecodeError, KeyError):
project_dir = Path(".").resolve()
configure_project_encoding(project_dir)
trellis_dir = project_dir / ".trellis"
output = StringIO()
output.write("""<session-context>
You are starting a new session in a Trellis-managed project.
Read and follow all instructions below carefully.
</session-context>
""")
output.write(FIRST_REPLY_NOTICE)
output.write("\n\n")
output.write("<current-state>\n")
context_script = trellis_dir / "scripts" / "get_context.py"
output.write(run_script(context_script))
output.write("\n</current-state>\n\n")
output.write("<workflow>\n")
output.write(_build_workflow_toc(trellis_dir / "workflow.md"))
output.write("\n</workflow>\n\n")
output.write("<guidelines>\n")
output.write(
"Project spec indexes are listed by path below. Each index contains a "
"**Pre-Development Checklist** listing the specific guideline files to "
"read before coding.\n\n"
"- If you're spawning an implement/check sub-agent, context is injected "
"automatically via `{task}/implement.jsonl` / `check.jsonl`. You do NOT "
"need to read these indexes yourself.\n"
"- For agent-capable platforms, do NOT edit code directly in the main "
"session; dispatch `trellis-implement` and `trellis-check` so JSONL "
"context is loaded by the sub-agents.\n\n"
)
# guides/ inlined (cross-package thinking, broadly useful)
guides_index = trellis_dir / "spec" / "guides" / "index.md"
if guides_index.is_file():
output.write("## guides (inlined — cross-package thinking guides)\n")
output.write(read_file(guides_index))
output.write("\n\n")
# Other indexes — paths only
paths: list[str] = []
spec_dir = trellis_dir / "spec"
if spec_dir.is_dir():
for sub in sorted(spec_dir.iterdir()):
if not sub.is_dir() or sub.name.startswith("."):
continue
if sub.name == "guides":
continue
index_file = sub / "index.md"
if index_file.is_file():
paths.append(f".trellis/spec/{sub.name}/index.md")
else:
for nested in sorted(sub.iterdir()):
if not nested.is_dir():
continue
nested_index = nested / "index.md"
if nested_index.is_file():
paths.append(
f".trellis/spec/{sub.name}/{nested.name}/index.md"
)
if paths:
output.write("## Available spec indexes (read on demand)\n")
for p in paths:
output.write(f"- {p}\n")
output.write("\n")
output.write(
"Discover more via: "
"`python3 ./.trellis/scripts/get_context.py --mode packages`\n"
)
output.write("</guidelines>\n\n")
task_status = _get_task_status(trellis_dir)
output.write(f"<task-status>\n{task_status}\n</task-status>\n\n")
output.write("""<ready>
Context loaded. Workflow index, project state, and guidelines are already injected above — do NOT re-read them.
When the user sends the first message, follow <task-status> and the workflow guide.
If a task is READY, execute its Next required action without asking whether to continue.
</ready>""")
context = output.getvalue()
result = {
"suppressOutput": True,
"systemMessage": f"Trellis context injected ({len(context)} chars)",
"hookSpecificOutput": {
"hookEventName": "SessionStart",
"additionalContext": context,
},
}
print(json.dumps(result, ensure_ascii=False), flush=True)
if __name__ == "__main__":
main()
+219
View File
@@ -0,0 +1,219 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Trellis StatusLine — project-level status display for Claude Code.
Reads Claude Code session JSON from stdin + Trellis task data from filesystem.
Outputs 1-2 lines:
With active task: [P1] Task title (status) + info line
Without task: info line only
Info line: model · ctx% · branch · duration · developer · tasks · rate limits
"""
from __future__ import annotations
import json
import re
import subprocess
import sys
from pathlib import Path
# Fix: Windows Python defaults to GBK encoding, which corrupts UTF-8
# characters like the middle dot (·). Wrap stdout/stderr with UTF-8.
if sys.platform == "win32":
for stream in (sys.stdout, sys.stderr):
reconfigure = getattr(stream, "reconfigure", None)
if callable(reconfigure):
reconfigure(encoding="utf-8", errors="replace")
def _read_text(path: Path) -> str:
try:
return path.read_text(encoding="utf-8").strip()
except (FileNotFoundError, PermissionError, OSError):
return ""
def _read_json(path: Path) -> dict:
text = _read_text(path)
if not text:
return {}
try:
return json.loads(text)
except (json.JSONDecodeError, ValueError):
return {}
def _normalize_task_ref(task_ref: str) -> str:
normalized = task_ref.strip()
if not normalized:
return ""
path_obj = Path(normalized)
if path_obj.is_absolute():
return str(path_obj)
normalized = normalized.replace("\\", "/")
while normalized.startswith("./"):
normalized = normalized[2:]
if normalized.startswith("tasks/"):
return f".trellis/{normalized}"
return normalized
def _resolve_task_dir(trellis_dir: Path, task_ref: str) -> Path:
normalized = _normalize_task_ref(task_ref)
path_obj = Path(normalized)
if path_obj.is_absolute():
return path_obj
if normalized.startswith(".trellis/"):
return trellis_dir.parent / path_obj
return trellis_dir / "tasks" / path_obj
def _find_trellis_dir() -> Path | None:
"""Walk up from cwd to find .trellis/ directory."""
current = Path.cwd()
for parent in [current, *current.parents]:
candidate = parent / ".trellis"
if candidate.is_dir():
return candidate
return None
def _get_current_task(trellis_dir: Path) -> dict | None:
"""Load current task info. Returns dict with title/status/priority or None."""
task_ref = _normalize_task_ref(_read_text(trellis_dir / ".current-task"))
if not task_ref:
return None
# Resolve task directory
task_path = _resolve_task_dir(trellis_dir, task_ref)
task_data = _read_json(task_path / "task.json")
if not task_data:
return None
return {
"title": task_data.get("title") or task_data.get("name") or "unknown",
"status": task_data.get("status", "unknown"),
"priority": task_data.get("priority", "P2"),
}
def _count_active_tasks(trellis_dir: Path) -> int:
"""Count non-archived task directories with valid task.json."""
tasks_dir = trellis_dir / "tasks"
if not tasks_dir.is_dir():
return 0
count = 0
for d in tasks_dir.iterdir():
if d.is_dir() and d.name != "archive" and (d / "task.json").is_file():
count += 1
return count
def _get_developer(trellis_dir: Path) -> str:
content = _read_text(trellis_dir / ".developer")
if not content:
return "unknown"
for line in content.splitlines():
if line.startswith("name="):
return line[5:].strip()
return content.splitlines()[0].strip() or "unknown"
def _get_git_branch() -> str:
try:
result = subprocess.run(
["git", "branch", "--show-current"],
capture_output=True, text=True, timeout=3,
)
return result.stdout.strip() if result.returncode == 0 else ""
except (FileNotFoundError, subprocess.TimeoutExpired):
return ""
def _format_ctx_size(size: int) -> str:
if size >= 1_000_000:
return f"{size // 1_000_000}M"
if size >= 1_000:
return f"{size // 1_000}K"
return str(size)
def _format_duration(ms: int) -> str:
secs = ms // 1000
hours, remainder = divmod(secs, 3600)
mins = remainder // 60
if hours > 0:
return f"{hours}h{mins}m"
return f"{mins}m"
def main() -> None:
# Read Claude Code session JSON from stdin
try:
cc_data = json.loads(sys.stdin.read())
except (json.JSONDecodeError, ValueError):
cc_data = {}
trellis_dir = _find_trellis_dir()
SEP = " \033[90m·\033[0m "
# --- Trellis data ---
task = _get_current_task(trellis_dir) if trellis_dir else None
dev = _get_developer(trellis_dir) if trellis_dir else ""
task_count = _count_active_tasks(trellis_dir) if trellis_dir else 0
# --- CC session data ---
model = cc_data.get("model", {}).get("display_name", "?")
ctx_pct = int(cc_data.get("context_window", {}).get("used_percentage") or 0)
ctx_size = _format_ctx_size(cc_data.get("context_window", {}).get("context_window_size") or 0)
duration = _format_duration(cc_data.get("cost", {}).get("total_duration_ms") or 0)
branch = _get_git_branch()
# Avoid "Opus 4.6 (1M context) (1M)"
if re.search(r"\d+[KMG]\b", model, re.IGNORECASE):
model_label = model
else:
model_label = f"{model} ({ctx_size})"
# Context % with color
if ctx_pct >= 90:
ctx_color = "\033[31m"
elif ctx_pct >= 70:
ctx_color = "\033[33m"
else:
ctx_color = "\033[32m"
# Build info line: model · ctx · branch · duration · dev · tasks [· rate limits]
parts = [
model_label,
f"ctx {ctx_color}{ctx_pct}%\033[0m",
]
if branch:
parts.append(f"\033[35m{branch}\033[0m")
parts.append(duration)
if dev:
parts.append(f"\033[32m{dev}\033[0m")
if task_count:
parts.append(f"{task_count} task(s)")
five_hr = cc_data.get("rate_limits", {}).get("five_hour", {}).get("used_percentage")
if five_hr is not None:
parts.append(f"5h {int(five_hr)}%")
seven_day = cc_data.get("rate_limits", {}).get("seven_day", {}).get("used_percentage")
if seven_day is not None:
parts.append(f"7d {int(seven_day)}%")
info_line = SEP.join(parts)
# Output: task line (only if active) + info line
if task:
print(f"\033[36m[{task['priority']}]\033[0m {task['title']} \033[33m({task['status']})\033[0m")
print(info_line)
if __name__ == "__main__":
main()