Compare commits

...

7 Commits

Author SHA1 Message Date
Yeachan-Heo
4fb8a319ed feat(api): enable Anthropic Messages streaming in Rust
Add a standalone api crate for Anthropic's Messages API with typed requests,
responses, and SSE event parsing. The crate can send standard message
requests or consume streaming responses incrementally, and the workspace
now passes the stricter clippy invocation required by this lane.

Constraint: Must fit the existing Rust workspace and pass cargo fmt/clippy/test
Constraint: Live Anthropic verification depends on a valid ANTHROPIC_API_KEY at runtime
Rejected: Add a Stream-trait wrapper with extra dependencies | custom next_event() keeps the crate smaller
Rejected: Skip workspace lint cleanup | user explicitly required cargo clippy to pass
Confidence: medium
Scope-risk: moderate
Directive: Extend StreamEvent deliberately if future lanes need tool-use or thinking deltas
Tested: cargo fmt
Tested: cargo clippy --workspace --all-targets -- -D warnings
Tested: cargo test --workspace
Not-tested: Live Anthropic stream smoke test with a valid API key (current ANTHROPIC_API_KEY returned 401 authentication_error)
2026-03-31 15:45:30 +00:00
Yeachan-Heo
d621f5d5d8 Establish a harness-first Rust port foundation
This creates a new rust/ workspace for the compatibility-first port effort, adds a README that explains the milestone and verification commands, links the new Rust docs from the repository root, and seeds a minimal CLI/runtime/registry/harness skeleton that can read the upstream TypeScript sources.

Constraint: Initial delivery had to stay inside rust/ except for the root README pointer
Constraint: The workspace started empty, so the first milestone needed proof-oriented scaffolding before broader porting
Rejected: Wait for tmux worker integration output | local verified implementation finished first and worker merges conflicted with local files
Rejected: Start with a feature-complete CLI rewrite | too risky without manifest and bootstrap harness scaffolding
Confidence: medium
Scope-risk: moderate
Reversibility: clean
Directive: Grow compatibility from extracted manifests and tests before claiming drop-in parity
Tested: cargo fmt --all; cargo check --workspace; cargo test --workspace; cargo run -p rusty-claude-cli -- --help; cargo run -p rusty-claude-cli -- dump-manifests; cargo run -p rusty-claude-cli -- bootstrap-plan
Not-tested: Real command execution parity; state/config fixture round-trips; bridge/MCP/plugin protocol replay
Related: /home/bellman/Workspace/rusty-claude-code/.omx/plans/prd-rust-port-claude-code-dropin-cli.md
Related: /home/bellman/Workspace/rusty-claude-code/.omx/plans/test-spec-rust-port-claude-code-dropin-cli.md
2026-03-31 15:11:08 +00:00
sigridjineth
c941e95fc7 docs: emphasize leaked source and include original tweet 2026-03-31 15:11:08 +00:00
sigridjineth
c9f7b96e7d docs: rewrite README in English 2026-03-31 15:11:08 +00:00
sigridjineth
66d9c1e420 docs: add comprehensive README with architecture analysis 2026-03-31 15:11:08 +00:00
sigridjineth
caad05016e init: add source code from src.zip 2026-03-31 15:10:58 +00:00
instructkr
2394140007 Rewriting Project Claw Code - Python port with Rust on the way 2026-03-31 08:04:29 -07:00
2028 changed files with 518187 additions and 105 deletions

1
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1 @@
github: instructkr

View File

@@ -0,0 +1,9 @@
{
"session_id": "00083e63395f4f3bb24fd6e7ed26439d",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "053db140f7694d1abfb52c96e62fdede",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "05a9bc9a33c24f80b7e6540d4306d59d",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "08dcdbf3d76b4ed7898c27c1a45dde73",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "0ca1cdd3176041a1916729d76effe10d",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "11fb4a2aa69b460ba53031ea6643969f",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "156dd69bbb3142e687d1a56cb97633da",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "1a9711e7305f42a8bc30d8ade03a221d",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "1b2fe9ba9b804b2896e33ddc7c7d91ae",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "1d07428c6df44c859e6056ef13aa422d",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "1e068ad0e9234d8b9e85735556ad436c",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "2763324a1d7e4832a12f14e7108ce552",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "27d5203bdda3480681732c1f94291599",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "28510de9575e40919ddb607d82e95cc1",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "2e574cba82bb43eda5aafe7ae364210b",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "317de978222e4e6c93f7e5430d44d130",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "338275156d1d405f8c627be621b3191e",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "3b93d8e47dff4a0d90782d19e33a81b6",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "3c60859387044f7b8111557a4b252745",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "439e9f7d2bfa41cbbfed6edf44074311",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "4c75b6e3a46b4f599e32eaf147d92c4d",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "4e4aae9acdf945839798f37e01c04f1b",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "5230052e7d2749be9cf318de519d9c42",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "575c96ef2c1f45cf9deb85b06c552bde",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "577b74a297fd4fed956a78b5debd8025",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "59d15a4390b648b9b19c6ebae1c5d2d8",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "5cb2ee43a1c547f79ffbc74cb311247c",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "609a20c55c6d47c282ddc00d0c3b6dfb",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "663f870db32944b3a0d2b863c19a5708",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "6d0759c31bd942b6b86f3969ba8bbf28",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "6dd0b48373c64449b756fd53425b4031",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "6f8b4a9d5a0e47f79bd732f61a6eb543",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "6f9f4e28691f46dab19d99d78127c94e",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "72b9dcbd91054fd78d7cfdcf09de3c0b",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "76411759fe1e42e5bc5ac4279a68f700",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "776aeb73811d4868ad20b6562eba6502",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "7afd286cb9d14d62b56ba7ff86bcc08b",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "7e96b6c76ef64983a4ba3be162bd7cdd",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "800a5672a570499883260b597ed174bd",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "834e4208013d4505a6957901a0ff151c",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "884dc57a7bba40d59a1bcc1dd80cb28b",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "8d517af648b840928016d4cc613f1133",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "8d84e1795c9d414190bc1a9c719bc63e",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "90d94293814443edb376ba3b68568c46",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "9155bcf5da0348dbac0c2374ef60c9e5",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "91d1f071ab8148b28b7e29513cc1804f",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "933e1cd8e24042d1968959aa2c67f3d6",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "94d871d03ed6410a81a8339ecba4f3dd",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "994767f1199c44d8b2052e758b4d4d3c",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "9bd50e453f24444d970dedddf3c85027",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "9d8eecbecd7d452697d2e8b5be415bd4",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "9f87d3fe67bb4aebbe0effd034382685",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "a0e9fbca75ab44e7b89022156f18f60a",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "a1d0491f8c92424fb2a2c3e2333f39d0",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "a277471dc4cf4d7f86ff065fd77ccb88",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "a3b47cec34694898919020369ab6af13",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "a6e9e4b6daac4f5a82e366cb12313eee",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "ad7f38d318df458bb1f0f0492fa71f02",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "b3b5eb702163461bb0f0dd43a82528d8",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "b4829661195449c1af0c37e8c87ce0a5",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "b4f0e18929d54d948ad29bd1dd43a85d",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "bd2f9891b77d4ab29b022f6d830c6234",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "cbe249de5950459da16440988e2ddc54",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "db0aea45e8984d57898e91803bdcdc7c",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "dbd0d1c8f734444a8ed02f77f3d9fc1a",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "e8fff9feb2ae4d8ca94575ed8fa5a03f",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "e9433328bba545d8ac94e2bbac488aec",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "f074c37ae3b84a959da394aec7ec3c68",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "f2efb0de41f949859b57854e58efad70",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "f48004afd90f41d081a6129e96ad7651",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "f5def6f008f64a918bb0f5b0d6ec3db6",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "f5efe1bbcb124e3191e4417becbe1f81",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "f689195c4cf94eb998a461da808075a3",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,8 @@
{
"session_id": "f77644ede86245eba5198322de05e4a4",
"messages": [
"review MCP tool"
],
"input_tokens": 3,
"output_tokens": 13
}

View File

@@ -0,0 +1,9 @@
{
"session_id": "ffbf6be4ed074ead98bcf18e7710e6a1",
"messages": [
"review MCP tool",
"review MCP tool"
],
"input_tokens": 6,
"output_tokens": 32
}

316
README.md
View File

@@ -1,123 +1,261 @@
# Claude Code Python Porting Workspace
# Claude Code — Leaked Source (2026-03-31)
> The primary `src/` tree in this repository is now dedicated to **Python porting work**. The March 31, 2026 Claude Code source exposure is part of the project's background, but the tracked repository is now centered on Python source rather than the exposed TypeScript snapshot.
> **On March 31, 2026, the full source code of Anthropic's Claude Code CLI was leaked** via a `.map` file exposed in their npm registry.
---
## Porting Status
## How It Leaked
The main source tree is now Python-first.
[Chaofan Shou (@Fried_rice)](https://x.com/Fried_rice) discovered the leak and posted it publicly:
- `src/` contains the active Python porting workspace
- `tests/` verifies the current Python workspace
- the exposed snapshot is no longer part of the tracked repository state
> **"Claude code source code has been leaked via a map file in their npm registry!"**
>
> — [@Fried_rice, March 31, 2026](https://x.com/Fried_rice/status/2038894956459290963)
The current Python workspace is not yet a complete one-to-one replacement for the original system, but the primary implementation surface is now Python.
The source map file in the published npm package contained a reference to the full, unobfuscated TypeScript source, which was downloadable as a zip archive from Anthropic's R2 storage bucket.
## Why this rewrite exists
---
I originally studied the exposed codebase to understand its harness, tool wiring, and agent workflow. After spending more time with the legal and ethical questions—and after reading the essay linked below—I did not want the exposed snapshot itself to remain the main tracked source tree.
## Overview
This repository now focuses on Python porting work instead.
Claude Code is Anthropic's official CLI tool that lets you interact with Claude directly from the terminal to perform software engineering tasks — editing files, running commands, searching codebases, managing git workflows, and more.
## Repository Layout
This repository contains the leaked `src/` directory.
```text
.
├── src/ # Python porting workspace
│ ├── __init__.py
│ ├── commands.py
│ ├── main.py
│ ├── models.py
│ ├── port_manifest.py
│ ├── query_engine.py
│ ├── task.py
│ └── tools.py
├── tests/ # Python verification
├── assets/omx/ # OmX workflow screenshots
├── 2026-03-09-is-legal-the-same-as-legitimate-ai-reimplementation-and-the-erosion-of-copyleft.md
└── README.md
- **Leaked on**: 2026-03-31
- **Language**: TypeScript
- **Runtime**: Bun
- **Terminal UI**: React + [Ink](https://github.com/vadimdemedes/ink) (React for CLI)
- **Scale**: ~1,900 files, 512,000+ lines of code
---
## Rust port foundation
A compatibility-first Rust port workspace now lives in [`rust/`](rust/). Start with [`rust/README.md`](rust/README.md) for the current milestone scope, workspace layout, and verification commands.
## Directory Structure
```
src/
├── main.tsx # Entrypoint (Commander.js-based CLI parser)
├── commands.ts # Command registry
├── tools.ts # Tool registry
├── Tool.ts # Tool type definitions
├── QueryEngine.ts # LLM query engine (core Anthropic API caller)
├── context.ts # System/user context collection
├── cost-tracker.ts # Token cost tracking
├── commands/ # Slash command implementations (~50)
├── tools/ # Agent tool implementations (~40)
├── components/ # Ink UI components (~140)
├── hooks/ # React hooks
├── services/ # External service integrations
├── screens/ # Full-screen UIs (Doctor, REPL, Resume)
├── types/ # TypeScript type definitions
├── utils/ # Utility functions
├── bridge/ # IDE integration bridge (VS Code, JetBrains)
├── coordinator/ # Multi-agent coordinator
├── plugins/ # Plugin system
├── skills/ # Skill system
├── keybindings/ # Keybinding configuration
├── vim/ # Vim mode
├── voice/ # Voice input
├── remote/ # Remote sessions
├── server/ # Server mode
├── memdir/ # Memory directory (persistent memory)
├── tasks/ # Task management
├── state/ # State management
├── migrations/ # Config migrations
├── schemas/ # Config schemas (Zod)
├── entrypoints/ # Initialization logic
├── ink/ # Ink renderer wrapper
├── buddy/ # Companion sprite (Easter egg)
├── native-ts/ # Native TypeScript utils
├── outputStyles/ # Output styling
├── query/ # Query pipeline
└── upstreamproxy/ # Proxy configuration
```
## Python Workspace Overview
---
The new Python `src/` tree currently provides:
## Core Architecture
- **`port_manifest.py`** — summarizes the current Python workspace structure
- **`models.py`** — dataclasses for subsystems, modules, and backlog state
- **`commands.py`** — Python-side command port metadata
- **`tools.py`** — Python-side tool port metadata
- **`query_engine.py`** — renders a Python porting summary from the active workspace
- **`main.py`** — a CLI entrypoint for manifest and summary output
### 1. Tool System (`src/tools/`)
## Quickstart
Every tool Claude Code can invoke is implemented as a self-contained module. Each tool defines its input schema, permission model, and execution logic.
Render the Python porting summary:
| Tool | Description |
|---|---|
| `BashTool` | Shell command execution |
| `FileReadTool` | File reading (images, PDFs, notebooks) |
| `FileWriteTool` | File creation / overwrite |
| `FileEditTool` | Partial file modification (string replacement) |
| `GlobTool` | File pattern matching search |
| `GrepTool` | ripgrep-based content search |
| `WebFetchTool` | Fetch URL content |
| `WebSearchTool` | Web search |
| `AgentTool` | Sub-agent spawning |
| `SkillTool` | Skill execution |
| `MCPTool` | MCP server tool invocation |
| `LSPTool` | Language Server Protocol integration |
| `NotebookEditTool` | Jupyter notebook editing |
| `TaskCreateTool` / `TaskUpdateTool` | Task creation and management |
| `SendMessageTool` | Inter-agent messaging |
| `TeamCreateTool` / `TeamDeleteTool` | Team agent management |
| `EnterPlanModeTool` / `ExitPlanModeTool` | Plan mode toggle |
| `EnterWorktreeTool` / `ExitWorktreeTool` | Git worktree isolation |
| `ToolSearchTool` | Deferred tool discovery |
| `CronCreateTool` | Scheduled trigger creation |
| `RemoteTriggerTool` | Remote trigger |
| `SleepTool` | Proactive mode wait |
| `SyntheticOutputTool` | Structured output generation |
```bash
python3 -m src.main summary
### 2. Command System (`src/commands/`)
User-facing slash commands invoked with `/` prefix.
| Command | Description |
|---|---|
| `/commit` | Create a git commit |
| `/review` | Code review |
| `/compact` | Context compression |
| `/mcp` | MCP server management |
| `/config` | Settings management |
| `/doctor` | Environment diagnostics |
| `/login` / `/logout` | Authentication |
| `/memory` | Persistent memory management |
| `/skills` | Skill management |
| `/tasks` | Task management |
| `/vim` | Vim mode toggle |
| `/diff` | View changes |
| `/cost` | Check usage cost |
| `/theme` | Change theme |
| `/context` | Context visualization |
| `/pr_comments` | View PR comments |
| `/resume` | Restore previous session |
| `/share` | Share session |
| `/desktop` | Desktop app handoff |
| `/mobile` | Mobile app handoff |
### 3. Service Layer (`src/services/`)
| Service | Description |
|---|---|
| `api/` | Anthropic API client, file API, bootstrap |
| `mcp/` | Model Context Protocol server connection and management |
| `oauth/` | OAuth 2.0 authentication flow |
| `lsp/` | Language Server Protocol manager |
| `analytics/` | GrowthBook-based feature flags and analytics |
| `plugins/` | Plugin loader |
| `compact/` | Conversation context compression |
| `policyLimits/` | Organization policy limits |
| `remoteManagedSettings/` | Remote managed settings |
| `extractMemories/` | Automatic memory extraction |
| `tokenEstimation.ts` | Token count estimation |
| `teamMemorySync/` | Team memory synchronization |
### 4. Bridge System (`src/bridge/`)
A bidirectional communication layer connecting IDE extensions (VS Code, JetBrains) with the Claude Code CLI.
- `bridgeMain.ts` — Bridge main loop
- `bridgeMessaging.ts` — Message protocol
- `bridgePermissionCallbacks.ts` — Permission callbacks
- `replBridge.ts` — REPL session bridge
- `jwtUtils.ts` — JWT-based authentication
- `sessionRunner.ts` — Session execution management
### 5. Permission System (`src/hooks/toolPermission/`)
Checks permissions on every tool invocation. Either prompts the user for approval/denial or automatically resolves based on the configured permission mode (`default`, `plan`, `bypassPermissions`, `auto`, etc.).
### 6. Feature Flags
Dead code elimination via Bun's `bun:bundle` feature flags:
```typescript
import { feature } from 'bun:bundle'
// Inactive code is completely stripped at build time
const voiceCommand = feature('VOICE_MODE')
? require('./commands/voice/index.js').default
: null
```
Print the current Python workspace manifest:
Notable flags: `PROACTIVE`, `KAIROS`, `BRIDGE_MODE`, `DAEMON`, `VOICE_MODE`, `AGENT_TRIGGERS`, `MONITOR_TOOL`
```bash
python3 -m src.main manifest
---
## Key Files in Detail
### `QueryEngine.ts` (~46K lines)
The core engine for LLM API calls. Handles streaming responses, tool-call loops, thinking mode, retry logic, and token counting.
### `Tool.ts` (~29K lines)
Defines base types and interfaces for all tools — input schemas, permission models, and progress state types.
### `commands.ts` (~25K lines)
Manages registration and execution of all slash commands. Uses conditional imports to load different command sets per environment.
### `main.tsx`
Commander.js-based CLI parser + React/Ink renderer initialization. At startup, parallelizes MDM settings, keychain prefetch, and GrowthBook initialization for faster boot.
---
## Tech Stack
| Category | Technology |
|---|---|
| Runtime | [Bun](https://bun.sh) |
| Language | TypeScript (strict) |
| Terminal UI | [React](https://react.dev) + [Ink](https://github.com/vadimdemedes/ink) |
| CLI Parsing | [Commander.js](https://github.com/tj/commander.js) (extra-typings) |
| Schema Validation | [Zod v4](https://zod.dev) |
| Code Search | [ripgrep](https://github.com/BurntSushi/ripgrep) (via GrepTool) |
| Protocols | [MCP SDK](https://modelcontextprotocol.io), LSP |
| API | [Anthropic SDK](https://docs.anthropic.com) |
| Telemetry | OpenTelemetry + gRPC |
| Feature Flags | GrowthBook |
| Auth | OAuth 2.0, JWT, macOS Keychain |
---
## Notable Design Patterns
### Parallel Prefetch
Startup time is optimized by prefetching MDM settings, keychain reads, and API preconnect in parallel — before heavy module evaluation begins.
```typescript
// main.tsx — fired as side-effects before other imports
startMdmRawRead()
startKeychainPrefetch()
```
List the current Python modules:
### Lazy Loading
```bash
python3 -m src.main subsystems --limit 16
```
Heavy modules (OpenTelemetry ~400KB, gRPC ~700KB) are deferred via dynamic `import()` until actually needed.
Run verification:
### Agent Swarms
```bash
python3 -m unittest discover -s tests -v
```
Sub-agents are spawned via `AgentTool`, with `coordinator/` handling multi-agent orchestration. `TeamCreateTool` enables team-level parallel work.
Run the parity audit against the local ignored archive (when present):
### Skill System
```bash
python3 -m src.main parity-audit
```
Reusable workflows defined in `skills/` and executed through `SkillTool`. Users can add custom skills.
Inspect mirrored command/tool inventories:
### Plugin Architecture
```bash
python3 -m src.main commands --limit 10
python3 -m src.main tools --limit 10
```
Built-in and third-party plugins are loaded through the `plugins/` subsystem.
## Current Parity Checkpoint
---
The port now mirrors the archived root-entry file surface, top-level subsystem names, and command/tool inventories much more closely than before. However, it is **not yet** a full runtime-equivalent replacement for the original TypeScript system; the Python tree still contains fewer executable runtime slices than the archived source.
## Disclaimer
## Related Essay
- [*Is legal the same as legitimate: AI reimplementation and the erosion of copyleft*](https://writings.hongminhee.org/2026/03/legal-vs-legitimate/)
The essay is dated **March 9, 2026**, so it should be read as companion analysis that predates the **March 31, 2026** source exposure that motivated this rewrite direction.
## Built with `oh-my-codex`
The restructuring and documentation work on this repository was AI-assisted and orchestrated with Yeachan Heo's [oh-my-codex (OmX)](https://github.com/Yeachan-Heo/oh-my-codex), layered on top of Codex.
- **`$team` mode:** used for coordinated parallel review and architectural feedback
- **`$ralph` mode:** used for persistent execution, verification, and completion discipline
- **Codex-driven workflow:** used to turn the main `src/` tree into a Python-first porting workspace
### OmX workflow screenshots
![OmX workflow screenshot 1](assets/omx/omx-readme-review-1.png)
*Ralph/team orchestration view while the README and essay context were being reviewed in terminal panes.*
![OmX workflow screenshot 2](assets/omx/omx-readme-review-2.png)
*Split-pane review and verification flow during the final README wording pass.*
## Ownership / Affiliation Disclaimer
- This repository does **not** claim ownership of the original Claude Code source material.
- This repository is **not affiliated with, endorsed by, or maintained by Anthropic**.
This repository archives source code that was leaked from Anthropic's npm registry on **2026-03-31**. All original source code is the property of [Anthropic](https://www.anthropic.com).

BIN
assets/clawd-hero.jpeg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 233 KiB

BIN
assets/instructkr.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

BIN
assets/star-history.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 312 KiB

BIN
assets/tweet-screenshot.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 812 KiB

BIN
assets/wsj-feature.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 873 KiB

1
rust/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
target/

1410
rust/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

19
rust/Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[workspace]
members = ["crates/*"]
resolver = "2"
[workspace.package]
version = "0.1.0"
edition = "2021"
license = "MIT"
publish = false
[workspace.lints.rust]
unsafe_code = "forbid"
[workspace.lints.clippy]
all = { level = "warn", priority = -1 }
pedantic = { level = "warn", priority = -1 }
module_name_repetitions = "allow"
missing_panics_doc = "allow"
missing_errors_doc = "allow"

54
rust/README.md Normal file
View File

@@ -0,0 +1,54 @@
# Rust port foundation
This directory contains the first compatibility-first Rust foundation for a drop-in Claude Code CLI replacement.
## Current milestone
This initial milestone focuses on **harness-first scaffolding**, not full feature parity:
- a Cargo workspace aligned to major upstream seams
- a placeholder CLI crate (`rusty-claude-cli`)
- runtime, command, and tool registry skeleton crates
- a `compat-harness` crate that reads the upstream TypeScript sources in `../src/`
- tests that prove upstream manifests/bootstrap hints can be extracted from the leaked TypeScript codebase
## Workspace layout
```text
rust/
├── Cargo.toml
├── README.md
├── crates/
│ ├── rusty-claude-cli/
│ ├── runtime/
│ ├── commands/
│ ├── tools/
│ └── compat-harness/
└── tests/
```
## How to use
From this directory:
```bash
cargo fmt --all
cargo check --workspace
cargo test --workspace
cargo run -p rusty-claude-cli -- --help
cargo run -p rusty-claude-cli -- dump-manifests
cargo run -p rusty-claude-cli -- bootstrap-plan
```
## Design notes
The shape follows the PRD's harness-first recommendation:
1. Extract observable upstream command/tool/bootstrap facts first.
2. Keep Rust module boundaries recognizable.
3. Grow runtime compatibility behind proof artifacts.
4. Document explicit gaps instead of implying drop-in parity too early.
## Relationship to the root README
The repository root README explains the leaked TypeScript codebase. This document tracks the Rust replacement effort that lives in `rust/`.

View File

@@ -0,0 +1,15 @@
[package]
name = "api"
version.workspace = true
edition.workspace = true
license.workspace = true
publish.workspace = true
[dependencies]
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tokio = { version = "1", features = ["io-util", "macros", "net", "rt-multi-thread", "time"] }
[lints]
workspace = true

View File

@@ -0,0 +1,202 @@
use crate::error::ApiError;
use crate::sse::SseParser;
use crate::types::{MessageRequest, MessageResponse, StreamEvent};
const DEFAULT_BASE_URL: &str = "https://api.anthropic.com";
const ANTHROPIC_VERSION: &str = "2023-06-01";
#[derive(Debug, Clone)]
pub struct AnthropicClient {
http: reqwest::Client,
api_key: String,
auth_token: Option<String>,
base_url: String,
}
impl AnthropicClient {
#[must_use]
pub fn new(api_key: impl Into<String>) -> Self {
Self {
http: reqwest::Client::new(),
api_key: api_key.into(),
auth_token: None,
base_url: DEFAULT_BASE_URL.to_string(),
}
}
pub fn from_env() -> Result<Self, ApiError> {
Ok(Self::new(read_api_key(|key| std::env::var(key))?)
.with_auth_token(std::env::var("ANTHROPIC_AUTH_TOKEN").ok())
.with_base_url(
std::env::var("ANTHROPIC_BASE_URL")
.ok()
.or_else(|| std::env::var("CLAUDE_CODE_API_BASE_URL").ok())
.unwrap_or_else(|| DEFAULT_BASE_URL.to_string()),
))
}
#[must_use]
pub fn with_auth_token(mut self, auth_token: Option<String>) -> Self {
self.auth_token = auth_token.filter(|token| !token.is_empty());
self
}
#[must_use]
pub fn with_base_url(mut self, base_url: impl Into<String>) -> Self {
self.base_url = base_url.into();
self
}
pub async fn send_message(
&self,
request: &MessageRequest,
) -> Result<MessageResponse, ApiError> {
let request = MessageRequest {
stream: false,
..request.clone()
};
let response = self.send_raw_request(&request).await?;
let response = expect_success(response).await?;
response
.json::<MessageResponse>()
.await
.map_err(ApiError::from)
}
pub async fn stream_message(
&self,
request: &MessageRequest,
) -> Result<MessageStream, ApiError> {
let response = self
.send_raw_request(&request.clone().with_streaming())
.await?;
let response = expect_success(response).await?;
Ok(MessageStream {
response,
parser: SseParser::new(),
pending: std::collections::VecDeque::new(),
done: false,
})
}
async fn send_raw_request(
&self,
request: &MessageRequest,
) -> Result<reqwest::Response, ApiError> {
let mut request_builder = self
.http
.post(format!(
"{}/v1/messages",
self.base_url.trim_end_matches('/')
))
.header("x-api-key", &self.api_key)
.header("anthropic-version", ANTHROPIC_VERSION)
.header("content-type", "application/json");
if let Some(auth_token) = &self.auth_token {
request_builder = request_builder.bearer_auth(auth_token);
}
request_builder
.json(request)
.send()
.await
.map_err(ApiError::from)
}
}
fn read_api_key(
getter: impl FnOnce(&str) -> Result<String, std::env::VarError>,
) -> Result<String, ApiError> {
match getter("ANTHROPIC_API_KEY") {
Ok(api_key) if api_key.is_empty() => Err(ApiError::MissingApiKey),
Ok(api_key) => Ok(api_key),
Err(std::env::VarError::NotPresent) => Err(ApiError::MissingApiKey),
Err(error) => Err(ApiError::from(error)),
}
}
#[derive(Debug)]
pub struct MessageStream {
response: reqwest::Response,
parser: SseParser,
pending: std::collections::VecDeque<StreamEvent>,
done: bool,
}
impl MessageStream {
pub async fn next_event(&mut self) -> Result<Option<StreamEvent>, ApiError> {
loop {
if let Some(event) = self.pending.pop_front() {
return Ok(Some(event));
}
if self.done {
let remaining = self.parser.finish()?;
self.pending.extend(remaining);
if let Some(event) = self.pending.pop_front() {
return Ok(Some(event));
}
return Ok(None);
}
match self.response.chunk().await? {
Some(chunk) => {
self.pending.extend(self.parser.push(&chunk)?);
}
None => {
self.done = true;
}
}
}
}
}
async fn expect_success(response: reqwest::Response) -> Result<reqwest::Response, ApiError> {
let status = response.status();
if status.is_success() {
return Ok(response);
}
let body = response.text().await.unwrap_or_else(|_| String::new());
Err(ApiError::UnexpectedStatus { status, body })
}
#[cfg(test)]
mod tests {
use std::env::VarError;
use crate::types::MessageRequest;
#[test]
fn read_api_key_requires_presence() {
let error = super::read_api_key(|_| Err(VarError::NotPresent))
.expect_err("missing key should error");
assert!(matches!(error, crate::error::ApiError::MissingApiKey));
}
#[test]
fn read_api_key_requires_non_empty_value() {
let error = super::read_api_key(|_| Ok(String::new())).expect_err("empty key should error");
assert!(matches!(error, crate::error::ApiError::MissingApiKey));
}
#[test]
fn with_auth_token_drops_empty_values() {
let client = super::AnthropicClient::new("test-key").with_auth_token(Some(String::new()));
assert!(client.auth_token.is_none());
}
#[test]
fn message_request_stream_helper_sets_stream_true() {
let request = MessageRequest {
model: "claude-3-7-sonnet-latest".to_string(),
max_tokens: 64,
messages: vec![],
system: None,
stream: false,
};
assert!(request.with_streaming().stream);
}
}

View File

@@ -0,0 +1,65 @@
use std::env::VarError;
use std::fmt::{Display, Formatter};
#[derive(Debug)]
pub enum ApiError {
MissingApiKey,
InvalidApiKeyEnv(VarError),
Http(reqwest::Error),
Io(std::io::Error),
Json(serde_json::Error),
UnexpectedStatus {
status: reqwest::StatusCode,
body: String,
},
InvalidSseFrame(&'static str),
}
impl Display for ApiError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::MissingApiKey => {
write!(
f,
"ANTHROPIC_API_KEY is not set; export it before calling the Anthropic API"
)
}
Self::InvalidApiKeyEnv(error) => {
write!(f, "failed to read ANTHROPIC_API_KEY: {error}")
}
Self::Http(error) => write!(f, "http error: {error}"),
Self::Io(error) => write!(f, "io error: {error}"),
Self::Json(error) => write!(f, "json error: {error}"),
Self::UnexpectedStatus { status, body } => {
write!(f, "anthropic api returned {status}: {body}")
}
Self::InvalidSseFrame(message) => write!(f, "invalid sse frame: {message}"),
}
}
}
impl std::error::Error for ApiError {}
impl From<reqwest::Error> for ApiError {
fn from(value: reqwest::Error) -> Self {
Self::Http(value)
}
}
impl From<std::io::Error> for ApiError {
fn from(value: std::io::Error) -> Self {
Self::Io(value)
}
}
impl From<serde_json::Error> for ApiError {
fn from(value: serde_json::Error) -> Self {
Self::Json(value)
}
}
impl From<VarError> for ApiError {
fn from(value: VarError) -> Self {
Self::InvalidApiKeyEnv(value)
}
}

View File

@@ -0,0 +1,13 @@
mod client;
mod error;
mod sse;
mod types;
pub use client::{AnthropicClient, MessageStream};
pub use error::ApiError;
pub use sse::{parse_frame, SseParser};
pub use types::{
ContentBlockDelta, ContentBlockDeltaEvent, ContentBlockStartEvent, ContentBlockStopEvent,
InputContentBlock, InputMessage, MessageRequest, MessageResponse, MessageStartEvent,
MessageStopEvent, OutputContentBlock, StreamEvent, Usage,
};

203
rust/crates/api/src/sse.rs Normal file
View File

@@ -0,0 +1,203 @@
use crate::error::ApiError;
use crate::types::StreamEvent;
#[derive(Debug, Default)]
pub struct SseParser {
buffer: Vec<u8>,
}
impl SseParser {
#[must_use]
pub fn new() -> Self {
Self::default()
}
pub fn push(&mut self, chunk: &[u8]) -> Result<Vec<StreamEvent>, ApiError> {
self.buffer.extend_from_slice(chunk);
let mut events = Vec::new();
while let Some(frame) = self.next_frame() {
if let Some(event) = parse_frame(&frame)? {
events.push(event);
}
}
Ok(events)
}
pub fn finish(&mut self) -> Result<Vec<StreamEvent>, ApiError> {
if self.buffer.is_empty() {
return Ok(Vec::new());
}
let trailing = std::mem::take(&mut self.buffer);
match parse_frame(&String::from_utf8_lossy(&trailing))? {
Some(event) => Ok(vec![event]),
None => Ok(Vec::new()),
}
}
fn next_frame(&mut self) -> Option<String> {
let separator = self
.buffer
.windows(2)
.position(|window| window == b"\n\n")
.map(|position| (position, 2))
.or_else(|| {
self.buffer
.windows(4)
.position(|window| window == b"\r\n\r\n")
.map(|position| (position, 4))
})?;
let (position, separator_len) = separator;
let frame = self
.buffer
.drain(..position + separator_len)
.collect::<Vec<_>>();
let frame_len = frame.len().saturating_sub(separator_len);
Some(String::from_utf8_lossy(&frame[..frame_len]).into_owned())
}
}
pub fn parse_frame(frame: &str) -> Result<Option<StreamEvent>, ApiError> {
let trimmed = frame.trim();
if trimmed.is_empty() {
return Ok(None);
}
let mut data_lines = Vec::new();
let mut event_name: Option<&str> = None;
for line in trimmed.lines() {
if line.starts_with(':') {
continue;
}
if let Some(name) = line.strip_prefix("event:") {
event_name = Some(name.trim());
continue;
}
if let Some(data) = line.strip_prefix("data:") {
data_lines.push(data.trim_start());
}
}
if matches!(event_name, Some("ping")) {
return Ok(None);
}
if data_lines.is_empty() {
return Ok(None);
}
let payload = data_lines.join("\n");
if payload == "[DONE]" {
return Ok(None);
}
serde_json::from_str::<StreamEvent>(&payload)
.map(Some)
.map_err(ApiError::from)
}
#[cfg(test)]
mod tests {
use super::{parse_frame, SseParser};
use crate::types::{ContentBlockDelta, OutputContentBlock, StreamEvent};
#[test]
fn parses_single_frame() {
let frame = concat!(
"event: content_block_start\n",
"data: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"Hi\"}}\n\n"
);
let event = parse_frame(frame).expect("frame should parse");
assert_eq!(
event,
Some(StreamEvent::ContentBlockStart(
crate::types::ContentBlockStartEvent {
index: 0,
content_block: OutputContentBlock::Text {
text: "Hi".to_string(),
},
},
))
);
}
#[test]
fn parses_chunked_stream() {
let mut parser = SseParser::new();
let first = b"event: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Hel";
let second = b"lo\"}}\n\n";
assert!(parser
.push(first)
.expect("first chunk should buffer")
.is_empty());
let events = parser.push(second).expect("second chunk should parse");
assert_eq!(
events,
vec![StreamEvent::ContentBlockDelta(
crate::types::ContentBlockDeltaEvent {
index: 0,
delta: ContentBlockDelta::TextDelta {
text: "Hello".to_string(),
},
}
)]
);
}
#[test]
fn ignores_ping_and_done() {
let mut parser = SseParser::new();
let payload = concat!(
": keepalive\n",
"event: ping\n",
"data: {\"type\":\"ping\"}\n\n",
"event: message_stop\n",
"data: {\"type\":\"message_stop\"}\n\n",
"data: [DONE]\n\n"
);
let events = parser
.push(payload.as_bytes())
.expect("parser should succeed");
assert_eq!(
events,
vec![StreamEvent::MessageStop(crate::types::MessageStopEvent {})]
);
}
#[test]
fn ignores_data_less_event_frames() {
let frame = "event: ping\n\n";
let event = parse_frame(frame).expect("frame without data should be ignored");
assert_eq!(event, None);
}
#[test]
fn parses_split_json_across_data_lines() {
let frame = concat!(
"event: content_block_delta\n",
"data: {\"type\":\"content_block_delta\",\"index\":0,\n",
"data: \"delta\":{\"type\":\"text_delta\",\"text\":\"Hello\"}}\n\n"
);
let event = parse_frame(frame).expect("frame should parse");
assert_eq!(
event,
Some(StreamEvent::ContentBlockDelta(
crate::types::ContentBlockDeltaEvent {
index: 0,
delta: ContentBlockDelta::TextDelta {
text: "Hello".to_string(),
},
}
))
);
}
}

View File

@@ -0,0 +1,110 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct MessageRequest {
pub model: String,
pub max_tokens: u32,
pub messages: Vec<InputMessage>,
#[serde(skip_serializing_if = "Option::is_none")]
pub system: Option<String>,
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub stream: bool,
}
impl MessageRequest {
#[must_use]
pub fn with_streaming(mut self) -> Self {
self.stream = true;
self
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct InputMessage {
pub role: String,
pub content: Vec<InputContentBlock>,
}
impl InputMessage {
#[must_use]
pub fn user_text(text: impl Into<String>) -> Self {
Self {
role: "user".to_string(),
content: vec![InputContentBlock::Text { text: text.into() }],
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum InputContentBlock {
Text { text: String },
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct MessageResponse {
pub id: String,
#[serde(rename = "type")]
pub kind: String,
pub role: String,
pub content: Vec<OutputContentBlock>,
pub model: String,
#[serde(default)]
pub stop_reason: Option<String>,
#[serde(default)]
pub stop_sequence: Option<String>,
pub usage: Usage,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum OutputContentBlock {
Text { text: String },
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Usage {
pub input_tokens: u32,
pub output_tokens: u32,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct MessageStartEvent {
pub message: MessageResponse,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ContentBlockStartEvent {
pub index: u32,
pub content_block: OutputContentBlock,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ContentBlockDeltaEvent {
pub index: u32,
pub delta: ContentBlockDelta,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ContentBlockDelta {
TextDelta { text: String },
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ContentBlockStopEvent {
pub index: u32,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct MessageStopEvent {}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum StreamEvent {
MessageStart(MessageStartEvent),
ContentBlockStart(ContentBlockStartEvent),
ContentBlockDelta(ContentBlockDeltaEvent),
ContentBlockStop(ContentBlockStopEvent),
MessageStop(MessageStopEvent),
}

View File

@@ -0,0 +1,303 @@
use std::collections::HashMap;
use std::sync::Arc;
use api::{AnthropicClient, InputMessage, MessageRequest, OutputContentBlock, StreamEvent};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::net::TcpListener;
use tokio::sync::Mutex;
#[tokio::test]
async fn send_message_posts_json_and_parses_response() {
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
let body = concat!(
"{",
"\"id\":\"msg_test\",",
"\"type\":\"message\",",
"\"role\":\"assistant\",",
"\"content\":[{\"type\":\"text\",\"text\":\"Hello from Claude\"}],",
"\"model\":\"claude-3-7-sonnet-latest\",",
"\"stop_reason\":\"end_turn\",",
"\"stop_sequence\":null,",
"\"usage\":{\"input_tokens\":12,\"output_tokens\":4}",
"}"
);
let server = spawn_server(state.clone(), http_response("application/json", body)).await;
let client = AnthropicClient::new("test-key")
.with_auth_token(Some("proxy-token".to_string()))
.with_base_url(server.base_url());
let response = client
.send_message(&sample_request(false))
.await
.expect("request should succeed");
assert_eq!(response.id, "msg_test");
assert_eq!(
response.content,
vec![OutputContentBlock::Text {
text: "Hello from Claude".to_string(),
}]
);
let captured = state.lock().await;
let request = captured.first().expect("server should capture request");
assert_eq!(request.method, "POST");
assert_eq!(request.path, "/v1/messages");
assert_eq!(
request.headers.get("x-api-key").map(String::as_str),
Some("test-key")
);
assert_eq!(
request.headers.get("authorization").map(String::as_str),
Some("Bearer proxy-token")
);
assert_eq!(
request.headers.get("anthropic-version").map(String::as_str),
Some("2023-06-01")
);
let body: serde_json::Value =
serde_json::from_str(&request.body).expect("request body should be json");
assert_eq!(
body.get("model").and_then(serde_json::Value::as_str),
Some("claude-3-7-sonnet-latest")
);
assert!(
body.get("stream").is_none(),
"non-stream request should omit stream=false"
);
}
#[tokio::test]
async fn stream_message_parses_sse_events() {
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
let sse = concat!(
"event: message_start\n",
"data: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_stream\",\"type\":\"message\",\"role\":\"assistant\",\"content\":[],\"model\":\"claude-3-7-sonnet-latest\",\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":8,\"output_tokens\":0}}}\n\n",
"event: content_block_start\n",
"data: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"}}\n\n",
"event: content_block_delta\n",
"data: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Hello\"}}\n\n",
"event: content_block_stop\n",
"data: {\"type\":\"content_block_stop\",\"index\":0}\n\n",
"event: message_stop\n",
"data: {\"type\":\"message_stop\"}\n\n",
"data: [DONE]\n\n"
);
let server = spawn_server(state.clone(), http_response("text/event-stream", sse)).await;
let client = AnthropicClient::new("test-key")
.with_auth_token(Some("proxy-token".to_string()))
.with_base_url(server.base_url());
let mut stream = client
.stream_message(&sample_request(false))
.await
.expect("stream should start");
let mut events = Vec::new();
while let Some(event) = stream
.next_event()
.await
.expect("stream event should parse")
{
events.push(event);
}
assert_eq!(events.len(), 5);
assert!(matches!(events[0], StreamEvent::MessageStart(_)));
assert!(matches!(events[1], StreamEvent::ContentBlockStart(_)));
assert!(matches!(events[2], StreamEvent::ContentBlockDelta(_)));
assert!(matches!(events[3], StreamEvent::ContentBlockStop(_)));
assert!(matches!(events[4], StreamEvent::MessageStop(_)));
let captured = state.lock().await;
let request = captured.first().expect("server should capture request");
assert!(request.body.contains("\"stream\":true"));
}
#[tokio::test]
#[ignore = "requires ANTHROPIC_API_KEY and network access"]
async fn live_stream_smoke_test() {
let client = AnthropicClient::from_env().expect("ANTHROPIC_API_KEY must be set");
let mut stream = client
.stream_message(&MessageRequest {
model: std::env::var("ANTHROPIC_MODEL")
.unwrap_or_else(|_| "claude-3-7-sonnet-latest".to_string()),
max_tokens: 32,
messages: vec![InputMessage::user_text(
"Reply with exactly: hello from rust",
)],
system: None,
stream: false,
})
.await
.expect("live stream should start");
let mut saw_start = false;
let mut saw_follow_up = false;
let mut event_kinds = Vec::new();
while let Some(event) = stream
.next_event()
.await
.expect("live stream should yield events")
{
match event {
StreamEvent::MessageStart(_) => {
saw_start = true;
event_kinds.push("message_start");
}
StreamEvent::ContentBlockStart(_) => {
saw_follow_up = true;
event_kinds.push("content_block_start");
}
StreamEvent::ContentBlockDelta(_) => {
saw_follow_up = true;
event_kinds.push("content_block_delta");
}
StreamEvent::ContentBlockStop(_) => {
saw_follow_up = true;
event_kinds.push("content_block_stop");
}
StreamEvent::MessageStop(_) => {
saw_follow_up = true;
event_kinds.push("message_stop");
}
}
}
assert!(
saw_start,
"expected a message_start event; got {event_kinds:?}"
);
assert!(
saw_follow_up,
"expected at least one follow-up stream event; got {event_kinds:?}"
);
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct CapturedRequest {
method: String,
path: String,
headers: HashMap<String, String>,
body: String,
}
struct TestServer {
base_url: String,
join_handle: tokio::task::JoinHandle<()>,
}
impl TestServer {
fn base_url(&self) -> String {
self.base_url.clone()
}
}
impl Drop for TestServer {
fn drop(&mut self) {
self.join_handle.abort();
}
}
async fn spawn_server(state: Arc<Mutex<Vec<CapturedRequest>>>, response: String) -> TestServer {
let listener = TcpListener::bind("127.0.0.1:0")
.await
.expect("listener should bind");
let address = listener
.local_addr()
.expect("listener should have local addr");
let join_handle = tokio::spawn(async move {
let (mut socket, _) = listener.accept().await.expect("server should accept");
let mut buffer = Vec::new();
let mut header_end = None;
loop {
let mut chunk = [0_u8; 1024];
let read = socket
.read(&mut chunk)
.await
.expect("request read should succeed");
if read == 0 {
break;
}
buffer.extend_from_slice(&chunk[..read]);
if let Some(position) = find_header_end(&buffer) {
header_end = Some(position);
break;
}
}
let header_end = header_end.expect("request should include headers");
let (header_bytes, remaining) = buffer.split_at(header_end);
let header_text = String::from_utf8(header_bytes.to_vec()).expect("headers should be utf8");
let mut lines = header_text.split("\r\n");
let request_line = lines.next().expect("request line should exist");
let mut parts = request_line.split_whitespace();
let method = parts.next().expect("method should exist").to_string();
let path = parts.next().expect("path should exist").to_string();
let mut headers = HashMap::new();
let mut content_length = 0_usize;
for line in lines {
if line.is_empty() {
continue;
}
let (name, value) = line.split_once(':').expect("header should have colon");
let value = value.trim().to_string();
if name.eq_ignore_ascii_case("content-length") {
content_length = value.parse().expect("content length should parse");
}
headers.insert(name.to_ascii_lowercase(), value);
}
let mut body = remaining[4..].to_vec();
while body.len() < content_length {
let mut chunk = vec![0_u8; content_length - body.len()];
let read = socket
.read(&mut chunk)
.await
.expect("body read should succeed");
if read == 0 {
break;
}
body.extend_from_slice(&chunk[..read]);
}
state.lock().await.push(CapturedRequest {
method,
path,
headers,
body: String::from_utf8(body).expect("body should be utf8"),
});
socket
.write_all(response.as_bytes())
.await
.expect("response write should succeed");
});
TestServer {
base_url: format!("http://{address}"),
join_handle,
}
}
fn find_header_end(bytes: &[u8]) -> Option<usize> {
bytes.windows(4).position(|window| window == b"\r\n\r\n")
}
fn http_response(content_type: &str, body: &str) -> String {
format!(
"HTTP/1.1 200 OK\r\ncontent-type: {content_type}\r\ncontent-length: {}\r\nconnection: close\r\n\r\n{body}",
body.len()
)
}
fn sample_request(stream: bool) -> MessageRequest {
MessageRequest {
model: "claude-3-7-sonnet-latest".to_string(),
max_tokens: 64,
messages: vec![InputMessage::user_text("Say hello")],
system: None,
stream,
}
}

View File

@@ -0,0 +1,9 @@
[package]
name = "commands"
version.workspace = true
edition.workspace = true
license.workspace = true
publish.workspace = true
[lints]
workspace = true

View File

@@ -0,0 +1,29 @@
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CommandManifestEntry {
pub name: String,
pub source: CommandSource,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum CommandSource {
Builtin,
InternalOnly,
FeatureGated,
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct CommandRegistry {
entries: Vec<CommandManifestEntry>,
}
impl CommandRegistry {
#[must_use]
pub fn new(entries: Vec<CommandManifestEntry>) -> Self {
Self { entries }
}
#[must_use]
pub fn entries(&self) -> &[CommandManifestEntry] {
&self.entries
}
}

View File

@@ -0,0 +1,14 @@
[package]
name = "compat-harness"
version.workspace = true
edition.workspace = true
license.workspace = true
publish.workspace = true
[dependencies]
commands = { path = "../commands" }
tools = { path = "../tools" }
runtime = { path = "../runtime" }
[lints]
workspace = true

View File

@@ -0,0 +1,308 @@
use std::fs;
use std::path::{Path, PathBuf};
use commands::{CommandManifestEntry, CommandRegistry, CommandSource};
use runtime::{BootstrapPhase, BootstrapPlan};
use tools::{ToolManifestEntry, ToolRegistry, ToolSource};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct UpstreamPaths {
repo_root: PathBuf,
}
impl UpstreamPaths {
#[must_use]
pub fn from_repo_root(repo_root: impl Into<PathBuf>) -> Self {
Self {
repo_root: repo_root.into(),
}
}
#[must_use]
pub fn from_workspace_dir(workspace_dir: impl AsRef<Path>) -> Self {
let workspace_dir = workspace_dir
.as_ref()
.canonicalize()
.unwrap_or_else(|_| workspace_dir.as_ref().to_path_buf());
let repo_root = workspace_dir
.parent()
.map_or_else(|| PathBuf::from(".."), Path::to_path_buf);
Self { repo_root }
}
#[must_use]
pub fn commands_path(&self) -> PathBuf {
self.repo_root.join("src/commands.ts")
}
#[must_use]
pub fn tools_path(&self) -> PathBuf {
self.repo_root.join("src/tools.ts")
}
#[must_use]
pub fn cli_path(&self) -> PathBuf {
self.repo_root.join("src/entrypoints/cli.tsx")
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ExtractedManifest {
pub commands: CommandRegistry,
pub tools: ToolRegistry,
pub bootstrap: BootstrapPlan,
}
pub fn extract_manifest(paths: &UpstreamPaths) -> std::io::Result<ExtractedManifest> {
let commands_source = fs::read_to_string(paths.commands_path())?;
let tools_source = fs::read_to_string(paths.tools_path())?;
let cli_source = fs::read_to_string(paths.cli_path())?;
Ok(ExtractedManifest {
commands: extract_commands(&commands_source),
tools: extract_tools(&tools_source),
bootstrap: extract_bootstrap_plan(&cli_source),
})
}
#[must_use]
pub fn extract_commands(source: &str) -> CommandRegistry {
let mut entries = Vec::new();
let mut in_internal_block = false;
for raw_line in source.lines() {
let line = raw_line.trim();
if line.starts_with("export const INTERNAL_ONLY_COMMANDS = [") {
in_internal_block = true;
continue;
}
if in_internal_block {
if line.starts_with(']') {
in_internal_block = false;
continue;
}
if let Some(name) = first_identifier(line) {
entries.push(CommandManifestEntry {
name,
source: CommandSource::InternalOnly,
});
}
continue;
}
if line.starts_with("import ") {
for imported in imported_symbols(line) {
entries.push(CommandManifestEntry {
name: imported,
source: CommandSource::Builtin,
});
}
}
if line.contains("feature('") && line.contains("./commands/") {
if let Some(name) = first_assignment_identifier(line) {
entries.push(CommandManifestEntry {
name,
source: CommandSource::FeatureGated,
});
}
}
}
dedupe_commands(entries)
}
#[must_use]
pub fn extract_tools(source: &str) -> ToolRegistry {
let mut entries = Vec::new();
for raw_line in source.lines() {
let line = raw_line.trim();
if line.starts_with("import ") && line.contains("./tools/") {
for imported in imported_symbols(line) {
if imported.ends_with("Tool") {
entries.push(ToolManifestEntry {
name: imported,
source: ToolSource::Base,
});
}
}
}
if line.contains("feature('") && line.contains("Tool") {
if let Some(name) = first_assignment_identifier(line) {
if name.ends_with("Tool") || name.ends_with("Tools") {
entries.push(ToolManifestEntry {
name,
source: ToolSource::Conditional,
});
}
}
}
}
dedupe_tools(entries)
}
#[must_use]
pub fn extract_bootstrap_plan(source: &str) -> BootstrapPlan {
let mut phases = vec![BootstrapPhase::CliEntry];
if source.contains("--version") {
phases.push(BootstrapPhase::FastPathVersion);
}
if source.contains("startupProfiler") {
phases.push(BootstrapPhase::StartupProfiler);
}
if source.contains("--dump-system-prompt") {
phases.push(BootstrapPhase::SystemPromptFastPath);
}
if source.contains("--claude-in-chrome-mcp") {
phases.push(BootstrapPhase::ChromeMcpFastPath);
}
if source.contains("--daemon-worker") {
phases.push(BootstrapPhase::DaemonWorkerFastPath);
}
if source.contains("remote-control") {
phases.push(BootstrapPhase::BridgeFastPath);
}
if source.contains("args[0] === 'daemon'") {
phases.push(BootstrapPhase::DaemonFastPath);
}
if source.contains("args[0] === 'ps'") || source.contains("args.includes('--bg')") {
phases.push(BootstrapPhase::BackgroundSessionFastPath);
}
if source.contains("args[0] === 'new' || args[0] === 'list' || args[0] === 'reply'") {
phases.push(BootstrapPhase::TemplateFastPath);
}
if source.contains("environment-runner") {
phases.push(BootstrapPhase::EnvironmentRunnerFastPath);
}
phases.push(BootstrapPhase::MainRuntime);
BootstrapPlan::from_phases(phases)
}
fn imported_symbols(line: &str) -> Vec<String> {
let Some(after_import) = line.strip_prefix("import ") else {
return Vec::new();
};
let before_from = after_import
.split(" from ")
.next()
.unwrap_or_default()
.trim();
if before_from.starts_with('{') {
return before_from
.trim_matches(|c| c == '{' || c == '}')
.split(',')
.filter_map(|part| {
let trimmed = part.trim();
if trimmed.is_empty() {
return None;
}
Some(trimmed.split_whitespace().next()?.to_string())
})
.collect();
}
let first = before_from.split(',').next().unwrap_or_default().trim();
if first.is_empty() {
Vec::new()
} else {
vec![first.to_string()]
}
}
fn first_assignment_identifier(line: &str) -> Option<String> {
let trimmed = line.trim_start();
let candidate = trimmed.split('=').next()?.trim();
first_identifier(candidate)
}
fn first_identifier(line: &str) -> Option<String> {
let mut out = String::new();
for ch in line.chars() {
if ch.is_ascii_alphanumeric() || ch == '_' || ch == '-' {
out.push(ch);
} else if !out.is_empty() {
break;
}
}
(!out.is_empty()).then_some(out)
}
fn dedupe_commands(entries: Vec<CommandManifestEntry>) -> CommandRegistry {
let mut deduped = Vec::new();
for entry in entries {
let exists = deduped.iter().any(|seen: &CommandManifestEntry| {
seen.name == entry.name && seen.source == entry.source
});
if !exists {
deduped.push(entry);
}
}
CommandRegistry::new(deduped)
}
fn dedupe_tools(entries: Vec<ToolManifestEntry>) -> ToolRegistry {
let mut deduped = Vec::new();
for entry in entries {
let exists = deduped
.iter()
.any(|seen: &ToolManifestEntry| seen.name == entry.name && seen.source == entry.source);
if !exists {
deduped.push(entry);
}
}
ToolRegistry::new(deduped)
}
#[cfg(test)]
mod tests {
use super::*;
fn fixture_paths() -> UpstreamPaths {
let workspace_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("../..");
UpstreamPaths::from_workspace_dir(workspace_dir)
}
#[test]
fn extracts_non_empty_manifests_from_upstream_repo() {
let manifest = extract_manifest(&fixture_paths()).expect("manifest should load");
assert!(!manifest.commands.entries().is_empty());
assert!(!manifest.tools.entries().is_empty());
assert!(!manifest.bootstrap.phases().is_empty());
}
#[test]
fn detects_known_upstream_command_symbols() {
let commands = extract_commands(
&fs::read_to_string(fixture_paths().commands_path()).expect("commands.ts"),
);
let names: Vec<_> = commands
.entries()
.iter()
.map(|entry| entry.name.as_str())
.collect();
assert!(names.contains(&"addDir"));
assert!(names.contains(&"review"));
assert!(!names.contains(&"INTERNAL_ONLY_COMMANDS"));
}
#[test]
fn detects_known_upstream_tool_symbols() {
let tools =
extract_tools(&fs::read_to_string(fixture_paths().tools_path()).expect("tools.ts"));
let names: Vec<_> = tools
.entries()
.iter()
.map(|entry| entry.name.as_str())
.collect();
assert!(names.contains(&"AgentTool"));
assert!(names.contains(&"BashTool"));
}
}

View File

@@ -0,0 +1,9 @@
[package]
name = "runtime"
version.workspace = true
edition.workspace = true
license.workspace = true
publish.workspace = true
[lints]
workspace = true

View File

@@ -0,0 +1,56 @@
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BootstrapPhase {
CliEntry,
FastPathVersion,
StartupProfiler,
SystemPromptFastPath,
ChromeMcpFastPath,
DaemonWorkerFastPath,
BridgeFastPath,
DaemonFastPath,
BackgroundSessionFastPath,
TemplateFastPath,
EnvironmentRunnerFastPath,
MainRuntime,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BootstrapPlan {
phases: Vec<BootstrapPhase>,
}
impl BootstrapPlan {
#[must_use]
pub fn claude_code_default() -> Self {
Self::from_phases(vec![
BootstrapPhase::CliEntry,
BootstrapPhase::FastPathVersion,
BootstrapPhase::StartupProfiler,
BootstrapPhase::SystemPromptFastPath,
BootstrapPhase::ChromeMcpFastPath,
BootstrapPhase::DaemonWorkerFastPath,
BootstrapPhase::BridgeFastPath,
BootstrapPhase::DaemonFastPath,
BootstrapPhase::BackgroundSessionFastPath,
BootstrapPhase::TemplateFastPath,
BootstrapPhase::EnvironmentRunnerFastPath,
BootstrapPhase::MainRuntime,
])
}
#[must_use]
pub fn from_phases(phases: Vec<BootstrapPhase>) -> Self {
let mut deduped = Vec::new();
for phase in phases {
if !deduped.contains(&phase) {
deduped.push(phase);
}
}
Self { phases: deduped }
}
#[must_use]
pub fn phases(&self) -> &[BootstrapPhase] {
&self.phases
}
}

View File

@@ -0,0 +1,13 @@
[package]
name = "rusty-claude-cli"
version.workspace = true
edition.workspace = true
license.workspace = true
publish.workspace = true
[dependencies]
compat-harness = { path = "../compat-harness" }
runtime = { path = "../runtime" }
[lints]
workspace = true

Some files were not shown because too many files have changed in this diff Show More