Merge remote-tracking branch 'origin/main' into fix-opencode-mcp-usage

This commit is contained in:
iamtoruk 2026-05-11 21:29:08 -07:00
commit 9f0067bf9d
33 changed files with 769 additions and 114 deletions

18
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View file

@ -0,0 +1,18 @@
## Summary
<!-- What does this PR do? 1-3 bullet points. -->
## Testing
- [ ] I have tested this locally against real data (not just unit tests)
- [ ] `npm test` passes
- [ ] `npm run build` succeeds
### For new providers only:
- [ ] I installed the tool and generated real sessions by using it
- [ ] `npm run dev -- today` shows correct costs and session counts for this provider
- [ ] `npm run dev -- models --provider <name>` shows correct model names and pricing
- [ ] Screenshot or terminal output attached below proving it works with real data
<!-- Paste screenshot / terminal output here -->

View file

@ -45,7 +45,9 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: CodeBurnMenubar-${{ steps.version.outputs.value }}
path: mac/.build/dist/CodeBurnMenubar-*.zip
path: |
mac/.build/dist/CodeBurnMenubar-${{ steps.version.outputs.value }}.zip
mac/.build/dist/CodeBurnMenubar-${{ steps.version.outputs.value }}.zip.sha256
if-no-files-found: error
- name: Create / update GitHub Release
@ -66,6 +68,6 @@ jobs:
and macOS shows "cannot verify developer", right-click the app in Finder and
pick Open to whitelist it once.
files: |
mac/.build/dist/CodeBurnMenubar-*.zip
mac/.build/dist/CodeBurnMenubar-*.zip.sha256
mac/.build/dist/CodeBurnMenubar-${{ steps.version.outputs.value }}.zip
mac/.build/dist/CodeBurnMenubar-${{ steps.version.outputs.value }}.zip.sha256
fail_on_unmatched_files: true

View file

@ -1,5 +1,17 @@
# Changelog
## Unreleased
### Added (CLI)
- **IBM Bob provider.** Discovers IBM Bob IDE task history, reuses the
Cline-family parser for token/cost records, extracts model tags and
workspace-based project names from session data. Closes #248.
### Fixed (CLI)
- **Claude 1-hour cache write pricing.** 1-hour cache writes are now priced
at 2x base input (previously used the 5-minute 1.25x rate for all writes).
Daily cache bumped to v6 so stale totals are recomputed. Closes #276.
## 0.9.8 - 2026-05-10
### Added (CLI)

View file

@ -84,6 +84,23 @@ The `.github/workflows/block-claude-coauthor.yml` workflow rejects any PR whose
If a flagged PR rejects on this check, the workflow prints the exact rebase command to fix it.
## Before You Start
**Comment on the issue first.** Before writing code for a feature or new provider, leave a comment on the relevant issue saying what you plan to do. Wait for a maintainer to confirm the approach. Unsolicited PRs that duplicate work already in progress or take an incompatible approach will be closed.
**One PR at a time.** We will not review a second PR from you until the first is merged or closed. This keeps the review queue manageable and ensures each contribution gets proper attention.
## Adding a New Provider
New providers have the highest bar because broken parsing silently produces wrong data for users. Before opening a PR:
1. **Install the tool and use it.** Generate real sessions by actually coding with the provider. We do this ourselves for every provider we ship.
2. **Test against real data.** Run `npm run dev -- today` and `npm run dev -- models` with your real sessions and confirm the output looks correct — costs are non-zero, model names resolve, session counts match what you see in the tool.
3. **Include proof in the PR.** Attach a screenshot or terminal output showing codeburn correctly parsing your real sessions. PRs for new providers without evidence of local testing will not be reviewed.
4. **Do not rely on AI-generated guesses about storage paths or schemas.** Tools change their data formats between versions. The only way to know the current schema is to install the tool and inspect the actual files on disk.
PRs that add a provider based solely on online documentation or AI-generated code, without evidence of testing against real data, will be closed.
## Pull Requests
1. Fork or branch from `main`.

View file

@ -13,7 +13,7 @@
<a href="https://github.com/sponsors/iamtoruk"><img src="https://img.shields.io/badge/sponsor-♥-ea4aaa?logo=github" alt="Sponsor" /></a>
</p>
CodeBurn tracks token usage, cost, and performance across **18 AI coding tools**. It breaks down spending by task type, model, tool, project, and provider so you can see exactly where your budget goes.
CodeBurn tracks token usage, cost, and performance across **19 AI coding tools**. It breaks down spending by task type, model, tool, project, and provider so you can see exactly where your budget goes.
Everything runs locally. No wrapper, no proxy, no API keys. CodeBurn reads session data directly from disk and prices every call using [LiteLLM](https://github.com/BerriAI/litellm).
@ -104,6 +104,7 @@ Arrow keys switch between Today, 7 Days, 30 Days, Month, and 6 Months (use `--fr
| <img src="assets/providers/cursor-agent.jpg" width="28" /> | cursor-agent | Yes | [cursor-agent.md](docs/providers/cursor-agent.md) |
| <img src="assets/providers/gemini.png" width="28" /> | Gemini CLI | Yes | [gemini.md](docs/providers/gemini.md) |
| <img src="assets/providers/copilot.jpg" width="28" /> | GitHub Copilot | Yes | [copilot.md](docs/providers/copilot.md) |
| <img src="assets/providers/ibm-bob.svg" width="28" /> | IBM Bob | Yes | [ibm-bob.md](docs/providers/ibm-bob.md) |
| <img src="assets/providers/kiro.png" width="28" /> | Kiro | Yes | [kiro.md](docs/providers/kiro.md) |
| <img src="assets/providers/opencode.png" width="28" /> | OpenCode | Yes | [opencode.md](docs/providers/opencode.md) |
| <img src="assets/providers/openclaw.jpg" width="28" /> | OpenClaw | Yes | [openclaw.md](docs/providers/openclaw.md) |
@ -119,7 +120,7 @@ Arrow keys switch between Today, 7 Days, 30 Days, Month, and 6 Months (use `--fr
Each provider doc lists the exact data location, storage format, and known quirks. Linux and Windows paths are detected automatically. If a path has changed or is wrong, please [open an issue](https://github.com/getagentseal/codeburn/issues).
Provider logos are trademarks of their respective owners. The icon set was sourced from [tokscale](https://github.com/junhoyeo/tokscale) (MIT) plus official vendor assets, used under nominative fair use for the purpose of identifying supported tools.
Provider logos are trademarks of their respective owners. The icon set was sourced from [tokscale](https://github.com/junhoyeo/tokscale) (MIT), official vendor assets, and simple provider identifiers, used under nominative fair use for the purpose of identifying supported tools.
CodeBurn auto-detects which AI coding tools you use. If multiple providers have session data on disk, press `p` in the dashboard to toggle between them.
@ -378,6 +379,8 @@ These are starting points, not verdicts. A 60% cache hit on a single experimenta
**OpenClaw** stores agent sessions as JSONL at `~/.openclaw/agents/*.jsonl`. Also checks legacy paths `.clawdbot`, `.moltbot`, `.moldbot`. Token usage comes from assistant message `usage` blocks; model from `modelId` or `message.model` fields.
**IBM Bob** stores IDE task history in `User/globalStorage/ibm.bob-code/tasks/<task-id>/` under the IBM Bob application data directory. CodeBurn reads `ui_messages.json` for API request token/cost records and `api_conversation_history.json` for the selected model, with support for both GA (`IBM Bob`) and preview (`Bob-IDE`) app data folders.
**Roo Code / KiloCode** are Cline-family VS Code extensions. CodeBurn reads `ui_messages.json` from each task directory in VS Code's `globalStorage`, filtering `type: "say"` entries with `say: "api_req_started"` to extract token counts.
CodeBurn deduplicates messages (by API message ID for Claude, by cumulative token cross-check for Codex, by conversation/timestamp for Cursor, by session ID for Gemini, by session+message ID for OpenCode, by responseId for Pi/OMP), filters by date range per entry, and classifies each turn.

View file

@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 64 64" role="img" aria-label="IBM Bob">
<rect width="64" height="64" rx="12" fill="#0F62FE"/>
<path d="M14 19h36v5H14zm0 10h36v5H14zm0 10h36v5H14z" fill="#fff" opacity=".9"/>
<circle cx="24" cy="32" r="4" fill="#0F62FE"/>
<circle cx="40" cy="32" r="4" fill="#0F62FE"/>
</svg>

After

Width:  |  Height:  |  Size: 337 B

View file

@ -128,14 +128,14 @@ type Provider = {
}
```
`src/providers/index.ts` registers eighteen providers across two tiers:
`src/providers/index.ts` registers nineteen providers across two tiers:
- **Eager**: `claude`, `codex`, `copilot`, `droid`, `gemini`, `kilo-code`, `kiro`, `openclaw`, `pi`, `omp`, `qwen`, `roo-code`. Imported at module load.
- **Eager**: `claude`, `codex`, `copilot`, `droid`, `gemini`, `ibm-bob`, `kilo-code`, `kiro`, `openclaw`, `pi`, `omp`, `qwen`, `roo-code`. Imported at module load.
- **Lazy**: `antigravity`, `goose`, `cursor`, `opencode`, `cursor-agent`, `crush`. Imported via dynamic `import()` so the heavy dependencies (SQLite, protobuf) do not touch users who do not have those tools installed.
Both lists hit the same `getAllProviders()` aggregator. A failed lazy import is silent and excludes that provider from the run.
`src/providers/vscode-cline-parser.ts` is a shared helper consumed by `kilo-code` and `roo-code`. It is not registered as a provider on its own.
`src/providers/vscode-cline-parser.ts` is a shared helper consumed by `ibm-bob`, `kilo-code`, and `roo-code`. It is not registered as a provider on its own.
For the per-provider data location, storage format, parser quirks, and test coverage, see `docs/providers/`.

View file

@ -15,6 +15,7 @@ For the architectural picture, see `../architecture.md`.
| [Copilot](copilot.md) | JSONL | `src/providers/copilot.ts` | `tests/providers/copilot.test.ts` |
| [Droid](droid.md) | JSONL | `src/providers/droid.ts` | `tests/providers/droid.test.ts` |
| [Gemini](gemini.md) | JSON / JSONL | `src/providers/gemini.ts` | none |
| [IBM Bob](ibm-bob.md) | JSON | `src/providers/ibm-bob.ts` | `tests/providers/ibm-bob.test.ts` |
| [KiloCode](kilo-code.md) | JSON | `src/providers/kilo-code.ts` | `tests/providers/kilo-code.test.ts` |
| [Kiro](kiro.md) | JSON | `src/providers/kiro.ts` | `tests/providers/kiro.test.ts` |
| [OpenClaw](openclaw.md) | JSONL | `src/providers/openclaw.ts` | `tests/providers/openclaw.test.ts` |
@ -38,7 +39,7 @@ For the architectural picture, see `../architecture.md`.
| Helper | Used by | Source |
|---|---|---|
| [vscode-cline-parser](vscode-cline-parser.md) | `kilo-code`, `roo-code` | `src/providers/vscode-cline-parser.ts` |
| [vscode-cline-parser](vscode-cline-parser.md) | `ibm-bob`, `kilo-code`, `roo-code` | `src/providers/vscode-cline-parser.ts` |
## File Format

View file

@ -25,6 +25,17 @@ JSONL, one event per line, per session file. Sessions live under `<project>/<ses
`createSessionParser` returns an empty async generator (`claude.ts:101-105`). Claude is a special case: `src/parser.ts` reads Claude JSONL files directly with full turn grouping, dedup of streaming message IDs, and MCP tool inventory extraction. The provider object exists only so `discoverSessions` can return Claude session sources alongside the others.
## Pricing
Claude Code reports total cache-write tokens in `usage.cache_creation_input_tokens`.
When available, it also splits those writes by duration in
`usage.cache_creation.ephemeral_5m_input_tokens` and
`usage.cache_creation.ephemeral_1h_input_tokens`. CodeBurn keeps the existing
aggregate cache-write token total for reports, but prices the 1-hour portion at
2x base input cost (1.6x the 5-minute cache-write rate exposed by LiteLLM).
If the split fields are missing, the parser falls back to the legacy behavior
and prices every cache write at the 5-minute rate.
## Caching
None at the provider level. The daily aggregation cache (`src/daily-cache.ts`) reuses prior computed days.

55
docs/providers/ibm-bob.md Normal file
View file

@ -0,0 +1,55 @@
# IBM Bob
IBM Bob IDE task history.
- **Source:** `src/providers/ibm-bob.ts`
- **Loading:** eager (`src/providers/index.ts`)
- **Test:** `tests/providers/ibm-bob.test.ts`
## Where It Reads From
IBM Bob stores IDE task history below `User/globalStorage/ibm.bob-code/tasks/` in the application data directory.
Default paths checked:
| Platform | Paths |
|---|---|
| macOS | `~/Library/Application Support/IBM Bob/User/globalStorage/ibm.bob-code/`, `~/Library/Application Support/Bob-IDE/User/globalStorage/ibm.bob-code/` |
| Windows | `%APPDATA%/IBM Bob/User/globalStorage/ibm.bob-code/`, `%APPDATA%/Bob-IDE/User/globalStorage/ibm.bob-code/` |
| Linux | `$XDG_CONFIG_HOME/IBM Bob/User/globalStorage/ibm.bob-code/`, `$XDG_CONFIG_HOME/Bob-IDE/User/globalStorage/ibm.bob-code/` with `~/.config` fallback |
The `Bob-IDE` paths cover the preview-era app name that some installs used before the GA `IBM Bob` directory.
## Storage Format
Each task is a directory under `tasks/<task-id>/` and must contain `ui_messages.json`.
CodeBurn parses the same Cline-family UI event format used by Roo Code and KiloCode:
- `ui_messages.json` entries with `type: "say"` and `say: "api_req_started"` contain serialized token/cost metrics.
- `ui_messages.json` user text entries seed the turn's first user message.
- `api_conversation_history.json` is optional and is used to extract the selected model from `<model>...</model>` environment details when present.
- `task_metadata.json` may exist upstream, but CodeBurn does not need it for usage math today.
If no model tag is present, the parser uses `ibm-bob-auto`, which is priced through the same conservative Sonnet fallback used for Cline-family auto modes.
## Caching
None at the provider level.
## Deduplication
Per `<providerName>:<taskId>:<apiRequestIndex>` via `vscode-cline-parser.ts`.
## Quirks
- IBM Bob has shipped under both `IBM Bob` and `Bob-IDE` application data folder names.
- This provider intentionally covers the IDE task-history format. Bob Shell's `~/.bob` checkpoint data is a separate storage surface and is not parsed until we have a stable usage schema fixture.
- The shared Cline parser does not currently extract individual tool names from UI messages, so tool breakdowns are empty for IBM Bob just like Roo Code and KiloCode.
## When Fixing A Bug Here
1. Check whether the install uses `IBM Bob` or `Bob-IDE` as the application data directory.
2. Confirm the task folder still contains `ui_messages.json` and `api_conversation_history.json`.
3. If the UI message schema changed, add a focused fixture to `tests/providers/ibm-bob.test.ts`.
4. If the change also affects Roo Code or KiloCode, update `src/providers/vscode-cline-parser.ts` and run all three provider test files.

View file

@ -1,17 +1,18 @@
# vscode-cline-parser (Shared Helper)
Shared discovery and parsing for VS Code extensions descended from Cline.
Shared discovery and parsing for Cline-family task folders.
- **Source:** `src/providers/vscode-cline-parser.ts`
- **Loading:** not a provider; imported by `kilo-code.ts` and `roo-code.ts`.
- **Test:** none directly. Coverage comes from `tests/providers/kilo-code.test.ts` and `tests/providers/roo-code.test.ts`.
- **Loading:** not a provider; imported by `ibm-bob.ts`, `kilo-code.ts`, and `roo-code.ts`.
- **Test:** none directly. Coverage comes from `tests/providers/ibm-bob.test.ts`, `tests/providers/kilo-code.test.ts`, and `tests/providers/roo-code.test.ts`.
## What it does
Two responsibilities:
1. `discoverClineTasks(extensionId)` walks VS Code's `globalStorage/<extensionId>/tasks/` directories and returns one source per task that has a `ui_messages.json` file (`vscode-cline-parser.ts:25-50`).
2. `createClineParser` reads each task's `ui_messages.json` and `api_conversation_history.json`, extracts model, tools, and token counts, and yields `ParsedProviderCall` objects.
1. `discoverClineTasks(extensionId)` walks VS Code's `globalStorage/<extensionId>/tasks/` directories and returns one source per task that has a `ui_messages.json` file.
2. `discoverClineTasksInBaseDirs(baseDirs)` does the same for non-VS Code apps with compatible task storage, such as IBM Bob.
3. `createClineParser` reads each task's `ui_messages.json` and `api_conversation_history.json`, extracts model and token counts, and yields `ParsedProviderCall` objects.
## Storage layout
@ -25,25 +26,25 @@ Per task directory:
## Model resolution
The model is extracted from `api_conversation_history.json` by searching user message content blocks for a `<model>...</model>` tag (`vscode-cline-parser.ts:54-72`). Falls back to `cline-auto` if no tag is found.
The model is extracted from `api_conversation_history.json` by searching user message content blocks for a `<model>...</model>` tag. Falls back to the provider-supplied auto model (`cline-auto` by default) if no tag is found.
## Token extraction
From `api_req_started` entries inside `ui_messages.json`. Each such entry's `text` field is JSON-parsed; the parsed object holds `tokensIn`, `tokensOut`, `cacheReads`, `cacheWrites`, and (optionally) `cost` (`vscode-cline-parser.ts:119-134`).
From `api_req_started` entries inside `ui_messages.json`. Each such entry's `text` field is JSON-parsed; the parsed object holds `tokensIn`, `tokensOut`, `cacheReads`, `cacheWrites`, and (optionally) `cost`.
If `cost` is present, it is used directly. If not, `calculateCost` from `src/models.ts` computes it from tokens (`vscode-cline-parser.ts:139`).
If `cost` is present, it is used directly. If not, `calculateCost` from `src/models.ts` computes it from tokens.
## Deduplication
Per `<providerName>:<taskId>:<index>` where `index` is the position of the `api_req_started` entry within `ui_messages.json` (`vscode-cline-parser.ts:109`).
Per `<providerName>:<taskId>:<index>` where `index` is the position of the `api_req_started` entry within `ui_messages.json`.
## Quirks
- Only the **first** user message is emitted as `userMessage` in the `ParsedProviderCall` (`vscode-cline-parser.ts:157`). Subsequent user turns are accounted but not surfaced.
- Only the **first** user message is emitted as `userMessage` in the `ParsedProviderCall`. Subsequent user turns are accounted but not surfaced.
- The model regex looks inside content blocks, not at top-level fields. Some Cline-derivative extensions emit the model elsewhere; if you add support for one, branch on extension ID rather than rewriting the regex.
## When fixing a bug here
1. A change here ripples to **both** KiloCode and Roo Code. Run both test files (`tests/providers/kilo-code.test.ts` and `tests/providers/roo-code.test.ts`) before opening a PR.
1. A change here ripples to IBM Bob, KiloCode, and Roo Code. Run all three provider test files before opening a PR.
2. If you find that one of the two extensions emits a different shape, branch on the extension ID parameter that the discovery function already takes; do not duplicate the parser.
3. If you add support for a third Cline-derivative extension, register it as a thin wrapper file in the same shape as `kilo-code.ts` and `roo-code.ts`.
3. If you add support for another Cline-family task store, register it as a thin wrapper file in the same shape as `ibm-bob.ts`, `kilo-code.ts`, and `roo-code.ts`.

View file

@ -96,7 +96,7 @@ codesign --verify --deep --strict "${BUNDLE}" 2>/dev/null || echo " (signature
ZIP_NAME="CodeBurnMenubar-${VERSION}.zip"
ZIP_PATH="${DIST_DIR}/${ZIP_NAME}"
echo "▸ Packaging ${ZIP_NAME}..."
(cd "${DIST_DIR}" && /usr/bin/ditto -c -k --keepParent "${BUNDLE_NAME}" "${ZIP_NAME}")
(cd "${DIST_DIR}" && COPYFILE_DISABLE=1 /usr/bin/ditto -c -k --norsrc --keepParent "${BUNDLE_NAME}" "${ZIP_NAME}")
CHECKSUM_NAME="${ZIP_NAME}.sha256"
CHECKSUM_PATH="${DIST_DIR}/${CHECKSUM_NAME}"

View file

@ -140,6 +140,17 @@ final class AppStore {
inFlightKeys.removeAll()
}
func resetRefreshState(clearCache: Bool = false) {
switchTask?.cancel()
switchTask = nil
resetLoadingState()
attemptedKeys.removeAll()
lastErrorByKey.removeAll()
if clearCache {
cache.removeAll()
}
}
private let loadingWatchdogSeconds: TimeInterval = 60
@discardableResult
@ -725,6 +736,7 @@ enum ProviderFilter: String, CaseIterable, Identifiable {
case copilot = "Copilot"
case droid = "Droid"
case gemini = "Gemini"
case ibmBob = "IBM Bob"
case kiro = "Kiro"
case kiloCode = "KiloCode"
case openclaw = "OpenClaw"
@ -742,6 +754,7 @@ enum ProviderFilter: String, CaseIterable, Identifiable {
case .cursor: ["cursor", "cursor agent"]
case .rooCode: ["roo-code", "roo code"]
case .kiloCode: ["kilo-code", "kilocode"]
case .ibmBob: ["ibm-bob", "ibm bob"]
case .openclaw: ["openclaw"]
default: [rawValue.lowercased()]
}
@ -756,6 +769,7 @@ enum ProviderFilter: String, CaseIterable, Identifiable {
case .copilot: "copilot"
case .droid: "droid"
case .gemini: "gemini"
case .ibmBob: "ibm-bob"
case .kiloCode: "kilo-code"
case .kiro: "kiro"
case .openclaw: "openclaw"

View file

@ -6,6 +6,7 @@ private let refreshIntervalSeconds: UInt64 = 30
private let nanosPerSecond: UInt64 = 1_000_000_000
private let refreshIntervalNanos: UInt64 = refreshIntervalSeconds * nanosPerSecond
private let forceRefreshWatchdogSeconds: TimeInterval = 90
private let interactiveQuotaRefreshFloorSeconds: TimeInterval = 30
private let statusItemWidth: CGFloat = NSStatusItem.variableLength
private let popoverWidth: CGFloat = 360
private let popoverHeight: CGFloat = 660
@ -39,6 +40,8 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
private var forceRefreshTask: Task<Void, Never>?
private var forceRefreshStartedAt: Date?
private var forceRefreshGeneration: UInt64 = 0
private var manualRefreshTask: Task<Void, Never>?
private var manualRefreshGeneration: UInt64 = 0
func applicationWillFinishLaunching(_ notification: Notification) {
// Set accessory policy before the app's focus chain forms. On macOS Tahoe
@ -95,6 +98,10 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
self?.forceRefreshTask = nil
self?.forceRefreshStartedAt = nil
self?.forceRefreshGeneration &+= 1
self?.manualRefreshTask?.cancel()
self?.manualRefreshTask = nil
self?.manualRefreshGeneration &+= 1
self?.store.resetLoadingState()
self?.refreshLoopTask?.cancel()
self?.refreshLoopTask = nil
}
@ -110,9 +117,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
queue: .main
) { [weak self] _ in
Task { @MainActor in
self?.store.resetLoadingState()
self?.forceRefresh()
if self?.refreshLoopTask == nil { self?.startRefreshLoop() }
self?.recoverRefreshPipelineAfterInterruption(resetLoading: true)
}
}
@ -121,7 +126,9 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
object: nil,
queue: .main
) { [weak self] _ in
Task { @MainActor in self?.forceRefresh() }
Task { @MainActor in
self?.recoverRefreshPipelineAfterInterruption(resetLoading: true)
}
}
}
@ -131,10 +138,24 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
object: nil,
queue: .main
) { [weak self] _ in
Task { @MainActor in self?.forceRefresh() }
Task { @MainActor in
self?.recoverRefreshPipelineAfterInterruption(resetLoading: false)
}
}
}
private func recoverRefreshPipelineAfterInterruption(resetLoading: Bool) {
if resetLoading {
store.resetLoadingState()
} else {
_ = store.clearStaleLoadingIfNeeded()
}
if refreshLoopTask == nil {
startRefreshLoop()
}
forceRefresh()
}
private func installLaunchAgentIfNeeded() {
let fm = FileManager.default
let agentName = "com.codeburn.refresh.plist"
@ -232,6 +253,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
private func forceRefresh() {
let now = Date()
_ = clearStaleForceRefreshIfNeeded(now: now)
guard forceRefreshTask == nil else { return }
guard now.timeIntervalSince(lastRefreshTime) > 5 else { return }
lastRefreshTime = now
forceRefreshStartedAt = now
@ -241,7 +263,8 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
forceRefreshTask = Task {
async let main: Void = store.refresh(includeOptimize: false, force: true, showLoading: true)
async let today: Void = store.refreshQuietly(period: .today)
_ = await (main, today)
async let quotas: Bool = refreshLiveQuotaProgressIfDue()
_ = await (main, today, quotas)
refreshStatusButton()
await MainActor.run { [weak self] in
guard let self, self.forceRefreshGeneration == generation else { return }
@ -275,6 +298,51 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
}
fileprivate var lastSubscriptionRefreshAt: Date?
fileprivate var lastCodexRefreshAt: Date?
@discardableResult
private func refreshLiveQuotaProgressIfDue(force: Bool = false) async -> Bool {
let cadence = SubscriptionRefreshCadence.current
if !force && cadence == .manual { return false }
let now = Date()
let threshold = force ? 0 : TimeInterval(cadence.rawValue)
let shouldRefreshClaude = force || now.timeIntervalSince(lastSubscriptionRefreshAt ?? .distantPast) >= threshold
let shouldRefreshCodex = force || now.timeIntervalSince(lastCodexRefreshAt ?? .distantPast) >= threshold
guard shouldRefreshClaude || shouldRefreshCodex else { return false }
switch (shouldRefreshClaude, shouldRefreshCodex) {
case (true, true):
async let claude = store.refreshSubscriptionReportingSuccess()
async let codex = store.refreshCodexReportingSuccess()
if await claude { lastSubscriptionRefreshAt = Date() }
if await codex { lastCodexRefreshAt = Date() }
case (true, false):
if await store.refreshSubscriptionReportingSuccess() {
lastSubscriptionRefreshAt = Date()
}
case (false, true):
if await store.refreshCodexReportingSuccess() {
lastCodexRefreshAt = Date()
}
case (false, false):
break
}
return true
}
private func refreshLiveQuotaProgressForPopoverOpen() {
let now = Date()
let claudeElapsed = now.timeIntervalSince(lastSubscriptionRefreshAt ?? .distantPast)
let codexElapsed = now.timeIntervalSince(lastCodexRefreshAt ?? .distantPast)
guard claudeElapsed >= interactiveQuotaRefreshFloorSeconds ||
codexElapsed >= interactiveQuotaRefreshFloorSeconds else { return }
Task { [weak self] in
guard let self else { return }
_ = await self.refreshLiveQuotaProgressIfDue(force: true)
}
}
private func startRefreshLoop() {
refreshLoopTask?.cancel()
@ -282,10 +350,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
// Provider refreshes only run when the user has explicitly connected.
// Each refresh is a no-op until its corresponding bootstrap flag is set.
if let self {
async let claude = self.store.refreshSubscriptionReportingSuccess()
async let codex = self.store.refreshCodexReportingSuccess()
if await claude { self.lastSubscriptionRefreshAt = Date() }
if await codex { self.lastCodexRefreshAt = Date() }
await self.refreshLiveQuotaProgressIfDue(force: true)
}
while !Task.isCancelled {
guard let self else { return }
@ -311,39 +376,50 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
// (not last attempt) so an intermittent failure doesn't reset
// the timer. Each provider has its own anchor so a Codex 429
// doesn't delay a due Claude refresh.
let cadence = SubscriptionRefreshCadence.current
if cadence != .manual {
let claudeElapsed = Date().timeIntervalSince(self.lastSubscriptionRefreshAt ?? .distantPast)
if claudeElapsed >= TimeInterval(cadence.rawValue) {
let succeeded = await self.store.refreshSubscriptionReportingSuccess()
if succeeded { self.lastSubscriptionRefreshAt = Date() }
}
let codexElapsed = Date().timeIntervalSince(self.lastCodexRefreshAt ?? .distantPast)
if codexElapsed >= TimeInterval(cadence.rawValue) {
let succeeded = await self.store.refreshCodexReportingSuccess()
if succeeded { self.lastCodexRefreshAt = Date() }
}
}
await self.refreshLiveQuotaProgressIfDue()
try? await Task.sleep(nanoseconds: refreshIntervalNanos)
}
}
}
fileprivate var lastCodexRefreshAt: Date?
@MainActor
func refreshSubscriptionNow() {
Task { [weak self] in
manualRefreshTask?.cancel()
manualRefreshGeneration &+= 1
let generation = manualRefreshGeneration
forceRefreshTask?.cancel()
forceRefreshTask = nil
forceRefreshStartedAt = nil
forceRefreshGeneration &+= 1
pendingRefreshWork?.cancel()
pendingRefreshWork = nil
refreshLoopTask?.cancel()
refreshLoopTask = nil
store.resetRefreshState(clearCache: true)
lastRefreshTime = .distantPast
refreshStatusButton()
manualRefreshTask = Task { [weak self] in
guard let self else { return }
// "Refresh Now" should refresh the menubar payload AND every
// connected provider's live quota the user's intent is "make
// connected provider's live quota. The user's intent is "make
// this match reality right now."
let needsTodayTotal = self.store.selectedPeriod != .today || self.store.selectedProvider != .all
async let payload: Void = self.store.refresh(includeOptimize: false, force: true, showLoading: true)
async let claude: Bool = self.store.refreshSubscriptionReportingSuccess()
async let codex: Bool = self.store.refreshCodexReportingSuccess()
async let quotas: Bool = self.refreshLiveQuotaProgressIfDue(force: true)
if needsTodayTotal {
await self.store.refreshQuietly(period: .today)
}
_ = await payload
if await claude { self.lastSubscriptionRefreshAt = Date() }
if await codex { self.lastCodexRefreshAt = Date() }
guard self.manualRefreshGeneration == generation, !Task.isCancelled else { return }
self.lastRefreshTime = Date()
self.refreshStatusButton()
_ = await quotas
guard self.manualRefreshGeneration == generation, !Task.isCancelled else { return }
self.manualRefreshTask = nil
if self.refreshLoopTask == nil {
self.startRefreshLoop()
}
}
}
@ -541,6 +617,7 @@ final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
window.collectionBehavior.insert(.canJoinAllSpaces)
window.makeKeyAndOrderFront(nil)
}
refreshLiveQuotaProgressForPopoverOpen()
}
}

View file

@ -46,7 +46,7 @@ final class UpdateChecker {
let (data, _) = try await URLSession.shared.data(for: request)
let release = try JSONDecoder().decode(GitHubRelease.self, from: data)
guard let asset = release.assets.first(where: {
$0.name.hasPrefix("CodeBurnMenubar-") && $0.name.hasSuffix(".zip")
$0.name.hasPrefix("CodeBurnMenubar-v") && $0.name.hasSuffix(".zip")
}) else { return }
let version = asset.name

View file

@ -345,6 +345,7 @@ extension ProviderFilter {
case .copilot: return Color(red: 0x6D/255.0, green: 0x8F/255.0, blue: 0xA6/255.0)
case .droid: return Color(red: 0x7C/255.0, green: 0x3A/255.0, blue: 0xED/255.0)
case .gemini: return Color(red: 0x44/255.0, green: 0x85/255.0, blue: 0xF4/255.0)
case .ibmBob: return Color(red: 0x0F/255.0, green: 0x62/255.0, blue: 0xFE/255.0)
case .kiloCode: return Color(red: 0x00/255.0, green: 0x96/255.0, blue: 0x88/255.0)
case .kiro: return Color(red: 0x4A/255.0, green: 0x9E/255.0, blue: 0xC4/255.0)
case .openclaw: return Color(red: 0xDA/255.0, green: 0x70/255.0, blue: 0x56/255.0)

View file

@ -21,6 +21,7 @@
"claude-code",
"cursor",
"codex",
"ibm-bob",
"opencode",
"pi",
"ai-coding",

View file

@ -5,24 +5,19 @@ import { homedir } from 'os'
import { join } from 'path'
import type { DateRange, ProjectSummary } from './types.js'
// Bumped to 5 alongside the Cursor per-project breakdown: prior daily
// entries recorded every Cursor session under a single 'cursor' project
// label. After the upgrade, the breakdown produces per-workspace project
// labels for new days; without invalidation the dashboard would show
// 'cursor' for historical days and `-Users-you-myproject` for new ones
// in the same window, producing a confusing mixed projection.
export const DAILY_CACHE_VERSION = 5
// MIN_SUPPORTED_VERSION bumped to 5 too. The migration path
// Bumped to 6 alongside the Claude 1-hour cache-write pricing fix: prior
// daily entries priced all Claude cache writes at the 5-minute rate, so
// cached historical cost/model/provider/category totals would remain
// under-reported unless discarded and recomputed from raw sessions.
export const DAILY_CACHE_VERSION = 6
// MIN_SUPPORTED_VERSION bumped to 6 too. The migration path
// (isMigratableCache + migrateDays) only fills in missing default fields;
// it does NOT recompute the providers / categories / models rollups from
// session data, because those raw sessions are not stored in the cache.
// So a migrated v2/v3/v4 cache would carry forward stale provider totals
// (single 'cursor' bucket instead of per-workspace) for the full cache
// retention window. Setting the floor to 5 forces those older caches to
// be discarded and recomputed cleanly. Confirmed by live test:
// menubar-json --period all reported cursor=$3.78 against a migrated
// v4 cache but $4.08 (correct) after the cache was discarded.
const MIN_SUPPORTED_VERSION = 5
// So a migrated v5 cache would carry forward stale pricing totals for
// the full cache retention window. Setting the floor to 6 forces older
// caches to be discarded and recomputed cleanly.
const MIN_SUPPORTED_VERSION = 6
const DAILY_CACHE_FILENAME = 'daily-cache.json'
export type DailyEntry = {

View file

@ -52,6 +52,7 @@ const PROVIDER_COLORS: Record<string, string> = {
claude: '#FF8C42',
codex: '#5BF5A0',
cursor: '#00B4D8',
'ibm-bob': '#0F62FE',
opencode: '#A78BFA',
pi: '#F472B6',
all: '#FF8C42',
@ -513,6 +514,7 @@ const PROVIDER_DISPLAY_NAMES: Record<string, string> = {
claude: 'Claude',
codex: 'Codex',
cursor: 'Cursor',
'ibm-bob': 'IBM Bob',
opencode: 'OpenCode',
pi: 'Pi',
}

View file

@ -11,17 +11,28 @@ import { Readable } from 'node:stream'
/// newest tagged release; we filter its assets list for our zipped .app bundle.
const RELEASE_API = 'https://api.github.com/repos/getagentseal/codeburn/releases/latest'
const APP_BUNDLE_NAME = 'CodeBurnMenubar.app'
const ASSET_PATTERN = /^CodeBurnMenubar-.*\.zip$/
const CHECKSUM_PATTERN = /^CodeBurnMenubar-.*\.zip\.sha256$/
const VERSIONED_ASSET_PATTERN = /^CodeBurnMenubar-v.+\.zip$/
const APP_PROCESS_NAME = 'CodeBurnMenubar'
const SUPPORTED_OS = 'darwin'
const MIN_MACOS_MAJOR = 14
export type InstallResult = { installedPath: string; launched: boolean }
type ReleaseAsset = { name: string; browser_download_url: string }
type ReleaseResponse = { tag_name: string; assets: ReleaseAsset[] }
type ResolvedAssets = { zip: ReleaseAsset; checksum: ReleaseAsset | null }
export type ReleaseAsset = { name: string; browser_download_url: string }
export type ReleaseResponse = { tag_name: string; assets: ReleaseAsset[] }
export type ResolvedAssets = { zip: ReleaseAsset; checksum: ReleaseAsset | null }
export function resolveMenubarReleaseAssets(release: ReleaseResponse): ResolvedAssets {
const zip = release.assets.find(a => VERSIONED_ASSET_PATTERN.test(a.name))
if (!zip) {
throw new Error(
`No ${APP_BUNDLE_NAME} versioned zip found in release ${release.tag_name}. ` +
`Check https://github.com/getagentseal/codeburn/releases.`
)
}
const checksum = release.assets.find(a => a.name === `${zip.name}.sha256`) ?? null
return { zip, checksum }
}
function userApplicationsDir(): string {
return join(homedir(), 'Applications')
@ -71,15 +82,7 @@ async function fetchLatestReleaseAssets(): Promise<ResolvedAssets> {
throw new Error(`GitHub release lookup failed: HTTP ${response.status}`)
}
const body = await response.json() as ReleaseResponse
const zip = body.assets.find(a => ASSET_PATTERN.test(a.name))
if (!zip) {
throw new Error(
`No ${APP_BUNDLE_NAME} zip found in release ${body.tag_name}. ` +
`Check https://github.com/getagentseal/codeburn/releases.`
)
}
const checksum = body.assets.find(a => CHECKSUM_PATTERN.test(a.name)) ?? null
return { zip, checksum }
return resolveMenubarReleaseAssets(body)
}
async function verifyChecksum(archivePath: string, checksumUrl: string): Promise<void> {
@ -179,7 +182,7 @@ export async function installMenubarApp(options: { force?: boolean } = {}): Prom
}
console.log('Unpacking...')
await runCommand('/usr/bin/unzip', ['-q', archivePath, '-d', stagingDir])
await runCommand('/usr/bin/ditto', ['-x', '-k', archivePath, stagingDir])
const unpackedApp = join(stagingDir, APP_BUNDLE_NAME)
if (!(await exists(unpackedApp))) {

View file

@ -25,6 +25,7 @@ type SnapshotEntry = [number, number, number | null, number | null]
const LITELLM_URL = 'https://raw.githubusercontent.com/BerriAI/litellm/main/model_prices_and_context_window.json'
const CACHE_TTL_MS = 24 * 60 * 60 * 1000
const WEB_SEARCH_COST = 0.01
const ONE_HOUR_CACHE_WRITE_MULTIPLIER_FROM_FIVE_MINUTE_RATE = 1.6
const FAST_MULTIPLIERS: Record<string, number> = {
'claude-opus-4-7': 6,
@ -166,6 +167,7 @@ const BUILTIN_ALIASES: Record<string, string> = {
'copilot-auto': 'claude-sonnet-4-5',
'copilot-openai-auto': 'gpt-5.3-codex',
'copilot-anthropic-auto': 'claude-sonnet-4-5',
'ibm-bob-auto': 'claude-sonnet-4-5',
'kiro-auto': 'claude-sonnet-4-5',
'cline-auto': 'claude-sonnet-4-5',
'openclaw-auto': 'claude-sonnet-4-5',
@ -310,6 +312,7 @@ export function calculateCost(
cacheReadTokens: number,
webSearchRequests: number,
speed: 'standard' | 'fast' = 'standard',
oneHourCacheCreationTokens = 0,
): number {
const costs = getModelCosts(model)
if (!costs) {
@ -335,11 +338,15 @@ export function calculateCost(
// from real spend in aggregate totals. NaN is also handled here; the
// arithmetic below short-circuits to 0 when any operand is non-finite.
const safe = (n: number) => (Number.isFinite(n) && n > 0 ? n : 0)
const safeOneHourCacheCreation = safe(oneHourCacheCreationTokens)
const safeCacheCreation = Math.max(safe(cacheCreationTokens), safeOneHourCacheCreation)
const safeFiveMinuteCacheCreation = Math.max(0, safeCacheCreation - safeOneHourCacheCreation)
return multiplier * (
safe(inputTokens) * costs.inputCostPerToken +
safe(outputTokens) * costs.outputCostPerToken +
safe(cacheCreationTokens) * costs.cacheWriteCostPerToken +
safeFiveMinuteCacheCreation * costs.cacheWriteCostPerToken +
safeOneHourCacheCreation * costs.cacheWriteCostPerToken * ONE_HOUR_CACHE_WRITE_MULTIPLIER_FROM_FIVE_MINUTE_RATE +
safe(cacheReadTokens) * costs.cacheReadCostPerToken +
safe(webSearchRequests) * costs.webSearchCostPerRequest
)
@ -351,6 +358,7 @@ const autoModelNames: Record<string, string> = {
'copilot-auto': 'Copilot (auto)',
'copilot-openai-auto': 'Copilot (OpenAI)',
'copilot-anthropic-auto': 'Copilot (Anthropic)',
'ibm-bob-auto': 'IBM Bob (auto)',
'kiro-auto': 'Kiro (auto)',
'cline-auto': 'Cline (auto)',
'openclaw-auto': 'OpenClaw (auto)',

View file

@ -92,16 +92,39 @@ function getMessageId(entry: JournalEntry): string | null {
return msg?.id ?? null
}
function positiveNumber(n: number | undefined): number {
return n !== undefined && Number.isFinite(n) && n > 0 ? n : 0
}
function extractClaudeCacheCreation(usage: AssistantMessageContent['usage']): { totalTokens: number; oneHourTokens: number } {
const legacyTotal = positiveNumber(usage.cache_creation_input_tokens)
const cacheCreation = usage.cache_creation
const fiveMinuteTokens = positiveNumber(cacheCreation?.ephemeral_5m_input_tokens)
const oneHourTokens = positiveNumber(cacheCreation?.ephemeral_1h_input_tokens)
const splitTotal = fiveMinuteTokens + oneHourTokens
if (splitTotal === 0) return { totalTokens: legacyTotal, oneHourTokens: 0 }
// Valid Claude usage reports the legacy total and split total as equal.
// Keep the larger value so malformed partial splits do not drop tokens.
const totalTokens = Math.max(legacyTotal, splitTotal)
return {
totalTokens,
oneHourTokens: Math.min(oneHourTokens, totalTokens),
}
}
function parseApiCall(entry: JournalEntry): ParsedApiCall | null {
if (entry.type !== 'assistant') return null
const msg = entry.message as AssistantMessageContent | undefined
if (!msg?.usage || !msg?.model) return null
const usage = msg.usage
const cacheCreation = extractClaudeCacheCreation(usage)
const tokens: TokenUsage = {
inputTokens: usage.input_tokens ?? 0,
outputTokens: usage.output_tokens ?? 0,
cacheCreationInputTokens: usage.cache_creation_input_tokens ?? 0,
cacheCreationInputTokens: cacheCreation.totalTokens,
cacheReadInputTokens: usage.cache_read_input_tokens ?? 0,
cachedInputTokens: 0,
reasoningTokens: 0,
@ -118,6 +141,7 @@ function parseApiCall(entry: JournalEntry): ParsedApiCall | null {
tokens.cacheReadInputTokens,
tokens.webSearchRequests,
usage.speed ?? 'standard',
cacheCreation.oneHourTokens,
)
const bashCmds = extractBashCommandsFromContent(msg.content ?? [])
@ -550,7 +574,7 @@ async function parseProviderSources(
const provider = await getProvider(providerName)
if (!provider) return []
const sessionMap = new Map<string, { project: string; turns: ClassifiedTurn[] }>()
const sessionMap = new Map<string, { project: string; projectPath?: string; turns: ClassifiedTurn[] }>()
try {
for (const source of sources) {
@ -574,13 +598,15 @@ async function parseProviderSources(
const turn = providerCallToTurn(call)
const classified = classifyTurn(turn)
const key = `${providerName}:${call.sessionId}:${source.project}`
const project = call.project ?? source.project
const key = `${providerName}:${call.sessionId}:${project}`
const existing = sessionMap.get(key)
if (existing) {
existing.turns.push(classified)
if (!existing.projectPath && call.projectPath) existing.projectPath = call.projectPath
} else {
sessionMap.set(key, { project: source.project, turns: [classified] })
sessionMap.set(key, { project, projectPath: call.projectPath, turns: [classified] })
}
}
}
@ -592,22 +618,26 @@ async function parseProviderSources(
}
}
const projectMap = new Map<string, SessionSummary[]>()
for (const [key, { project, turns }] of sessionMap) {
const projectMap = new Map<string, { projectPath?: string; sessions: SessionSummary[] }>()
for (const [key, { project, projectPath, turns }] of sessionMap) {
const sessionId = key.split(':')[1] ?? key
const session = buildSessionSummary(sessionId, project, turns)
if (session.apiCalls > 0) {
const existing = projectMap.get(project) ?? []
existing.push(session)
projectMap.set(project, existing)
const existing = projectMap.get(project)
if (existing) {
existing.sessions.push(session)
if (!existing.projectPath && projectPath) existing.projectPath = projectPath
} else {
projectMap.set(project, { projectPath, sessions: [session] })
}
}
}
const projects: ProjectSummary[] = []
for (const [dirName, sessions] of projectMap) {
for (const [dirName, { projectPath, sessions }] of projectMap) {
projects.push({
project: dirName,
projectPath: unsanitizePath(dirName),
projectPath: projectPath ?? unsanitizePath(dirName),
sessions,
totalCostUSD: sessions.reduce((s, sess) => s + sess.totalCostUSD, 0),
totalApiCalls: sessions.reduce((s, sess) => s + sess.apiCalls, 0),

59
src/providers/ibm-bob.ts Normal file
View file

@ -0,0 +1,59 @@
import { join } from 'path'
import { homedir } from 'os'
import { getShortModelName } from '../models.js'
import { discoverClineTasksInBaseDirs, createClineParser } from './vscode-cline-parser.js'
import type { Provider, SessionSource, SessionParser } from './types.js'
const PROVIDER_NAME = 'ibm-bob'
const DISPLAY_NAME = 'IBM Bob'
const EXTENSION_ID = 'ibm.bob-code'
const FALLBACK_MODEL = 'ibm-bob-auto'
export function getIBMBobGlobalStorageDirs(): string[] {
const home = homedir()
if (process.platform === 'darwin') {
return [
join(home, 'Library', 'Application Support', 'IBM Bob', 'User', 'globalStorage', EXTENSION_ID),
join(home, 'Library', 'Application Support', 'Bob-IDE', 'User', 'globalStorage', EXTENSION_ID),
]
}
if (process.platform === 'win32') {
const appData = process.env['APPDATA'] ?? join(home, 'AppData', 'Roaming')
return [
join(appData, 'IBM Bob', 'User', 'globalStorage', EXTENSION_ID),
join(appData, 'Bob-IDE', 'User', 'globalStorage', EXTENSION_ID),
]
}
const configHome = process.env['XDG_CONFIG_HOME'] ?? join(home, '.config')
return [
join(configHome, 'IBM Bob', 'User', 'globalStorage', EXTENSION_ID),
join(configHome, 'Bob-IDE', 'User', 'globalStorage', EXTENSION_ID),
]
}
export function createIBMBobProvider(overrideDir?: string): Provider {
return {
name: PROVIDER_NAME,
displayName: DISPLAY_NAME,
modelDisplayName(model: string): string {
return getShortModelName(model)
},
toolDisplayName(rawTool: string): string {
return rawTool
},
async discoverSessions(): Promise<SessionSource[]> {
const dirs = overrideDir ? [overrideDir] : getIBMBobGlobalStorageDirs()
return discoverClineTasksInBaseDirs(dirs, PROVIDER_NAME, DISPLAY_NAME)
},
createSessionParser(source: SessionSource, seenKeys: Set<string>): SessionParser {
return createClineParser(source, seenKeys, PROVIDER_NAME, FALLBACK_MODEL)
},
}
}
export const ibmBob = createIBMBobProvider()

View file

@ -3,6 +3,7 @@ import { codex } from './codex.js'
import { copilot } from './copilot.js'
import { droid } from './droid.js'
import { gemini } from './gemini.js'
import { ibmBob } from './ibm-bob.js'
import { kiloCode } from './kilo-code.js'
import { kiro } from './kiro.js'
import { openclaw } from './openclaw.js'
@ -101,7 +102,7 @@ async function loadCrush(): Promise<Provider | null> {
}
}
const coreProviders: Provider[] = [claude, codex, copilot, droid, gemini, kiloCode, kiro, openclaw, pi, omp, qwen, rooCode]
const coreProviders: Provider[] = [claude, codex, copilot, droid, gemini, ibmBob, kiloCode, kiro, openclaw, pi, omp, qwen, rooCode]
export async function getAllProviders(): Promise<Provider[]> {
const [ag, gs, cursor, opencode, cursorAgent, crush] = await Promise.all([loadAntigravity(), loadGoose(), loadCursor(), loadOpenCode(), loadCursorAgent(), loadCrush()])

View file

@ -27,6 +27,8 @@ export type ParsedProviderCall = {
deduplicationKey: string
userMessage: string
sessionId: string
project?: string
projectPath?: string
}
export type Provider = {

View file

@ -24,6 +24,23 @@ export function getVSCodeGlobalStoragePath(extensionId: string): string {
export async function discoverClineTasks(extensionId: string, providerName: string, displayName: string, overrideDir?: string): Promise<SessionSource[]> {
const baseDir = overrideDir ?? getVSCodeGlobalStoragePath(extensionId)
return discoverClineTasksInBaseDirs([baseDir], providerName, displayName)
}
export async function discoverClineTasksInBaseDirs(baseDirs: string[], providerName: string, displayName: string): Promise<SessionSource[]> {
const sources: SessionSource[] = []
const seen = new Set<string>()
for (const baseDir of baseDirs) {
for (const source of await discoverClineTasksInBaseDir(baseDir, providerName, displayName)) {
if (seen.has(source.path)) continue
seen.add(source.path)
sources.push(source)
}
}
return sources
}
async function discoverClineTasksInBaseDir(baseDir: string, providerName: string, displayName: string): Promise<SessionSource[]> {
const tasksDir = join(baseDir, 'tasks')
const sources: SessionSource[] = []
@ -50,28 +67,43 @@ export async function discoverClineTasks(extensionId: string, providerName: stri
}
const MODEL_TAG_RE = /<model>([^<]+)<\/model>/
const WORKSPACE_DIR_RE = /Current Workspace Directory \(([^)]+)\)/
function extractModelFromHistory(taskDir: string): Promise<string> {
type HistoryMeta = { model: string; workspace: string | null }
function extractHistoryMeta(taskDir: string, fallbackModel: string): Promise<HistoryMeta> {
return readFile(join(taskDir, 'api_conversation_history.json'), 'utf-8')
.then(raw => {
const msgs = JSON.parse(raw) as Array<{ role?: string; content?: Array<{ text?: string }> }>
if (!Array.isArray(msgs)) return 'cline-auto'
if (!Array.isArray(msgs)) return { model: fallbackModel, workspace: null }
let model: string | null = null
let workspace: string | null = null
for (const msg of msgs) {
if (msg.role !== 'user' || !Array.isArray(msg.content)) continue
for (const block of msg.content) {
const match = typeof block.text === 'string' && MODEL_TAG_RE.exec(block.text)
if (match) {
const raw = match[1]
return raw.includes('/') ? raw.split('/').pop()! : raw
if (typeof block.text !== 'string') continue
if (!model) {
const mm = MODEL_TAG_RE.exec(block.text)
if (mm) model = mm[1].includes('/') ? mm[1].split('/').pop()! : mm[1]
}
if (!workspace) {
const wm = WORKSPACE_DIR_RE.exec(block.text)
if (wm) workspace = wm[1]
}
if (model && workspace) break
}
if (model && workspace) break
}
return 'cline-auto'
return { model: model ?? fallbackModel, workspace }
})
.catch(() => 'cline-auto')
.catch(() => ({ model: fallbackModel, workspace: null }))
}
export function createClineParser(source: SessionSource, seenKeys: Set<string>, providerName: string): SessionParser {
function workspaceToProject(workspace: string): string {
return basename(workspace) || workspace
}
export function createClineParser(source: SessionSource, seenKeys: Set<string>, providerName: string, fallbackModel = 'cline-auto'): SessionParser {
return {
async *parse(): AsyncGenerator<ParsedProviderCall> {
const taskDir = source.path
@ -93,7 +125,10 @@ export function createClineParser(source: SessionSource, seenKeys: Set<string>,
if (!Array.isArray(uiMessages)) return
const model = await extractModelFromHistory(taskDir)
const meta = await extractHistoryMeta(taskDir, fallbackModel)
const model = meta.model
const project = meta.workspace ? workspaceToProject(meta.workspace) : undefined
const projectPath = meta.workspace ?? undefined
let userMessage = ''
for (const msg of uiMessages) {
@ -156,6 +191,8 @@ export function createClineParser(source: SessionSource, seenKeys: Set<string>,
deduplicationKey: dedupKey,
userMessage: index === 0 ? userMessage : '',
sessionId: taskId,
project,
projectPath,
}
}
},

View file

@ -25,6 +25,10 @@ export type ApiUsage = {
input_tokens: number
output_tokens: number
cache_creation_input_tokens?: number
cache_creation?: {
ephemeral_5m_input_tokens?: number
ephemeral_1h_input_tokens?: number
}
cache_read_input_tokens?: number
server_tool_use?: {
web_search_requests?: number

View file

@ -104,6 +104,36 @@ describe('loadDailyCache', () => {
expect(existsSync(join(TMP_CACHE_ROOT, 'daily-cache.json.v2.bak'))).toBe(true)
})
it('discards a v5 cache because cached Claude costs predate 1-hour cache pricing', async () => {
const saved = {
version: 5,
lastComputedDate: '2026-05-01',
days: [{
date: '2026-05-01',
cost: 0.37575,
calls: 1,
sessions: 1,
inputTokens: 0,
outputTokens: 0,
cacheReadTokens: 0,
cacheWriteTokens: 60_120,
editTurns: 0,
oneShotTurns: 0,
models: { 'Opus 4.7': { calls: 1, cost: 0.37575, inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 60_120 } },
categories: {},
providers: { claude: { calls: 1, cost: 0.37575 } },
}],
}
const { writeFile, mkdir } = await import('fs/promises')
await mkdir(TMP_CACHE_ROOT, { recursive: true })
await writeFile(join(TMP_CACHE_ROOT, 'daily-cache.json'), JSON.stringify(saved), 'utf-8')
const cache = await loadDailyCache()
expect(cache.version).toBe(DAILY_CACHE_VERSION)
expect(cache.days).toEqual([])
expect(cache.lastComputedDate).toBeNull()
expect(existsSync(join(TMP_CACHE_ROOT, 'daily-cache.json.v5.bak'))).toBe(true)
})
it('round-trips a valid cache through save and load', async () => {
const saved: DailyCache = {
version: DAILY_CACHE_VERSION,

View file

@ -0,0 +1,37 @@
import { describe, expect, it } from 'vitest'
import { resolveMenubarReleaseAssets, type ReleaseResponse } from '../src/menubar-installer.js'
function asset(name: string) {
return { name, browser_download_url: `https://example.test/${name}` }
}
describe('resolveMenubarReleaseAssets', () => {
it('ignores dev zips and pairs the checksum with the versioned zip', () => {
const release: ReleaseResponse = {
tag_name: 'mac-v0.9.8',
assets: [
asset('CodeBurnMenubar-dev.zip'),
asset('CodeBurnMenubar-dev.zip.sha256'),
asset('CodeBurnMenubar-v0.9.8.zip'),
asset('CodeBurnMenubar-v0.9.8.zip.sha256'),
],
}
const resolved = resolveMenubarReleaseAssets(release)
expect(resolved.zip.name).toBe('CodeBurnMenubar-v0.9.8.zip')
expect(resolved.checksum?.name).toBe('CodeBurnMenubar-v0.9.8.zip.sha256')
})
it('fails when a release only contains dev assets', () => {
const release: ReleaseResponse = {
tag_name: 'mac-v0.9.8',
assets: [
asset('CodeBurnMenubar-dev.zip'),
asset('CodeBurnMenubar-dev.zip.sha256'),
],
}
expect(() => resolveMenubarReleaseAssets(release)).toThrow(/versioned zip/)
})
})

View file

@ -158,6 +158,18 @@ describe('calculateCost - OMP names produce non-zero cost', () => {
})
})
describe('calculateCost - Claude cache write durations', () => {
it('prices 1-hour cache writes at 1.6x the 5-minute cache write rate', () => {
const fiveMinute = calculateCost('claude-opus-4-7', 0, 0, 1_000_000, 0, 0)
const oneHour = calculateCost('claude-opus-4-7', 0, 0, 1_000_000, 0, 0, 'standard', 1_000_000)
const mixed = calculateCost('claude-opus-4-7', 0, 0, 100_000, 0, 0, 'standard', 60_000)
expect(fiveMinute).toBeCloseTo(6.25, 6)
expect(oneHour).toBeCloseTo(10, 6)
expect(mixed).toBeCloseTo(0.85, 6)
})
})
describe('existing model names still resolve', () => {
it('canonical claude-opus-4-6', () => {
expect(getModelCosts('claude-opus-4-6')).not.toBeNull()

View file

@ -31,7 +31,14 @@ function dayRange(day: string): DateRange {
}
}
async function writeClaudeSession(projectSlug: string, sessionId: string, cwd: string, timestamp: string): Promise<void> {
async function writeClaudeSession(
projectSlug: string,
sessionId: string,
cwd: string,
timestamp: string,
usage: Record<string, unknown> = { input_tokens: 100, output_tokens: 50 },
model = 'claude-sonnet-4-5',
): Promise<void> {
const projectDir = join(tmpDir, 'projects', projectSlug)
await mkdir(projectDir, { recursive: true })
const filePath = join(projectDir, `${sessionId}.jsonl`)
@ -44,12 +51,9 @@ async function writeClaudeSession(projectSlug: string, sessionId: string, cwd: s
id: `msg-${sessionId}`,
type: 'message',
role: 'assistant',
model: 'claude-sonnet-4-5',
model,
content: [],
usage: {
input_tokens: 100,
output_tokens: 50,
},
usage,
},
}) + '\n')
@ -158,3 +162,51 @@ describe('Claude cwd project paths', () => {
expect(projects[0]!.projectPath).toBe('fallback/slug')
})
})
describe('Claude cache creation pricing', () => {
it('prices 1-hour cache writes from usage.cache_creation at the 2x input rate', async () => {
await writeClaudeSession(
'cache-pricing',
'one-hour-cache',
'/tmp/cache-pricing',
'2099-05-05T10:00:00.000Z',
{
input_tokens: 0,
output_tokens: 0,
cache_creation_input_tokens: 60_120,
cache_creation: {
ephemeral_5m_input_tokens: 0,
ephemeral_1h_input_tokens: 60_120,
},
},
'claude-opus-4-7',
)
const projects = await parseAllSessions(dayRange('2099-05-05'), 'claude')
expect(projects).toHaveLength(1)
expect(projects[0]!.sessions[0]!.totalCacheWriteTokens).toBe(60_120)
expect(projects[0]!.totalCostUSD).toBeCloseTo(0.6012, 6)
})
it('falls back to the legacy 5-minute cache write rate when split fields are absent', async () => {
await writeClaudeSession(
'legacy-cache-pricing',
'legacy-cache',
'/tmp/legacy-cache-pricing',
'2099-05-06T10:00:00.000Z',
{
input_tokens: 0,
output_tokens: 0,
cache_creation_input_tokens: 60_120,
},
'claude-opus-4-7',
)
const projects = await parseAllSessions(dayRange('2099-05-06'), 'claude')
expect(projects).toHaveLength(1)
expect(projects[0]!.sessions[0]!.totalCacheWriteTokens).toBe(60_120)
expect(projects[0]!.totalCostUSD).toBeCloseTo(0.37575, 6)
})
})

View file

@ -3,7 +3,7 @@ import { providers, getAllProviders } from '../src/providers/index.js'
describe('provider registry', () => {
it('has core providers registered synchronously', () => {
expect(providers.map(p => p.name)).toEqual(['claude', 'codex', 'copilot', 'droid', 'gemini', 'kilo-code', 'kiro', 'openclaw', 'pi', 'omp', 'qwen', 'roo-code'])
expect(providers.map(p => p.name)).toEqual(['claude', 'codex', 'copilot', 'droid', 'gemini', 'ibm-bob', 'kilo-code', 'kiro', 'openclaw', 'pi', 'omp', 'qwen', 'roo-code'])
})
it('includes sqlite providers after async load', async () => {

View file

@ -0,0 +1,164 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
import { mkdtemp, mkdir, writeFile, rm } from 'fs/promises'
import { join } from 'path'
import { tmpdir } from 'os'
import { ibmBob, createIBMBobProvider } from '../../src/providers/ibm-bob.js'
import type { ParsedProviderCall } from '../../src/providers/types.js'
let tmpDir: string
function makeUiMessages(opts: {
tokensIn?: number
tokensOut?: number
cacheReads?: number
cacheWrites?: number
cost?: number
userMessage?: string
ts?: number
}): string {
const messages: unknown[] = []
if (opts.userMessage) {
messages.push({ type: 'say', say: 'user_feedback', text: opts.userMessage, ts: 1_700_000_000_000 })
}
const apiData: Record<string, unknown> = {
tokensIn: opts.tokensIn ?? 100,
tokensOut: opts.tokensOut ?? 50,
cacheReads: opts.cacheReads ?? 0,
cacheWrites: opts.cacheWrites ?? 0,
}
if (opts.cost !== undefined) apiData.cost = opts.cost
messages.push({
type: 'say',
say: 'api_req_started',
text: JSON.stringify(apiData),
ts: opts.ts ?? 1_700_000_001_000,
})
return JSON.stringify(messages)
}
function makeApiHistory(model?: string): string {
const modelTag = model ? `<model>${model}</model>` : ''
return JSON.stringify([
{ role: 'user', content: [{ type: 'text', text: `hello\n<environment_details>\n${modelTag}\n</environment_details>` }] },
{ role: 'assistant', content: [{ type: 'text', text: 'response' }] },
])
}
describe('ibm-bob provider - discovery and parsing', () => {
beforeEach(async () => {
tmpDir = await mkdtemp(join(tmpdir(), 'ibm-bob-test-'))
})
afterEach(async () => {
await rm(tmpDir, { recursive: true, force: true })
})
it('discovers IBM Bob task directories with ui_messages.json', async () => {
const task1 = join(tmpDir, 'tasks', 'task-a')
const task2 = join(tmpDir, 'tasks', 'task-b')
await mkdir(task1, { recursive: true })
await mkdir(task2, { recursive: true })
await writeFile(join(task1, 'ui_messages.json'), '[]')
await writeFile(join(task2, 'ui_messages.json'), '[]')
const provider = createIBMBobProvider(tmpDir)
const sessions = await provider.discoverSessions()
expect(sessions).toHaveLength(2)
expect(sessions.every(s => s.provider === 'ibm-bob')).toBe(true)
expect(sessions.every(s => s.project === 'IBM Bob')).toBe(true)
})
it('skips tasks without ui_messages.json', async () => {
const task = join(tmpDir, 'tasks', 'task-no-ui')
await mkdir(task, { recursive: true })
await writeFile(join(task, 'api_conversation_history.json'), '[]')
const provider = createIBMBobProvider(tmpDir)
const sessions = await provider.discoverSessions()
expect(sessions).toHaveLength(0)
})
it('parses token usage and provider cost from Bob ui messages', async () => {
const taskDir = join(tmpDir, 'tasks', 'task-001')
await mkdir(taskDir, { recursive: true })
await writeFile(join(taskDir, 'ui_messages.json'), makeUiMessages({
tokensIn: 250,
tokensOut: 125,
cacheReads: 60,
cacheWrites: 30,
cost: 0.08,
userMessage: 'modernize this class',
}))
await writeFile(join(taskDir, 'api_conversation_history.json'), makeApiHistory('anthropic/claude-sonnet-4-6'))
const source = { path: taskDir, project: 'IBM Bob', provider: 'ibm-bob' }
const calls: ParsedProviderCall[] = []
for await (const call of ibmBob.createSessionParser(source, new Set()).parse()) calls.push(call)
expect(calls).toHaveLength(1)
expect(calls[0]!).toMatchObject({
provider: 'ibm-bob',
model: 'claude-sonnet-4-6',
inputTokens: 250,
outputTokens: 125,
cacheReadInputTokens: 60,
cacheCreationInputTokens: 30,
costUSD: 0.08,
userMessage: 'modernize this class',
sessionId: 'task-001',
})
expect(calls[0]!.deduplicationKey).toBe('ibm-bob:task-001:0')
})
it('falls back to IBM Bob auto model when history has no model tag', async () => {
const taskDir = join(tmpDir, 'tasks', 'task-002')
await mkdir(taskDir, { recursive: true })
await writeFile(join(taskDir, 'ui_messages.json'), makeUiMessages({ tokensIn: 100, tokensOut: 50 }))
await writeFile(join(taskDir, 'api_conversation_history.json'), makeApiHistory())
const source = { path: taskDir, project: 'IBM Bob', provider: 'ibm-bob' }
const calls: ParsedProviderCall[] = []
for await (const call of ibmBob.createSessionParser(source, new Set()).parse()) calls.push(call)
expect(calls).toHaveLength(1)
expect(calls[0]!.model).toBe('ibm-bob-auto')
expect(calls[0]!.costUSD).toBeGreaterThan(0)
})
it('deduplicates across parser runs', async () => {
const taskDir = join(tmpDir, 'tasks', 'task-003')
await mkdir(taskDir, { recursive: true })
await writeFile(join(taskDir, 'ui_messages.json'), makeUiMessages({ tokensIn: 100, tokensOut: 50 }))
const source = { path: taskDir, project: 'IBM Bob', provider: 'ibm-bob' }
const seenKeys = new Set<string>()
const calls1: ParsedProviderCall[] = []
for await (const call of ibmBob.createSessionParser(source, seenKeys).parse()) calls1.push(call)
const calls2: ParsedProviderCall[] = []
for await (const call of ibmBob.createSessionParser(source, seenKeys).parse()) calls2.push(call)
expect(calls1).toHaveLength(1)
expect(calls2).toHaveLength(0)
})
})
describe('ibm-bob provider - metadata', () => {
it('has correct name and displayName', () => {
expect(ibmBob.name).toBe('ibm-bob')
expect(ibmBob.displayName).toBe('IBM Bob')
})
it('uses shared short model display names', () => {
expect(ibmBob.modelDisplayName('ibm-bob-auto')).toBe('IBM Bob (auto)')
expect(ibmBob.modelDisplayName('claude-sonnet-4-6')).toBe('Sonnet 4.6')
})
})