mirror of
https://github.com/AgentSeal/codeburn.git
synced 2026-04-28 06:59:37 +00:00
feat(mac): native Swift menubar app + one-command install
Introduces mac/ with a native SwiftUI menubar app that replaces the previous SwiftBar plugin entirely. Install via `npx codeburn menubar`, which downloads the .app from GitHub Releases, strips Gatekeeper quarantine, and drops it into ~/Applications. Highlights - mac/ SwiftUI app: agent tabs, Today/7/30/Month/All period switcher, Trend/Forecast/Pulse/Stats/Plan insights, activity + model breakdowns, optimize findings, CSV/JSON export, Star-on-GitHub banner, live 60s refresh, instant currency switching with offline FX cache. - Security: CodeburnCLI argv-based spawn (no shell interpretation), SafeFile symlink guards + O_NOFOLLOW writes, FX rate clamping to [0.0001, 1_000_000], keychain filtered to account == "default", removed byte-window credential log, in-flight refresh guard, POSIX flock on config.json writes, TerminalLauncher validates argv before AppleScript interpolation. - Performance: shared static NumberFormatter (thousands of allocations per popover redraw eliminated), concurrent pipe drain with 20 MB cap + 60s timeout in DataClient, Observation-tracked reactive UI, 5-min payload cache keyed on (period, provider). - CLI: new `codeburn menubar` subcommand that downloads + installs + launches the .app (no clone, no build). New `status --format menubar-json` payload builder. `export` rewritten to produce a folder of one-table-per-file CSVs with a `.codeburn-export` marker so arbitrary -o paths cannot be silently deleted. - Removed: src/menubar.ts (SwiftBar plugin generator), install-menubar / uninstall-menubar subcommands, `status --format menubar` directive output, tests/menubar.test.ts, tests/security/menubar-injection.test.ts. - Release: .github/workflows/release-menubar.yml builds universal binary, assembles .app, ad-hoc signs, zips, uploads on mac-v* tag push. Runs on the free macos-latest runner. Tests - 230 TypeScript tests pass - 10 Swift CapacityEstimator tests pass - TypeScript typecheck clean - Swift release build clean
This commit is contained in:
parent
69268a9e91
commit
495a254338
46 changed files with 6433 additions and 575 deletions
70
.github/workflows/release-menubar.yml
vendored
Normal file
70
.github/workflows/release-menubar.yml
vendored
Normal file
|
|
@ -0,0 +1,70 @@
|
||||||
|
name: Release macOS Menubar
|
||||||
|
|
||||||
|
# Triggers on a `mac-v*` tag push (e.g. `git tag mac-v0.8.0 && git push origin mac-v0.8.0`),
|
||||||
|
# or manually via the Actions tab. Runs entirely on the free macos-latest runner -- no Apple
|
||||||
|
# Developer Program membership, no signing certificates, no secrets required. The bundle ships
|
||||||
|
# ad-hoc signed; `npx codeburn menubar` strips the download quarantine flag on install so
|
||||||
|
# Gatekeeper stays quiet.
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'mac-v*'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version label for the bundle (e.g. v0.8.0 or dev-preview)'
|
||||||
|
required: true
|
||||||
|
default: 'dev-preview'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write # Needed to create the release + upload assets.
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Resolve version label
|
||||||
|
id: version
|
||||||
|
run: |
|
||||||
|
if [[ "${GITHUB_REF}" == refs/tags/mac-v* ]]; then
|
||||||
|
echo "value=${GITHUB_REF#refs/tags/mac-}" >> "$GITHUB_OUTPUT"
|
||||||
|
else
|
||||||
|
echo "value=${{ github.event.inputs.version }}" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Show Swift toolchain
|
||||||
|
run: swift --version
|
||||||
|
|
||||||
|
- name: Build + bundle + zip
|
||||||
|
run: mac/Scripts/package-app.sh "${{ steps.version.outputs.value }}"
|
||||||
|
|
||||||
|
- name: Upload artifact (for manual runs)
|
||||||
|
if: github.event_name == 'workflow_dispatch'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: CodeBurnMenubar-${{ steps.version.outputs.value }}
|
||||||
|
path: mac/.build/dist/CodeBurnMenubar-*.zip
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
- name: Create / update GitHub Release
|
||||||
|
if: startsWith(github.ref, 'refs/tags/mac-v')
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
tag_name: ${{ github.ref_name }}
|
||||||
|
name: Menubar ${{ steps.version.outputs.value }}
|
||||||
|
body: |
|
||||||
|
Install with:
|
||||||
|
|
||||||
|
```
|
||||||
|
npx codeburn menubar
|
||||||
|
```
|
||||||
|
|
||||||
|
Unsigned build. The installer clears Gatekeeper quarantine on download, so the
|
||||||
|
app launches without warnings. Direct-download users from this page may see
|
||||||
|
"cannot verify developer" -- right-click → Open once to dismiss it, or use the
|
||||||
|
npx command above.
|
||||||
|
files: mac/.build/dist/CodeBurnMenubar-*.zip
|
||||||
|
fail_on_unmatched_files: true
|
||||||
11
README.md
11
README.md
|
|
@ -19,7 +19,7 @@
|
||||||
<img src="https://raw.githubusercontent.com/AgentSeal/codeburn/main/assets/dashboard.jpg" alt="CodeBurn TUI dashboard" width="620" />
|
<img src="https://raw.githubusercontent.com/AgentSeal/codeburn/main/assets/dashboard.jpg" alt="CodeBurn TUI dashboard" width="620" />
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
By task type, tool, model, MCP server, and project. Supports **Claude Code**, **Codex** (OpenAI), **Cursor**, **OpenCode**, **Pi**, and **GitHub Copilot** with a provider plugin system. Tracks one-shot success rate per activity type so you can see where the AI nails it first try vs. burns tokens on edit/test/fix retries. Interactive TUI dashboard with gradient charts, responsive panels, and keyboard navigation. macOS menu bar widget via SwiftBar. CSV/JSON export.
|
By task type, tool, model, MCP server, and project. Supports **Claude Code**, **Codex** (OpenAI), **Cursor**, **OpenCode**, **Pi**, and **GitHub Copilot** with a provider plugin system. Tracks one-shot success rate per activity type so you can see where the AI nails it first try vs. burns tokens on edit/test/fix retries. Interactive TUI dashboard with gradient charts, responsive panels, and keyboard navigation. Native macOS menubar app in `mac/`. CSV/JSON export.
|
||||||
|
|
||||||
Works by reading session data directly from disk. No wrapper, no proxy, no API keys. Pricing from LiteLLM (auto-cached, all models supported).
|
Works by reading session data directly from disk. No wrapper, no proxy, no API keys. Pricing from LiteLLM (auto-cached, all models supported).
|
||||||
|
|
||||||
|
|
@ -156,14 +156,13 @@ The menu bar widget includes a currency picker with 17 common currencies. For an
|
||||||
|
|
||||||
## Menu Bar
|
## Menu Bar
|
||||||
|
|
||||||
<img src="https://cdn.jsdelivr.net/gh/AgentSeal/codeburn@main/assets/menubar.png" alt="CodeBurn SwiftBar menu bar widget" width="260" />
|
<img src="https://cdn.jsdelivr.net/gh/AgentSeal/codeburn@main/assets/menubar.png" alt="CodeBurn macOS menubar app" width="420" />
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
codeburn install-menubar # install SwiftBar/xbar plugin
|
npx codeburn menubar
|
||||||
codeburn uninstall-menubar # remove it
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Requires [SwiftBar](https://github.com/swiftbar/SwiftBar) (`brew install --cask swiftbar`). Shows today's cost in the menu bar with a flame icon. Dropdown shows activity breakdown, model costs, token stats, per-provider cost breakdown, and a currency picker. Refreshes every 5 minutes.
|
One command: downloads the latest `.app`, installs into `~/Applications`, and launches it. Re-run with `--force` to reinstall. Native Swift + SwiftUI app lives in `mac/` (see `mac/README.md` for build details). Shows today's cost with a flame icon, opens a popover with agent tabs, period switcher (Today / 7 Days / 30 Days / Month / All), Trend / Forecast / Pulse / Stats / Plan insights, activity and model breakdowns, optimize findings, and CSV/JSON export. Refreshes live via FSEvents plus a 60-second poll.
|
||||||
|
|
||||||
## What it tracks
|
## What it tracks
|
||||||
|
|
||||||
|
|
@ -269,7 +268,7 @@ src/
|
||||||
classifier.ts 13-category task classifier
|
classifier.ts 13-category task classifier
|
||||||
types.ts Type definitions
|
types.ts Type definitions
|
||||||
format.ts Text rendering (status bar)
|
format.ts Text rendering (status bar)
|
||||||
menubar.ts SwiftBar plugin generator
|
menubar-json.ts Payload builder consumed by the native macOS menubar app in mac/
|
||||||
export.ts CSV/JSON multi-period export
|
export.ts CSV/JSON multi-period export
|
||||||
config.ts Config file management (~/.config/codeburn/)
|
config.ts Config file management (~/.config/codeburn/)
|
||||||
currency.ts Currency conversion, exchange rates, Intl formatting
|
currency.ts Currency conversion, exchange rates, Intl formatting
|
||||||
|
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 298 KiB After Width: | Height: | Size: 300 KiB |
6
mac/.gitignore
vendored
Normal file
6
mac/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
.build/
|
||||||
|
.swiftpm/
|
||||||
|
Package.resolved
|
||||||
|
*.xcodeproj/
|
||||||
|
*.xcworkspace/
|
||||||
|
DerivedData/
|
||||||
29
mac/Package.swift
Normal file
29
mac/Package.swift
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
// swift-tools-version: 6.0
|
||||||
|
import PackageDescription
|
||||||
|
|
||||||
|
let package = Package(
|
||||||
|
name: "CodeBurnMenubar",
|
||||||
|
platforms: [
|
||||||
|
.macOS(.v14)
|
||||||
|
],
|
||||||
|
products: [
|
||||||
|
.executable(name: "CodeBurnMenubar", targets: ["CodeBurnMenubar"])
|
||||||
|
],
|
||||||
|
targets: [
|
||||||
|
.executableTarget(
|
||||||
|
name: "CodeBurnMenubar",
|
||||||
|
path: "Sources/CodeBurnMenubar",
|
||||||
|
resources: [
|
||||||
|
.process("../../Resources")
|
||||||
|
],
|
||||||
|
swiftSettings: [
|
||||||
|
.enableUpcomingFeature("StrictConcurrency")
|
||||||
|
]
|
||||||
|
),
|
||||||
|
.testTarget(
|
||||||
|
name: "CodeBurnMenubarTests",
|
||||||
|
dependencies: ["CodeBurnMenubar"],
|
||||||
|
path: "Tests/CodeBurnMenubarTests"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
88
mac/README.md
Normal file
88
mac/README.md
Normal file
|
|
@ -0,0 +1,88 @@
|
||||||
|
# CodeBurn Menubar (macOS)
|
||||||
|
|
||||||
|
Native Swift + SwiftUI menubar app. The codeburn menubar surface.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
- macOS 14+ (Sonoma)
|
||||||
|
- Swift 6.0+ toolchain (bundled with Xcode 16 or standalone)
|
||||||
|
- `codeburn` CLI installed globally (`npm install -g codeburn`) or available at a path you pass via `CODEBURN_BIN`
|
||||||
|
|
||||||
|
## Install (end users)
|
||||||
|
|
||||||
|
One command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx codeburn menubar
|
||||||
|
```
|
||||||
|
|
||||||
|
That's it. The command downloads the latest signed `.app` from GitHub Releases, drops it into `~/Applications`, clears Gatekeeper quarantine, and launches it. Re-running it upgrades in place with `--force`, or just launches the existing copy otherwise.
|
||||||
|
|
||||||
|
If you already have the CLI installed globally (`npm install -g codeburn`), `codeburn menubar` works the same way.
|
||||||
|
|
||||||
|
### Build from source
|
||||||
|
|
||||||
|
For contributors running a local build instead of the packaged release:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install -g codeburn # CLI the app shells out to for data
|
||||||
|
git clone https://github.com/AgentSeal/codeburn.git
|
||||||
|
cd codeburn/mac
|
||||||
|
swift build -c release
|
||||||
|
.build/release/CodeBurnMenubar # launch
|
||||||
|
```
|
||||||
|
|
||||||
|
## Build & run (dev against a local CLI checkout)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd mac
|
||||||
|
swift build
|
||||||
|
# Point the app at your dev CLI build instead of the globally installed `codeburn`:
|
||||||
|
npm --prefix .. run build
|
||||||
|
CODEBURN_BIN="node $(pwd)/../dist/cli.js" swift run
|
||||||
|
```
|
||||||
|
|
||||||
|
The app registers itself as a menubar accessory (`LSUIElement = true` at runtime). No Dock icon.
|
||||||
|
|
||||||
|
## Data source
|
||||||
|
|
||||||
|
On launch and every 60 seconds thereafter, the app spawns `codeburn status --format menubar-json --no-optimize` directly (argv, no shell) via `CodeburnCLI.makeProcess` and decodes the JSON into `MenubarPayload`. The manual refresh button in the footer invokes the same command without `--no-optimize`, which includes optimize findings but takes longer.
|
||||||
|
|
||||||
|
Override the binary via the `CODEBURN_BIN` environment variable (default: `codeburn` on PATH). The value is validated against a strict allowlist (alphanumerics plus `._/-` space) before use, so a malicious env var can't inject shell commands.
|
||||||
|
|
||||||
|
## Project layout
|
||||||
|
|
||||||
|
```
|
||||||
|
mac/
|
||||||
|
├── Package.swift SwiftPM manifest
|
||||||
|
├── Sources/CodeBurnMenubar/
|
||||||
|
│ ├── CodeBurnApp.swift @main + MenuBarExtra scene
|
||||||
|
│ ├── AppStore.swift @Observable store + enums
|
||||||
|
│ ├── Data/MenubarPayload.swift Codable payload types + placeholder
|
||||||
|
│ ├── Theme/Theme.swift Design tokens (warm terracotta palette)
|
||||||
|
│ └── Views/MenuBarContent.swift Popover layout + footer action bar
|
||||||
|
└── README.md This file
|
||||||
|
```
|
||||||
|
|
||||||
|
## Status
|
||||||
|
|
||||||
|
Live data wired. Next iterations:
|
||||||
|
|
||||||
|
1. FSEvents watch for `~/.claude/projects/` changes (debounced refresh on real edits)
|
||||||
|
2. Persistent disk cache for optimize findings so the default refresh can include them without the 30-second penalty
|
||||||
|
3. Currency metadata in the JSON payload + Swift-side formatting
|
||||||
|
4. Sparkle auto-update
|
||||||
|
5. DMG packaging + Homebrew Cask tap
|
||||||
|
|
||||||
|
## Design tokens
|
||||||
|
|
||||||
|
Sourced from `~/codeburn-menubar-mac-swiftui.html`. Warm terracotta-ember palette:
|
||||||
|
|
||||||
|
- Accent (light): `#C9521D`
|
||||||
|
- Accent (dark): `#E8774A`
|
||||||
|
- Ember deep: `#8B3E13`
|
||||||
|
- Ember glow: `#F0A070`
|
||||||
|
- Surface (light): `#FAF7F3`
|
||||||
|
- Surface (dark): `#1C1816`
|
||||||
|
|
||||||
|
SF Mono for currency values; SF Pro Rounded for hero.
|
||||||
103
mac/Scripts/package-app.sh
Executable file
103
mac/Scripts/package-app.sh
Executable file
|
|
@ -0,0 +1,103 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# Builds a universal CodeBurnMenubar.app bundle from the SwiftPM target and drops a
|
||||||
|
# distributable zip alongside. Used by the GitHub release workflow; also runnable locally.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# mac/Scripts/package-app.sh [<version>]
|
||||||
|
# Defaults to `dev` if no version is given.
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
VERSION="${1:-dev}"
|
||||||
|
BUNDLE_NAME="CodeBurnMenubar.app"
|
||||||
|
BUNDLE_ID="org.agentseal.codeburn-menubar"
|
||||||
|
EXECUTABLE_NAME="CodeBurnMenubar"
|
||||||
|
MIN_MACOS="14.0"
|
||||||
|
|
||||||
|
repo_root() {
|
||||||
|
git rev-parse --show-toplevel 2>/dev/null || (cd "$(dirname "$0")/../.." && pwd)
|
||||||
|
}
|
||||||
|
|
||||||
|
ROOT=$(repo_root)
|
||||||
|
MAC_DIR="${ROOT}/mac"
|
||||||
|
DIST_DIR="${MAC_DIR}/.build/dist"
|
||||||
|
|
||||||
|
cd "${MAC_DIR}"
|
||||||
|
|
||||||
|
echo "▸ Cleaning previous dist..."
|
||||||
|
rm -rf "${DIST_DIR}"
|
||||||
|
mkdir -p "${DIST_DIR}"
|
||||||
|
|
||||||
|
echo "▸ Building universal binary (arm64 + x86_64)..."
|
||||||
|
swift build -c release --arch arm64 --arch x86_64
|
||||||
|
|
||||||
|
BIN_PATH=$(swift build -c release --arch arm64 --arch x86_64 --show-bin-path)
|
||||||
|
BUILT_BINARY="${BIN_PATH}/${EXECUTABLE_NAME}"
|
||||||
|
if [[ ! -x "${BUILT_BINARY}" ]]; then
|
||||||
|
echo "Binary not found at ${BUILT_BINARY}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "▸ Assembling ${BUNDLE_NAME}..."
|
||||||
|
BUNDLE="${DIST_DIR}/${BUNDLE_NAME}"
|
||||||
|
mkdir -p "${BUNDLE}/Contents/MacOS"
|
||||||
|
mkdir -p "${BUNDLE}/Contents/Resources"
|
||||||
|
cp "${BUILT_BINARY}" "${BUNDLE}/Contents/MacOS/${EXECUTABLE_NAME}"
|
||||||
|
|
||||||
|
cat > "${BUNDLE}/Contents/Info.plist" <<PLIST
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||||
|
<plist version="1.0">
|
||||||
|
<dict>
|
||||||
|
<key>CFBundleDevelopmentRegion</key>
|
||||||
|
<string>en</string>
|
||||||
|
<key>CFBundleDisplayName</key>
|
||||||
|
<string>CodeBurn Menubar</string>
|
||||||
|
<key>CFBundleExecutable</key>
|
||||||
|
<string>${EXECUTABLE_NAME}</string>
|
||||||
|
<key>CFBundleIconFile</key>
|
||||||
|
<string>AppIcon</string>
|
||||||
|
<key>CFBundleIdentifier</key>
|
||||||
|
<string>${BUNDLE_ID}</string>
|
||||||
|
<key>CFBundleInfoDictionaryVersion</key>
|
||||||
|
<string>6.0</string>
|
||||||
|
<key>CFBundleName</key>
|
||||||
|
<string>${EXECUTABLE_NAME}</string>
|
||||||
|
<key>CFBundlePackageType</key>
|
||||||
|
<string>APPL</string>
|
||||||
|
<key>CFBundleShortVersionString</key>
|
||||||
|
<string>${VERSION}</string>
|
||||||
|
<key>CFBundleVersion</key>
|
||||||
|
<string>${VERSION}</string>
|
||||||
|
<key>LSMinimumSystemVersion</key>
|
||||||
|
<string>${MIN_MACOS}</string>
|
||||||
|
<key>LSUIElement</key>
|
||||||
|
<true/>
|
||||||
|
<key>NSHighResolutionCapable</key>
|
||||||
|
<true/>
|
||||||
|
<key>NSHumanReadableCopyright</key>
|
||||||
|
<string>© AgentSeal</string>
|
||||||
|
</dict>
|
||||||
|
</plist>
|
||||||
|
PLIST
|
||||||
|
|
||||||
|
cat > "${BUNDLE}/Contents/PkgInfo" <<'PKG'
|
||||||
|
APPL????
|
||||||
|
PKG
|
||||||
|
|
||||||
|
# Ad-hoc sign so macOS treats the bundle as internally consistent. This does NOT give us a
|
||||||
|
# recognisable developer name in Finder (that needs the $99 Developer ID cert), but it
|
||||||
|
# satisfies macOS's minimum bundle-validity checks on 14+ and prevents some Gatekeeper edge
|
||||||
|
# cases on managed Macs.
|
||||||
|
echo "▸ Ad-hoc signing..."
|
||||||
|
codesign --force --sign - --timestamp=none --deep "${BUNDLE}" 2>/dev/null || true
|
||||||
|
codesign --verify --deep --strict "${BUNDLE}" 2>/dev/null || echo " (signature verify skipped)"
|
||||||
|
|
||||||
|
ZIP_NAME="CodeBurnMenubar-${VERSION}.zip"
|
||||||
|
ZIP_PATH="${DIST_DIR}/${ZIP_NAME}"
|
||||||
|
echo "▸ Packaging ${ZIP_NAME}..."
|
||||||
|
(cd "${DIST_DIR}" && /usr/bin/ditto -c -k --keepParent "${BUNDLE_NAME}" "${ZIP_NAME}")
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✓ Built ${ZIP_PATH}"
|
||||||
|
ls -la "${DIST_DIR}"
|
||||||
307
mac/Sources/CodeBurnMenubar/AppStore.swift
Normal file
307
mac/Sources/CodeBurnMenubar/AppStore.swift
Normal file
|
|
@ -0,0 +1,307 @@
|
||||||
|
import Foundation
|
||||||
|
import Observation
|
||||||
|
|
||||||
|
private let cacheTTLSeconds: TimeInterval = 300
|
||||||
|
|
||||||
|
struct CachedPayload {
|
||||||
|
let payload: MenubarPayload
|
||||||
|
let fetchedAt: Date
|
||||||
|
var isFresh: Bool { Date().timeIntervalSince(fetchedAt) < cacheTTLSeconds }
|
||||||
|
}
|
||||||
|
|
||||||
|
struct PayloadCacheKey: Hashable {
|
||||||
|
let period: Period
|
||||||
|
let provider: ProviderFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
@MainActor
|
||||||
|
@Observable
|
||||||
|
final class AppStore {
|
||||||
|
var selectedProvider: ProviderFilter = .all
|
||||||
|
var selectedPeriod: Period = .today
|
||||||
|
var selectedInsight: InsightMode = .trend
|
||||||
|
var currency: String = "USD"
|
||||||
|
var isLoading: Bool = false
|
||||||
|
var lastError: String?
|
||||||
|
var subscription: SubscriptionUsage?
|
||||||
|
var subscriptionError: String?
|
||||||
|
var subscriptionLoadState: SubscriptionLoadState = .idle
|
||||||
|
var capacityEstimates: [String: CapacityEstimate] = [:]
|
||||||
|
|
||||||
|
private var cache: [PayloadCacheKey: CachedPayload] = [:]
|
||||||
|
|
||||||
|
private var currentKey: PayloadCacheKey {
|
||||||
|
PayloadCacheKey(period: selectedPeriod, provider: selectedProvider)
|
||||||
|
}
|
||||||
|
|
||||||
|
var payload: MenubarPayload {
|
||||||
|
cache[currentKey]?.payload ?? .empty
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Today (across all providers) is pinned for the always-visible menubar icon, independent of
|
||||||
|
/// the popover's selected period or provider.
|
||||||
|
var todayPayload: MenubarPayload? {
|
||||||
|
cache[PayloadCacheKey(period: .today, provider: .all)]?.payload
|
||||||
|
}
|
||||||
|
|
||||||
|
var hasCachedData: Bool {
|
||||||
|
cache[currentKey] != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var findingsCount: Int {
|
||||||
|
payload.optimize.findingCount
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Switch to a period. Uses cached payload if fresh; otherwise fetches.
|
||||||
|
func switchTo(period: Period) async {
|
||||||
|
selectedPeriod = period
|
||||||
|
if let cached = cache[currentKey], cached.isFresh { return }
|
||||||
|
await refresh(includeOptimize: true)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Switch to a provider filter. Uses cached payload if fresh; otherwise fetches.
|
||||||
|
func switchTo(provider: ProviderFilter) async {
|
||||||
|
selectedProvider = provider
|
||||||
|
if let cached = cache[currentKey], cached.isFresh { return }
|
||||||
|
await refresh(includeOptimize: true)
|
||||||
|
}
|
||||||
|
|
||||||
|
private var inFlightKeys: Set<PayloadCacheKey> = []
|
||||||
|
|
||||||
|
/// Refresh the currently selected (period, provider) combination. Guards against concurrent
|
||||||
|
/// fetches for the same key so a slow initial request can't overwrite a newer one that
|
||||||
|
/// finished first (which would show stale numbers the user has already moved past).
|
||||||
|
func refresh(includeOptimize: Bool) async {
|
||||||
|
let key = currentKey
|
||||||
|
guard !inFlightKeys.contains(key) else { return }
|
||||||
|
inFlightKeys.insert(key)
|
||||||
|
isLoading = true
|
||||||
|
defer {
|
||||||
|
inFlightKeys.remove(key)
|
||||||
|
isLoading = false
|
||||||
|
}
|
||||||
|
do {
|
||||||
|
let fresh = try await DataClient.fetch(period: key.period, provider: key.provider, includeOptimize: includeOptimize)
|
||||||
|
cache[key] = CachedPayload(payload: fresh, fetchedAt: Date())
|
||||||
|
lastError = nil
|
||||||
|
} catch {
|
||||||
|
lastError = String(describing: error)
|
||||||
|
NSLog("CodeBurn: fetch failed for \(key.period.rawValue)/\(key.provider.rawValue): \(error)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Background refresh for a period other than the visible one (e.g. keeping today fresh for the menubar badge).
|
||||||
|
/// Does not toggle isLoading, so the popover's loading overlay is unaffected.
|
||||||
|
/// Always uses the .all provider since the menubar badge shows total spend.
|
||||||
|
func refreshQuietly(period: Period) async {
|
||||||
|
do {
|
||||||
|
let fresh = try await DataClient.fetch(period: period, provider: .all, includeOptimize: true)
|
||||||
|
cache[PayloadCacheKey(period: period, provider: .all)] = CachedPayload(payload: fresh, fetchedAt: Date())
|
||||||
|
} catch {
|
||||||
|
NSLog("CodeBurn: quiet refresh failed for \(period.rawValue): \(error)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch Claude subscription usage. Sets subscription = nil on missing creds (API users / unauthenticated).
|
||||||
|
/// Triggered lazily when the user opens the Plan pill, so the Keychain prompt only fires on intent.
|
||||||
|
func refreshSubscription() async {
|
||||||
|
subscriptionLoadState = .loading
|
||||||
|
do {
|
||||||
|
let usage = try await SubscriptionClient.fetch()
|
||||||
|
subscription = usage
|
||||||
|
subscriptionError = nil
|
||||||
|
subscriptionLoadState = .loaded
|
||||||
|
await captureSnapshots(for: usage)
|
||||||
|
} catch SubscriptionError.noCredentials {
|
||||||
|
subscription = nil
|
||||||
|
subscriptionError = nil
|
||||||
|
subscriptionLoadState = .noCredentials
|
||||||
|
} catch {
|
||||||
|
subscription = nil
|
||||||
|
subscriptionError = String(describing: error)
|
||||||
|
subscriptionLoadState = .failed
|
||||||
|
NSLog("CodeBurn: subscription fetch failed: \(error)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Persist one snapshot per window so we can answer "what did the prior cycle end at?"
|
||||||
|
/// when the current window has just reset and projection from current data isn't meaningful.
|
||||||
|
/// Also computes the effective_tokens consumed inside each 7-day window from local history,
|
||||||
|
/// which the CapacityEstimator uses to derive the absolute token capacity per tier.
|
||||||
|
private func captureSnapshots(for usage: SubscriptionUsage) async {
|
||||||
|
let now = Date()
|
||||||
|
let history = payload.history.daily
|
||||||
|
|
||||||
|
let captures: [(key: String, percent: Double?, resetsAt: Date?, effective: Double?)] = [
|
||||||
|
("five_hour", usage.fiveHourPercent, usage.fiveHourResetsAt, nil),
|
||||||
|
("seven_day", usage.sevenDayPercent, usage.sevenDayResetsAt,
|
||||||
|
effectiveTokensInLast7Days(history: history, asOf: now)),
|
||||||
|
("seven_day_opus", usage.sevenDayOpusPercent, usage.sevenDayOpusResetsAt, nil),
|
||||||
|
("seven_day_sonnet", usage.sevenDaySonnetPercent, usage.sevenDaySonnetResetsAt, nil),
|
||||||
|
]
|
||||||
|
for capture in captures {
|
||||||
|
guard let percent = capture.percent, let resetsAt = capture.resetsAt else { continue }
|
||||||
|
await SubscriptionSnapshotStore.record(SubscriptionSnapshot(
|
||||||
|
windowKey: capture.key,
|
||||||
|
percent: percent,
|
||||||
|
resetsAt: resetsAt,
|
||||||
|
capturedAt: now,
|
||||||
|
effectiveTokens: capture.effective
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
await refreshCapacityEstimates()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sum effective tokens (input + 5*output + cache_creation + 0.1*cache_read) across the
|
||||||
|
/// last 7 days of dailyHistory. Used as the "tokens consumed in 7-day window" reading paired
|
||||||
|
/// with the API-reported percent for capacity estimation.
|
||||||
|
private func effectiveTokensInLast7Days(history: [DailyHistoryEntry], asOf now: Date) -> Double {
|
||||||
|
let cutoff = ISO8601DateFormatter().string(from: now.addingTimeInterval(-7 * 86400)).prefix(10)
|
||||||
|
return history
|
||||||
|
.filter { $0.date >= cutoff }
|
||||||
|
.reduce(0.0) { $0 + $1.effectiveTokens }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run CapacityEstimator over each window's accumulated snapshots. Only snapshots with a
|
||||||
|
/// non-nil effectiveTokens contribute. Result lives in capacityEstimates dict for UI gating.
|
||||||
|
private func refreshCapacityEstimates() async {
|
||||||
|
var next: [String: CapacityEstimate] = [:]
|
||||||
|
for key in ["seven_day", "seven_day_opus", "seven_day_sonnet"] {
|
||||||
|
let snaps = await SubscriptionSnapshotStore.snapshots(for: key)
|
||||||
|
let capacitySnaps = snaps.compactMap { s -> CapacitySnapshot? in
|
||||||
|
guard let effective = s.effectiveTokens, effective > 0 else { return nil }
|
||||||
|
return CapacitySnapshot(percent: s.percent, effectiveTokens: effective, capturedAt: s.capturedAt)
|
||||||
|
}
|
||||||
|
if let estimate = CapacityEstimator.estimate(capacitySnaps) {
|
||||||
|
next[key] = estimate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
capacityEstimates = next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum SupportedCurrency: String, CaseIterable, Identifiable {
|
||||||
|
case USD, GBP, EUR, AUD, CAD, NZD, JPY, CHF, INR, BRL, SEK, SGD, HKD, KRW, MXN, ZAR, DKK
|
||||||
|
var id: String { rawValue }
|
||||||
|
var displayName: String {
|
||||||
|
switch self {
|
||||||
|
case .USD: "US Dollar"
|
||||||
|
case .GBP: "British Pound"
|
||||||
|
case .EUR: "Euro"
|
||||||
|
case .AUD: "Australian Dollar"
|
||||||
|
case .CAD: "Canadian Dollar"
|
||||||
|
case .NZD: "New Zealand Dollar"
|
||||||
|
case .JPY: "Japanese Yen"
|
||||||
|
case .CHF: "Swiss Franc"
|
||||||
|
case .INR: "Indian Rupee"
|
||||||
|
case .BRL: "Brazilian Real"
|
||||||
|
case .SEK: "Swedish Krona"
|
||||||
|
case .SGD: "Singapore Dollar"
|
||||||
|
case .HKD: "Hong Kong Dollar"
|
||||||
|
case .KRW: "South Korean Won"
|
||||||
|
case .MXN: "Mexican Peso"
|
||||||
|
case .ZAR: "South African Rand"
|
||||||
|
case .DKK: "Danish Krone"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ProviderFilter: String, CaseIterable, Identifiable {
|
||||||
|
case all = "All"
|
||||||
|
case claude = "Claude"
|
||||||
|
case codex = "Codex"
|
||||||
|
case cursor = "Cursor"
|
||||||
|
case copilot = "Copilot"
|
||||||
|
|
||||||
|
var id: String { rawValue }
|
||||||
|
|
||||||
|
/// Maps to the CLI's `--provider` argument values.
|
||||||
|
var cliArg: String {
|
||||||
|
switch self {
|
||||||
|
case .all: "all"
|
||||||
|
case .claude: "claude"
|
||||||
|
case .codex: "codex"
|
||||||
|
case .cursor: "cursor"
|
||||||
|
case .copilot: "copilot"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum SubscriptionLoadState: Sendable, Equatable {
|
||||||
|
case idle // never tried, awaiting user intent
|
||||||
|
case loading // fetch in progress
|
||||||
|
case loaded // success; subscription is populated
|
||||||
|
case noCredentials // tried; user has no Claude OAuth (API user / not logged in)
|
||||||
|
case failed // tried; error occurred
|
||||||
|
}
|
||||||
|
|
||||||
|
enum InsightMode: String, CaseIterable, Identifiable {
|
||||||
|
case plan = "Plan"
|
||||||
|
case trend = "Trend"
|
||||||
|
case forecast = "Forecast"
|
||||||
|
case pulse = "Pulse"
|
||||||
|
case stats = "Stats"
|
||||||
|
var id: String { rawValue }
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Period: String, CaseIterable, Identifiable {
|
||||||
|
case today = "Today"
|
||||||
|
case sevenDays = "7 Days"
|
||||||
|
case thirtyDays = "30 Days"
|
||||||
|
case month = "Month"
|
||||||
|
case all = "All"
|
||||||
|
|
||||||
|
var id: String { rawValue }
|
||||||
|
|
||||||
|
/// Maps to the CLI's `--period` argument values.
|
||||||
|
var cliArg: String {
|
||||||
|
switch self {
|
||||||
|
case .today: "today"
|
||||||
|
case .sevenDays: "week"
|
||||||
|
case .thirtyDays: "30days"
|
||||||
|
case .month: "month"
|
||||||
|
case .all: "all"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// NumberFormatter is expensive to instantiate (~microseconds each) and currency/token values
|
||||||
|
/// are formatted dozens of times per popover refresh. These shared instances avoid thousands of
|
||||||
|
/// allocations per frame while SwiftUI's Observation framework still triggers redraws when
|
||||||
|
/// CurrencyState.shared mutates.
|
||||||
|
private let groupedDecimalFormatter: NumberFormatter = {
|
||||||
|
let f = NumberFormatter()
|
||||||
|
f.numberStyle = .decimal
|
||||||
|
f.groupingSeparator = ","
|
||||||
|
f.decimalSeparator = "."
|
||||||
|
f.maximumFractionDigits = 2
|
||||||
|
f.minimumFractionDigits = 2
|
||||||
|
return f
|
||||||
|
}()
|
||||||
|
|
||||||
|
private let thousandsFormatter: NumberFormatter = {
|
||||||
|
let f = NumberFormatter()
|
||||||
|
f.numberStyle = .decimal
|
||||||
|
f.groupingSeparator = ","
|
||||||
|
return f
|
||||||
|
}()
|
||||||
|
|
||||||
|
extension Double {
|
||||||
|
func asCurrency() -> String {
|
||||||
|
let state = CurrencyState.shared
|
||||||
|
let converted = self * state.rate
|
||||||
|
return state.symbol + (groupedDecimalFormatter.string(from: NSNumber(value: converted)) ?? "\(converted)")
|
||||||
|
}
|
||||||
|
|
||||||
|
func asCompactCurrency() -> String {
|
||||||
|
let state = CurrencyState.shared
|
||||||
|
return String(format: "\(state.symbol)%.2f", self * state.rate)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extension Int {
|
||||||
|
func asThousandsSeparated() -> String {
|
||||||
|
thousandsFormatter.string(from: NSNumber(value: self)) ?? "\(self)"
|
||||||
|
}
|
||||||
|
}
|
||||||
182
mac/Sources/CodeBurnMenubar/CodeBurnApp.swift
Normal file
182
mac/Sources/CodeBurnMenubar/CodeBurnApp.swift
Normal file
|
|
@ -0,0 +1,182 @@
|
||||||
|
import SwiftUI
|
||||||
|
import AppKit
|
||||||
|
import Observation
|
||||||
|
|
||||||
|
private let refreshIntervalSeconds: UInt64 = 60
|
||||||
|
private let nanosPerSecond: UInt64 = 1_000_000_000
|
||||||
|
private let refreshIntervalNanos: UInt64 = refreshIntervalSeconds * nanosPerSecond
|
||||||
|
/// Fixed so the popover's anchor point doesn't shift each time today's cost changes.
|
||||||
|
private let statusItemFixedWidth: CGFloat = 130
|
||||||
|
private let popoverWidth: CGFloat = 360
|
||||||
|
private let popoverHeight: CGFloat = 660
|
||||||
|
private let menubarTitleFontSize: CGFloat = 13
|
||||||
|
|
||||||
|
@main
|
||||||
|
struct CodeBurnApp: App {
|
||||||
|
@NSApplicationDelegateAdaptor(AppDelegate.self) var delegate
|
||||||
|
|
||||||
|
var body: some Scene {
|
||||||
|
// SwiftUI App needs at least one scene. Settings is invisible by default.
|
||||||
|
Settings {
|
||||||
|
EmptyView()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@MainActor
|
||||||
|
final class AppDelegate: NSObject, NSApplicationDelegate, NSPopoverDelegate {
|
||||||
|
private var statusItem: NSStatusItem!
|
||||||
|
private var popover: NSPopover!
|
||||||
|
private let store = AppStore()
|
||||||
|
private var refreshTask: Task<Void, Never>?
|
||||||
|
|
||||||
|
func applicationDidFinishLaunching(_ notification: Notification) {
|
||||||
|
// Menubar accessory -- no Dock icon, no app switcher entry.
|
||||||
|
NSApp.setActivationPolicy(.accessory)
|
||||||
|
|
||||||
|
restorePersistedCurrency()
|
||||||
|
setupStatusItem()
|
||||||
|
setupPopover()
|
||||||
|
observeStore()
|
||||||
|
startRefreshLoop()
|
||||||
|
// Subscription is fetched lazily when the user opens the Plan pill, so the macOS
|
||||||
|
// Keychain prompt never fires until the user explicitly asks for it.
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Loads the currency code persisted by `codeburn currency` so a relaunch picks up where
|
||||||
|
/// the user left off. Rate is resolved from the on-disk FX cache if present, otherwise
|
||||||
|
/// fetched live in the background.
|
||||||
|
private func restorePersistedCurrency() {
|
||||||
|
guard let code = CLICurrencyConfig.loadCode(), code != "USD" else { return }
|
||||||
|
let symbol = CurrencyState.symbolForCode(code)
|
||||||
|
store.currency = code
|
||||||
|
|
||||||
|
Task {
|
||||||
|
let cached = await FXRateCache.shared.cachedRate(for: code)
|
||||||
|
await MainActor.run {
|
||||||
|
CurrencyState.shared.apply(code: code, rate: cached, symbol: symbol)
|
||||||
|
}
|
||||||
|
let fresh = await FXRateCache.shared.rate(for: code)
|
||||||
|
if let fresh, fresh != cached {
|
||||||
|
await MainActor.run {
|
||||||
|
CurrencyState.shared.apply(code: code, rate: fresh, symbol: symbol)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func applicationWillTerminate(_ notification: Notification) {
|
||||||
|
refreshTask?.cancel()
|
||||||
|
}
|
||||||
|
|
||||||
|
private func startRefreshLoop() {
|
||||||
|
refreshTask = Task { [weak self] in
|
||||||
|
while !Task.isCancelled {
|
||||||
|
guard let self else { return }
|
||||||
|
if self.store.selectedPeriod != .today {
|
||||||
|
await self.store.refreshQuietly(period: .today)
|
||||||
|
}
|
||||||
|
// Optimize is fast (~1s warm-cache) so include findings on every refresh.
|
||||||
|
await self.store.refresh(includeOptimize: true)
|
||||||
|
try? await Task.sleep(nanoseconds: refreshIntervalNanos)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func observeStore() {
|
||||||
|
withObservationTracking {
|
||||||
|
_ = store.payload
|
||||||
|
_ = store.todayPayload
|
||||||
|
} onChange: { [weak self] in
|
||||||
|
Task { @MainActor in
|
||||||
|
self?.refreshStatusButton()
|
||||||
|
self?.observeStore()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: - Status Item
|
||||||
|
|
||||||
|
private func setupStatusItem() {
|
||||||
|
// Fixed width so the popover anchor (and thus popover position) doesn't shift
|
||||||
|
// every time today's cost or findings badge changes.
|
||||||
|
statusItem = NSStatusBar.system.statusItem(withLength: statusItemFixedWidth)
|
||||||
|
guard let button = statusItem.button else { return }
|
||||||
|
button.target = self
|
||||||
|
button.action = #selector(handleButtonClick(_:))
|
||||||
|
button.sendAction(on: [.leftMouseUp, .rightMouseUp])
|
||||||
|
refreshStatusButton()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Composes the menubar title as a single attributed string with the flame as an inline
|
||||||
|
/// NSTextAttachment. NSStatusItem's separate `image` + `attributedTitle` path leaves a
|
||||||
|
/// stubborn gap between icon and text on some macOS releases (the icon hugs the left edge
|
||||||
|
/// of the status item, the title starts at its own baseline), so we inline both so they
|
||||||
|
/// flow as one typographic unit with a single, controllable gap.
|
||||||
|
private func refreshStatusButton() {
|
||||||
|
guard let button = statusItem.button else { return }
|
||||||
|
|
||||||
|
// Clear any previously-set image so the attachment is the only glyph rendered.
|
||||||
|
button.image = nil
|
||||||
|
button.imagePosition = .noImage
|
||||||
|
|
||||||
|
let font = NSFont.monospacedDigitSystemFont(ofSize: menubarTitleFontSize, weight: .medium)
|
||||||
|
let flameConfig = NSImage.SymbolConfiguration(pointSize: menubarTitleFontSize, weight: .medium)
|
||||||
|
let flame = NSImage(systemSymbolName: "flame.fill", accessibilityDescription: "CodeBurn")?
|
||||||
|
.withSymbolConfiguration(flameConfig)
|
||||||
|
flame?.isTemplate = true
|
||||||
|
|
||||||
|
let attachment = NSTextAttachment()
|
||||||
|
attachment.image = flame
|
||||||
|
if let size = flame?.size {
|
||||||
|
// Nudge the image down ~2pt so its visual centre sits on the text baseline mid-line
|
||||||
|
// rather than riding high. Exact value tuned against SF Pro Display 13pt.
|
||||||
|
attachment.bounds = CGRect(x: 0, y: -2, width: size.width, height: size.height)
|
||||||
|
}
|
||||||
|
|
||||||
|
let hasPayload = store.todayPayload != nil
|
||||||
|
let valueText = " " + (store.todayPayload?.current.cost.asCompactCurrency() ?? "$—")
|
||||||
|
let color: NSColor = hasPayload ? .labelColor : .secondaryLabelColor
|
||||||
|
|
||||||
|
let composed = NSMutableAttributedString()
|
||||||
|
composed.append(NSAttributedString(attachment: attachment))
|
||||||
|
composed.append(NSAttributedString(
|
||||||
|
string: valueText,
|
||||||
|
attributes: [.font: font, .foregroundColor: color]
|
||||||
|
))
|
||||||
|
button.attributedTitle = composed
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: - Popover
|
||||||
|
|
||||||
|
private func setupPopover() {
|
||||||
|
popover = NSPopover()
|
||||||
|
popover.contentSize = NSSize(width: popoverWidth, height: popoverHeight)
|
||||||
|
popover.behavior = .transient // auto-close only on explicit outside click
|
||||||
|
popover.animates = true
|
||||||
|
popover.delegate = self
|
||||||
|
|
||||||
|
let content = MenuBarContent()
|
||||||
|
.environment(store)
|
||||||
|
.frame(width: popoverWidth)
|
||||||
|
|
||||||
|
popover.contentViewController = NSHostingController(rootView: content)
|
||||||
|
}
|
||||||
|
|
||||||
|
@objc private func handleButtonClick(_ sender: AnyObject?) {
|
||||||
|
guard let button = statusItem.button else { return }
|
||||||
|
if popover.isShown {
|
||||||
|
popover.performClose(sender)
|
||||||
|
} else {
|
||||||
|
NSApp.activate(ignoringOtherApps: true)
|
||||||
|
popover.show(relativeTo: button.bounds, of: button, preferredEdge: .minY)
|
||||||
|
popover.contentViewController?.view.window?.makeKey()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: - NSPopoverDelegate
|
||||||
|
|
||||||
|
func popoverShouldDetach(_ popover: NSPopover) -> Bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
209
mac/Sources/CodeBurnMenubar/CurrencyState.swift
Normal file
209
mac/Sources/CodeBurnMenubar/CurrencyState.swift
Normal file
|
|
@ -0,0 +1,209 @@
|
||||||
|
import Foundation
|
||||||
|
import Observation
|
||||||
|
|
||||||
|
private let fxCacheTTLSeconds: TimeInterval = 24 * 3600
|
||||||
|
private let frankfurterBaseURL = "https://api.frankfurter.app/latest?from=USD&to="
|
||||||
|
/// Defensive bounds on any fetched FX rate. Real-world USD→X rates sit in [0.0001, 200000]
|
||||||
|
/// for every ISO 4217 pair; anything outside is either a parser bug or a MITM poisoning
|
||||||
|
/// attempt. We clamp hard so UI can't render NaN, negative, or astronomical numbers.
|
||||||
|
private let minValidFXRate: Double = 0.0001
|
||||||
|
private let maxValidFXRate: Double = 1_000_000
|
||||||
|
private let fxFetchTimeoutSeconds: TimeInterval = 10
|
||||||
|
|
||||||
|
@Observable
|
||||||
|
final class CurrencyState: @unchecked Sendable {
|
||||||
|
static let shared = CurrencyState()
|
||||||
|
|
||||||
|
var code: String = "USD"
|
||||||
|
var rate: Double = 1.0
|
||||||
|
var symbol: String = "$"
|
||||||
|
|
||||||
|
private init() {}
|
||||||
|
|
||||||
|
/// Applies a new currency context. Callers must invoke on the main actor so @Observable
|
||||||
|
/// view updates run on the UI thread. Rejects non-finite or out-of-band rates so a
|
||||||
|
/// poisoned Frankfurter response can't corrupt displayed costs.
|
||||||
|
func apply(code: String, rate: Double?, symbol: String) {
|
||||||
|
self.code = code
|
||||||
|
self.symbol = symbol
|
||||||
|
if let r = rate, r.isFinite, r >= minValidFXRate, r <= maxValidFXRate {
|
||||||
|
self.rate = r
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static func symbolForCode(_ code: String) -> String {
|
||||||
|
// Some locales return "US$" for USD or "CA$" for CAD via NumberFormatter. Prefer the
|
||||||
|
// plain glyph form everyone recognises.
|
||||||
|
if let override = symbolOverrides[code] { return override }
|
||||||
|
let formatter = NumberFormatter()
|
||||||
|
formatter.numberStyle = .currency
|
||||||
|
formatter.currencyCode = code
|
||||||
|
formatter.locale = Locale(identifier: "en_\(code.prefix(2))")
|
||||||
|
return formatter.currencySymbol ?? code
|
||||||
|
}
|
||||||
|
|
||||||
|
private static let symbolOverrides: [String: String] = [
|
||||||
|
"USD": "$",
|
||||||
|
"CAD": "$",
|
||||||
|
"AUD": "$",
|
||||||
|
"NZD": "$",
|
||||||
|
"HKD": "$",
|
||||||
|
"SGD": "$",
|
||||||
|
"MXN": "$",
|
||||||
|
"EUR": "\u{20AC}",
|
||||||
|
"GBP": "\u{00A3}",
|
||||||
|
"JPY": "\u{00A5}",
|
||||||
|
"CNY": "\u{00A5}",
|
||||||
|
"KRW": "\u{20A9}",
|
||||||
|
"INR": "\u{20B9}",
|
||||||
|
"BRL": "R$",
|
||||||
|
"CHF": "CHF",
|
||||||
|
"SEK": "kr",
|
||||||
|
"DKK": "kr",
|
||||||
|
"ZAR": "R"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
actor FXRateCache {
|
||||||
|
static let shared = FXRateCache()
|
||||||
|
|
||||||
|
private struct Entry: Codable {
|
||||||
|
let rate: Double
|
||||||
|
let savedAt: TimeInterval
|
||||||
|
}
|
||||||
|
|
||||||
|
private var entries: [String: Entry] = [:]
|
||||||
|
private var loaded = false
|
||||||
|
|
||||||
|
private var cacheFilePath: String {
|
||||||
|
let base = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask)[0]
|
||||||
|
return base
|
||||||
|
.appendingPathComponent("codeburn-mac", isDirectory: true)
|
||||||
|
.appendingPathComponent("fx-rates.json")
|
||||||
|
.path
|
||||||
|
}
|
||||||
|
|
||||||
|
private func loadIfNeeded() {
|
||||||
|
guard !loaded else { return }
|
||||||
|
loaded = true
|
||||||
|
do {
|
||||||
|
let data = try SafeFile.read(from: cacheFilePath)
|
||||||
|
let decoded = try JSONDecoder().decode([String: Entry].self, from: data)
|
||||||
|
// Drop any persisted entries whose rate violates the sanity bounds -- covers an
|
||||||
|
// old cache that was written before the clamp was introduced.
|
||||||
|
entries = decoded.filter { _, entry in
|
||||||
|
entry.rate.isFinite && entry.rate >= minValidFXRate && entry.rate <= maxValidFXRate
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
entries = [:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func persist() {
|
||||||
|
guard let data = try? JSONEncoder().encode(entries) else { return }
|
||||||
|
try? SafeFile.write(data, to: cacheFilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a cached rate regardless of freshness. Nil if never fetched.
|
||||||
|
func cachedRate(for code: String) -> Double? {
|
||||||
|
if code == "USD" { return 1.0 }
|
||||||
|
loadIfNeeded()
|
||||||
|
return entries[code]?.rate
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a fresh rate, fetching from Frankfurter when cache is stale or absent. Nil on
|
||||||
|
/// failure. The returned rate is always finite, positive, and within the sanity bounds.
|
||||||
|
func rate(for code: String) async -> Double? {
|
||||||
|
if code == "USD" { return 1.0 }
|
||||||
|
loadIfNeeded()
|
||||||
|
|
||||||
|
if let entry = entries[code],
|
||||||
|
Date().timeIntervalSince1970 - entry.savedAt < fxCacheTTLSeconds {
|
||||||
|
return entry.rate
|
||||||
|
}
|
||||||
|
|
||||||
|
guard let url = URL(string: "\(frankfurterBaseURL)\(code)") else { return entries[code]?.rate }
|
||||||
|
|
||||||
|
let config = URLSessionConfiguration.ephemeral
|
||||||
|
config.timeoutIntervalForRequest = fxFetchTimeoutSeconds
|
||||||
|
config.tlsMinimumSupportedProtocolVersion = .TLSv12
|
||||||
|
let session = URLSession(configuration: config)
|
||||||
|
|
||||||
|
do {
|
||||||
|
let (data, response) = try await session.data(from: url)
|
||||||
|
guard let http = response as? HTTPURLResponse, http.statusCode == 200 else {
|
||||||
|
return entries[code]?.rate
|
||||||
|
}
|
||||||
|
struct Response: Decodable { let rates: [String: Double] }
|
||||||
|
let decoded = try JSONDecoder().decode(Response.self, from: data)
|
||||||
|
guard let fresh = decoded.rates[code],
|
||||||
|
fresh.isFinite, fresh >= minValidFXRate, fresh <= maxValidFXRate else {
|
||||||
|
NSLog("CodeBurn: discarding out-of-band FX rate for \(code)")
|
||||||
|
return entries[code]?.rate
|
||||||
|
}
|
||||||
|
entries[code] = Entry(rate: fresh, savedAt: Date().timeIntervalSince1970)
|
||||||
|
persist()
|
||||||
|
return fresh
|
||||||
|
} catch {
|
||||||
|
return entries[code]?.rate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reads and writes the CLI's persisted currency config (~/.config/codeburn/config.json).
|
||||||
|
/// Uses an on-disk flock so a concurrent `codeburn currency ...` invocation from a terminal
|
||||||
|
/// can't race the menubar and silently drop each other's writes (TOCTOU on config.json).
|
||||||
|
enum CLICurrencyConfig {
|
||||||
|
private static var configDir: String {
|
||||||
|
(NSHomeDirectory() as NSString).appendingPathComponent(".config/codeburn")
|
||||||
|
}
|
||||||
|
private static var configPath: String {
|
||||||
|
(configDir as NSString).appendingPathComponent("config.json")
|
||||||
|
}
|
||||||
|
private static var lockPath: String {
|
||||||
|
(configDir as NSString).appendingPathComponent(".config.lock")
|
||||||
|
}
|
||||||
|
|
||||||
|
static func loadCode() -> String? {
|
||||||
|
guard
|
||||||
|
let data = try? SafeFile.read(from: configPath),
|
||||||
|
let json = try? JSONSerialization.jsonObject(with: data) as? [String: Any],
|
||||||
|
let currency = json["currency"] as? [String: Any],
|
||||||
|
let code = currency["code"] as? String
|
||||||
|
else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return code.uppercased()
|
||||||
|
}
|
||||||
|
|
||||||
|
static func persist(code: String) {
|
||||||
|
do {
|
||||||
|
try SafeFile.withExclusiveLock(at: lockPath) {
|
||||||
|
var existing: [String: Any] = [:]
|
||||||
|
if let data = try? SafeFile.read(from: configPath),
|
||||||
|
let parsed = try? JSONSerialization.jsonObject(with: data) as? [String: Any] {
|
||||||
|
existing = parsed
|
||||||
|
}
|
||||||
|
|
||||||
|
if code == "USD" {
|
||||||
|
existing.removeValue(forKey: "currency")
|
||||||
|
} else {
|
||||||
|
existing["currency"] = [
|
||||||
|
"code": code,
|
||||||
|
"symbol": CurrencyState.symbolForCode(code)
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
guard let data = try? JSONSerialization.data(
|
||||||
|
withJSONObject: existing,
|
||||||
|
options: [.prettyPrinted, .sortedKeys]
|
||||||
|
) else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
try SafeFile.write(data, to: configPath, mode: 0o600)
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
NSLog("CodeBurn: failed to persist currency config: \(error)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
127
mac/Sources/CodeBurnMenubar/Data/CapacityEstimator.swift
Normal file
127
mac/Sources/CodeBurnMenubar/Data/CapacityEstimator.swift
Normal file
|
|
@ -0,0 +1,127 @@
|
||||||
|
import Foundation
|
||||||
|
|
||||||
|
public struct CapacitySnapshot: Sendable, Equatable {
|
||||||
|
public let percent: Double // 0..100, Anthropic-reported utilization
|
||||||
|
public let effectiveTokens: Double // weighted sum of input/output/cache tokens consumed at capture
|
||||||
|
public let capturedAt: Date
|
||||||
|
|
||||||
|
public init(percent: Double, effectiveTokens: Double, capturedAt: Date) {
|
||||||
|
self.percent = percent
|
||||||
|
self.effectiveTokens = effectiveTokens
|
||||||
|
self.capturedAt = capturedAt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public enum CapacityConfidence: String, Sendable {
|
||||||
|
case low, medium, solid
|
||||||
|
}
|
||||||
|
|
||||||
|
public struct CapacityEstimate: Sendable, Equatable {
|
||||||
|
public let capacity: Double // tokens equivalent to 100%
|
||||||
|
public let confidence: CapacityConfidence
|
||||||
|
public let sampleSize: Int // post-decorrelation count
|
||||||
|
public let nonLinearityWarning: Bool
|
||||||
|
|
||||||
|
public init(capacity: Double, confidence: CapacityConfidence, sampleSize: Int, nonLinearityWarning: Bool) {
|
||||||
|
self.capacity = capacity
|
||||||
|
self.confidence = confidence
|
||||||
|
self.sampleSize = sampleSize
|
||||||
|
self.nonLinearityWarning = nonLinearityWarning
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public enum CapacityEstimator {
|
||||||
|
private static let minSampleSize = 5
|
||||||
|
private static let minPercentRange = 15.0
|
||||||
|
private static let recencyHalfLifeSeconds: Double = 30 * 86400
|
||||||
|
private static let solidR2 = 0.97
|
||||||
|
private static let mediumR2 = 0.85
|
||||||
|
private static let solidSampleThreshold = 15
|
||||||
|
private static let mediumSampleThreshold = 6
|
||||||
|
private static let nonLinearityRunLengthThreshold = 0.7
|
||||||
|
|
||||||
|
public static func estimate(_ snapshots: [CapacitySnapshot], asOf now: Date = Date()) -> CapacityEstimate? {
|
||||||
|
guard snapshots.count >= minSampleSize else { return nil }
|
||||||
|
let percents = snapshots.map(\.percent)
|
||||||
|
let range = (percents.max() ?? 0) - (percents.min() ?? 0)
|
||||||
|
guard range >= minPercentRange else { return nil }
|
||||||
|
|
||||||
|
let weighted = snapshots.map { snap -> (p: Double, t: Double, w: Double) in
|
||||||
|
let ageSeconds = now.timeIntervalSince(snap.capturedAt)
|
||||||
|
let weight = pow(0.5, max(0, ageSeconds) / recencyHalfLifeSeconds)
|
||||||
|
return (snap.percent, snap.effectiveTokens, weight)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Weighted least squares through origin: minimize sum(w * (t - p * cap/100)^2)
|
||||||
|
// Solution: cap = 100 * sum(w * t * p) / sum(w * p * p)
|
||||||
|
let numerator = weighted.reduce(0.0) { $0 + $1.w * $1.t * $1.p }
|
||||||
|
let denominator = weighted.reduce(0.0) { $0 + $1.w * $1.p * $1.p }
|
||||||
|
guard denominator > 0 else { return nil }
|
||||||
|
let capacity = 100.0 * numerator / denominator
|
||||||
|
guard capacity > 0 else { return nil }
|
||||||
|
|
||||||
|
// Weighted R^2 against the through-origin fit.
|
||||||
|
let weightedTokenSum = weighted.reduce(0.0) { $0 + $1.w * $1.t }
|
||||||
|
let weightSum = weighted.reduce(0.0) { $0 + $1.w }
|
||||||
|
let weightedMeanT = weightedTokenSum / max(weightSum, .ulpOfOne)
|
||||||
|
let ssRes = weighted.reduce(0.0) { acc, s in
|
||||||
|
let predicted = s.p * capacity / 100
|
||||||
|
let diff = s.t - predicted
|
||||||
|
return acc + s.w * diff * diff
|
||||||
|
}
|
||||||
|
let ssTot = weighted.reduce(0.0) { acc, s in
|
||||||
|
let diff = s.t - weightedMeanT
|
||||||
|
return acc + s.w * diff * diff
|
||||||
|
}
|
||||||
|
let r2 = ssTot > 0 ? max(0.0, 1.0 - ssRes / ssTot) : 0.0
|
||||||
|
|
||||||
|
let n = snapshots.count
|
||||||
|
let confidence: CapacityConfidence = {
|
||||||
|
if n >= solidSampleThreshold && r2 >= solidR2 { return .solid }
|
||||||
|
if n >= mediumSampleThreshold && r2 >= mediumR2 { return .medium }
|
||||||
|
return .low
|
||||||
|
}()
|
||||||
|
|
||||||
|
let nonLinearityWarning = detectNonLinearity(snapshots: weighted, capacity: capacity)
|
||||||
|
|
||||||
|
return CapacityEstimate(
|
||||||
|
capacity: capacity,
|
||||||
|
confidence: confidence,
|
||||||
|
sampleSize: n,
|
||||||
|
nonLinearityWarning: nonLinearityWarning
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sign-test on residuals across the percent range. If residuals form a long monotonic run
|
||||||
|
/// (e.g. all-negative in low percents then all-positive at high), the relationship isn't linear.
|
||||||
|
private static func detectNonLinearity(
|
||||||
|
snapshots: [(p: Double, t: Double, w: Double)],
|
||||||
|
capacity: Double
|
||||||
|
) -> Bool {
|
||||||
|
let sorted = snapshots.sorted { $0.p < $1.p }
|
||||||
|
let signs = sorted.map { s -> Int in
|
||||||
|
let predicted = s.p * capacity / 100
|
||||||
|
let diff = s.t - predicted
|
||||||
|
if abs(diff) < .ulpOfOne { return 0 }
|
||||||
|
return diff > 0 ? 1 : -1
|
||||||
|
}.filter { $0 != 0 }
|
||||||
|
guard signs.count >= minSampleSize else { return false }
|
||||||
|
|
||||||
|
// Longest single-sign run length / total
|
||||||
|
var longestRun = 0
|
||||||
|
var currentRun = 0
|
||||||
|
var currentSign = 0
|
||||||
|
for s in signs {
|
||||||
|
if s == currentSign {
|
||||||
|
currentRun += 1
|
||||||
|
} else {
|
||||||
|
longestRun = max(longestRun, currentRun)
|
||||||
|
currentSign = s
|
||||||
|
currentRun = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
longestRun = max(longestRun, currentRun)
|
||||||
|
let runFraction = Double(longestRun) / Double(signs.count)
|
||||||
|
return runFraction >= nonLinearityRunLengthThreshold
|
||||||
|
}
|
||||||
|
}
|
||||||
107
mac/Sources/CodeBurnMenubar/Data/DataClient.swift
Normal file
107
mac/Sources/CodeBurnMenubar/Data/DataClient.swift
Normal file
|
|
@ -0,0 +1,107 @@
|
||||||
|
import Foundation
|
||||||
|
|
||||||
|
/// Upper bound on payload + stderr bytes read from the CLI. Real payloads top out near 500 KB
|
||||||
|
/// (365 days of history with dozens of models); anything larger is pathological and truncating
|
||||||
|
/// prevents unbounded memory growth. Hard timeout guards against a hung CLI keeping Process and
|
||||||
|
/// Pipe file descriptors pinned forever.
|
||||||
|
private let maxPayloadBytes = 20 * 1024 * 1024
|
||||||
|
private let maxStderrBytes = 256 * 1024
|
||||||
|
private let spawnTimeoutSeconds: UInt64 = 60
|
||||||
|
|
||||||
|
enum DataClientError: Error {
|
||||||
|
case spawn(String)
|
||||||
|
case nonZeroExit(code: Int32, stderr: String)
|
||||||
|
case decode(Error)
|
||||||
|
case timeout
|
||||||
|
case outputTooLarge
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Runs the CLI via argv (no shell interpretation). See `CodeburnCLI` for why we never route
|
||||||
|
/// commands through `/bin/zsh -c` anymore.
|
||||||
|
struct DataClient {
|
||||||
|
static func fetch(period: Period, provider: ProviderFilter, includeOptimize: Bool) async throws -> MenubarPayload {
|
||||||
|
var subcommand = [
|
||||||
|
"status",
|
||||||
|
"--format", "menubar-json",
|
||||||
|
"--period", period.cliArg,
|
||||||
|
"--provider", provider.cliArg,
|
||||||
|
]
|
||||||
|
if !includeOptimize {
|
||||||
|
subcommand.append("--no-optimize")
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = try await runCLI(subcommand: subcommand)
|
||||||
|
guard result.exitCode == 0 else {
|
||||||
|
throw DataClientError.nonZeroExit(code: result.exitCode, stderr: result.stderr)
|
||||||
|
}
|
||||||
|
do {
|
||||||
|
return try JSONDecoder().decode(MenubarPayload.self, from: result.stdout)
|
||||||
|
} catch {
|
||||||
|
throw DataClientError.decode(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct ProcessResult {
|
||||||
|
let stdout: Data
|
||||||
|
let stderr: String
|
||||||
|
let exitCode: Int32
|
||||||
|
}
|
||||||
|
|
||||||
|
private static func runCLI(subcommand: [String]) async throws -> ProcessResult {
|
||||||
|
let process = CodeburnCLI.makeProcess(subcommand: subcommand)
|
||||||
|
|
||||||
|
let outPipe = Pipe()
|
||||||
|
let errPipe = Pipe()
|
||||||
|
process.standardOutput = outPipe
|
||||||
|
process.standardError = errPipe
|
||||||
|
|
||||||
|
do {
|
||||||
|
try process.run()
|
||||||
|
} catch {
|
||||||
|
throw DataClientError.spawn(error.localizedDescription)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Drain both pipes concurrently so a large stderr can't deadlock stdout (the child
|
||||||
|
// blocks on write once the pipe buffer fills). `drain` also enforces a byte cap.
|
||||||
|
async let stdoutData = drain(outPipe.fileHandleForReading, limit: maxPayloadBytes)
|
||||||
|
async let stderrData = drain(errPipe.fileHandleForReading, limit: maxStderrBytes)
|
||||||
|
|
||||||
|
// Wall-clock timeout: if the CLI hangs (parser stuck, disk stall), kill it.
|
||||||
|
let timeoutTask = Task.detached(priority: .utility) {
|
||||||
|
try? await Task.sleep(nanoseconds: spawnTimeoutSeconds * 1_000_000_000)
|
||||||
|
if process.isRunning {
|
||||||
|
process.terminate()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
defer { timeoutTask.cancel() }
|
||||||
|
|
||||||
|
let (out, err) = await (stdoutData, stderrData)
|
||||||
|
process.waitUntilExit()
|
||||||
|
|
||||||
|
if out.count >= maxPayloadBytes {
|
||||||
|
throw DataClientError.outputTooLarge
|
||||||
|
}
|
||||||
|
|
||||||
|
let stderrString = String(data: err, encoding: .utf8) ?? ""
|
||||||
|
return ProcessResult(stdout: out, stderr: stderrString, exitCode: process.terminationStatus)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Pulls bytes off a pipe until EOF or `limit`. Intentionally uses `availableData`, which
|
||||||
|
/// returns empty on EOF -- no blocking once the child exits.
|
||||||
|
private static func drain(_ handle: FileHandle, limit: Int) async -> Data {
|
||||||
|
await Task.detached(priority: .utility) {
|
||||||
|
var buffer = Data()
|
||||||
|
while buffer.count < limit {
|
||||||
|
let chunk = handle.availableData
|
||||||
|
if chunk.isEmpty { break }
|
||||||
|
let remaining = limit - buffer.count
|
||||||
|
if chunk.count > remaining {
|
||||||
|
buffer.append(chunk.prefix(remaining))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
buffer.append(chunk)
|
||||||
|
}
|
||||||
|
return buffer
|
||||||
|
}.value
|
||||||
|
}
|
||||||
|
}
|
||||||
123
mac/Sources/CodeBurnMenubar/Data/MenubarPayload.swift
Normal file
123
mac/Sources/CodeBurnMenubar/Data/MenubarPayload.swift
Normal file
|
|
@ -0,0 +1,123 @@
|
||||||
|
import Foundation
|
||||||
|
|
||||||
|
/// Shape of `codeburn status --format menubar-json --period <period>`.
|
||||||
|
/// `current` is scoped to the requested period; the whole payload reflects that slice.
|
||||||
|
struct MenubarPayload: Codable, Sendable {
|
||||||
|
let generated: String
|
||||||
|
let current: CurrentBlock
|
||||||
|
let optimize: OptimizeBlock
|
||||||
|
let history: HistoryBlock
|
||||||
|
}
|
||||||
|
|
||||||
|
struct HistoryBlock: Codable, Sendable {
|
||||||
|
let daily: [DailyHistoryEntry]
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DailyModelBreakdown: Codable, Sendable {
|
||||||
|
let name: String
|
||||||
|
let cost: Double
|
||||||
|
let calls: Int
|
||||||
|
let inputTokens: Int
|
||||||
|
let outputTokens: Int
|
||||||
|
|
||||||
|
var totalTokens: Int { inputTokens + outputTokens }
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DailyHistoryEntry: Codable, Sendable {
|
||||||
|
let date: String
|
||||||
|
let cost: Double
|
||||||
|
let calls: Int
|
||||||
|
let inputTokens: Int
|
||||||
|
let outputTokens: Int
|
||||||
|
let cacheReadTokens: Int
|
||||||
|
let cacheWriteTokens: Int
|
||||||
|
let topModels: [DailyModelBreakdown]
|
||||||
|
|
||||||
|
/// Pricing-ratio prior: input + 5x output + cache_creation + 0.1x cache_read.
|
||||||
|
/// Matches Anthropic's published per-token pricing on Sonnet/Opus closely enough to be a useful proxy.
|
||||||
|
var effectiveTokens: Double {
|
||||||
|
Double(inputTokens) + 5.0 * Double(outputTokens) + Double(cacheWriteTokens) + 0.1 * Double(cacheReadTokens)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extension DailyHistoryEntry {
|
||||||
|
/// Required for legacy payloads (no topModels emitted yet).
|
||||||
|
enum CodingKeys: String, CodingKey {
|
||||||
|
case date, cost, calls, inputTokens, outputTokens, cacheReadTokens, cacheWriteTokens, topModels
|
||||||
|
}
|
||||||
|
init(from decoder: Decoder) throws {
|
||||||
|
let c = try decoder.container(keyedBy: CodingKeys.self)
|
||||||
|
date = try c.decode(String.self, forKey: .date)
|
||||||
|
cost = try c.decode(Double.self, forKey: .cost)
|
||||||
|
calls = try c.decode(Int.self, forKey: .calls)
|
||||||
|
inputTokens = try c.decode(Int.self, forKey: .inputTokens)
|
||||||
|
outputTokens = try c.decode(Int.self, forKey: .outputTokens)
|
||||||
|
cacheReadTokens = try c.decode(Int.self, forKey: .cacheReadTokens)
|
||||||
|
cacheWriteTokens = try c.decode(Int.self, forKey: .cacheWriteTokens)
|
||||||
|
topModels = try c.decodeIfPresent([DailyModelBreakdown].self, forKey: .topModels) ?? []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct CurrentBlock: Codable, Sendable {
|
||||||
|
let label: String
|
||||||
|
let cost: Double
|
||||||
|
let calls: Int
|
||||||
|
let sessions: Int
|
||||||
|
let oneShotRate: Double?
|
||||||
|
let inputTokens: Int
|
||||||
|
let outputTokens: Int
|
||||||
|
let cacheHitPercent: Double
|
||||||
|
let topActivities: [ActivityEntry]
|
||||||
|
let topModels: [ModelEntry]
|
||||||
|
let providers: [String: Double]
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ActivityEntry: Codable, Sendable {
|
||||||
|
let name: String
|
||||||
|
let cost: Double
|
||||||
|
let turns: Int
|
||||||
|
let oneShotRate: Double?
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ModelEntry: Codable, Sendable {
|
||||||
|
let name: String
|
||||||
|
let cost: Double
|
||||||
|
let calls: Int
|
||||||
|
}
|
||||||
|
|
||||||
|
struct OptimizeBlock: Codable, Sendable {
|
||||||
|
let findingCount: Int
|
||||||
|
let savingsUSD: Double
|
||||||
|
let topFindings: [FindingEntry]
|
||||||
|
}
|
||||||
|
|
||||||
|
struct FindingEntry: Codable, Sendable {
|
||||||
|
let title: String
|
||||||
|
let impact: String
|
||||||
|
let savingsUSD: Double
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: - Empty fallback
|
||||||
|
|
||||||
|
extension MenubarPayload {
|
||||||
|
/// Strictly-empty payload. Used as the fallback before real data arrives, so no
|
||||||
|
/// plausible-looking fake numbers leak into the UI.
|
||||||
|
static let empty = MenubarPayload(
|
||||||
|
generated: "",
|
||||||
|
current: CurrentBlock(
|
||||||
|
label: "",
|
||||||
|
cost: 0,
|
||||||
|
calls: 0,
|
||||||
|
sessions: 0,
|
||||||
|
oneShotRate: nil,
|
||||||
|
inputTokens: 0,
|
||||||
|
outputTokens: 0,
|
||||||
|
cacheHitPercent: 0,
|
||||||
|
topActivities: [],
|
||||||
|
topModels: [],
|
||||||
|
providers: [:]
|
||||||
|
),
|
||||||
|
optimize: OptimizeBlock(findingCount: 0, savingsUSD: 0, topFindings: []),
|
||||||
|
history: HistoryBlock(daily: [])
|
||||||
|
)
|
||||||
|
}
|
||||||
306
mac/Sources/CodeBurnMenubar/Data/SubscriptionClient.swift
Normal file
306
mac/Sources/CodeBurnMenubar/Data/SubscriptionClient.swift
Normal file
|
|
@ -0,0 +1,306 @@
|
||||||
|
import Foundation
|
||||||
|
import Security
|
||||||
|
|
||||||
|
private let credentialsRelativePath = ".claude/.credentials.json"
|
||||||
|
private let keychainService = "Claude Code-credentials"
|
||||||
|
private let oauthClientID = "9d1c250a-e61b-44d9-88ed-5944d1962f5e"
|
||||||
|
private let refreshURL = URL(string: "https://platform.claude.com/v1/oauth/token")!
|
||||||
|
private let usageURL = URL(string: "https://api.anthropic.com/api/oauth/usage")!
|
||||||
|
private let betaHeader = "oauth-2025-04-20"
|
||||||
|
private let userAgent = "claude-code/2.1.0"
|
||||||
|
private let requestTimeout: TimeInterval = 30
|
||||||
|
|
||||||
|
/// Claude Code writes Keychain items with `kSecAttrAccount = "default"`. Filtering on this
|
||||||
|
/// prevents a planted Keychain item from another app (or a stale install with a mangled
|
||||||
|
/// account) from being accepted as our source of OAuth credentials.
|
||||||
|
private let expectedKeychainAccounts: Set<String> = ["default"]
|
||||||
|
private let maxCredentialBytes = 64 * 1024
|
||||||
|
|
||||||
|
enum SubscriptionError: Error, LocalizedError {
|
||||||
|
case noCredentials
|
||||||
|
case credentialsInvalid
|
||||||
|
case refreshFailed(Int, String?)
|
||||||
|
case usageFetchFailed(Int, String?)
|
||||||
|
case decodeFailed(Error)
|
||||||
|
|
||||||
|
var errorDescription: String? {
|
||||||
|
switch self {
|
||||||
|
case .noCredentials: "No Claude OAuth credentials found"
|
||||||
|
case .credentialsInvalid: "Claude OAuth credentials malformed"
|
||||||
|
case let .refreshFailed(code, body): "Token refresh failed (\(code))\(body.map { ": \($0)" } ?? "")"
|
||||||
|
case let .usageFetchFailed(code, body): "Usage fetch failed (\(code))\(body.map { ": \($0)" } ?? "")"
|
||||||
|
case let .decodeFailed(err): "Decode failed: \(err.localizedDescription)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SubscriptionClient {
|
||||||
|
static func fetch() async throws -> SubscriptionUsage {
|
||||||
|
let creds = try loadCredentials()
|
||||||
|
|
||||||
|
// Try the usage call with the existing token first. Only refresh on 401.
|
||||||
|
do {
|
||||||
|
let response = try await fetchUsage(token: creds.accessToken)
|
||||||
|
return mapResponse(response, rawTier: creds.rateLimitTier)
|
||||||
|
} catch SubscriptionError.usageFetchFailed(401, _) {
|
||||||
|
guard let refreshToken = creds.refreshToken, !refreshToken.isEmpty else {
|
||||||
|
throw SubscriptionError.usageFetchFailed(401, "no refresh token available")
|
||||||
|
}
|
||||||
|
let newToken = try await refreshAccessToken(refreshToken: refreshToken)
|
||||||
|
let response = try await fetchUsage(token: newToken)
|
||||||
|
return mapResponse(response, rawTier: creds.rateLimitTier)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: - Credentials
|
||||||
|
|
||||||
|
private static func loadCredentials() throws -> StoredCredentials {
|
||||||
|
if let data = try readFileCredentials() {
|
||||||
|
return try parseCredentials(data: sanitizeKeychainData(data))
|
||||||
|
}
|
||||||
|
if let creds = try readKeychainCredentials() {
|
||||||
|
return creds
|
||||||
|
}
|
||||||
|
throw SubscriptionError.noCredentials
|
||||||
|
}
|
||||||
|
|
||||||
|
private static func readFileCredentials() throws -> Data? {
|
||||||
|
let url = FileManager.default.homeDirectoryForCurrentUser.appendingPathComponent(credentialsRelativePath)
|
||||||
|
guard FileManager.default.fileExists(atPath: url.path) else { return nil }
|
||||||
|
// SafeFile refuses to follow symlinks and caps the read, so a 6 GB /dev/urandom
|
||||||
|
// masquerading as the creds file can't blow up the app.
|
||||||
|
return try SafeFile.read(from: url.path, maxBytes: maxCredentialBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Two-phase keychain enumeration: (1) list persistent refs + accounts, (2) fetch each
|
||||||
|
/// item's data by ref. The combination kSecMatchLimitAll + kSecReturnData errors with -50,
|
||||||
|
/// so the data fetch has to be per-item.
|
||||||
|
private static func readKeychainCredentials() throws -> StoredCredentials? {
|
||||||
|
let listQuery: [String: Any] = [
|
||||||
|
kSecClass as String: kSecClassGenericPassword,
|
||||||
|
kSecAttrService as String: keychainService,
|
||||||
|
kSecMatchLimit as String: kSecMatchLimitAll,
|
||||||
|
kSecReturnAttributes as String: true,
|
||||||
|
kSecReturnPersistentRef as String: true,
|
||||||
|
]
|
||||||
|
var listResult: CFTypeRef?
|
||||||
|
let listStatus = SecItemCopyMatching(listQuery as CFDictionary, &listResult)
|
||||||
|
if listStatus == errSecItemNotFound {
|
||||||
|
NSLog("CodeBurn: keychain query found no items for service \(keychainService)")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
guard listStatus == errSecSuccess, let rows = listResult as? [[String: Any]] else {
|
||||||
|
NSLog("CodeBurn: keychain enumerate failed status=\(listStatus)")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var best: StoredCredentials? = nil
|
||||||
|
for row in rows {
|
||||||
|
guard let ref = row[kSecValuePersistentRef as String] as? Data else { continue }
|
||||||
|
let account = (row[kSecAttrAccount as String] as? String) ?? ""
|
||||||
|
// Ignore rows whose account doesn't match Claude Code's known writer. Stops another
|
||||||
|
// app's item (or a legacy install with an unexpected account) from being accepted.
|
||||||
|
guard expectedKeychainAccounts.contains(account) else { continue }
|
||||||
|
let dataQuery: [String: Any] = [
|
||||||
|
kSecClass as String: kSecClassGenericPassword,
|
||||||
|
kSecValuePersistentRef as String: ref,
|
||||||
|
kSecMatchLimit as String: kSecMatchLimitOne,
|
||||||
|
kSecReturnData as String: true,
|
||||||
|
]
|
||||||
|
var dataResult: CFTypeRef?
|
||||||
|
let dataStatus = SecItemCopyMatching(dataQuery as CFDictionary, &dataResult)
|
||||||
|
guard dataStatus == errSecSuccess, let data = dataResult as? Data else { continue }
|
||||||
|
let sanitized = sanitizeKeychainData(data)
|
||||||
|
guard let parsed = try? parseCredentials(data: sanitized) else { continue }
|
||||||
|
if let current = best {
|
||||||
|
if (parsed.expiresAt ?? .distantPast) > (current.expiresAt ?? .distantPast) {
|
||||||
|
best = parsed
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
best = parsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return best
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Claude Code's keychain writer line-wraps long string values (newline + leading spaces)
|
||||||
|
/// mid-token, producing JSON with literal control chars and stray spaces inside string
|
||||||
|
/// values. Replace every newline (CR/LF) plus the run of spaces/tabs that follows it.
|
||||||
|
/// Drops both the wrapping in tokens AND pretty-print indentation between fields (both
|
||||||
|
/// produce valid, compact JSON afterward).
|
||||||
|
private static func sanitizeKeychainData(_ data: Data) -> Data {
|
||||||
|
guard var s = String(data: data, encoding: .utf8) else { return data }
|
||||||
|
s = s.replacingOccurrences(of: "\r", with: "")
|
||||||
|
let regex = try? NSRegularExpression(pattern: "\\n[ \\t]*", options: [])
|
||||||
|
if let regex {
|
||||||
|
let range = NSRange(s.startIndex..<s.endIndex, in: s)
|
||||||
|
s = regex.stringByReplacingMatches(in: s, options: [], range: range, withTemplate: "")
|
||||||
|
}
|
||||||
|
s = s.trimmingCharacters(in: .whitespacesAndNewlines)
|
||||||
|
return s.data(using: .utf8) ?? data
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decodes the credential JSON blob. Never logs the blob contents or any slice of it --
|
||||||
|
/// even a partial access token reaching Console.app is a leak, and the byte-window
|
||||||
|
/// diagnostic that used to live here could overlap the `accessToken` field bytes.
|
||||||
|
private static func parseCredentials(data: Data) throws -> StoredCredentials {
|
||||||
|
do {
|
||||||
|
let root = try JSONDecoder().decode(CredentialsRoot.self, from: data)
|
||||||
|
guard let oauth = root.claudeAiOauth else { throw SubscriptionError.credentialsInvalid }
|
||||||
|
let token = oauth.accessToken?.trimmingCharacters(in: .whitespacesAndNewlines) ?? ""
|
||||||
|
guard !token.isEmpty else { throw SubscriptionError.credentialsInvalid }
|
||||||
|
let expiresAt = oauth.expiresAt.map { Date(timeIntervalSince1970: $0 / 1000.0) }
|
||||||
|
return StoredCredentials(
|
||||||
|
accessToken: token,
|
||||||
|
refreshToken: oauth.refreshToken,
|
||||||
|
expiresAt: expiresAt,
|
||||||
|
rateLimitTier: oauth.rateLimitTier
|
||||||
|
)
|
||||||
|
} catch let err as SubscriptionError {
|
||||||
|
throw err
|
||||||
|
} catch {
|
||||||
|
throw SubscriptionError.decodeFailed(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: - Refresh
|
||||||
|
|
||||||
|
private static func refreshAccessToken(refreshToken: String) async throws -> String {
|
||||||
|
var request = URLRequest(url: refreshURL)
|
||||||
|
request.httpMethod = "POST"
|
||||||
|
request.timeoutInterval = requestTimeout
|
||||||
|
request.setValue("application/x-www-form-urlencoded", forHTTPHeaderField: "Content-Type")
|
||||||
|
request.setValue("application/json", forHTTPHeaderField: "Accept")
|
||||||
|
var components = URLComponents()
|
||||||
|
components.queryItems = [
|
||||||
|
URLQueryItem(name: "grant_type", value: "refresh_token"),
|
||||||
|
URLQueryItem(name: "refresh_token", value: refreshToken),
|
||||||
|
URLQueryItem(name: "client_id", value: oauthClientID),
|
||||||
|
]
|
||||||
|
request.httpBody = (components.percentEncodedQuery ?? "").data(using: .utf8)
|
||||||
|
|
||||||
|
let (data, response) = try await URLSession.shared.data(for: request)
|
||||||
|
guard let http = response as? HTTPURLResponse else {
|
||||||
|
throw SubscriptionError.refreshFailed(-1, nil)
|
||||||
|
}
|
||||||
|
guard http.statusCode == 200 else {
|
||||||
|
let body = String(data: data, encoding: .utf8)
|
||||||
|
throw SubscriptionError.refreshFailed(http.statusCode, body)
|
||||||
|
}
|
||||||
|
do {
|
||||||
|
let decoded = try JSONDecoder().decode(TokenRefreshResponse.self, from: data)
|
||||||
|
return decoded.accessToken
|
||||||
|
} catch {
|
||||||
|
throw SubscriptionError.decodeFailed(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: - Usage fetch
|
||||||
|
|
||||||
|
private static func fetchUsage(token: String) async throws -> UsageResponse {
|
||||||
|
var request = URLRequest(url: usageURL)
|
||||||
|
request.httpMethod = "GET"
|
||||||
|
request.timeoutInterval = requestTimeout
|
||||||
|
request.setValue("Bearer \(token)", forHTTPHeaderField: "Authorization")
|
||||||
|
request.setValue("application/json", forHTTPHeaderField: "Accept")
|
||||||
|
request.setValue(betaHeader, forHTTPHeaderField: "anthropic-beta")
|
||||||
|
request.setValue(userAgent, forHTTPHeaderField: "User-Agent")
|
||||||
|
|
||||||
|
let (data, response) = try await URLSession.shared.data(for: request)
|
||||||
|
guard let http = response as? HTTPURLResponse else {
|
||||||
|
throw SubscriptionError.usageFetchFailed(-1, nil)
|
||||||
|
}
|
||||||
|
guard http.statusCode == 200 else {
|
||||||
|
let body = String(data: data, encoding: .utf8)
|
||||||
|
throw SubscriptionError.usageFetchFailed(http.statusCode, body)
|
||||||
|
}
|
||||||
|
do {
|
||||||
|
return try JSONDecoder().decode(UsageResponse.self, from: data)
|
||||||
|
} catch {
|
||||||
|
throw SubscriptionError.decodeFailed(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: - Mapping
|
||||||
|
|
||||||
|
private static func mapResponse(_ r: UsageResponse, rawTier: String?) -> SubscriptionUsage {
|
||||||
|
SubscriptionUsage(
|
||||||
|
tier: SubscriptionUsage.tier(from: rawTier),
|
||||||
|
rawTier: rawTier,
|
||||||
|
fiveHourPercent: r.fiveHour?.utilization,
|
||||||
|
fiveHourResetsAt: parseDate(r.fiveHour?.resetsAt),
|
||||||
|
sevenDayPercent: r.sevenDay?.utilization,
|
||||||
|
sevenDayResetsAt: parseDate(r.sevenDay?.resetsAt),
|
||||||
|
sevenDayOpusPercent: r.sevenDayOpus?.utilization,
|
||||||
|
sevenDayOpusResetsAt: parseDate(r.sevenDayOpus?.resetsAt),
|
||||||
|
sevenDaySonnetPercent: r.sevenDaySonnet?.utilization,
|
||||||
|
sevenDaySonnetResetsAt: parseDate(r.sevenDaySonnet?.resetsAt),
|
||||||
|
fetchedAt: Date()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private static func parseDate(_ s: String?) -> Date? {
|
||||||
|
guard let s, !s.isEmpty else { return nil }
|
||||||
|
let f = ISO8601DateFormatter()
|
||||||
|
f.formatOptions = [.withInternetDateTime, .withFractionalSeconds]
|
||||||
|
if let d = f.date(from: s) { return d }
|
||||||
|
f.formatOptions = [.withInternetDateTime]
|
||||||
|
return f.date(from: s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: - Internal models
|
||||||
|
|
||||||
|
private struct StoredCredentials {
|
||||||
|
let accessToken: String
|
||||||
|
let refreshToken: String?
|
||||||
|
let expiresAt: Date?
|
||||||
|
let rateLimitTier: String?
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct CredentialsRoot: Decodable {
|
||||||
|
let claudeAiOauth: OAuthBlock?
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct OAuthBlock: Decodable {
|
||||||
|
let accessToken: String?
|
||||||
|
let refreshToken: String?
|
||||||
|
let expiresAt: Double?
|
||||||
|
let rateLimitTier: String?
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct TokenRefreshResponse: Decodable {
|
||||||
|
let accessToken: String
|
||||||
|
let refreshToken: String?
|
||||||
|
let expiresIn: Int?
|
||||||
|
|
||||||
|
enum CodingKeys: String, CodingKey {
|
||||||
|
case accessToken = "access_token"
|
||||||
|
case refreshToken = "refresh_token"
|
||||||
|
case expiresIn = "expires_in"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct UsageResponse: Decodable {
|
||||||
|
let fiveHour: Window?
|
||||||
|
let sevenDay: Window?
|
||||||
|
let sevenDayOpus: Window?
|
||||||
|
let sevenDaySonnet: Window?
|
||||||
|
|
||||||
|
enum CodingKeys: String, CodingKey {
|
||||||
|
case fiveHour = "five_hour"
|
||||||
|
case sevenDay = "seven_day"
|
||||||
|
case sevenDayOpus = "seven_day_opus"
|
||||||
|
case sevenDaySonnet = "seven_day_sonnet"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct Window: Decodable {
|
||||||
|
let utilization: Double?
|
||||||
|
let resetsAt: String?
|
||||||
|
|
||||||
|
enum CodingKeys: String, CodingKey {
|
||||||
|
case utilization
|
||||||
|
case resetsAt = "resets_at"
|
||||||
|
}
|
||||||
|
}
|
||||||
102
mac/Sources/CodeBurnMenubar/Data/SubscriptionSnapshotStore.swift
Normal file
102
mac/Sources/CodeBurnMenubar/Data/SubscriptionSnapshotStore.swift
Normal file
|
|
@ -0,0 +1,102 @@
|
||||||
|
import Foundation
|
||||||
|
|
||||||
|
/// Persisted snapshot of a single utilization reading. We capture one per window every time
|
||||||
|
/// SubscriptionClient.fetch() succeeds so we can answer "what did the prior 7-day cycle finish at?"
|
||||||
|
/// when the current window has no usable data yet (just reset).
|
||||||
|
struct SubscriptionSnapshot: Codable, Sendable {
|
||||||
|
let windowKey: String // "five_hour", "seven_day", "seven_day_opus", "seven_day_sonnet"
|
||||||
|
let percent: Double // 0..100
|
||||||
|
let resetsAt: Date // resets_at active at capture (identifies which window cycle this belongs to)
|
||||||
|
let capturedAt: Date // when the snapshot was recorded
|
||||||
|
let effectiveTokens: Double? // tokens consumed in window at capture (nil if not computed)
|
||||||
|
}
|
||||||
|
|
||||||
|
private let snapshotFilename = "subscription-snapshots.json"
|
||||||
|
private let pruneOlderThanSeconds: TimeInterval = 30 * 24 * 3600
|
||||||
|
|
||||||
|
private func snapshotsCacheDir() -> String {
|
||||||
|
return ProcessInfo.processInfo.environment["CODEBURN_CACHE_DIR"]
|
||||||
|
?? (NSHomeDirectory() as NSString).appendingPathComponent(".cache/codeburn")
|
||||||
|
}
|
||||||
|
|
||||||
|
private func snapshotsPath() -> String {
|
||||||
|
return (snapshotsCacheDir() as NSString).appendingPathComponent(snapshotFilename)
|
||||||
|
}
|
||||||
|
|
||||||
|
private actor SnapshotLock {
|
||||||
|
static let shared = SnapshotLock()
|
||||||
|
func run<T>(_ fn: () throws -> T) rethrows -> T { try fn() }
|
||||||
|
}
|
||||||
|
|
||||||
|
enum SubscriptionSnapshotStore {
|
||||||
|
/// Append a snapshot. Auto-prunes entries older than 30 days. Idempotent: if a snapshot
|
||||||
|
/// with the same windowKey + resetsAt already exists, only update percent if new is higher
|
||||||
|
/// (so "final" reading near reset is preserved).
|
||||||
|
static func record(_ snapshot: SubscriptionSnapshot) async {
|
||||||
|
await SnapshotLock.shared.run {
|
||||||
|
do {
|
||||||
|
var all = loadAll()
|
||||||
|
let key = "\(snapshot.windowKey)|\(snapshot.resetsAt.timeIntervalSince1970)"
|
||||||
|
if let idx = all.firstIndex(where: { "\($0.windowKey)|\($0.resetsAt.timeIntervalSince1970)" == key }) {
|
||||||
|
if snapshot.percent > all[idx].percent {
|
||||||
|
all[idx] = snapshot
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
all.append(snapshot)
|
||||||
|
}
|
||||||
|
let cutoff = Date().addingTimeInterval(-pruneOlderThanSeconds)
|
||||||
|
all = all.filter { $0.capturedAt >= cutoff }
|
||||||
|
try save(all)
|
||||||
|
} catch {
|
||||||
|
NSLog("CodeBurn: snapshot record failed: \(error)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the final percent of the immediately-prior cycle for this window, or nil if no
|
||||||
|
/// prior data is available. Logic: among snapshots whose resetsAt < currentResetsAt, pick
|
||||||
|
/// the group with the largest resetsAt (most recent prior cycle), then return the max
|
||||||
|
/// percent in that group (the closest-to-final reading we have).
|
||||||
|
static func previousWindowFinal(windowKey: String, currentResetsAt: Date) async -> Double? {
|
||||||
|
await SnapshotLock.shared.run {
|
||||||
|
let all = loadAll()
|
||||||
|
let priors = all.filter { $0.windowKey == windowKey && $0.resetsAt < currentResetsAt }
|
||||||
|
guard let mostRecentPriorReset = priors.map({ $0.resetsAt }).max() else { return nil }
|
||||||
|
let priorWindow = priors.filter { $0.resetsAt == mostRecentPriorReset }
|
||||||
|
return priorWindow.map(\.percent).max()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return all snapshots for a given window key, useful for capacity estimation.
|
||||||
|
static func snapshots(for windowKey: String) async -> [SubscriptionSnapshot] {
|
||||||
|
await SnapshotLock.shared.run {
|
||||||
|
loadAll().filter { $0.windowKey == windowKey }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test seam: clear all snapshots.
|
||||||
|
static func resetForTesting() async {
|
||||||
|
await SnapshotLock.shared.run {
|
||||||
|
try? FileManager.default.removeItem(atPath: snapshotsPath())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: - Internals
|
||||||
|
|
||||||
|
private static func loadAll() -> [SubscriptionSnapshot] {
|
||||||
|
let path = snapshotsPath()
|
||||||
|
guard FileManager.default.fileExists(atPath: path) else { return [] }
|
||||||
|
guard let data = try? SafeFile.read(from: path) else { return [] }
|
||||||
|
let decoder = JSONDecoder()
|
||||||
|
decoder.dateDecodingStrategy = .iso8601
|
||||||
|
return (try? decoder.decode([SubscriptionSnapshot].self, from: data)) ?? []
|
||||||
|
}
|
||||||
|
|
||||||
|
private static func save(_ snapshots: [SubscriptionSnapshot]) throws {
|
||||||
|
let encoder = JSONEncoder()
|
||||||
|
encoder.dateEncodingStrategy = .iso8601
|
||||||
|
let data = try encoder.encode(snapshots)
|
||||||
|
// SafeFile.write refuses symlinked targets and does the tmp+rename atomic dance.
|
||||||
|
try SafeFile.write(data, to: snapshotsPath(), mode: 0o600)
|
||||||
|
}
|
||||||
|
}
|
||||||
46
mac/Sources/CodeBurnMenubar/Data/SubscriptionUsage.swift
Normal file
46
mac/Sources/CodeBurnMenubar/Data/SubscriptionUsage.swift
Normal file
|
|
@ -0,0 +1,46 @@
|
||||||
|
import Foundation
|
||||||
|
|
||||||
|
struct SubscriptionUsage: Sendable, Equatable {
|
||||||
|
enum Tier: String, Sendable, Equatable {
|
||||||
|
case pro
|
||||||
|
case max5x
|
||||||
|
case max20x
|
||||||
|
case team
|
||||||
|
case enterprise
|
||||||
|
case unknown
|
||||||
|
|
||||||
|
var displayName: String {
|
||||||
|
switch self {
|
||||||
|
case .pro: "Pro"
|
||||||
|
case .max5x: "Max 5x"
|
||||||
|
case .max20x: "Max 20x"
|
||||||
|
case .team: "Team"
|
||||||
|
case .enterprise: "Enterprise"
|
||||||
|
case .unknown: "Subscription"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let tier: Tier
|
||||||
|
let rawTier: String?
|
||||||
|
let fiveHourPercent: Double?
|
||||||
|
let fiveHourResetsAt: Date?
|
||||||
|
let sevenDayPercent: Double?
|
||||||
|
let sevenDayResetsAt: Date?
|
||||||
|
let sevenDayOpusPercent: Double?
|
||||||
|
let sevenDayOpusResetsAt: Date?
|
||||||
|
let sevenDaySonnetPercent: Double?
|
||||||
|
let sevenDaySonnetResetsAt: Date?
|
||||||
|
let fetchedAt: Date
|
||||||
|
|
||||||
|
static func tier(from raw: String?) -> Tier {
|
||||||
|
guard let raw = raw?.lowercased() else { return .unknown }
|
||||||
|
if raw.contains("max_20x") || raw.contains("max20x") || raw.contains("max-20x") { return .max20x }
|
||||||
|
if raw.contains("max_5x") || raw.contains("max5x") || raw.contains("max-5x") { return .max5x }
|
||||||
|
if raw.contains("max") { return .max5x }
|
||||||
|
if raw.contains("pro") { return .pro }
|
||||||
|
if raw.contains("team") { return .team }
|
||||||
|
if raw.contains("enterprise") { return .enterprise }
|
||||||
|
return .unknown
|
||||||
|
}
|
||||||
|
}
|
||||||
59
mac/Sources/CodeBurnMenubar/Security/CodeburnCLI.swift
Normal file
59
mac/Sources/CodeBurnMenubar/Security/CodeburnCLI.swift
Normal file
|
|
@ -0,0 +1,59 @@
|
||||||
|
import Foundation
|
||||||
|
|
||||||
|
/// Single entry point for spawning the `codeburn` CLI. All callers route through here so the
|
||||||
|
/// binary argv is validated once and no code path ever passes user-influenced strings through
|
||||||
|
/// a shell (`/bin/zsh -c`, `open --args`, AppleScript). This closes the shell-injection attack
|
||||||
|
/// surface end-to-end.
|
||||||
|
enum CodeburnCLI {
|
||||||
|
/// Matches a plain file path / program name: alphanumerics, dot, underscore, slash, hyphen,
|
||||||
|
/// space. Deliberately excludes shell metacharacters (`$`, `;`, `&`, `|`, quotes, backticks,
|
||||||
|
/// newlines) so a malicious `CODEBURN_BIN="codeburn; rm -rf ~"` can't slip through.
|
||||||
|
private static let safeArgPattern = try! NSRegularExpression(pattern: "^[A-Za-z0-9 ._/\\-]+$")
|
||||||
|
|
||||||
|
/// PATH additions for GUI-launched apps, which otherwise get a minimal PATH that misses
|
||||||
|
/// Homebrew and npm global installs.
|
||||||
|
private static let additionalPathEntries = ["/opt/homebrew/bin", "/usr/local/bin"]
|
||||||
|
|
||||||
|
/// Returns the argv that launches the CLI. Dev override via `CODEBURN_BIN` is honoured only
|
||||||
|
/// if every whitespace-delimited token passes `safeArgPattern`. Otherwise falls back to the
|
||||||
|
/// plain `codeburn` name (resolved via PATH).
|
||||||
|
static func baseArgv() -> [String] {
|
||||||
|
guard let raw = ProcessInfo.processInfo.environment["CODEBURN_BIN"], !raw.isEmpty else {
|
||||||
|
return ["codeburn"]
|
||||||
|
}
|
||||||
|
let parts = raw.split(separator: " ", omittingEmptySubsequences: true).map(String.init)
|
||||||
|
guard parts.allSatisfy(isSafe) else {
|
||||||
|
NSLog("CodeBurn: refusing unsafe CODEBURN_BIN; using default 'codeburn'")
|
||||||
|
return ["codeburn"]
|
||||||
|
}
|
||||||
|
return parts
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds a `Process` that runs the CLI with the given subcommand args. Uses `/usr/bin/env`
|
||||||
|
/// so PATH lookup happens without involving a shell, and augments PATH with Homebrew
|
||||||
|
/// defaults. Caller sets stdout/stderr pipes and calls `run()`.
|
||||||
|
static func makeProcess(subcommand: [String]) -> Process {
|
||||||
|
let process = Process()
|
||||||
|
process.executableURL = URL(fileURLWithPath: "/usr/bin/env")
|
||||||
|
var environment = ProcessInfo.processInfo.environment
|
||||||
|
environment["PATH"] = augmentedPath(environment["PATH"] ?? "")
|
||||||
|
process.environment = environment
|
||||||
|
// `env --` treats everything following as argv, not VAR=val pairs -- guards against an
|
||||||
|
// argument accidentally resembling an env assignment.
|
||||||
|
process.arguments = ["--"] + baseArgv() + subcommand
|
||||||
|
return process
|
||||||
|
}
|
||||||
|
|
||||||
|
static func isSafe(_ s: String) -> Bool {
|
||||||
|
let range = NSRange(s.startIndex..<s.endIndex, in: s)
|
||||||
|
return safeArgPattern.firstMatch(in: s, range: range) != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
private static func augmentedPath(_ existing: String) -> String {
|
||||||
|
var parts = existing.split(separator: ":", omittingEmptySubsequences: true).map(String.init)
|
||||||
|
for extra in additionalPathEntries where !parts.contains(extra) {
|
||||||
|
parts.append(extra)
|
||||||
|
}
|
||||||
|
return parts.joined(separator: ":")
|
||||||
|
}
|
||||||
|
}
|
||||||
128
mac/Sources/CodeBurnMenubar/Security/SafeFile.swift
Normal file
128
mac/Sources/CodeBurnMenubar/Security/SafeFile.swift
Normal file
|
|
@ -0,0 +1,128 @@
|
||||||
|
import Foundation
|
||||||
|
|
||||||
|
/// Symlink-safe file I/O with atomic writes and optional cross-process flock.
|
||||||
|
///
|
||||||
|
/// Every cache file we touch (`~/Library/Caches/codeburn-mac/fx-rates.json`,
|
||||||
|
/// `~/.cache/codeburn/subscription-snapshots.json`, `~/.config/codeburn/config.json`) is a
|
||||||
|
/// legitimate target for a local-symlink attack: if an attacker plants a symlink from one of
|
||||||
|
/// those paths to, say, `~/.ssh/config`, a naive `Data.write(to:)` blindly follows the link and
|
||||||
|
/// clobbers the real file. `O_NOFOLLOW` on the write() refuses the operation instead.
|
||||||
|
enum SafeFile {
|
||||||
|
enum Error: Swift.Error {
|
||||||
|
case symlinkDetected(String)
|
||||||
|
case openFailed(String, Int32)
|
||||||
|
case writeFailed(String, Int32)
|
||||||
|
case renameFailed(String, Int32)
|
||||||
|
case readFailed(String, Int32)
|
||||||
|
case sizeLimitExceeded(String, Int)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default max bytes when reading untrusted cache files. Prevents a malicious cache file
|
||||||
|
/// from exhausting memory in the Swift process.
|
||||||
|
static let defaultReadLimit = 8 * 1024 * 1024
|
||||||
|
|
||||||
|
/// Refuses to follow symlinks and writes atomically via a tmp file + rename. `mode` is the
|
||||||
|
/// final file permission (0o600 by default so cache files stay user-private).
|
||||||
|
static func write(_ data: Data, to path: String, mode: mode_t = 0o600) throws {
|
||||||
|
let parent = (path as NSString).deletingLastPathComponent
|
||||||
|
try FileManager.default.createDirectory(
|
||||||
|
atPath: parent,
|
||||||
|
withIntermediateDirectories: true,
|
||||||
|
attributes: [.posixPermissions: NSNumber(value: 0o700)]
|
||||||
|
)
|
||||||
|
|
||||||
|
// Reject if the existing file is a symlink. We use lstat so the link itself is
|
||||||
|
// inspected, not its target.
|
||||||
|
var linkInfo = stat()
|
||||||
|
if lstat(path, &linkInfo) == 0, (linkInfo.st_mode & S_IFMT) == S_IFLNK {
|
||||||
|
throw Error.symlinkDetected(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
let tmpPath = parent + "/.codeburn-" + UUID().uuidString + ".tmp"
|
||||||
|
let flags: Int32 = O_CREAT | O_WRONLY | O_EXCL | O_NOFOLLOW
|
||||||
|
let fd = Darwin.open(tmpPath, flags, mode)
|
||||||
|
guard fd >= 0 else {
|
||||||
|
throw Error.openFailed(tmpPath, errno)
|
||||||
|
}
|
||||||
|
|
||||||
|
let writeResult: Int = data.withUnsafeBytes { buffer -> Int in
|
||||||
|
guard let base = buffer.baseAddress else { return 0 }
|
||||||
|
return Darwin.write(fd, base, buffer.count)
|
||||||
|
}
|
||||||
|
let writeErrno = errno
|
||||||
|
fsync(fd)
|
||||||
|
Darwin.close(fd)
|
||||||
|
|
||||||
|
guard writeResult == data.count else {
|
||||||
|
unlink(tmpPath)
|
||||||
|
throw Error.writeFailed(tmpPath, writeErrno)
|
||||||
|
}
|
||||||
|
|
||||||
|
if rename(tmpPath, path) != 0 {
|
||||||
|
let renameErrno = errno
|
||||||
|
unlink(tmpPath)
|
||||||
|
throw Error.renameFailed(path, renameErrno)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Refuses to read through a symlink. `maxBytes` bounds the read so a tampered cache file
|
||||||
|
/// can't balloon the process.
|
||||||
|
static func read(from path: String, maxBytes: Int = defaultReadLimit) throws -> Data {
|
||||||
|
var linkInfo = stat()
|
||||||
|
guard lstat(path, &linkInfo) == 0 else {
|
||||||
|
throw Error.readFailed(path, errno)
|
||||||
|
}
|
||||||
|
if (linkInfo.st_mode & S_IFMT) == S_IFLNK {
|
||||||
|
throw Error.symlinkDetected(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
let fd = Darwin.open(path, O_RDONLY | O_NOFOLLOW)
|
||||||
|
guard fd >= 0 else {
|
||||||
|
throw Error.readFailed(path, errno)
|
||||||
|
}
|
||||||
|
defer { Darwin.close(fd) }
|
||||||
|
|
||||||
|
let size = Int(linkInfo.st_size)
|
||||||
|
if size > maxBytes {
|
||||||
|
throw Error.sizeLimitExceeded(path, size)
|
||||||
|
}
|
||||||
|
|
||||||
|
var data = Data(count: size)
|
||||||
|
let readBytes: Int = data.withUnsafeMutableBytes { buffer -> Int in
|
||||||
|
guard let base = buffer.baseAddress else { return 0 }
|
||||||
|
return Darwin.read(fd, base, buffer.count)
|
||||||
|
}
|
||||||
|
guard readBytes >= 0 else {
|
||||||
|
throw Error.readFailed(path, errno)
|
||||||
|
}
|
||||||
|
if readBytes < size {
|
||||||
|
data = data.prefix(readBytes)
|
||||||
|
}
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Runs `body` while holding an exclusive POSIX advisory lock on `path`. The lock file is
|
||||||
|
/// created if missing (with 0o600 permissions) and released on scope exit, so other
|
||||||
|
/// codeburn processes (the CLI running in a terminal, say) block on the same file instead
|
||||||
|
/// of racing on a shared config.
|
||||||
|
static func withExclusiveLock<T>(at path: String, body: () throws -> T) throws -> T {
|
||||||
|
let parent = (path as NSString).deletingLastPathComponent
|
||||||
|
try FileManager.default.createDirectory(
|
||||||
|
atPath: parent,
|
||||||
|
withIntermediateDirectories: true,
|
||||||
|
attributes: [.posixPermissions: NSNumber(value: 0o700)]
|
||||||
|
)
|
||||||
|
let fd = Darwin.open(path, O_CREAT | O_RDWR | O_NOFOLLOW, 0o600)
|
||||||
|
guard fd >= 0 else {
|
||||||
|
throw Error.openFailed(path, errno)
|
||||||
|
}
|
||||||
|
defer { Darwin.close(fd) }
|
||||||
|
|
||||||
|
guard flock(fd, LOCK_EX) == 0 else {
|
||||||
|
throw Error.openFailed(path, errno)
|
||||||
|
}
|
||||||
|
defer { _ = flock(fd, LOCK_UN) }
|
||||||
|
|
||||||
|
return try body()
|
||||||
|
}
|
||||||
|
}
|
||||||
40
mac/Sources/CodeBurnMenubar/Security/TerminalLauncher.swift
Normal file
40
mac/Sources/CodeBurnMenubar/Security/TerminalLauncher.swift
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
import AppKit
|
||||||
|
import Foundation
|
||||||
|
|
||||||
|
/// Opens a codeburn subcommand in the user's Terminal. The argv is validated through
|
||||||
|
/// `CodeburnCLI.isSafe` before it's interpolated into AppleScript so there's no path for a
|
||||||
|
/// rogue environment variable to smuggle shell metacharacters into the `do script` call.
|
||||||
|
/// Falls back to a detached headless spawn on machines without Terminal.app (iTerm/Ghostty/Warp
|
||||||
|
/// users) so the subcommand still runs.
|
||||||
|
enum TerminalLauncher {
|
||||||
|
private static let terminalPaths = [
|
||||||
|
"/System/Applications/Utilities/Terminal.app",
|
||||||
|
"/Applications/Utilities/Terminal.app",
|
||||||
|
]
|
||||||
|
|
||||||
|
static func open(subcommand: [String]) {
|
||||||
|
let argv = CodeburnCLI.baseArgv() + subcommand
|
||||||
|
guard argv.allSatisfy(CodeburnCLI.isSafe) else {
|
||||||
|
NSLog("CodeBurn: refusing to open terminal with unsafe argv")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let command = argv.joined(separator: " ")
|
||||||
|
|
||||||
|
if terminalPaths.contains(where: FileManager.default.fileExists(atPath:)) {
|
||||||
|
let script = """
|
||||||
|
tell application "Terminal"
|
||||||
|
activate
|
||||||
|
do script "\(command)"
|
||||||
|
end tell
|
||||||
|
"""
|
||||||
|
let process = Process()
|
||||||
|
process.executableURL = URL(fileURLWithPath: "/usr/bin/osascript")
|
||||||
|
process.arguments = ["-e", script]
|
||||||
|
try? process.run()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let headless = CodeburnCLI.makeProcess(subcommand: subcommand)
|
||||||
|
try? headless.run()
|
||||||
|
}
|
||||||
|
}
|
||||||
32
mac/Sources/CodeBurnMenubar/Theme/Theme.swift
Normal file
32
mac/Sources/CodeBurnMenubar/Theme/Theme.swift
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
/// Design tokens. Warm terracotta-ember palette, not generic orange.
|
||||||
|
enum Theme {
|
||||||
|
static let brandAccent = Color(red: 0xC9/255.0, green: 0x52/255.0, blue: 0x1D/255.0)
|
||||||
|
static let brandAccentDark = Color(red: 0xE8/255.0, green: 0x77/255.0, blue: 0x4A/255.0)
|
||||||
|
static let brandEmberDeep = Color(red: 0x8B/255.0, green: 0x3E/255.0, blue: 0x13/255.0)
|
||||||
|
static let brandEmberGlow = Color(red: 0xF0/255.0, green: 0xA0/255.0, blue: 0x70/255.0)
|
||||||
|
|
||||||
|
static let warmSurface = Color(red: 0xFA/255.0, green: 0xF7/255.0, blue: 0xF3/255.0)
|
||||||
|
static let warmSurfaceDark = Color(red: 0x1C/255.0, green: 0x18/255.0, blue: 0x16/255.0)
|
||||||
|
|
||||||
|
static let categoricalClaude = Color(red: 0xC9/255.0, green: 0x52/255.0, blue: 0x1D/255.0)
|
||||||
|
static let categoricalCursor = Color(red: 0x3F/255.0, green: 0x6B/255.0, blue: 0x8C/255.0)
|
||||||
|
static let categoricalCodex = Color(red: 0x4A/255.0, green: 0x7D/255.0, blue: 0x5C/255.0)
|
||||||
|
|
||||||
|
static let oneShotGood = Color(red: 0x30/255.0, green: 0xD1/255.0, blue: 0x58/255.0)
|
||||||
|
static let oneShotMid = Color(red: 0xFF/255.0, green: 0x9F/255.0, blue: 0x0A/255.0)
|
||||||
|
static let oneShotLow = Color(red: 0xFF/255.0, green: 0x45/255.0, blue: 0x3A/255.0)
|
||||||
|
|
||||||
|
// Semantic colors -- tuned to sit alongside the terracotta accent without clashing.
|
||||||
|
static let semanticDanger = Color(red: 0xC8/255.0, green: 0x3F/255.0, blue: 0x2C/255.0) // brick-red, terracotta-leaning
|
||||||
|
static let semanticWarning = Color(red: 0xD9/255.0, green: 0x8F/255.0, blue: 0x29/255.0) // amber, warmer than vanilla
|
||||||
|
static let semanticSuccess = Color(red: 0x4E/255.0, green: 0xA8/255.0, blue: 0x65/255.0) // muted green that holds against terracotta
|
||||||
|
}
|
||||||
|
|
||||||
|
extension Font {
|
||||||
|
/// SF Mono for currency values -- developer-tool identity.
|
||||||
|
static func codeMono(size: CGFloat, weight: Font.Weight = .regular) -> Font {
|
||||||
|
.system(size: size, weight: weight, design: .monospaced)
|
||||||
|
}
|
||||||
|
}
|
||||||
87
mac/Sources/CodeBurnMenubar/Views/ActivitySection.swift
Normal file
87
mac/Sources/CodeBurnMenubar/Views/ActivitySection.swift
Normal file
|
|
@ -0,0 +1,87 @@
|
||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
struct ActivitySection: View {
|
||||||
|
@Environment(AppStore.self) private var store
|
||||||
|
@State private var isExpanded: Bool = true
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
CollapsibleSection(
|
||||||
|
caption: "Activity",
|
||||||
|
isExpanded: $isExpanded,
|
||||||
|
trailing: {
|
||||||
|
HStack(spacing: 8) {
|
||||||
|
Text("Cost").frame(minWidth: 54, alignment: .trailing)
|
||||||
|
Text("Turns").frame(minWidth: 52, alignment: .trailing)
|
||||||
|
Text("1-shot").frame(minWidth: 44, alignment: .trailing)
|
||||||
|
}
|
||||||
|
.font(.system(size: 10, weight: .medium))
|
||||||
|
.foregroundStyle(.tertiary)
|
||||||
|
.tracking(-0.05)
|
||||||
|
}
|
||||||
|
) {
|
||||||
|
VStack(alignment: .leading, spacing: 7) {
|
||||||
|
let maxCost = store.payload.current.topActivities.map(\.cost).max() ?? 1
|
||||||
|
ForEach(store.payload.current.topActivities, id: \.name) { activity in
|
||||||
|
ActivityRow(activity: activity, maxCost: maxCost)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ActivityRow: View {
|
||||||
|
let activity: ActivityEntry
|
||||||
|
let maxCost: Double
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
HStack(spacing: 8) {
|
||||||
|
FixedBar(fraction: activity.cost / maxCost)
|
||||||
|
.frame(width: 56, height: 6)
|
||||||
|
|
||||||
|
Text(activity.name)
|
||||||
|
.font(.system(size: 12.5, weight: .medium))
|
||||||
|
.frame(maxWidth: .infinity, alignment: .leading)
|
||||||
|
|
||||||
|
Text(activity.cost.asCompactCurrency())
|
||||||
|
.font(.codeMono(size: 12, weight: .medium))
|
||||||
|
.tracking(-0.2)
|
||||||
|
.frame(minWidth: 54, alignment: .trailing)
|
||||||
|
|
||||||
|
Text("\(activity.turns)")
|
||||||
|
.font(.system(size: 11))
|
||||||
|
.monospacedDigit()
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
.frame(minWidth: 52, alignment: .trailing)
|
||||||
|
|
||||||
|
Text(oneShotText)
|
||||||
|
.font(.system(size: 10.5))
|
||||||
|
.monospacedDigit()
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
.frame(minWidth: 44, alignment: .trailing)
|
||||||
|
}
|
||||||
|
.padding(.horizontal, 2)
|
||||||
|
.padding(.vertical, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
private var oneShotText: String {
|
||||||
|
guard let rate = activity.oneShotRate else { return "—" }
|
||||||
|
return "\(Int(rate * 100))%"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fixed-width horizontal bar that shows a fill fraction.
|
||||||
|
struct FixedBar: View {
|
||||||
|
let fraction: Double
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
GeometryReader { geo in
|
||||||
|
ZStack(alignment: .leading) {
|
||||||
|
RoundedRectangle(cornerRadius: 2)
|
||||||
|
.fill(.secondary.opacity(0.15))
|
||||||
|
RoundedRectangle(cornerRadius: 2)
|
||||||
|
.fill(Theme.brandAccent)
|
||||||
|
.frame(width: max(0, min(geo.size.width, geo.size.width * CGFloat(fraction))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
92
mac/Sources/CodeBurnMenubar/Views/AgentTabStrip.swift
Normal file
92
mac/Sources/CodeBurnMenubar/Views/AgentTabStrip.swift
Normal file
|
|
@ -0,0 +1,92 @@
|
||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
struct AgentTabStrip: View {
|
||||||
|
@Environment(AppStore.self) private var store
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
ScrollView(.horizontal, showsIndicators: false) {
|
||||||
|
HStack(spacing: 5) {
|
||||||
|
ForEach(visibleFilters) { filter in
|
||||||
|
Button {
|
||||||
|
Task { await store.switchTo(provider: filter) }
|
||||||
|
} label: {
|
||||||
|
AgentTab(
|
||||||
|
filter: filter,
|
||||||
|
cost: cost(for: filter),
|
||||||
|
isActive: store.selectedProvider == filter
|
||||||
|
)
|
||||||
|
}
|
||||||
|
.buttonStyle(.plain)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.padding(.horizontal, 12)
|
||||||
|
.padding(.top, 8)
|
||||||
|
.padding(.bottom, 4)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Drive tab visibility and per-tab cost labels from the *all-provider* payload (today),
|
||||||
|
/// not the currently selected provider's payload. Without this, switching to Codex (which
|
||||||
|
/// has no data) would hide every other tab including Claude.
|
||||||
|
private var allProvidersToday: MenubarPayload {
|
||||||
|
store.todayPayload ?? store.payload
|
||||||
|
}
|
||||||
|
|
||||||
|
private var visibleFilters: [ProviderFilter] {
|
||||||
|
let activeKeys = Set(allProvidersToday.current.providers.keys.map { $0.lowercased() })
|
||||||
|
return ProviderFilter.allCases.filter { filter in
|
||||||
|
if filter == .all { return true }
|
||||||
|
return activeKeys.contains(filter.rawValue.lowercased())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func cost(for filter: ProviderFilter) -> Double? {
|
||||||
|
switch filter {
|
||||||
|
case .all:
|
||||||
|
return allProvidersToday.current.cost
|
||||||
|
default:
|
||||||
|
let key = filter.rawValue.lowercased()
|
||||||
|
return allProvidersToday.current.providers[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct AgentTab: View {
|
||||||
|
let filter: ProviderFilter
|
||||||
|
let cost: Double?
|
||||||
|
let isActive: Bool
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
HStack(spacing: 5) {
|
||||||
|
Text(filter.rawValue)
|
||||||
|
.font(.system(size: 11.5, weight: .medium))
|
||||||
|
.tracking(-0.05)
|
||||||
|
if let cost, cost > 0 {
|
||||||
|
Text(cost.asCompactCurrency())
|
||||||
|
.font(.codeMono(size: 10.5, weight: .medium))
|
||||||
|
.foregroundStyle(isActive ? AnyShapeStyle(.white.opacity(0.8)) : AnyShapeStyle(.secondary))
|
||||||
|
.tracking(-0.2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.padding(.horizontal, 10)
|
||||||
|
.padding(.vertical, 4)
|
||||||
|
.background(
|
||||||
|
RoundedRectangle(cornerRadius: 6)
|
||||||
|
.fill(isActive ? AnyShapeStyle(Theme.brandAccent) : AnyShapeStyle(Color.secondary.opacity(0.08)))
|
||||||
|
)
|
||||||
|
.foregroundStyle(isActive ? AnyShapeStyle(.white) : AnyShapeStyle(.secondary))
|
||||||
|
.contentShape(Rectangle())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extension ProviderFilter {
|
||||||
|
var color: Color {
|
||||||
|
switch self {
|
||||||
|
case .all: return Theme.brandAccent
|
||||||
|
case .claude: return Theme.categoricalClaude
|
||||||
|
case .codex: return Theme.categoricalCodex
|
||||||
|
case .cursor: return Theme.categoricalCursor
|
||||||
|
case .copilot: return Color(red: 0x6D/255.0, green: 0x8F/255.0, blue: 0xA6/255.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
290
mac/Sources/CodeBurnMenubar/Views/FindingsSection.swift
Normal file
290
mac/Sources/CodeBurnMenubar/Views/FindingsSection.swift
Normal file
|
|
@ -0,0 +1,290 @@
|
||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
private let winColor = Theme.brandAccent
|
||||||
|
private let riskColor = Theme.brandAccent
|
||||||
|
private let improveColor = Theme.brandAccent
|
||||||
|
|
||||||
|
/// Three-category insights panel: wins, improvements, risks.
|
||||||
|
/// Wins/risks are derived from current + history; improvements come from the optimize findings.
|
||||||
|
struct FindingsSection: View {
|
||||||
|
@Environment(AppStore.self) private var store
|
||||||
|
@State private var isExpanded: Bool = true
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
let groups = computeTipGroups(payload: store.payload)
|
||||||
|
if groups.allSatisfy({ $0.items.isEmpty }) { return AnyView(EmptyView()) }
|
||||||
|
|
||||||
|
return AnyView(
|
||||||
|
VStack(alignment: .leading, spacing: 8) {
|
||||||
|
Button {
|
||||||
|
withAnimation(.easeInOut(duration: 0.18)) { isExpanded.toggle() }
|
||||||
|
} label: {
|
||||||
|
HStack(alignment: .firstTextBaseline) {
|
||||||
|
HStack(spacing: 6) {
|
||||||
|
Image(systemName: "lightbulb.fill")
|
||||||
|
.font(.system(size: 11, weight: .semibold))
|
||||||
|
.foregroundStyle(Theme.brandAccent)
|
||||||
|
Text("Tips for you")
|
||||||
|
.font(.system(size: 12.5, weight: .semibold))
|
||||||
|
.foregroundStyle(.primary)
|
||||||
|
}
|
||||||
|
Spacer()
|
||||||
|
Text("\(groups.flatMap { $0.items }.count) signals")
|
||||||
|
.font(.system(size: 10.5))
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
Image(systemName: "chevron.right")
|
||||||
|
.font(.system(size: 9, weight: .semibold))
|
||||||
|
.rotationEffect(.degrees(isExpanded ? 90 : 0))
|
||||||
|
.opacity(0.55)
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
}
|
||||||
|
.contentShape(Rectangle())
|
||||||
|
}
|
||||||
|
.buttonStyle(.plain)
|
||||||
|
|
||||||
|
if isExpanded {
|
||||||
|
VStack(alignment: .leading, spacing: 10) {
|
||||||
|
ForEach(groups) { group in
|
||||||
|
if !group.items.isEmpty {
|
||||||
|
TipsGroup(group: group)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if store.payload.optimize.findingCount > 0 {
|
||||||
|
Button {
|
||||||
|
openOptimize()
|
||||||
|
} label: {
|
||||||
|
HStack(spacing: 4) {
|
||||||
|
Text("Open Full Optimize")
|
||||||
|
.font(.system(size: 11.5, weight: .semibold))
|
||||||
|
Image(systemName: "arrow.forward")
|
||||||
|
.font(.system(size: 9, weight: .semibold))
|
||||||
|
}
|
||||||
|
.foregroundStyle(Theme.brandAccent)
|
||||||
|
}
|
||||||
|
.buttonStyle(.plain)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.transition(.opacity)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.padding(12)
|
||||||
|
.background(
|
||||||
|
RoundedRectangle(cornerRadius: 8)
|
||||||
|
.fill(Color.secondary.opacity(0.06))
|
||||||
|
)
|
||||||
|
.padding(.horizontal, 14)
|
||||||
|
.padding(.vertical, 8)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private func openOptimize() {
|
||||||
|
TerminalLauncher.open(subcommand: ["optimize"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct TipsGroup: View {
|
||||||
|
let group: TipGroup
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
VStack(alignment: .leading, spacing: 5) {
|
||||||
|
HStack(spacing: 5) {
|
||||||
|
Image(systemName: group.icon)
|
||||||
|
.font(.system(size: 10, weight: .bold))
|
||||||
|
.foregroundStyle(group.color)
|
||||||
|
Text(group.label)
|
||||||
|
.font(.system(size: 10.5, weight: .semibold))
|
||||||
|
.foregroundStyle(group.color)
|
||||||
|
.textCase(.uppercase)
|
||||||
|
.tracking(0.4)
|
||||||
|
}
|
||||||
|
VStack(alignment: .leading, spacing: 4) {
|
||||||
|
ForEach(group.items) { item in
|
||||||
|
HStack(alignment: .firstTextBaseline, spacing: 6) {
|
||||||
|
Circle().fill(group.color).frame(width: 3, height: 3).padding(.top, 4)
|
||||||
|
Text(item.text)
|
||||||
|
.font(.system(size: 11.5))
|
||||||
|
.foregroundStyle(.primary)
|
||||||
|
.frame(maxWidth: .infinity, alignment: .leading)
|
||||||
|
if let trailing = item.trailing {
|
||||||
|
Text(trailing)
|
||||||
|
.font(.codeMono(size: 11, weight: .medium))
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
.tracking(-0.2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct TipGroup: Identifiable {
|
||||||
|
let id = UUID()
|
||||||
|
let label: String
|
||||||
|
let icon: String
|
||||||
|
let color: Color
|
||||||
|
let items: [TipItem]
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct TipItem: Identifiable {
|
||||||
|
let id = UUID()
|
||||||
|
let text: String
|
||||||
|
let trailing: String?
|
||||||
|
}
|
||||||
|
|
||||||
|
private func computeTipGroups(payload: MenubarPayload) -> [TipGroup] {
|
||||||
|
let stats = computeHistoryStats(history: payload.history.daily)
|
||||||
|
|
||||||
|
// What's working
|
||||||
|
var wins: [TipItem] = []
|
||||||
|
let cacheHit = payload.current.cacheHitPercent
|
||||||
|
if cacheHit >= 80 {
|
||||||
|
wins.append(TipItem(
|
||||||
|
text: "Cache hit at \(Int(cacheHit))% — most prompts reuse cache",
|
||||||
|
trailing: nil
|
||||||
|
))
|
||||||
|
}
|
||||||
|
if let oneShot = payload.current.oneShotRate, oneShot >= 0.75 {
|
||||||
|
wins.append(TipItem(
|
||||||
|
text: "\(Int(oneShot * 100))% one-shot — edits landing first try",
|
||||||
|
trailing: nil
|
||||||
|
))
|
||||||
|
}
|
||||||
|
if let delta = stats.weekDeltaPercent, delta < -10 {
|
||||||
|
wins.append(TipItem(
|
||||||
|
text: "Spend down \(Int(abs(delta)))% vs last 7 days",
|
||||||
|
trailing: nil
|
||||||
|
))
|
||||||
|
}
|
||||||
|
if stats.activeStreakDays >= 5 {
|
||||||
|
wins.append(TipItem(
|
||||||
|
text: "\(stats.activeStreakDays)-day usage streak",
|
||||||
|
trailing: nil
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
// What to improve (existing optimize findings)
|
||||||
|
var improvements: [TipItem] = []
|
||||||
|
for finding in payload.optimize.topFindings.prefix(3) {
|
||||||
|
improvements.append(TipItem(
|
||||||
|
text: finding.title,
|
||||||
|
trailing: finding.savingsUSD.asCompactCurrency()
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Risks
|
||||||
|
var risks: [TipItem] = []
|
||||||
|
if let delta = stats.weekDeltaPercent, delta > 25 {
|
||||||
|
risks.append(TipItem(
|
||||||
|
text: "Spend up \(Int(delta))% vs prior 7 days",
|
||||||
|
trailing: nil
|
||||||
|
))
|
||||||
|
}
|
||||||
|
if cacheHit > 0 && cacheHit < 50 {
|
||||||
|
risks.append(TipItem(
|
||||||
|
text: "Cache hit only \(Int(cacheHit))% — paying for cold prompts",
|
||||||
|
trailing: nil
|
||||||
|
))
|
||||||
|
}
|
||||||
|
if let oneShot = payload.current.oneShotRate, oneShot < 0.5 {
|
||||||
|
risks.append(TipItem(
|
||||||
|
text: "\(Int(oneShot * 100))% one-shot — lots of iteration",
|
||||||
|
trailing: nil
|
||||||
|
))
|
||||||
|
}
|
||||||
|
if let projected = stats.projectedMonth, let prevMonth = stats.previousMonthTotal, projected > prevMonth * 1.3 {
|
||||||
|
risks.append(TipItem(
|
||||||
|
text: "On pace for \(projected.asCompactCurrency()) this month (+\(Int(((projected - prevMonth) / prevMonth) * 100))% vs last)",
|
||||||
|
trailing: nil
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
TipGroup(label: "What's working", icon: "checkmark.circle.fill", color: winColor, items: wins),
|
||||||
|
TipGroup(label: "What to improve", icon: "arrow.up.right.circle.fill", color: improveColor, items: improvements),
|
||||||
|
TipGroup(label: "Risks", icon: "exclamationmark.triangle.fill", color: riskColor, items: risks),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct HistoryStats {
|
||||||
|
let weekDeltaPercent: Double?
|
||||||
|
let activeStreakDays: Int
|
||||||
|
let projectedMonth: Double?
|
||||||
|
let previousMonthTotal: Double?
|
||||||
|
}
|
||||||
|
|
||||||
|
private func computeHistoryStats(history: [DailyHistoryEntry]) -> HistoryStats {
|
||||||
|
var calendar = Calendar(identifier: .gregorian)
|
||||||
|
calendar.timeZone = TimeZone(identifier: "UTC")!
|
||||||
|
let formatter: DateFormatter = {
|
||||||
|
let f = DateFormatter()
|
||||||
|
f.dateFormat = "yyyy-MM-dd"
|
||||||
|
f.timeZone = TimeZone(identifier: "UTC")
|
||||||
|
return f
|
||||||
|
}()
|
||||||
|
let now = Date()
|
||||||
|
let today = calendar.startOfDay(for: now)
|
||||||
|
let costByDate = Dictionary(uniqueKeysWithValues: history.map { ($0.date, $0.cost) })
|
||||||
|
|
||||||
|
let lastWeekStart = calendar.date(byAdding: .day, value: -6, to: today)
|
||||||
|
let priorWeekStart = calendar.date(byAdding: .day, value: -13, to: today)
|
||||||
|
let priorWeekEnd = calendar.date(byAdding: .day, value: -7, to: today)
|
||||||
|
var weekDeltaPercent: Double? = nil
|
||||||
|
if let lws = lastWeekStart, let pws = priorWeekStart, let pwe = priorWeekEnd {
|
||||||
|
let lwsStr = formatter.string(from: lws)
|
||||||
|
let pwsStr = formatter.string(from: pws)
|
||||||
|
let pweStr = formatter.string(from: pwe)
|
||||||
|
let thisWeek = history.filter { $0.date >= lwsStr }.reduce(0.0) { $0 + $1.cost }
|
||||||
|
let prior = history.filter { $0.date >= pwsStr && $0.date <= pweStr }.reduce(0.0) { $0 + $1.cost }
|
||||||
|
if prior > 0 {
|
||||||
|
weekDeltaPercent = ((thisWeek - prior) / prior) * 100
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var streak = 0
|
||||||
|
for offset in 0..<60 {
|
||||||
|
guard let d = calendar.date(byAdding: .day, value: -offset, to: today) else { break }
|
||||||
|
let key = formatter.string(from: d)
|
||||||
|
if (costByDate[key] ?? 0) > 0 { streak += 1 } else { break }
|
||||||
|
}
|
||||||
|
|
||||||
|
var projectedMonth: Double? = nil
|
||||||
|
var previousMonthTotal: Double? = nil
|
||||||
|
let comps = calendar.dateComponents([.year, .month, .day], from: now)
|
||||||
|
if
|
||||||
|
let firstOfMonth = calendar.date(from: DateComponents(year: comps.year, month: comps.month, day: 1)),
|
||||||
|
let rangeOfMonth = calendar.range(of: .day, in: .month, for: firstOfMonth)
|
||||||
|
{
|
||||||
|
let firstStr = formatter.string(from: firstOfMonth)
|
||||||
|
let mtd = history.filter { $0.date >= firstStr }.reduce(0.0) { $0 + $1.cost }
|
||||||
|
let dayOfMonth = comps.day ?? 1
|
||||||
|
if dayOfMonth > 0 {
|
||||||
|
projectedMonth = (mtd / Double(dayOfMonth)) * Double(rangeOfMonth.count)
|
||||||
|
}
|
||||||
|
|
||||||
|
if
|
||||||
|
let prevMonth = calendar.date(byAdding: .month, value: -1, to: firstOfMonth),
|
||||||
|
let prevRange = calendar.range(of: .day, in: .month, for: prevMonth),
|
||||||
|
let prevFirst = calendar.date(from: DateComponents(
|
||||||
|
year: calendar.component(.year, from: prevMonth),
|
||||||
|
month: calendar.component(.month, from: prevMonth),
|
||||||
|
day: 1
|
||||||
|
)),
|
||||||
|
let prevLast = calendar.date(byAdding: .day, value: prevRange.count - 1, to: prevFirst)
|
||||||
|
{
|
||||||
|
let prevFirstStr = formatter.string(from: prevFirst)
|
||||||
|
let prevLastStr = formatter.string(from: prevLast)
|
||||||
|
let prevTotal = history.filter { $0.date >= prevFirstStr && $0.date <= prevLastStr }
|
||||||
|
.reduce(0.0) { $0 + $1.cost }
|
||||||
|
if prevTotal > 0 { previousMonthTotal = prevTotal }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return HistoryStats(
|
||||||
|
weekDeltaPercent: weekDeltaPercent,
|
||||||
|
activeStreakDays: streak,
|
||||||
|
projectedMonth: projectedMonth,
|
||||||
|
previousMonthTotal: previousMonthTotal
|
||||||
|
)
|
||||||
|
}
|
||||||
1219
mac/Sources/CodeBurnMenubar/Views/HeatmapSection.swift
Normal file
1219
mac/Sources/CodeBurnMenubar/Views/HeatmapSection.swift
Normal file
File diff suppressed because it is too large
Load diff
55
mac/Sources/CodeBurnMenubar/Views/HeroSection.swift
Normal file
55
mac/Sources/CodeBurnMenubar/Views/HeroSection.swift
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
struct HeroSection: View {
|
||||||
|
@Environment(AppStore.self) private var store
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
VStack(alignment: .leading, spacing: 8) {
|
||||||
|
SectionCaption(text: caption)
|
||||||
|
|
||||||
|
HStack(alignment: .firstTextBaseline) {
|
||||||
|
Text(store.payload.current.cost.asCurrency())
|
||||||
|
.font(.system(size: 32, weight: .semibold, design: .rounded))
|
||||||
|
.monospacedDigit()
|
||||||
|
.tracking(-1)
|
||||||
|
.foregroundStyle(
|
||||||
|
LinearGradient(
|
||||||
|
colors: [Theme.brandAccent, Theme.brandEmberDeep],
|
||||||
|
startPoint: .top,
|
||||||
|
endPoint: .bottom
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
Spacer()
|
||||||
|
|
||||||
|
VStack(alignment: .trailing, spacing: 2) {
|
||||||
|
Text("\(store.payload.current.calls.asThousandsSeparated()) calls")
|
||||||
|
.font(.system(size: 11))
|
||||||
|
.monospacedDigit()
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
Text("\(store.payload.current.sessions) sessions")
|
||||||
|
.font(.system(size: 10.5))
|
||||||
|
.monospacedDigit()
|
||||||
|
.foregroundStyle(.tertiary)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.padding(.horizontal, 14)
|
||||||
|
.padding(.top, 10)
|
||||||
|
.padding(.bottom, 12)
|
||||||
|
}
|
||||||
|
|
||||||
|
private var caption: String {
|
||||||
|
let label = store.payload.current.label.isEmpty ? store.selectedPeriod.rawValue : store.payload.current.label
|
||||||
|
if store.selectedPeriod == .today {
|
||||||
|
return "\(label) · \(todayDate)"
|
||||||
|
}
|
||||||
|
return label
|
||||||
|
}
|
||||||
|
|
||||||
|
private var todayDate: String {
|
||||||
|
let formatter = DateFormatter()
|
||||||
|
formatter.dateFormat = "EEE MMM d"
|
||||||
|
return formatter.string(from: Date())
|
||||||
|
}
|
||||||
|
}
|
||||||
401
mac/Sources/CodeBurnMenubar/Views/MenuBarContent.swift
Normal file
401
mac/Sources/CodeBurnMenubar/Views/MenuBarContent.swift
Normal file
|
|
@ -0,0 +1,401 @@
|
||||||
|
import AppKit
|
||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
/// Popover root. Assembles all sections matching the HTML design spec.
|
||||||
|
struct MenuBarContent: View {
|
||||||
|
@Environment(AppStore.self) private var store
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
VStack(spacing: 0) {
|
||||||
|
Header()
|
||||||
|
|
||||||
|
Divider()
|
||||||
|
|
||||||
|
AgentTabStrip()
|
||||||
|
|
||||||
|
Divider()
|
||||||
|
|
||||||
|
ZStack {
|
||||||
|
ScrollView(.vertical, showsIndicators: false) {
|
||||||
|
VStack(spacing: 0) {
|
||||||
|
HeroSection()
|
||||||
|
Divider().opacity(0.5)
|
||||||
|
PeriodSegmentedControl()
|
||||||
|
Divider().opacity(0.5)
|
||||||
|
if isFilteredEmpty {
|
||||||
|
EmptyProviderState(provider: store.selectedProvider, period: store.selectedPeriod)
|
||||||
|
} else {
|
||||||
|
HeatmapSection()
|
||||||
|
.padding(.horizontal, 14)
|
||||||
|
.padding(.top, 10)
|
||||||
|
.padding(.bottom, 10)
|
||||||
|
.zIndex(10)
|
||||||
|
Divider().opacity(0.5)
|
||||||
|
ActivitySection()
|
||||||
|
Divider().opacity(0.5)
|
||||||
|
ModelsSection()
|
||||||
|
Divider().opacity(0.5)
|
||||||
|
FindingsSection()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if store.isLoading {
|
||||||
|
BurnLoadingOverlay(periodLabel: store.selectedPeriod.rawValue)
|
||||||
|
.transition(.opacity)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.frame(height: 520)
|
||||||
|
.animation(.easeInOut(duration: 0.2), value: store.isLoading)
|
||||||
|
|
||||||
|
Divider()
|
||||||
|
|
||||||
|
FooterBar()
|
||||||
|
|
||||||
|
StarBanner()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// True when a specific provider tab is selected and that provider has no spend in the
|
||||||
|
/// currently selected period. The .all tab is exempt -- it always shows aggregated data.
|
||||||
|
private var isFilteredEmpty: Bool {
|
||||||
|
guard store.selectedProvider != .all else { return false }
|
||||||
|
return store.payload.current.cost <= 0 && store.payload.current.calls == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct EmptyProviderState: View {
|
||||||
|
let provider: ProviderFilter
|
||||||
|
let period: Period
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
VStack(spacing: 10) {
|
||||||
|
Image(systemName: "tray")
|
||||||
|
.font(.system(size: 26))
|
||||||
|
.foregroundStyle(.tertiary)
|
||||||
|
Text("No \(provider.rawValue) data for \(periodPhrase)")
|
||||||
|
.font(.system(size: 12, weight: .medium))
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
.multilineTextAlignment(.center)
|
||||||
|
}
|
||||||
|
.frame(maxWidth: .infinity)
|
||||||
|
.padding(.vertical, 60)
|
||||||
|
}
|
||||||
|
|
||||||
|
private var periodPhrase: String {
|
||||||
|
switch period {
|
||||||
|
case .today: "today"
|
||||||
|
case .sevenDays: "the last 7 days"
|
||||||
|
case .thirtyDays: "the last 30 days"
|
||||||
|
case .month: "this month"
|
||||||
|
case .all: "all time"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Translucent overlay that blurs whatever's behind it (the previous tab/period content)
|
||||||
|
/// and centers an animated burning flame -- the brand mark filling up bottom-to-top in
|
||||||
|
/// yellow→orange→red, looping.
|
||||||
|
private struct BurnLoadingOverlay: View {
|
||||||
|
let periodLabel: String
|
||||||
|
@State private var fillProgress: CGFloat = 0
|
||||||
|
@State private var glowing: Bool = false
|
||||||
|
|
||||||
|
private let flameSize: CGFloat = 64
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
ZStack {
|
||||||
|
// Blur backdrop -- ultraThinMaterial uses live blur of underlying content.
|
||||||
|
Rectangle()
|
||||||
|
.fill(.ultraThinMaterial)
|
||||||
|
|
||||||
|
VStack(spacing: 14) {
|
||||||
|
BurnFlame(size: flameSize, fillProgress: fillProgress, glowing: glowing)
|
||||||
|
Text("Loading \(periodLabel)…")
|
||||||
|
.font(.system(size: 11.5, weight: .medium))
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.onAppear {
|
||||||
|
withAnimation(.easeInOut(duration: 1.4).repeatForever(autoreverses: true)) {
|
||||||
|
fillProgress = 1.0
|
||||||
|
}
|
||||||
|
withAnimation(.easeInOut(duration: 0.9).repeatForever(autoreverses: true)) {
|
||||||
|
glowing = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct BurnFlame: View {
|
||||||
|
let size: CGFloat
|
||||||
|
let fillProgress: CGFloat
|
||||||
|
let glowing: Bool
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
ZStack {
|
||||||
|
// Soft outer glow that pulses, matching the brand terracotta palette.
|
||||||
|
Image(systemName: "flame.fill")
|
||||||
|
.font(.system(size: size, weight: .regular))
|
||||||
|
.foregroundStyle(Theme.brandEmberGlow.opacity(glowing ? 0.55 : 0.20))
|
||||||
|
.blur(radius: glowing ? 14 : 6)
|
||||||
|
|
||||||
|
// Empty (cool) flame as base
|
||||||
|
Image(systemName: "flame")
|
||||||
|
.font(.system(size: size, weight: .regular))
|
||||||
|
.foregroundStyle(Theme.brandAccent.opacity(0.25))
|
||||||
|
|
||||||
|
// Burning gradient (brand orange) masked by an animated bottom-up rectangle
|
||||||
|
Image(systemName: "flame.fill")
|
||||||
|
.font(.system(size: size, weight: .regular))
|
||||||
|
.foregroundStyle(
|
||||||
|
LinearGradient(
|
||||||
|
colors: [
|
||||||
|
Theme.brandEmberGlow,
|
||||||
|
Theme.brandAccentDark,
|
||||||
|
Theme.brandAccent,
|
||||||
|
Theme.brandEmberDeep
|
||||||
|
],
|
||||||
|
startPoint: .bottom,
|
||||||
|
endPoint: .top
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.mask(
|
||||||
|
GeometryReader { geo in
|
||||||
|
Rectangle()
|
||||||
|
.frame(height: geo.size.height * fillProgress)
|
||||||
|
.frame(maxHeight: .infinity, alignment: .bottom)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
.frame(width: size, height: size)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct Header: View {
|
||||||
|
var body: some View {
|
||||||
|
VStack(alignment: .leading, spacing: 1) {
|
||||||
|
(
|
||||||
|
Text("Code").foregroundStyle(.primary)
|
||||||
|
+ Text("Burn").foregroundStyle(Theme.brandAccent)
|
||||||
|
)
|
||||||
|
.font(.system(size: 13, weight: .semibold))
|
||||||
|
.tracking(-0.15)
|
||||||
|
Text("AI Coding Cost Tracker")
|
||||||
|
.font(.system(size: 10.5))
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
}
|
||||||
|
.frame(maxWidth: .infinity, alignment: .leading)
|
||||||
|
.padding(.horizontal, 14)
|
||||||
|
.padding(.top, 10)
|
||||||
|
.padding(.bottom, 8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct FlameMark: View {
|
||||||
|
var body: some View {
|
||||||
|
ZStack {
|
||||||
|
RoundedRectangle(cornerRadius: 5)
|
||||||
|
.fill(
|
||||||
|
LinearGradient(
|
||||||
|
colors: [Theme.brandAccentDark, Theme.brandEmberDeep],
|
||||||
|
startPoint: .topLeading,
|
||||||
|
endPoint: .bottomTrailing
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.shadow(color: .black.opacity(0.2), radius: 1, y: 0.5)
|
||||||
|
Image(systemName: "flame.fill")
|
||||||
|
.font(.system(size: 12, weight: .semibold))
|
||||||
|
.foregroundStyle(.white)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private let starBannerGitHubURL = URL(string: "https://github.com/AgentSeal/codeburn")!
|
||||||
|
|
||||||
|
/// Shown at the very bottom on first launch. A small terracotta strip nudges users to star the
|
||||||
|
/// repo; clicking opens GitHub, clicking the close icon hides it forever (persisted to
|
||||||
|
/// UserDefaults so it never returns across launches).
|
||||||
|
struct StarBanner: View {
|
||||||
|
@AppStorage("codeburn.starBannerDismissed") private var dismissed: Bool = false
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
if !dismissed {
|
||||||
|
HStack(spacing: 8) {
|
||||||
|
Image(systemName: "star.fill")
|
||||||
|
.font(.system(size: 10, weight: .semibold))
|
||||||
|
.foregroundStyle(Theme.brandAccent)
|
||||||
|
|
||||||
|
Button {
|
||||||
|
NSWorkspace.shared.open(starBannerGitHubURL)
|
||||||
|
} label: {
|
||||||
|
HStack(spacing: 4) {
|
||||||
|
Text("Enjoying CodeBurn?")
|
||||||
|
.foregroundStyle(.primary)
|
||||||
|
Text("Star us on GitHub")
|
||||||
|
.foregroundStyle(Theme.brandAccent)
|
||||||
|
.underline(true, pattern: .solid)
|
||||||
|
}
|
||||||
|
.font(.system(size: 10.5, weight: .medium))
|
||||||
|
.contentShape(Rectangle())
|
||||||
|
}
|
||||||
|
.buttonStyle(.plain)
|
||||||
|
|
||||||
|
Spacer()
|
||||||
|
|
||||||
|
Button {
|
||||||
|
dismissed = true
|
||||||
|
} label: {
|
||||||
|
Image(systemName: "xmark")
|
||||||
|
.font(.system(size: 9, weight: .semibold))
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
.padding(4)
|
||||||
|
.contentShape(Rectangle())
|
||||||
|
}
|
||||||
|
.buttonStyle(.plain)
|
||||||
|
.help("Hide this banner")
|
||||||
|
}
|
||||||
|
.padding(.horizontal, 12)
|
||||||
|
.padding(.vertical, 6)
|
||||||
|
.background(Theme.brandAccent.opacity(0.08))
|
||||||
|
.overlay(alignment: .top) {
|
||||||
|
Rectangle()
|
||||||
|
.fill(Color.secondary.opacity(0.18))
|
||||||
|
.frame(height: 0.5)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct FooterBar: View {
|
||||||
|
@Environment(AppStore.self) private var store
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
HStack(spacing: 6) {
|
||||||
|
Menu {
|
||||||
|
ForEach(SupportedCurrency.allCases) { currency in
|
||||||
|
Button {
|
||||||
|
applyCurrency(code: currency.rawValue)
|
||||||
|
} label: {
|
||||||
|
if currency.rawValue == store.currency {
|
||||||
|
Label("\(currency.displayName) (\(currency.rawValue))", systemImage: "checkmark")
|
||||||
|
} else {
|
||||||
|
Text("\(currency.displayName) (\(currency.rawValue))")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} label: {
|
||||||
|
Label(store.currency, systemImage: "dollarsign.circle")
|
||||||
|
.font(.system(size: 11, weight: .medium))
|
||||||
|
.labelStyle(.titleAndIcon)
|
||||||
|
}
|
||||||
|
.menuStyle(.button)
|
||||||
|
.menuIndicator(.hidden)
|
||||||
|
.buttonStyle(.bordered)
|
||||||
|
.controlSize(.small)
|
||||||
|
.fixedSize()
|
||||||
|
|
||||||
|
Button {
|
||||||
|
Task { await store.refresh(includeOptimize: true) }
|
||||||
|
} label: {
|
||||||
|
Image(systemName: store.isLoading ? "arrow.triangle.2.circlepath" : "arrow.clockwise")
|
||||||
|
.font(.system(size: 11, weight: .medium))
|
||||||
|
}
|
||||||
|
.buttonStyle(.bordered)
|
||||||
|
.controlSize(.small)
|
||||||
|
.disabled(store.isLoading)
|
||||||
|
|
||||||
|
Menu {
|
||||||
|
Button("CSV (folder)") { runExport(format: .csv) }
|
||||||
|
Button("JSON") { runExport(format: .json) }
|
||||||
|
} label: {
|
||||||
|
Label("Export", systemImage: "square.and.arrow.down")
|
||||||
|
.font(.system(size: 11, weight: .medium))
|
||||||
|
.labelStyle(.titleAndIcon)
|
||||||
|
}
|
||||||
|
.menuStyle(.button)
|
||||||
|
.menuIndicator(.hidden)
|
||||||
|
.buttonStyle(.bordered)
|
||||||
|
.controlSize(.small)
|
||||||
|
.fixedSize()
|
||||||
|
|
||||||
|
Spacer()
|
||||||
|
|
||||||
|
Button { openReport() } label: {
|
||||||
|
Label("Open Full Report", systemImage: "terminal")
|
||||||
|
.font(.system(size: 11, weight: .semibold))
|
||||||
|
.labelStyle(.titleAndIcon)
|
||||||
|
}
|
||||||
|
.buttonStyle(.borderedProminent)
|
||||||
|
.controlSize(.small)
|
||||||
|
.tint(Theme.brandAccent)
|
||||||
|
}
|
||||||
|
.padding(.horizontal, 12)
|
||||||
|
.padding(.vertical, 8)
|
||||||
|
}
|
||||||
|
|
||||||
|
private func openReport() {
|
||||||
|
TerminalLauncher.open(subcommand: ["report"])
|
||||||
|
}
|
||||||
|
|
||||||
|
private enum ExportFormat {
|
||||||
|
case csv, json
|
||||||
|
var cliName: String { self == .csv ? "csv" : "json" }
|
||||||
|
var suffix: String { self == .csv ? "" : ".json" }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Runs `codeburn export` directly into ~/Downloads and reveals the result in Finder. CSV
|
||||||
|
/// produces a folder of clean one-table-per-file CSVs; JSON produces a single structured
|
||||||
|
/// file. The CLI is spawned with argv (no shell interpretation), so the output path cannot
|
||||||
|
/// be abused to inject shell commands even if a pathological value slips through.
|
||||||
|
private func runExport(format: ExportFormat) {
|
||||||
|
Task {
|
||||||
|
let downloads = (NSHomeDirectory() as NSString).appendingPathComponent("Downloads")
|
||||||
|
let formatter = DateFormatter()
|
||||||
|
formatter.dateFormat = "yyyy-MM-dd"
|
||||||
|
let base = "codeburn-\(formatter.string(from: Date()))"
|
||||||
|
let outputPath = (downloads as NSString).appendingPathComponent(base + format.suffix)
|
||||||
|
|
||||||
|
let process = CodeburnCLI.makeProcess(subcommand: [
|
||||||
|
"export", "-f", format.cliName, "-o", outputPath
|
||||||
|
])
|
||||||
|
|
||||||
|
do {
|
||||||
|
try process.run()
|
||||||
|
process.waitUntilExit()
|
||||||
|
if process.terminationStatus == 0 {
|
||||||
|
NSWorkspace.shared.activateFileViewerSelecting([URL(fileURLWithPath: outputPath)])
|
||||||
|
} else {
|
||||||
|
NSLog("CodeBurn: \(format.cliName.uppercased()) export exited with status \(process.terminationStatus)")
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
NSLog("CodeBurn: \(format.cliName.uppercased()) export failed: \(error)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Instant-feeling currency switch. Updates the symbol and any cached FX rate on the main
|
||||||
|
/// thread right away so the UI redraws the next frame, then fetches a fresh rate in the
|
||||||
|
/// background. CLI config is persisted so other codeburn commands stay in sync.
|
||||||
|
private func applyCurrency(code: String) {
|
||||||
|
store.currency = code
|
||||||
|
let symbol = CurrencyState.symbolForCode(code)
|
||||||
|
|
||||||
|
Task {
|
||||||
|
let cached = await FXRateCache.shared.cachedRate(for: code)
|
||||||
|
await MainActor.run {
|
||||||
|
CurrencyState.shared.apply(code: code, rate: cached, symbol: symbol)
|
||||||
|
}
|
||||||
|
|
||||||
|
let fresh = await FXRateCache.shared.rate(for: code)
|
||||||
|
if let fresh, fresh != cached {
|
||||||
|
await MainActor.run {
|
||||||
|
CurrencyState.shared.apply(code: code, rate: fresh, symbol: symbol)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
CLICurrencyConfig.persist(code: code)
|
||||||
|
}
|
||||||
|
}
|
||||||
97
mac/Sources/CodeBurnMenubar/Views/ModelsSection.swift
Normal file
97
mac/Sources/CodeBurnMenubar/Views/ModelsSection.swift
Normal file
|
|
@ -0,0 +1,97 @@
|
||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
struct ModelsSection: View {
|
||||||
|
@Environment(AppStore.self) private var store
|
||||||
|
@State private var isExpanded: Bool = true
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
CollapsibleSection(
|
||||||
|
caption: "Models",
|
||||||
|
isExpanded: $isExpanded,
|
||||||
|
trailing: {
|
||||||
|
HStack(spacing: 8) {
|
||||||
|
Text("Cost").frame(minWidth: 54, alignment: .trailing)
|
||||||
|
Text("Calls").frame(minWidth: 52, alignment: .trailing)
|
||||||
|
}
|
||||||
|
.font(.system(size: 10, weight: .medium))
|
||||||
|
.foregroundStyle(.tertiary)
|
||||||
|
.tracking(-0.05)
|
||||||
|
}
|
||||||
|
) {
|
||||||
|
VStack(alignment: .leading, spacing: 7) {
|
||||||
|
let maxCost = store.payload.current.topModels.map(\.cost).max() ?? 1
|
||||||
|
ForEach(store.payload.current.topModels, id: \.name) { model in
|
||||||
|
ModelRow(model: model, maxCost: maxCost)
|
||||||
|
}
|
||||||
|
|
||||||
|
TokensLine()
|
||||||
|
.padding(.top, 5)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct ModelRow: View {
|
||||||
|
let model: ModelEntry
|
||||||
|
let maxCost: Double
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
HStack(spacing: 8) {
|
||||||
|
FixedBar(fraction: model.cost / maxCost)
|
||||||
|
.frame(width: 56, height: 6)
|
||||||
|
|
||||||
|
Text(model.name)
|
||||||
|
.font(.system(size: 12.5, weight: .medium))
|
||||||
|
.frame(maxWidth: .infinity, alignment: .leading)
|
||||||
|
|
||||||
|
Text(model.cost.asCompactCurrency())
|
||||||
|
.font(.codeMono(size: 12, weight: .medium))
|
||||||
|
.tracking(-0.2)
|
||||||
|
.frame(minWidth: 54, alignment: .trailing)
|
||||||
|
|
||||||
|
Text("\(model.calls)")
|
||||||
|
.font(.system(size: 11))
|
||||||
|
.monospacedDigit()
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
.frame(minWidth: 52, alignment: .trailing)
|
||||||
|
}
|
||||||
|
.padding(.horizontal, 2)
|
||||||
|
.padding(.vertical, 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct TokensLine: View {
|
||||||
|
@Environment(AppStore.self) private var store
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
let t = store.payload.current
|
||||||
|
let cacheHit = String(format: "%.0f", t.cacheHitPercent)
|
||||||
|
|
||||||
|
HStack(spacing: 4) {
|
||||||
|
Text("Tokens")
|
||||||
|
.foregroundStyle(.tertiary)
|
||||||
|
Text(formatTokens(t.inputTokens) + " in")
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
Text("·")
|
||||||
|
.foregroundStyle(.tertiary)
|
||||||
|
Text(formatTokens(t.outputTokens) + " out")
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
Text("·")
|
||||||
|
.foregroundStyle(.tertiary)
|
||||||
|
Text(cacheHit + "% cache hit")
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
Spacer()
|
||||||
|
}
|
||||||
|
.font(.system(size: 10.5))
|
||||||
|
.monospacedDigit()
|
||||||
|
}
|
||||||
|
|
||||||
|
private func formatTokens(_ n: Int) -> String {
|
||||||
|
if n >= 1_000_000 {
|
||||||
|
return String(format: "%.1fM", Double(n) / 1_000_000)
|
||||||
|
} else if n >= 1_000 {
|
||||||
|
return String(format: "%.1fK", Double(n) / 1_000)
|
||||||
|
}
|
||||||
|
return "\(n)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,36 @@
|
||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
struct PeriodSegmentedControl: View {
|
||||||
|
@Environment(AppStore.self) private var store
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
HStack(spacing: 1) {
|
||||||
|
ForEach(Period.allCases) { period in
|
||||||
|
Button {
|
||||||
|
Task { await store.switchTo(period: period) }
|
||||||
|
} label: {
|
||||||
|
Text(period.rawValue)
|
||||||
|
.font(.system(size: 11, weight: .medium))
|
||||||
|
.foregroundStyle(store.selectedPeriod == period ? AnyShapeStyle(.primary) : AnyShapeStyle(.secondary))
|
||||||
|
.frame(maxWidth: .infinity)
|
||||||
|
.padding(.vertical, 4)
|
||||||
|
.contentShape(Rectangle())
|
||||||
|
}
|
||||||
|
.buttonStyle(.plain)
|
||||||
|
.background(
|
||||||
|
RoundedRectangle(cornerRadius: 5)
|
||||||
|
.fill(store.selectedPeriod == period ? Color(NSColor.windowBackgroundColor).opacity(0.85) : .clear)
|
||||||
|
.shadow(color: .black.opacity(store.selectedPeriod == period ? 0.06 : 0), radius: 1, y: 0.5)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.padding(2)
|
||||||
|
.background(
|
||||||
|
RoundedRectangle(cornerRadius: 7)
|
||||||
|
.fill(Color.secondary.opacity(0.08))
|
||||||
|
)
|
||||||
|
.padding(.horizontal, 12)
|
||||||
|
.padding(.top, 6)
|
||||||
|
.padding(.bottom, 10)
|
||||||
|
}
|
||||||
|
}
|
||||||
85
mac/Sources/CodeBurnMenubar/Views/SectionCaption.swift
Normal file
85
mac/Sources/CodeBurnMenubar/Views/SectionCaption.swift
Normal file
|
|
@ -0,0 +1,85 @@
|
||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
struct SectionCaption: View {
|
||||||
|
let text: String
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
HStack(spacing: 5) {
|
||||||
|
Circle()
|
||||||
|
.fill(Theme.brandAccent.opacity(0.7))
|
||||||
|
.frame(width: 3, height: 3)
|
||||||
|
Text(text)
|
||||||
|
.font(.system(size: 11.5, weight: .medium))
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
.tracking(-0.1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Collapsible section shell with a clickable caption, optional inline trailing
|
||||||
|
/// view (e.g. column headers), and a chevron.
|
||||||
|
struct CollapsibleSection<Trailing: View, Content: View>: View {
|
||||||
|
let caption: String
|
||||||
|
@Binding var isExpanded: Bool
|
||||||
|
let trailing: Trailing
|
||||||
|
let content: Content
|
||||||
|
|
||||||
|
init(
|
||||||
|
caption: String,
|
||||||
|
isExpanded: Binding<Bool>,
|
||||||
|
@ViewBuilder trailing: () -> Trailing,
|
||||||
|
@ViewBuilder content: () -> Content
|
||||||
|
) {
|
||||||
|
self.caption = caption
|
||||||
|
self._isExpanded = isExpanded
|
||||||
|
self.trailing = trailing()
|
||||||
|
self.content = content()
|
||||||
|
}
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
VStack(alignment: .leading, spacing: 7) {
|
||||||
|
Button {
|
||||||
|
withAnimation(.easeInOut(duration: 0.18)) {
|
||||||
|
isExpanded.toggle()
|
||||||
|
}
|
||||||
|
} label: {
|
||||||
|
HStack(spacing: 8) {
|
||||||
|
HStack(spacing: 5) {
|
||||||
|
Circle()
|
||||||
|
.fill(Theme.brandAccent.opacity(0.7))
|
||||||
|
.frame(width: 3, height: 3)
|
||||||
|
Text(caption)
|
||||||
|
.font(.system(size: 11.5, weight: .medium))
|
||||||
|
.tracking(-0.1)
|
||||||
|
}
|
||||||
|
Spacer()
|
||||||
|
trailing
|
||||||
|
Image(systemName: "chevron.right")
|
||||||
|
.font(.system(size: 9, weight: .semibold))
|
||||||
|
.rotationEffect(.degrees(isExpanded ? 90 : 0))
|
||||||
|
.opacity(0.55)
|
||||||
|
}
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
.contentShape(Rectangle())
|
||||||
|
}
|
||||||
|
.buttonStyle(.plain)
|
||||||
|
|
||||||
|
if isExpanded {
|
||||||
|
content
|
||||||
|
.transition(.opacity)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.padding(.horizontal, 14)
|
||||||
|
.padding(.vertical, 11)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extension CollapsibleSection where Trailing == EmptyView {
|
||||||
|
init(
|
||||||
|
caption: String,
|
||||||
|
isExpanded: Binding<Bool>,
|
||||||
|
@ViewBuilder content: () -> Content
|
||||||
|
) {
|
||||||
|
self.init(caption: caption, isExpanded: isExpanded, trailing: { EmptyView() }, content: content)
|
||||||
|
}
|
||||||
|
}
|
||||||
99
mac/Sources/CodeBurnMenubar/Views/SparklineView.swift
Normal file
99
mac/Sources/CodeBurnMenubar/Views/SparklineView.swift
Normal file
|
|
@ -0,0 +1,99 @@
|
||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
struct SparklineView: View {
|
||||||
|
let points: [Double]
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
GeometryReader { geo in
|
||||||
|
let cgPoints = makePoints(in: geo.size)
|
||||||
|
let smooth = smoothPath(cgPoints)
|
||||||
|
|
||||||
|
ZStack {
|
||||||
|
// Gradient fill under the curve
|
||||||
|
let fill = closedPath(smooth, width: geo.size.width, height: geo.size.height)
|
||||||
|
fill.fill(
|
||||||
|
LinearGradient(
|
||||||
|
colors: [Theme.brandAccent.opacity(0.25), .clear],
|
||||||
|
startPoint: .top,
|
||||||
|
endPoint: .bottom
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Smooth accent stroke
|
||||||
|
smooth.stroke(
|
||||||
|
Theme.brandAccent.opacity(0.85),
|
||||||
|
style: StrokeStyle(lineWidth: 1.6, lineCap: .round, lineJoin: .round)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Highlighted current-day point
|
||||||
|
if let last = cgPoints.last {
|
||||||
|
Circle()
|
||||||
|
.fill(Theme.brandAccent)
|
||||||
|
.frame(width: 6, height: 6)
|
||||||
|
.overlay(
|
||||||
|
Circle()
|
||||||
|
.stroke(Color(NSColor.windowBackgroundColor).opacity(0.9), lineWidth: 1.3)
|
||||||
|
)
|
||||||
|
.position(last)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: - Geometry
|
||||||
|
|
||||||
|
private func makePoints(in size: CGSize) -> [CGPoint] {
|
||||||
|
guard !points.isEmpty else { return [] }
|
||||||
|
let w = size.width
|
||||||
|
let h = size.height
|
||||||
|
let maxV = points.max() ?? 1
|
||||||
|
let minV = points.min() ?? 0
|
||||||
|
let range = max(maxV - minV, 1)
|
||||||
|
let count = max(points.count - 1, 1)
|
||||||
|
let topPad: CGFloat = 5
|
||||||
|
let bottomPad: CGFloat = 5
|
||||||
|
let usable = max(h - topPad - bottomPad, 1)
|
||||||
|
|
||||||
|
return points.enumerated().map { idx, v in
|
||||||
|
CGPoint(
|
||||||
|
x: w * CGFloat(idx) / CGFloat(count),
|
||||||
|
y: h - bottomPad - usable * CGFloat(v - minV) / CGFloat(range)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Catmull-Rom → cubic bezier. Standard smooth interpolation, no overshoot.
|
||||||
|
private func smoothPath(_ pts: [CGPoint]) -> Path {
|
||||||
|
var path = Path()
|
||||||
|
guard pts.count >= 2 else { return path }
|
||||||
|
path.move(to: pts[0])
|
||||||
|
|
||||||
|
let tension: CGFloat = 0.5
|
||||||
|
for i in 0..<(pts.count - 1) {
|
||||||
|
let p0 = i > 0 ? pts[i - 1] : pts[i]
|
||||||
|
let p1 = pts[i]
|
||||||
|
let p2 = pts[i + 1]
|
||||||
|
let p3 = i + 2 < pts.count ? pts[i + 2] : p2
|
||||||
|
|
||||||
|
let cp1 = CGPoint(
|
||||||
|
x: p1.x + (p2.x - p0.x) * tension / 3,
|
||||||
|
y: p1.y + (p2.y - p0.y) * tension / 3
|
||||||
|
)
|
||||||
|
let cp2 = CGPoint(
|
||||||
|
x: p2.x - (p3.x - p1.x) * tension / 3,
|
||||||
|
y: p2.y - (p3.y - p1.y) * tension / 3
|
||||||
|
)
|
||||||
|
path.addCurve(to: p2, control1: cp1, control2: cp2)
|
||||||
|
}
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Close the path along the bottom to form a fill region.
|
||||||
|
private func closedPath(_ line: Path, width: CGFloat, height: CGFloat) -> Path {
|
||||||
|
var p = line
|
||||||
|
p.addLine(to: CGPoint(x: width, y: height))
|
||||||
|
p.addLine(to: CGPoint(x: 0, y: height))
|
||||||
|
p.closeSubpath()
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
}
|
||||||
158
mac/Tests/CodeBurnMenubarTests/CapacityEstimatorTests.swift
Normal file
158
mac/Tests/CodeBurnMenubarTests/CapacityEstimatorTests.swift
Normal file
|
|
@ -0,0 +1,158 @@
|
||||||
|
import Foundation
|
||||||
|
import Testing
|
||||||
|
@testable import CodeBurnMenubar
|
||||||
|
|
||||||
|
private let now = Date(timeIntervalSince1970: 1_734_000_000)
|
||||||
|
|
||||||
|
private func snap(_ percent: Double, _ tokens: Double, ageDays: Double = 0) -> CapacitySnapshot {
|
||||||
|
CapacitySnapshot(
|
||||||
|
percent: percent,
|
||||||
|
effectiveTokens: tokens,
|
||||||
|
capturedAt: now.addingTimeInterval(-ageDays * 86400)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Suite("CapacityEstimator -- gating")
|
||||||
|
struct CapacityEstimatorGatingTests {
|
||||||
|
@Test("returns nil with no snapshots")
|
||||||
|
func emptyReturnsNil() {
|
||||||
|
#expect(CapacityEstimator.estimate([], asOf: now) == nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test("returns nil with fewer than 5 snapshots")
|
||||||
|
func tooFewReturnsNil() {
|
||||||
|
let snaps = (1...4).map { snap(Double($0 * 10), Double($0) * 100_000) }
|
||||||
|
#expect(CapacityEstimator.estimate(snaps, asOf: now) == nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test("returns nil when percent range is below 15 points")
|
||||||
|
func tooNarrowReturnsNil() {
|
||||||
|
let snaps = [
|
||||||
|
snap(40, 4_000_000),
|
||||||
|
snap(42, 4_200_000),
|
||||||
|
snap(44, 4_400_000),
|
||||||
|
snap(46, 4_600_000),
|
||||||
|
snap(48, 4_800_000),
|
||||||
|
snap(50, 5_000_000),
|
||||||
|
]
|
||||||
|
#expect(CapacityEstimator.estimate(snaps, asOf: now) == nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Suite("CapacityEstimator -- recovery")
|
||||||
|
struct CapacityEstimatorRecoveryTests {
|
||||||
|
@Test("recovers capacity from 10 noise-free snapshots within 0.5%")
|
||||||
|
func recoverFromCleanData() {
|
||||||
|
let trueCapacity: Double = 10_000_000
|
||||||
|
let percents = [5.0, 12, 20, 28, 35, 47, 55, 68, 80, 92]
|
||||||
|
let snaps = percents.map { p in snap(p, p / 100 * trueCapacity) }
|
||||||
|
let est = CapacityEstimator.estimate(snaps, asOf: now)
|
||||||
|
#expect(est != nil)
|
||||||
|
#expect(est!.capacity > trueCapacity * 0.995)
|
||||||
|
#expect(est!.capacity < trueCapacity * 1.005)
|
||||||
|
// 10 perfect samples is below the solid sample threshold (15) but easily medium.
|
||||||
|
#expect(est!.confidence == .medium || est!.confidence == .solid)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test("recovers capacity within 5% from 30 noisy snapshots")
|
||||||
|
func recoverFromNoisyData() {
|
||||||
|
let trueCapacity: Double = 8_000_000
|
||||||
|
var rng = LinearCongruentialGenerator(seed: 42)
|
||||||
|
let snaps: [CapacitySnapshot] = (0..<30).map { i in
|
||||||
|
let p = 5.0 + Double(i) * 3.0 // 5..92, spanning enough
|
||||||
|
let noise = (rng.nextDouble() - 0.5) * 0.10 // ±5%
|
||||||
|
let tokens = (p / 100) * trueCapacity * (1 + noise)
|
||||||
|
return snap(p, tokens)
|
||||||
|
}
|
||||||
|
let est = CapacityEstimator.estimate(snaps, asOf: now)
|
||||||
|
#expect(est != nil)
|
||||||
|
let ratio = est!.capacity / trueCapacity
|
||||||
|
#expect(ratio > 0.95 && ratio < 1.05)
|
||||||
|
#expect(est!.confidence == .solid || est!.confidence == .medium)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Suite("CapacityEstimator -- confidence tiers")
|
||||||
|
struct CapacityEstimatorConfidenceTests {
|
||||||
|
@Test("six clean snapshots span sufficient range -> at least medium")
|
||||||
|
func sixCleanSnapshotsMedium() {
|
||||||
|
let trueCapacity: Double = 5_000_000
|
||||||
|
let percents = [5.0, 18, 32, 51, 70, 88]
|
||||||
|
let snaps = percents.map { p in snap(p, p / 100 * trueCapacity) }
|
||||||
|
let est = CapacityEstimator.estimate(snaps, asOf: now)
|
||||||
|
#expect(est != nil)
|
||||||
|
#expect(est!.confidence == .medium || est!.confidence == .solid)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test("noisy small-sample data falls to low confidence")
|
||||||
|
func noisySmallSampleLow() {
|
||||||
|
let trueCapacity: Double = 5_000_000
|
||||||
|
var rng = LinearCongruentialGenerator(seed: 7)
|
||||||
|
let percents = [5.0, 22, 40, 60, 80, 95]
|
||||||
|
let snaps: [CapacitySnapshot] = percents.map { p in
|
||||||
|
let noise = (rng.nextDouble() - 0.5) * 1.6 // ±80% noise -> drops R^2 below medium gate
|
||||||
|
return snap(p, p / 100 * trueCapacity * (1 + noise))
|
||||||
|
}
|
||||||
|
let est = CapacityEstimator.estimate(snaps, asOf: now)
|
||||||
|
#expect(est != nil)
|
||||||
|
#expect(est!.confidence == .low)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Suite("CapacityEstimator -- recency weighting")
|
||||||
|
struct CapacityEstimatorRecencyTests {
|
||||||
|
@Test("recent snapshots dominate over old ones with different capacity")
|
||||||
|
func recencyShiftsEstimate() {
|
||||||
|
// Old data: capacity = 5M (45 days ago)
|
||||||
|
// New data: capacity = 10M (today)
|
||||||
|
// With 30-day half-life, recent data should win.
|
||||||
|
let oldSnaps = (0..<10).map { i -> CapacitySnapshot in
|
||||||
|
let p = 10.0 + Double(i) * 8
|
||||||
|
return snap(p, p / 100 * 5_000_000, ageDays: 45)
|
||||||
|
}
|
||||||
|
let newSnaps = (0..<10).map { i -> CapacitySnapshot in
|
||||||
|
let p = 10.0 + Double(i) * 8
|
||||||
|
return snap(p, p / 100 * 10_000_000, ageDays: 1)
|
||||||
|
}
|
||||||
|
let est = CapacityEstimator.estimate(oldSnaps + newSnaps, asOf: now)
|
||||||
|
#expect(est != nil)
|
||||||
|
// Recent capacity is 10M; estimate should be closer to 10M than 5M.
|
||||||
|
#expect(est!.capacity > 7_500_000)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Suite("CapacityEstimator -- non-linearity")
|
||||||
|
struct CapacityEstimatorNonLinearityTests {
|
||||||
|
@Test("flags non-linearity when residuals show systematic sign pattern")
|
||||||
|
func detectsKneePattern() {
|
||||||
|
// Data follows a knee: linear up to 60%, then flatter (Anthropic capping).
|
||||||
|
let snaps: [CapacitySnapshot] = (0..<20).map { i in
|
||||||
|
let p = 5.0 + Double(i) * 5
|
||||||
|
let tokens: Double = p < 60 ? p / 100 * 8_000_000 : 0.6 * 8_000_000 + (p - 60) / 100 * 4_000_000
|
||||||
|
return snap(p, tokens)
|
||||||
|
}
|
||||||
|
let est = CapacityEstimator.estimate(snaps, asOf: now)
|
||||||
|
#expect(est != nil)
|
||||||
|
#expect(est!.nonLinearityWarning == true)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test("does not flag clean linear data")
|
||||||
|
func cleanLinearNoFlag() {
|
||||||
|
let trueCapacity: Double = 6_000_000
|
||||||
|
let percents = stride(from: 5.0, to: 95.0, by: 5.0).map { $0 }
|
||||||
|
let snaps = percents.map { p in snap(p, p / 100 * trueCapacity) }
|
||||||
|
let est = CapacityEstimator.estimate(snaps, asOf: now)
|
||||||
|
#expect(est != nil)
|
||||||
|
#expect(est!.nonLinearityWarning == false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lightweight deterministic RNG for reproducible noise in tests.
|
||||||
|
struct LinearCongruentialGenerator {
|
||||||
|
private var state: UInt64
|
||||||
|
init(seed: UInt64) { self.state = seed }
|
||||||
|
mutating func nextDouble() -> Double {
|
||||||
|
state = state &* 6364136223846793005 &+ 1442695040888963407
|
||||||
|
return Double(state >> 11) / Double(1 << 53)
|
||||||
|
}
|
||||||
|
}
|
||||||
217
src/cli.ts
217
src/cli.ts
|
|
@ -1,13 +1,17 @@
|
||||||
import { Command } from 'commander'
|
import { Command } from 'commander'
|
||||||
|
import { installMenubarApp } from './menubar-installer.js'
|
||||||
import { exportCsv, exportJson, type PeriodExport } from './export.js'
|
import { exportCsv, exportJson, type PeriodExport } from './export.js'
|
||||||
import { loadPricing } from './models.js'
|
import { loadPricing } from './models.js'
|
||||||
import { parseAllSessions, filterProjectsByName } from './parser.js'
|
import { parseAllSessions, filterProjectsByName } from './parser.js'
|
||||||
import { convertCost } from './currency.js'
|
import { convertCost } from './currency.js'
|
||||||
import { renderStatusBar } from './format.js'
|
import { renderStatusBar } from './format.js'
|
||||||
import { installMenubar, renderMenubarFormat, type PeriodData, type ProviderCost, uninstallMenubar } from './menubar.js'
|
import { type PeriodData, type ProviderCost } from './menubar-json.js'
|
||||||
|
import { buildMenubarPayload } from './menubar-json.js'
|
||||||
|
import { addNewDays, getDaysInRange, loadDailyCache, saveDailyCache, withDailyCacheLock } from './daily-cache.js'
|
||||||
|
import { aggregateProjectsIntoDays, buildPeriodDataFromDays } from './day-aggregator.js'
|
||||||
import { CATEGORY_LABELS, type DateRange, type ProjectSummary, type TaskCategory } from './types.js'
|
import { CATEGORY_LABELS, type DateRange, type ProjectSummary, type TaskCategory } from './types.js'
|
||||||
import { renderDashboard } from './dashboard.js'
|
import { renderDashboard } from './dashboard.js'
|
||||||
import { runOptimize } from './optimize.js'
|
import { runOptimize, scanAndDetect } from './optimize.js'
|
||||||
import { getAllProviders } from './providers/index.js'
|
import { getAllProviders } from './providers/index.js'
|
||||||
import { readConfig, saveConfig, getConfigFilePath } from './config.js'
|
import { readConfig, saveConfig, getConfigFilePath } from './config.js'
|
||||||
import { createRequire } from 'node:module'
|
import { createRequire } from 'node:module'
|
||||||
|
|
@ -16,6 +20,13 @@ const require = createRequire(import.meta.url)
|
||||||
const { version } = require('../package.json')
|
const { version } = require('../package.json')
|
||||||
import { loadCurrency, getCurrency, isValidCurrencyCode } from './currency.js'
|
import { loadCurrency, getCurrency, isValidCurrencyCode } from './currency.js'
|
||||||
|
|
||||||
|
const MS_PER_DAY = 24 * 60 * 60 * 1000
|
||||||
|
const BACKFILL_DAYS = 365
|
||||||
|
|
||||||
|
function toDateString(date: Date): string {
|
||||||
|
return date.toISOString().slice(0, 10)
|
||||||
|
}
|
||||||
|
|
||||||
function getDateRange(period: string): { range: DateRange; label: string } {
|
function getDateRange(period: string): { range: DateRange; label: string } {
|
||||||
const now = new Date()
|
const now = new Date()
|
||||||
const end = new Date(now.getFullYear(), now.getMonth(), now.getDate(), 23, 59, 59, 999)
|
const end = new Date(now.getFullYear(), now.getMonth(), now.getDate(), 23, 59, 59, 999)
|
||||||
|
|
@ -43,7 +54,11 @@ function getDateRange(period: string): { range: DateRange; label: string } {
|
||||||
return { range: { start, end }, label: 'Last 30 Days' }
|
return { range: { start, end }, label: 'Last 30 Days' }
|
||||||
}
|
}
|
||||||
case 'all': {
|
case 'all': {
|
||||||
return { range: { start: new Date(0), end }, label: 'All Time' }
|
// Cap "All Time" to the last 6 months. Older data is rarely actionable for a cost
|
||||||
|
// tracker and keeps the parse path bounded so providers like Codex/Cursor with sparse
|
||||||
|
// data still load in seconds.
|
||||||
|
const start = new Date(now.getFullYear(), now.getMonth() - 6, now.getDate())
|
||||||
|
return { range: { start, end }, label: 'Last 6 months' }
|
||||||
}
|
}
|
||||||
default: {
|
default: {
|
||||||
const start = new Date(now.getFullYear(), now.getMonth(), now.getDate() - 7)
|
const start = new Date(now.getFullYear(), now.getMonth(), now.getDate() - 7)
|
||||||
|
|
@ -98,8 +113,10 @@ function buildJsonReport(projects: ProjectSummary[], period: string, periodKey:
|
||||||
const totalOutput = sessions.reduce((s, sess) => s + sess.totalOutputTokens, 0)
|
const totalOutput = sessions.reduce((s, sess) => s + sess.totalOutputTokens, 0)
|
||||||
const totalCacheRead = sessions.reduce((s, sess) => s + sess.totalCacheReadTokens, 0)
|
const totalCacheRead = sessions.reduce((s, sess) => s + sess.totalCacheReadTokens, 0)
|
||||||
const totalCacheWrite = sessions.reduce((s, sess) => s + sess.totalCacheWriteTokens, 0)
|
const totalCacheWrite = sessions.reduce((s, sess) => s + sess.totalCacheWriteTokens, 0)
|
||||||
const allInput = totalInput + totalCacheRead + totalCacheWrite
|
// Match src/menubar-json.ts:cacheHitPercent: reads over reads+fresh-input. cache_write
|
||||||
const cacheHitPercent = allInput > 0 ? Math.round((totalCacheRead / allInput) * 1000) / 10 : 0
|
// counts tokens being stored, not served, so it doesn't belong in the denominator.
|
||||||
|
const cacheHitDenom = totalInput + totalCacheRead
|
||||||
|
const cacheHitPercent = cacheHitDenom > 0 ? Math.round((totalCacheRead / cacheHitDenom) * 1000) / 10 : 0
|
||||||
|
|
||||||
const dailyMap: Record<string, { cost: number; calls: number }> = {}
|
const dailyMap: Record<string, { cost: number; calls: number }> = {}
|
||||||
for (const sess of sessions) {
|
for (const sess of sessions) {
|
||||||
|
|
@ -262,6 +279,7 @@ function buildPeriodData(label: string, projects: ProjectSummary[]): PeriodData
|
||||||
label,
|
label,
|
||||||
cost: projects.reduce((s, p) => s + p.totalCostUSD, 0),
|
cost: projects.reduce((s, p) => s + p.totalCostUSD, 0),
|
||||||
calls: projects.reduce((s, p) => s + p.totalApiCalls, 0),
|
calls: projects.reduce((s, p) => s + p.totalApiCalls, 0),
|
||||||
|
sessions: projects.reduce((s, p) => s + p.sessions.length, 0),
|
||||||
inputTokens, outputTokens, cacheReadTokens, cacheWriteTokens,
|
inputTokens, outputTokens, cacheReadTokens, cacheWriteTokens,
|
||||||
categories: Object.entries(catTotals)
|
categories: Object.entries(catTotals)
|
||||||
.sort(([, a], [, b]) => b.cost - a.cost)
|
.sort(([, a], [, b]) => b.cost - a.cost)
|
||||||
|
|
@ -275,27 +293,148 @@ function buildPeriodData(label: string, projects: ProjectSummary[]): PeriodData
|
||||||
program
|
program
|
||||||
.command('status')
|
.command('status')
|
||||||
.description('Compact status output (today + week + month)')
|
.description('Compact status output (today + week + month)')
|
||||||
.option('--format <format>', 'Output format: terminal, menubar, json', 'terminal')
|
.option('--format <format>', 'Output format: terminal, menubar-json, json', 'terminal')
|
||||||
.option('--provider <provider>', 'Filter by provider: all, claude, codex, cursor', 'all')
|
.option('--provider <provider>', 'Filter by provider: all, claude, codex, cursor', 'all')
|
||||||
.option('--project <name>', 'Show only projects matching name (repeatable)', collect, [])
|
.option('--project <name>', 'Show only projects matching name (repeatable)', collect, [])
|
||||||
.option('--exclude <name>', 'Exclude projects matching name (repeatable)', collect, [])
|
.option('--exclude <name>', 'Exclude projects matching name (repeatable)', collect, [])
|
||||||
|
.option('--period <period>', 'Primary period for menubar-json: today, week, 30days, month, all', 'today')
|
||||||
|
.option('--no-optimize', 'Skip optimize findings (menubar-json only, faster)')
|
||||||
.action(async (opts) => {
|
.action(async (opts) => {
|
||||||
await loadPricing()
|
await loadPricing()
|
||||||
const pf = opts.provider
|
const pf = opts.provider
|
||||||
const fp = (p: ProjectSummary[]) => filterProjectsByName(p, opts.project, opts.exclude)
|
const fp = (p: ProjectSummary[]) => filterProjectsByName(p, opts.project, opts.exclude)
|
||||||
if (opts.format === 'menubar') {
|
if (opts.format === 'menubar-json') {
|
||||||
const todayRange = getDateRange('today').range
|
const periodInfo = getDateRange(opts.period)
|
||||||
const todayData = buildPeriodData('Today', fp(await parseAllSessions(todayRange, pf)))
|
const now = new Date()
|
||||||
const weekData = buildPeriodData('7 Days', fp(await parseAllSessions(getDateRange('week').range, pf)))
|
const todayStart = new Date(now.getFullYear(), now.getMonth(), now.getDate())
|
||||||
const thirtyDayData = buildPeriodData('30 Days', fp(await parseAllSessions(getDateRange('30days').range, pf)))
|
const yesterdayEnd = new Date(todayStart.getTime() - 1)
|
||||||
const monthData = buildPeriodData('Month', fp(await parseAllSessions(getDateRange('month').range, pf)))
|
const yesterdayStr = toDateString(new Date(todayStart.getTime() - MS_PER_DAY))
|
||||||
const todayProviders: ProviderCost[] = []
|
const isAllProviders = pf === 'all'
|
||||||
for (const p of await getAllProviders()) {
|
|
||||||
const data = fp(await parseAllSessions(todayRange, p.name))
|
// The daily cache is provider-agnostic: always backfill it from .all so subsequent
|
||||||
const cost = data.reduce((s, proj) => s + proj.totalCostUSD, 0)
|
// provider-filtered reads can derive per-provider cost+calls from DailyEntry.providers.
|
||||||
if (cost > 0) todayProviders.push({ name: p.displayName, cost })
|
const cache = await withDailyCacheLock(async () => {
|
||||||
|
let c = await loadDailyCache()
|
||||||
|
const gapStart = c.lastComputedDate
|
||||||
|
? new Date(new Date(`${c.lastComputedDate}T00:00:00.000Z`).getTime() + MS_PER_DAY)
|
||||||
|
: new Date(todayStart.getTime() - BACKFILL_DAYS * MS_PER_DAY)
|
||||||
|
|
||||||
|
if (gapStart.getTime() <= yesterdayEnd.getTime()) {
|
||||||
|
const gapRange: DateRange = { start: gapStart, end: yesterdayEnd }
|
||||||
|
const gapProjects = filterProjectsByName(await parseAllSessions(gapRange, 'all'), opts.project, opts.exclude)
|
||||||
|
const gapDays = aggregateProjectsIntoDays(gapProjects)
|
||||||
|
c = addNewDays(c, gapDays, yesterdayStr)
|
||||||
|
await saveDailyCache(c)
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
})
|
||||||
|
|
||||||
|
// CURRENT PERIOD DATA
|
||||||
|
// - .all provider: assemble from cache + today (fast)
|
||||||
|
// - specific provider: parse the period range with provider filter (correct, but slower)
|
||||||
|
let currentData: PeriodData
|
||||||
|
let scanProjects: ProjectSummary[]
|
||||||
|
let scanRange: DateRange
|
||||||
|
|
||||||
|
if (isAllProviders) {
|
||||||
|
const todayRange: DateRange = { start: todayStart, end: now }
|
||||||
|
const todayProjects = fp(await parseAllSessions(todayRange, 'all'))
|
||||||
|
const todayDays = aggregateProjectsIntoDays(todayProjects)
|
||||||
|
const rangeStartStr = toDateString(periodInfo.range.start)
|
||||||
|
const rangeEndStr = toDateString(periodInfo.range.end)
|
||||||
|
const historicalDays = getDaysInRange(cache, rangeStartStr, yesterdayStr)
|
||||||
|
const todayInRange = todayDays.filter(d => d.date >= rangeStartStr && d.date <= rangeEndStr)
|
||||||
|
const allDays = [...historicalDays, ...todayInRange].sort((a, b) => a.date.localeCompare(b.date))
|
||||||
|
currentData = buildPeriodDataFromDays(allDays, periodInfo.label)
|
||||||
|
scanProjects = todayProjects
|
||||||
|
scanRange = todayRange
|
||||||
|
} else {
|
||||||
|
const projects = fp(await parseAllSessions(periodInfo.range, pf))
|
||||||
|
currentData = buildPeriodData(periodInfo.label, projects)
|
||||||
|
scanProjects = projects
|
||||||
|
scanRange = periodInfo.range
|
||||||
}
|
}
|
||||||
console.log(renderMenubarFormat(todayData, weekData, thirtyDayData, monthData, todayProviders))
|
|
||||||
|
// PROVIDERS
|
||||||
|
// For .all: enumerate every provider with cost across the period (from cache) + installed-but-zero.
|
||||||
|
// For specific: just this single provider with its scoped cost.
|
||||||
|
const allProviders = await getAllProviders()
|
||||||
|
const displayNameByName = new Map(allProviders.map(p => [p.name, p.displayName]))
|
||||||
|
const providers: ProviderCost[] = []
|
||||||
|
if (isAllProviders) {
|
||||||
|
const todayRangeForProviders: DateRange = { start: todayStart, end: now }
|
||||||
|
const todayDaysForProviders = aggregateProjectsIntoDays(fp(await parseAllSessions(todayRangeForProviders, 'all')))
|
||||||
|
const rangeStartStr = toDateString(periodInfo.range.start)
|
||||||
|
const allDaysForProviders = [
|
||||||
|
...getDaysInRange(cache, rangeStartStr, yesterdayStr),
|
||||||
|
...todayDaysForProviders.filter(d => d.date >= rangeStartStr),
|
||||||
|
]
|
||||||
|
const providerTotals: Record<string, number> = {}
|
||||||
|
for (const d of allDaysForProviders) {
|
||||||
|
for (const [name, p] of Object.entries(d.providers)) {
|
||||||
|
providerTotals[name] = (providerTotals[name] ?? 0) + p.cost
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const [name, cost] of Object.entries(providerTotals)) {
|
||||||
|
providers.push({ name: displayNameByName.get(name) ?? name, cost })
|
||||||
|
}
|
||||||
|
for (const p of allProviders) {
|
||||||
|
if (providers.some(pc => pc.name === p.displayName)) continue
|
||||||
|
const sources = await p.discoverSessions()
|
||||||
|
if (sources.length > 0) providers.push({ name: p.displayName, cost: 0 })
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const display = displayNameByName.get(pf) ?? pf
|
||||||
|
providers.push({ name: display, cost: currentData.cost })
|
||||||
|
}
|
||||||
|
|
||||||
|
// DAILY HISTORY (last 365 days)
|
||||||
|
// Cache stores per-provider cost+calls per day in DailyEntry.providers, so we can derive
|
||||||
|
// a provider-filtered history without re-parsing. Tokens aren't broken down per provider
|
||||||
|
// in the cache, so the filtered view shows zero tokens (heatmap/trend still works on cost).
|
||||||
|
const historyStartStr = toDateString(new Date(todayStart.getTime() - BACKFILL_DAYS * MS_PER_DAY))
|
||||||
|
const allCacheDays = getDaysInRange(cache, historyStartStr, yesterdayStr)
|
||||||
|
const allTodayDaysForHistory = aggregateProjectsIntoDays(fp(await parseAllSessions({ start: todayStart, end: now }, 'all')))
|
||||||
|
const fullHistory = [...allCacheDays, ...allTodayDaysForHistory]
|
||||||
|
const dailyHistory = fullHistory.map(d => {
|
||||||
|
if (isAllProviders) {
|
||||||
|
const topModels = Object.entries(d.models)
|
||||||
|
.filter(([name]) => name !== '<synthetic>')
|
||||||
|
.sort(([, a], [, b]) => b.cost - a.cost)
|
||||||
|
.slice(0, 5)
|
||||||
|
.map(([name, m]) => ({
|
||||||
|
name,
|
||||||
|
cost: m.cost,
|
||||||
|
calls: m.calls,
|
||||||
|
inputTokens: m.inputTokens,
|
||||||
|
outputTokens: m.outputTokens,
|
||||||
|
}))
|
||||||
|
return {
|
||||||
|
date: d.date,
|
||||||
|
cost: d.cost,
|
||||||
|
calls: d.calls,
|
||||||
|
inputTokens: d.inputTokens,
|
||||||
|
outputTokens: d.outputTokens,
|
||||||
|
cacheReadTokens: d.cacheReadTokens,
|
||||||
|
cacheWriteTokens: d.cacheWriteTokens,
|
||||||
|
topModels,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const prov = d.providers[pf] ?? { calls: 0, cost: 0 }
|
||||||
|
return {
|
||||||
|
date: d.date,
|
||||||
|
cost: prov.cost,
|
||||||
|
calls: prov.calls,
|
||||||
|
inputTokens: 0,
|
||||||
|
outputTokens: 0,
|
||||||
|
cacheReadTokens: 0,
|
||||||
|
cacheWriteTokens: 0,
|
||||||
|
topModels: [],
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const optimize = opts.optimize === false ? null : await scanAndDetect(scanProjects, scanRange)
|
||||||
|
console.log(JSON.stringify(buildMenubarPayload(currentData, providers, optimize, dailyHistory)))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -374,29 +513,37 @@ program
|
||||||
const outputPath = opts.output ?? `${defaultName}.${opts.format}`
|
const outputPath = opts.output ?? `${defaultName}.${opts.format}`
|
||||||
|
|
||||||
let savedPath: string
|
let savedPath: string
|
||||||
if (opts.format === 'json') {
|
try {
|
||||||
savedPath = await exportJson(periods, outputPath)
|
if (opts.format === 'json') {
|
||||||
} else {
|
savedPath = await exportJson(periods, outputPath)
|
||||||
savedPath = await exportCsv(periods, outputPath)
|
} else {
|
||||||
|
savedPath = await exportCsv(periods, outputPath)
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// Protection guards in export.ts (symlink refusal, non-codeburn folder refusal, etc.)
|
||||||
|
// throw with a user-readable message. Print just the message, not the stack, so the CLI
|
||||||
|
// doesn't spray its internals at the user.
|
||||||
|
const message = err instanceof Error ? err.message : String(err)
|
||||||
|
console.error(`\n Export failed: ${message}\n`)
|
||||||
|
process.exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`\n Exported (Today + 7 Days + 30 Days) to: ${savedPath}\n`)
|
console.log(`\n Exported (Today + 7 Days + 30 Days) to: ${savedPath}\n`)
|
||||||
})
|
})
|
||||||
|
|
||||||
program
|
program
|
||||||
.command('install-menubar')
|
.command('menubar')
|
||||||
.description('Install macOS menu bar plugin (SwiftBar/xbar)')
|
.description('Install and launch the macOS menubar app (one command, no clone)')
|
||||||
.action(async () => {
|
.option('--force', 'Reinstall even if an older copy is already in ~/Applications')
|
||||||
const result = await installMenubar()
|
.action(async (opts: { force?: boolean }) => {
|
||||||
console.log(result)
|
try {
|
||||||
})
|
const result = await installMenubarApp({ force: opts.force })
|
||||||
|
console.log(`\n Ready. ${result.installedPath}\n`)
|
||||||
program
|
} catch (err) {
|
||||||
.command('uninstall-menubar')
|
const message = err instanceof Error ? err.message : String(err)
|
||||||
.description('Remove macOS menu bar plugin')
|
console.error(`\n Menubar install failed: ${message}\n`)
|
||||||
.action(async () => {
|
process.exit(1)
|
||||||
const result = await uninstallMenubar()
|
}
|
||||||
console.log(result)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
program
|
program
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,17 @@ type CurrencyState = {
|
||||||
|
|
||||||
const CACHE_TTL_MS = 24 * 60 * 60 * 1000
|
const CACHE_TTL_MS = 24 * 60 * 60 * 1000
|
||||||
const FRANKFURTER_URL = 'https://api.frankfurter.app/latest?from=USD&to='
|
const FRANKFURTER_URL = 'https://api.frankfurter.app/latest?from=USD&to='
|
||||||
|
// Defensive bounds on any fetched FX rate. Outside this band the rate is either a parser bug
|
||||||
|
// or a tampered Frankfurter response, and we refuse to multiply it into displayed costs.
|
||||||
|
const MIN_VALID_FX_RATE = 0.0001
|
||||||
|
const MAX_VALID_FX_RATE = 1_000_000
|
||||||
|
|
||||||
|
function isValidRate(value: unknown): value is number {
|
||||||
|
return typeof value === 'number'
|
||||||
|
&& Number.isFinite(value)
|
||||||
|
&& value >= MIN_VALID_FX_RATE
|
||||||
|
&& value <= MAX_VALID_FX_RATE
|
||||||
|
}
|
||||||
|
|
||||||
let active: CurrencyState = { code: 'USD', rate: 1, symbol: '$' }
|
let active: CurrencyState = { code: 'USD', rate: 1, symbol: '$' }
|
||||||
|
|
||||||
|
|
@ -54,18 +65,22 @@ function getRateCachePath(): string {
|
||||||
async function fetchRate(code: string): Promise<number> {
|
async function fetchRate(code: string): Promise<number> {
|
||||||
const response = await fetch(`${FRANKFURTER_URL}${code}`)
|
const response = await fetch(`${FRANKFURTER_URL}${code}`)
|
||||||
if (!response.ok) throw new Error(`HTTP ${response.status}`)
|
if (!response.ok) throw new Error(`HTTP ${response.status}`)
|
||||||
const data = await response.json() as { rates: Record<string, number> }
|
const data = await response.json() as { rates?: Record<string, unknown> }
|
||||||
const rate = data.rates[code]
|
const rate = data.rates?.[code]
|
||||||
if (!rate) throw new Error(`No rate returned for ${code}`)
|
if (!isValidRate(rate)) throw new Error(`Invalid rate returned for ${code}`)
|
||||||
return rate
|
return rate
|
||||||
}
|
}
|
||||||
|
|
||||||
async function loadCachedRate(code: string): Promise<number | null> {
|
async function loadCachedRate(code: string): Promise<number | null> {
|
||||||
try {
|
try {
|
||||||
const raw = await readFile(getRateCachePath(), 'utf-8')
|
const raw = await readFile(getRateCachePath(), 'utf-8')
|
||||||
const cached = JSON.parse(raw) as { timestamp: number; code: string; rate: number }
|
const cached = JSON.parse(raw) as Partial<{ timestamp: number; code: string; rate: number }>
|
||||||
if (cached.code !== code) return null
|
// Validate every field -- a tampered cache file could set rate to a string, null, or
|
||||||
|
// Infinity and break downstream math silently.
|
||||||
|
if (typeof cached.code !== 'string' || cached.code !== code) return null
|
||||||
|
if (typeof cached.timestamp !== 'number' || !Number.isFinite(cached.timestamp)) return null
|
||||||
if (Date.now() - cached.timestamp > CACHE_TTL_MS) return null
|
if (Date.now() - cached.timestamp > CACHE_TTL_MS) return null
|
||||||
|
if (!isValidRate(cached.rate)) return null
|
||||||
return cached.rate
|
return cached.rate
|
||||||
} catch {
|
} catch {
|
||||||
return null
|
return null
|
||||||
|
|
|
||||||
118
src/daily-cache.ts
Normal file
118
src/daily-cache.ts
Normal file
|
|
@ -0,0 +1,118 @@
|
||||||
|
import { randomBytes } from 'crypto'
|
||||||
|
import { existsSync } from 'fs'
|
||||||
|
import { mkdir, open, readFile, rename, unlink } from 'fs/promises'
|
||||||
|
import { homedir } from 'os'
|
||||||
|
import { join } from 'path'
|
||||||
|
|
||||||
|
export const DAILY_CACHE_VERSION = 2
|
||||||
|
const DAILY_CACHE_FILENAME = 'daily-cache.json'
|
||||||
|
|
||||||
|
export type DailyEntry = {
|
||||||
|
date: string
|
||||||
|
cost: number
|
||||||
|
calls: number
|
||||||
|
sessions: number
|
||||||
|
inputTokens: number
|
||||||
|
outputTokens: number
|
||||||
|
cacheReadTokens: number
|
||||||
|
cacheWriteTokens: number
|
||||||
|
editTurns: number
|
||||||
|
oneShotTurns: number
|
||||||
|
models: Record<string, {
|
||||||
|
calls: number
|
||||||
|
cost: number
|
||||||
|
inputTokens: number
|
||||||
|
outputTokens: number
|
||||||
|
cacheReadTokens: number
|
||||||
|
cacheWriteTokens: number
|
||||||
|
}>
|
||||||
|
categories: Record<string, { turns: number; cost: number; editTurns: number; oneShotTurns: number }>
|
||||||
|
providers: Record<string, { calls: number; cost: number }>
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DailyCache = {
|
||||||
|
version: number
|
||||||
|
lastComputedDate: string | null
|
||||||
|
days: DailyEntry[]
|
||||||
|
}
|
||||||
|
|
||||||
|
function getCacheDir(): string {
|
||||||
|
return process.env['CODEBURN_CACHE_DIR'] ?? join(homedir(), '.cache', 'codeburn')
|
||||||
|
}
|
||||||
|
|
||||||
|
function getCachePath(): string {
|
||||||
|
return join(getCacheDir(), DAILY_CACHE_FILENAME)
|
||||||
|
}
|
||||||
|
|
||||||
|
function emptyCache(): DailyCache {
|
||||||
|
return { version: DAILY_CACHE_VERSION, lastComputedDate: null, days: [] }
|
||||||
|
}
|
||||||
|
|
||||||
|
function isValidCache(parsed: unknown): parsed is DailyCache {
|
||||||
|
if (!parsed || typeof parsed !== 'object') return false
|
||||||
|
const c = parsed as Partial<DailyCache>
|
||||||
|
if (c.version !== DAILY_CACHE_VERSION) return false
|
||||||
|
if (!Array.isArray(c.days)) return false
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function loadDailyCache(): Promise<DailyCache> {
|
||||||
|
const path = getCachePath()
|
||||||
|
if (!existsSync(path)) return emptyCache()
|
||||||
|
try {
|
||||||
|
const raw = await readFile(path, 'utf-8')
|
||||||
|
const parsed: unknown = JSON.parse(raw)
|
||||||
|
if (!isValidCache(parsed)) return emptyCache()
|
||||||
|
return parsed
|
||||||
|
} catch {
|
||||||
|
return emptyCache()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveDailyCache(cache: DailyCache): Promise<void> {
|
||||||
|
const dir = getCacheDir()
|
||||||
|
if (!existsSync(dir)) await mkdir(dir, { recursive: true })
|
||||||
|
const finalPath = getCachePath()
|
||||||
|
const tempPath = `${finalPath}.${randomBytes(8).toString('hex')}.tmp`
|
||||||
|
const payload = JSON.stringify(cache)
|
||||||
|
const handle = await open(tempPath, 'w', 0o600)
|
||||||
|
try {
|
||||||
|
await handle.writeFile(payload, { encoding: 'utf-8' })
|
||||||
|
await handle.sync()
|
||||||
|
} finally {
|
||||||
|
await handle.close()
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await rename(tempPath, finalPath)
|
||||||
|
} catch (err) {
|
||||||
|
try { await unlink(tempPath) } catch { /* ignore */ }
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function addNewDays(cache: DailyCache, incoming: DailyEntry[], newestDate: string): DailyCache {
|
||||||
|
const seen = new Set(cache.days.map(d => d.date))
|
||||||
|
const merged = [...cache.days]
|
||||||
|
for (const day of incoming) {
|
||||||
|
if (seen.has(day.date)) continue
|
||||||
|
seen.add(day.date)
|
||||||
|
merged.push(day)
|
||||||
|
}
|
||||||
|
merged.sort((a, b) => a.date.localeCompare(b.date))
|
||||||
|
const nextLast = cache.lastComputedDate && cache.lastComputedDate > newestDate
|
||||||
|
? cache.lastComputedDate
|
||||||
|
: newestDate
|
||||||
|
return { version: DAILY_CACHE_VERSION, lastComputedDate: nextLast, days: merged }
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDaysInRange(cache: DailyCache, start: string, end: string): DailyEntry[] {
|
||||||
|
return cache.days.filter(d => d.date >= start && d.date <= end)
|
||||||
|
}
|
||||||
|
|
||||||
|
let lockChain: Promise<unknown> = Promise.resolve()
|
||||||
|
|
||||||
|
export function withDailyCacheLock<T>(fn: () => Promise<T>): Promise<T> {
|
||||||
|
const next = lockChain.then(() => fn())
|
||||||
|
lockChain = next.catch(() => undefined)
|
||||||
|
return next
|
||||||
|
}
|
||||||
142
src/day-aggregator.ts
Normal file
142
src/day-aggregator.ts
Normal file
|
|
@ -0,0 +1,142 @@
|
||||||
|
import type { DailyEntry } from './daily-cache.js'
|
||||||
|
import type { PeriodData } from './menubar-json.js'
|
||||||
|
import { CATEGORY_LABELS, type ProjectSummary, type TaskCategory } from './types.js'
|
||||||
|
|
||||||
|
function emptyEntry(date: string): DailyEntry {
|
||||||
|
return {
|
||||||
|
date,
|
||||||
|
cost: 0,
|
||||||
|
calls: 0,
|
||||||
|
sessions: 0,
|
||||||
|
inputTokens: 0,
|
||||||
|
outputTokens: 0,
|
||||||
|
cacheReadTokens: 0,
|
||||||
|
cacheWriteTokens: 0,
|
||||||
|
editTurns: 0,
|
||||||
|
oneShotTurns: 0,
|
||||||
|
models: {},
|
||||||
|
categories: {},
|
||||||
|
providers: {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function dateKey(iso: string): string {
|
||||||
|
return iso.slice(0, 10)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function aggregateProjectsIntoDays(projects: ProjectSummary[]): DailyEntry[] {
|
||||||
|
const byDate = new Map<string, DailyEntry>()
|
||||||
|
const ensure = (date: string): DailyEntry => {
|
||||||
|
let d = byDate.get(date)
|
||||||
|
if (!d) { d = emptyEntry(date); byDate.set(date, d) }
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const project of projects) {
|
||||||
|
for (const session of project.sessions) {
|
||||||
|
const sessionDate = dateKey(session.firstTimestamp)
|
||||||
|
ensure(sessionDate).sessions += 1
|
||||||
|
|
||||||
|
for (const turn of session.turns) {
|
||||||
|
if (turn.assistantCalls.length === 0) continue
|
||||||
|
const turnDate = dateKey(turn.assistantCalls[0]!.timestamp)
|
||||||
|
const turnDay = ensure(turnDate)
|
||||||
|
|
||||||
|
const editTurns = turn.hasEdits ? 1 : 0
|
||||||
|
const oneShotTurns = turn.hasEdits && turn.retries === 0 ? 1 : 0
|
||||||
|
const turnCost = turn.assistantCalls.reduce((s, c) => s + c.costUSD, 0)
|
||||||
|
|
||||||
|
turnDay.editTurns += editTurns
|
||||||
|
turnDay.oneShotTurns += oneShotTurns
|
||||||
|
|
||||||
|
const cat = turnDay.categories[turn.category] ?? { turns: 0, cost: 0, editTurns: 0, oneShotTurns: 0 }
|
||||||
|
cat.turns += 1
|
||||||
|
cat.cost += turnCost
|
||||||
|
cat.editTurns += editTurns
|
||||||
|
cat.oneShotTurns += oneShotTurns
|
||||||
|
turnDay.categories[turn.category] = cat
|
||||||
|
|
||||||
|
for (const call of turn.assistantCalls) {
|
||||||
|
const callDate = dateKey(call.timestamp)
|
||||||
|
const callDay = ensure(callDate)
|
||||||
|
|
||||||
|
callDay.cost += call.costUSD
|
||||||
|
callDay.calls += 1
|
||||||
|
callDay.inputTokens += call.usage.inputTokens
|
||||||
|
callDay.outputTokens += call.usage.outputTokens
|
||||||
|
callDay.cacheReadTokens += call.usage.cacheReadInputTokens
|
||||||
|
callDay.cacheWriteTokens += call.usage.cacheCreationInputTokens
|
||||||
|
|
||||||
|
const model = callDay.models[call.model] ?? {
|
||||||
|
calls: 0, cost: 0,
|
||||||
|
inputTokens: 0, outputTokens: 0,
|
||||||
|
cacheReadTokens: 0, cacheWriteTokens: 0,
|
||||||
|
}
|
||||||
|
model.calls += 1
|
||||||
|
model.cost += call.costUSD
|
||||||
|
model.inputTokens += call.usage.inputTokens
|
||||||
|
model.outputTokens += call.usage.outputTokens
|
||||||
|
model.cacheReadTokens += call.usage.cacheReadInputTokens
|
||||||
|
model.cacheWriteTokens += call.usage.cacheCreationInputTokens
|
||||||
|
callDay.models[call.model] = model
|
||||||
|
|
||||||
|
const provider = callDay.providers[call.provider] ?? { calls: 0, cost: 0 }
|
||||||
|
provider.calls += 1
|
||||||
|
provider.cost += call.costUSD
|
||||||
|
callDay.providers[call.provider] = provider
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [...byDate.values()].sort((a, b) => a.date.localeCompare(b.date))
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildPeriodDataFromDays(days: DailyEntry[], label: string): PeriodData {
|
||||||
|
let cost = 0, calls = 0, sessions = 0
|
||||||
|
let inputTokens = 0, outputTokens = 0, cacheReadTokens = 0, cacheWriteTokens = 0
|
||||||
|
const catTotals: Record<string, { turns: number; cost: number; editTurns: number; oneShotTurns: number }> = {}
|
||||||
|
const modelTotals: Record<string, { calls: number; cost: number }> = {}
|
||||||
|
|
||||||
|
for (const d of days) {
|
||||||
|
cost += d.cost
|
||||||
|
calls += d.calls
|
||||||
|
sessions += d.sessions
|
||||||
|
inputTokens += d.inputTokens
|
||||||
|
outputTokens += d.outputTokens
|
||||||
|
cacheReadTokens += d.cacheReadTokens
|
||||||
|
cacheWriteTokens += d.cacheWriteTokens
|
||||||
|
|
||||||
|
for (const [name, m] of Object.entries(d.models)) {
|
||||||
|
const acc = modelTotals[name] ?? { calls: 0, cost: 0 }
|
||||||
|
acc.calls += m.calls
|
||||||
|
acc.cost += m.cost
|
||||||
|
modelTotals[name] = acc
|
||||||
|
}
|
||||||
|
for (const [cat, c] of Object.entries(d.categories)) {
|
||||||
|
const acc = catTotals[cat] ?? { turns: 0, cost: 0, editTurns: 0, oneShotTurns: 0 }
|
||||||
|
acc.turns += c.turns
|
||||||
|
acc.cost += c.cost
|
||||||
|
acc.editTurns += c.editTurns
|
||||||
|
acc.oneShotTurns += c.oneShotTurns
|
||||||
|
catTotals[cat] = acc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
label,
|
||||||
|
cost,
|
||||||
|
calls,
|
||||||
|
sessions,
|
||||||
|
inputTokens,
|
||||||
|
outputTokens,
|
||||||
|
cacheReadTokens,
|
||||||
|
cacheWriteTokens,
|
||||||
|
categories: Object.entries(catTotals)
|
||||||
|
.sort(([, a], [, b]) => b.cost - a.cost)
|
||||||
|
.map(([cat, d]) => ({ name: CATEGORY_LABELS[cat as TaskCategory] ?? cat, ...d })),
|
||||||
|
models: Object.entries(modelTotals)
|
||||||
|
.sort(([, a], [, b]) => b.cost - a.cost)
|
||||||
|
.map(([name, d]) => ({ name, ...d })),
|
||||||
|
}
|
||||||
|
}
|
||||||
310
src/export.ts
310
src/export.ts
|
|
@ -1,8 +1,8 @@
|
||||||
import { writeFile } from 'fs/promises'
|
import { writeFile, mkdir, readdir, stat, rm } from 'fs/promises'
|
||||||
import { resolve } from 'path'
|
import { dirname, join, resolve } from 'path'
|
||||||
|
|
||||||
import { CATEGORY_LABELS, type ProjectSummary, type TaskCategory } from './types.js'
|
import { CATEGORY_LABELS, type ProjectSummary, type TaskCategory } from './types.js'
|
||||||
import { getCostColumnHeader, convertCost } from './currency.js'
|
import { getCurrency, convertCost } from './currency.js'
|
||||||
|
|
||||||
function escCsv(s: string): string {
|
function escCsv(s: string): string {
|
||||||
const sanitized = /^[=+\-@]/.test(s) ? `'${s}` : s
|
const sanitized = /^[=+\-@]/.test(s) ? `'${s}` : s
|
||||||
|
|
@ -12,15 +12,47 @@ function escCsv(s: string): string {
|
||||||
return sanitized
|
return sanitized
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildDailyRows(projects: ProjectSummary[]): Array<Record<string, string | number>> {
|
type Row = Record<string, string | number>
|
||||||
const daily: Record<string, { cost: number; calls: number; input: number; output: number; cacheRead: number; cacheWrite: number }> = {}
|
|
||||||
|
|
||||||
|
function rowsToCsv(rows: Row[]): string {
|
||||||
|
if (rows.length === 0) return ''
|
||||||
|
const headers = Object.keys(rows[0])
|
||||||
|
const lines = [headers.map(escCsv).join(',')]
|
||||||
|
for (const row of rows) {
|
||||||
|
lines.push(headers.map(h => escCsv(String(row[h] ?? ''))).join(','))
|
||||||
|
}
|
||||||
|
return lines.join('\n') + '\n'
|
||||||
|
}
|
||||||
|
|
||||||
|
function round2(n: number): number {
|
||||||
|
return Math.round(n * 100) / 100
|
||||||
|
}
|
||||||
|
|
||||||
|
function pct(n: number, total: number): number {
|
||||||
|
return total > 0 ? round2((n / total) * 100) : 0
|
||||||
|
}
|
||||||
|
|
||||||
|
type DailyAgg = {
|
||||||
|
cost: number
|
||||||
|
calls: number
|
||||||
|
input: number
|
||||||
|
output: number
|
||||||
|
cacheRead: number
|
||||||
|
cacheWrite: number
|
||||||
|
sessions: Set<string>
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildDailyRows(projects: ProjectSummary[], period: string): Row[] {
|
||||||
|
const daily: Record<string, DailyAgg> = {}
|
||||||
for (const project of projects) {
|
for (const project of projects) {
|
||||||
for (const session of project.sessions) {
|
for (const session of project.sessions) {
|
||||||
for (const turn of session.turns) {
|
for (const turn of session.turns) {
|
||||||
if (!turn.timestamp) continue
|
if (!turn.timestamp) continue
|
||||||
const day = turn.timestamp.slice(0, 10)
|
const day = turn.timestamp.slice(0, 10)
|
||||||
if (!daily[day]) daily[day] = { cost: 0, calls: 0, input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }
|
if (!daily[day]) {
|
||||||
|
daily[day] = { cost: 0, calls: 0, input: 0, output: 0, cacheRead: 0, cacheWrite: 0, sessions: new Set() }
|
||||||
|
}
|
||||||
|
daily[day].sessions.add(session.sessionId)
|
||||||
for (const call of turn.assistantCalls) {
|
for (const call of turn.assistantCalls) {
|
||||||
daily[day].cost += call.costUSD
|
daily[day].cost += call.costUSD
|
||||||
daily[day].calls++
|
daily[day].calls++
|
||||||
|
|
@ -32,11 +64,13 @@ function buildDailyRows(projects: ProjectSummary[]): Array<Record<string, string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const { code } = getCurrency()
|
||||||
return Object.entries(daily).sort().map(([date, d]) => ({
|
return Object.entries(daily).sort().map(([date, d]) => ({
|
||||||
|
Period: period,
|
||||||
Date: date,
|
Date: date,
|
||||||
[getCostColumnHeader()]: convertCost(d.cost),
|
[`Cost (${code})`]: round2(convertCost(d.cost)),
|
||||||
'API Calls': d.calls,
|
'API Calls': d.calls,
|
||||||
|
Sessions: d.sessions.size,
|
||||||
'Input Tokens': d.input,
|
'Input Tokens': d.input,
|
||||||
'Output Tokens': d.output,
|
'Output Tokens': d.output,
|
||||||
'Cache Read Tokens': d.cacheRead,
|
'Cache Read Tokens': d.cacheRead,
|
||||||
|
|
@ -44,7 +78,7 @@ function buildDailyRows(projects: ProjectSummary[]): Array<Record<string, string
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildActivityRows(projects: ProjectSummary[]): Array<Record<string, string | number>> {
|
function buildActivityRows(projects: ProjectSummary[], period: string): Row[] {
|
||||||
const catTotals: Record<string, { turns: number; cost: number }> = {}
|
const catTotals: Record<string, { turns: number; cost: number }> = {}
|
||||||
for (const project of projects) {
|
for (const project of projects) {
|
||||||
for (const session of project.sessions) {
|
for (const session of project.sessions) {
|
||||||
|
|
@ -55,40 +89,53 @@ function buildActivityRows(projects: ProjectSummary[]): Array<Record<string, str
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const totalCost = Object.values(catTotals).reduce((s, d) => s + d.cost, 0)
|
||||||
|
const { code } = getCurrency()
|
||||||
return Object.entries(catTotals)
|
return Object.entries(catTotals)
|
||||||
.sort(([, a], [, b]) => b.cost - a.cost)
|
.sort(([, a], [, b]) => b.cost - a.cost)
|
||||||
.map(([cat, d]) => ({
|
.map(([cat, d]) => ({
|
||||||
|
Period: period,
|
||||||
Activity: CATEGORY_LABELS[cat as TaskCategory] ?? cat,
|
Activity: CATEGORY_LABELS[cat as TaskCategory] ?? cat,
|
||||||
[getCostColumnHeader()]: convertCost(d.cost),
|
[`Cost (${code})`]: round2(convertCost(d.cost)),
|
||||||
|
'Share (%)': pct(d.cost, totalCost),
|
||||||
Turns: d.turns,
|
Turns: d.turns,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildModelRows(projects: ProjectSummary[]): Array<Record<string, string | number>> {
|
function buildModelRows(projects: ProjectSummary[], period: string): Row[] {
|
||||||
const modelTotals: Record<string, { calls: number; cost: number; input: number; output: number }> = {}
|
const modelTotals: Record<string, { calls: number; cost: number; input: number; output: number; cacheRead: number; cacheWrite: number }> = {}
|
||||||
for (const project of projects) {
|
for (const project of projects) {
|
||||||
for (const session of project.sessions) {
|
for (const session of project.sessions) {
|
||||||
for (const [model, d] of Object.entries(session.modelBreakdown)) {
|
for (const [model, d] of Object.entries(session.modelBreakdown)) {
|
||||||
if (!modelTotals[model]) modelTotals[model] = { calls: 0, cost: 0, input: 0, output: 0 }
|
if (!modelTotals[model]) modelTotals[model] = { calls: 0, cost: 0, input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }
|
||||||
modelTotals[model].calls += d.calls
|
modelTotals[model].calls += d.calls
|
||||||
modelTotals[model].cost += d.costUSD
|
modelTotals[model].cost += d.costUSD
|
||||||
modelTotals[model].input += d.tokens.inputTokens
|
modelTotals[model].input += d.tokens.inputTokens
|
||||||
modelTotals[model].output += d.tokens.outputTokens
|
modelTotals[model].output += d.tokens.outputTokens
|
||||||
|
modelTotals[model].cacheRead += d.tokens.cacheReadInputTokens ?? 0
|
||||||
|
modelTotals[model].cacheWrite += d.tokens.cacheCreationInputTokens ?? 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const totalCost = Object.values(modelTotals).reduce((s, d) => s + d.cost, 0)
|
||||||
|
const { code } = getCurrency()
|
||||||
return Object.entries(modelTotals)
|
return Object.entries(modelTotals)
|
||||||
|
.filter(([name]) => name !== '<synthetic>')
|
||||||
.sort(([, a], [, b]) => b.cost - a.cost)
|
.sort(([, a], [, b]) => b.cost - a.cost)
|
||||||
.map(([model, d]) => ({
|
.map(([model, d]) => ({
|
||||||
|
Period: period,
|
||||||
Model: model,
|
Model: model,
|
||||||
[getCostColumnHeader()]: convertCost(d.cost),
|
[`Cost (${code})`]: round2(convertCost(d.cost)),
|
||||||
|
'Share (%)': pct(d.cost, totalCost),
|
||||||
'API Calls': d.calls,
|
'API Calls': d.calls,
|
||||||
'Input Tokens': d.input,
|
'Input Tokens': d.input,
|
||||||
'Output Tokens': d.output,
|
'Output Tokens': d.output,
|
||||||
|
'Cache Read Tokens': d.cacheRead,
|
||||||
|
'Cache Write Tokens': d.cacheWrite,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildToolRows(projects: ProjectSummary[]): Array<Record<string, string | number>> {
|
function buildToolRows(projects: ProjectSummary[]): Row[] {
|
||||||
const toolTotals: Record<string, number> = {}
|
const toolTotals: Record<string, number> = {}
|
||||||
for (const project of projects) {
|
for (const project of projects) {
|
||||||
for (const session of project.sessions) {
|
for (const session of project.sessions) {
|
||||||
|
|
@ -97,12 +144,17 @@ function buildToolRows(projects: ProjectSummary[]): Array<Record<string, string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const total = Object.values(toolTotals).reduce((s, n) => s + n, 0)
|
||||||
return Object.entries(toolTotals)
|
return Object.entries(toolTotals)
|
||||||
.sort(([, a], [, b]) => b - a)
|
.sort(([, a], [, b]) => b - a)
|
||||||
.map(([tool, calls]) => ({ Tool: tool, Calls: calls }))
|
.map(([tool, calls]) => ({
|
||||||
|
Tool: tool,
|
||||||
|
Calls: calls,
|
||||||
|
'Share (%)': pct(calls, total),
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildBashRows(projects: ProjectSummary[]): Array<Record<string, string | number>> {
|
function buildBashRows(projects: ProjectSummary[]): Row[] {
|
||||||
const bashTotals: Record<string, number> = {}
|
const bashTotals: Record<string, number> = {}
|
||||||
for (const project of projects) {
|
for (const project of projects) {
|
||||||
for (const session of project.sessions) {
|
for (const session of project.sessions) {
|
||||||
|
|
@ -111,28 +163,47 @@ function buildBashRows(projects: ProjectSummary[]): Array<Record<string, string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const total = Object.values(bashTotals).reduce((s, n) => s + n, 0)
|
||||||
return Object.entries(bashTotals)
|
return Object.entries(bashTotals)
|
||||||
.sort(([, a], [, b]) => b - a)
|
.sort(([, a], [, b]) => b - a)
|
||||||
.map(([cmd, calls]) => ({ Command: cmd, Calls: calls }))
|
.map(([cmd, calls]) => ({
|
||||||
|
Command: cmd,
|
||||||
|
Calls: calls,
|
||||||
|
'Share (%)': pct(calls, total),
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildProjectRows(projects: ProjectSummary[]): Array<Record<string, string | number>> {
|
function buildProjectRows(projects: ProjectSummary[]): Row[] {
|
||||||
return projects.map(p => ({
|
const { code } = getCurrency()
|
||||||
Project: p.projectPath,
|
const total = projects.reduce((s, p) => s + p.totalCostUSD, 0)
|
||||||
[getCostColumnHeader()]: convertCost(p.totalCostUSD),
|
return projects
|
||||||
'API Calls': p.totalApiCalls,
|
.slice()
|
||||||
Sessions: p.sessions.length,
|
.sort((a, b) => b.totalCostUSD - a.totalCostUSD)
|
||||||
}))
|
.map(p => ({
|
||||||
|
Project: p.projectPath,
|
||||||
|
[`Cost (${code})`]: round2(convertCost(p.totalCostUSD)),
|
||||||
|
'Share (%)': pct(p.totalCostUSD, total),
|
||||||
|
'API Calls': p.totalApiCalls,
|
||||||
|
Sessions: p.sessions.length,
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
function rowsToCsv(rows: Array<Record<string, string | number>>): string {
|
function buildSessionRows(projects: ProjectSummary[]): Row[] {
|
||||||
if (rows.length === 0) return ''
|
const { code } = getCurrency()
|
||||||
const headers = Object.keys(rows[0])
|
const rows: Row[] = []
|
||||||
const lines = [headers.map(escCsv).join(',')]
|
for (const p of projects) {
|
||||||
for (const row of rows) {
|
for (const s of p.sessions) {
|
||||||
lines.push(headers.map(h => escCsv(String(row[h] ?? ''))).join(','))
|
rows.push({
|
||||||
|
Project: p.projectPath,
|
||||||
|
'Session ID': s.sessionId,
|
||||||
|
'Started At': s.firstTimestamp ?? '',
|
||||||
|
[`Cost (${code})`]: round2(convertCost(s.totalCostUSD)),
|
||||||
|
'API Calls': s.apiCalls,
|
||||||
|
Turns: s.turns.length,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return lines.join('\n')
|
return rows.sort((a, b) => (b[`Cost (${code})`] as number) - (a[`Cost (${code})`] as number))
|
||||||
}
|
}
|
||||||
|
|
||||||
export type PeriodExport = {
|
export type PeriodExport = {
|
||||||
|
|
@ -140,77 +211,140 @@ export type PeriodExport = {
|
||||||
projects: ProjectSummary[]
|
projects: ProjectSummary[]
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildSummaryRow(period: PeriodExport): Record<string, string | number> {
|
function buildSummaryRows(periods: PeriodExport[]): Row[] {
|
||||||
const cost = period.projects.reduce((s, p) => s + p.totalCostUSD, 0)
|
const { code } = getCurrency()
|
||||||
const calls = period.projects.reduce((s, p) => s + p.totalApiCalls, 0)
|
return periods.map(p => {
|
||||||
const sessions = period.projects.reduce((s, p) => s + p.sessions.length, 0)
|
const cost = p.projects.reduce((s, proj) => s + proj.totalCostUSD, 0)
|
||||||
return { Period: period.label, [getCostColumnHeader()]: convertCost(cost), 'API Calls': calls, Sessions: sessions }
|
const calls = p.projects.reduce((s, proj) => s + proj.totalApiCalls, 0)
|
||||||
|
const sessions = p.projects.reduce((s, proj) => s + proj.sessions.length, 0)
|
||||||
|
const projectCount = p.projects.filter(proj => proj.totalCostUSD > 0).length
|
||||||
|
return {
|
||||||
|
Period: p.label,
|
||||||
|
[`Cost (${code})`]: round2(convertCost(cost)),
|
||||||
|
'API Calls': calls,
|
||||||
|
Sessions: sessions,
|
||||||
|
Projects: projectCount,
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function buildReadme(periods: PeriodExport[]): string {
|
||||||
|
const { code } = getCurrency()
|
||||||
|
const generated = new Date().toISOString()
|
||||||
|
const lines = [
|
||||||
|
'CodeBurn Usage Export',
|
||||||
|
'====================',
|
||||||
|
'',
|
||||||
|
`Generated: ${generated}`,
|
||||||
|
`Currency: ${code}`,
|
||||||
|
`Periods: ${periods.map(p => p.label).join(', ')}`,
|
||||||
|
'',
|
||||||
|
'Files',
|
||||||
|
'-----',
|
||||||
|
' summary.csv One row per period. Headline totals.',
|
||||||
|
' daily.csv Day-by-day breakdown, Period column distinguishes the window.',
|
||||||
|
' activity.csv Time spent per task category (Coding, Debugging, Exploration, etc.).',
|
||||||
|
' models.csv Spend per model with token totals and cache usage.',
|
||||||
|
' projects.csv Spend per project folder (30-day window).',
|
||||||
|
' sessions.csv One row per session (30-day window) with session IDs and costs.',
|
||||||
|
' tools.csv Tool invocations and share (30-day window).',
|
||||||
|
' shell-commands.csv Shell commands executed via Bash tool (30-day window).',
|
||||||
|
'',
|
||||||
|
'Notes',
|
||||||
|
'-----',
|
||||||
|
' Every cost column is already converted to the active currency. Tokens are raw integer',
|
||||||
|
' counts from provider telemetry. Share (%) is relative to the period/table total.',
|
||||||
|
'',
|
||||||
|
]
|
||||||
|
return lines.join('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sentinel file dropped into every folder we create so we can safely overwrite an older
|
||||||
|
/// codeburn export without ever deleting a user's unrelated files by accident.
|
||||||
|
const EXPORT_MARKER_FILE = '.codeburn-export'
|
||||||
|
|
||||||
|
async function isCodeburnExportFolder(path: string): Promise<boolean> {
|
||||||
|
const markerStat = await stat(join(path, EXPORT_MARKER_FILE)).catch(() => null)
|
||||||
|
return markerStat?.isFile() ?? false
|
||||||
|
}
|
||||||
|
|
||||||
|
async function clearCodeburnExportFolder(path: string): Promise<void> {
|
||||||
|
const entries = await readdir(path)
|
||||||
|
for (const entry of entries) {
|
||||||
|
await rm(join(path, entry), { recursive: true, force: true })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Writes a folder of one-table-per-file CSVs. The outputPath is treated as a directory. If it
|
||||||
|
/// ends in `.csv` the extension is stripped to form the folder name. Refuses to delete a
|
||||||
|
/// pre-existing file or a non-codeburn folder, so a typo like `-o ~/.ssh/id_ed25519` can't
|
||||||
|
/// wipe a sensitive file (prior versions did `rm(path, { force: true })` unconditionally).
|
||||||
export async function exportCsv(periods: PeriodExport[], outputPath: string): Promise<string> {
|
export async function exportCsv(periods: PeriodExport[], outputPath: string): Promise<string> {
|
||||||
const allProjects = periods.find(p => p.label === '30 Days')?.projects
|
const thirtyDays = periods.find(p => p.label === '30 Days')
|
||||||
?? periods[periods.length - 1].projects
|
const thirtyDayProjects = thirtyDays?.projects ?? periods[periods.length - 1].projects
|
||||||
|
|
||||||
const parts: string[] = []
|
let folder = resolve(outputPath)
|
||||||
|
if (folder.toLowerCase().endsWith('.csv')) {
|
||||||
parts.push('# Summary')
|
folder = folder.slice(0, -4)
|
||||||
parts.push(rowsToCsv(periods.map(buildSummaryRow)))
|
|
||||||
parts.push('')
|
|
||||||
|
|
||||||
for (const period of periods) {
|
|
||||||
parts.push(`# Daily - ${period.label}`)
|
|
||||||
parts.push(rowsToCsv(buildDailyRows(period.projects)))
|
|
||||||
parts.push('')
|
|
||||||
|
|
||||||
parts.push(`# Activity - ${period.label}`)
|
|
||||||
parts.push(rowsToCsv(buildActivityRows(period.projects)))
|
|
||||||
parts.push('')
|
|
||||||
|
|
||||||
parts.push(`# Models - ${period.label}`)
|
|
||||||
parts.push(rowsToCsv(buildModelRows(period.projects)))
|
|
||||||
parts.push('')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parts.push('# Tools - All')
|
const existingStat = await stat(folder).catch(() => null)
|
||||||
parts.push(rowsToCsv(buildToolRows(allProjects)))
|
if (existingStat?.isFile()) {
|
||||||
parts.push('')
|
throw new Error(`Refusing to overwrite existing file at ${folder}. Pass a directory path instead.`)
|
||||||
|
}
|
||||||
|
if (existingStat?.isDirectory()) {
|
||||||
|
if (!(await isCodeburnExportFolder(folder))) {
|
||||||
|
throw new Error(
|
||||||
|
`Refusing to reuse non-empty directory ${folder}: no ${EXPORT_MARKER_FILE} marker. ` +
|
||||||
|
`Delete it manually or pick a different -o path.`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
await clearCodeburnExportFolder(folder)
|
||||||
|
}
|
||||||
|
await mkdir(folder, { recursive: true })
|
||||||
|
await writeFile(join(folder, EXPORT_MARKER_FILE), '', 'utf-8')
|
||||||
|
|
||||||
parts.push('# Shell Commands - All')
|
const dailyRows = periods.flatMap(p => buildDailyRows(p.projects, p.label))
|
||||||
parts.push(rowsToCsv(buildBashRows(allProjects)))
|
const activityRows = periods.flatMap(p => buildActivityRows(p.projects, p.label))
|
||||||
parts.push('')
|
const modelRows = periods.flatMap(p => buildModelRows(p.projects, p.label))
|
||||||
|
|
||||||
parts.push('# Projects - All')
|
await writeFile(join(folder, 'README.txt'), buildReadme(periods), 'utf-8')
|
||||||
parts.push(rowsToCsv(buildProjectRows(allProjects)))
|
await writeFile(join(folder, 'summary.csv'), rowsToCsv(buildSummaryRows(periods)), 'utf-8')
|
||||||
parts.push('')
|
await writeFile(join(folder, 'daily.csv'), rowsToCsv(dailyRows), 'utf-8')
|
||||||
|
await writeFile(join(folder, 'activity.csv'), rowsToCsv(activityRows), 'utf-8')
|
||||||
|
await writeFile(join(folder, 'models.csv'), rowsToCsv(modelRows), 'utf-8')
|
||||||
|
await writeFile(join(folder, 'projects.csv'), rowsToCsv(buildProjectRows(thirtyDayProjects)), 'utf-8')
|
||||||
|
await writeFile(join(folder, 'sessions.csv'), rowsToCsv(buildSessionRows(thirtyDayProjects)), 'utf-8')
|
||||||
|
await writeFile(join(folder, 'tools.csv'), rowsToCsv(buildToolRows(thirtyDayProjects)), 'utf-8')
|
||||||
|
await writeFile(join(folder, 'shell-commands.csv'), rowsToCsv(buildBashRows(thirtyDayProjects)), 'utf-8')
|
||||||
|
|
||||||
const fullPath = resolve(outputPath)
|
return folder
|
||||||
await writeFile(fullPath, parts.join('\n'), 'utf-8')
|
|
||||||
return fullPath
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function exportJson(periods: PeriodExport[], outputPath: string): Promise<string> {
|
export async function exportJson(periods: PeriodExport[], outputPath: string): Promise<string> {
|
||||||
const allProjects = periods.find(p => p.label === '30 Days')?.projects
|
const thirtyDays = periods.find(p => p.label === '30 Days')
|
||||||
?? periods[periods.length - 1].projects
|
const thirtyDayProjects = thirtyDays?.projects ?? periods[periods.length - 1].projects
|
||||||
|
const { code, rate, symbol } = getCurrency()
|
||||||
const periodData: Record<string, unknown> = {}
|
|
||||||
for (const period of periods) {
|
|
||||||
periodData[period.label] = {
|
|
||||||
summary: buildSummaryRow(period),
|
|
||||||
daily: buildDailyRows(period.projects),
|
|
||||||
activity: buildActivityRows(period.projects),
|
|
||||||
models: buildModelRows(period.projects),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = {
|
const data = {
|
||||||
|
schema: 'codeburn.export.v2',
|
||||||
generated: new Date().toISOString(),
|
generated: new Date().toISOString(),
|
||||||
periods: periodData,
|
currency: { code, rate, symbol },
|
||||||
tools: buildToolRows(allProjects),
|
summary: buildSummaryRows(periods),
|
||||||
shellCommands: buildBashRows(allProjects),
|
periods: periods.map(p => ({
|
||||||
projects: buildProjectRows(allProjects),
|
label: p.label,
|
||||||
|
daily: buildDailyRows(p.projects, p.label),
|
||||||
|
activity: buildActivityRows(p.projects, p.label),
|
||||||
|
models: buildModelRows(p.projects, p.label),
|
||||||
|
})),
|
||||||
|
projects: buildProjectRows(thirtyDayProjects),
|
||||||
|
sessions: buildSessionRows(thirtyDayProjects),
|
||||||
|
tools: buildToolRows(thirtyDayProjects),
|
||||||
|
shellCommands: buildBashRows(thirtyDayProjects),
|
||||||
}
|
}
|
||||||
|
|
||||||
const fullPath = resolve(outputPath)
|
const target = resolve(outputPath.toLowerCase().endsWith('.json') ? outputPath : `${outputPath}.json`)
|
||||||
await writeFile(fullPath, JSON.stringify(data, null, 2), 'utf-8')
|
await mkdir(dirname(target), { recursive: true })
|
||||||
return fullPath
|
await writeFile(target, JSON.stringify(data, null, 2), 'utf-8')
|
||||||
|
return target
|
||||||
}
|
}
|
||||||
|
|
|
||||||
173
src/menubar-installer.ts
Normal file
173
src/menubar-installer.ts
Normal file
|
|
@ -0,0 +1,173 @@
|
||||||
|
import { spawn } from 'node:child_process'
|
||||||
|
import { createWriteStream } from 'node:fs'
|
||||||
|
import { mkdir, mkdtemp, rename, rm, stat } from 'node:fs/promises'
|
||||||
|
import { homedir, platform, tmpdir } from 'node:os'
|
||||||
|
import { join } from 'node:path'
|
||||||
|
import { pipeline } from 'node:stream/promises'
|
||||||
|
import { Readable } from 'node:stream'
|
||||||
|
|
||||||
|
/// Public GitHub repo that hosts signed macOS release builds. `/releases/latest` returns the
|
||||||
|
/// newest tagged release; we filter its assets list for our zipped .app bundle.
|
||||||
|
const RELEASE_API = 'https://api.github.com/repos/AgentSeal/codeburn/releases/latest'
|
||||||
|
const APP_BUNDLE_NAME = 'CodeBurnMenubar.app'
|
||||||
|
const ASSET_PATTERN = /^CodeBurnMenubar-.*\.zip$/
|
||||||
|
const APP_PROCESS_NAME = 'CodeBurnMenubar'
|
||||||
|
const SUPPORTED_OS = 'darwin'
|
||||||
|
const MIN_MACOS_MAJOR = 14
|
||||||
|
|
||||||
|
export type InstallResult = { installedPath: string; launched: boolean }
|
||||||
|
|
||||||
|
type ReleaseAsset = { name: string; browser_download_url: string }
|
||||||
|
type ReleaseResponse = { tag_name: string; assets: ReleaseAsset[] }
|
||||||
|
|
||||||
|
function userApplicationsDir(): string {
|
||||||
|
return join(homedir(), 'Applications')
|
||||||
|
}
|
||||||
|
|
||||||
|
async function exists(path: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await stat(path)
|
||||||
|
return true
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function ensureSupportedPlatform(): Promise<void> {
|
||||||
|
if (platform() !== SUPPORTED_OS) {
|
||||||
|
throw new Error(`The menubar app is macOS only (detected: ${platform()}).`)
|
||||||
|
}
|
||||||
|
const major = Number((process.env.CODEBURN_FORCE_MACOS_MAJOR ?? '')
|
||||||
|
|| (await sysProductVersion()).split('.')[0])
|
||||||
|
if (!Number.isFinite(major) || major < MIN_MACOS_MAJOR) {
|
||||||
|
throw new Error(`macOS ${MIN_MACOS_MAJOR}+ required (detected ${major}).`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function sysProductVersion(): Promise<string> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const proc = spawn('/usr/bin/sw_vers', ['-productVersion'])
|
||||||
|
let out = ''
|
||||||
|
proc.stdout.on('data', (chunk: Buffer) => { out += chunk.toString() })
|
||||||
|
proc.on('error', reject)
|
||||||
|
proc.on('close', (code) => {
|
||||||
|
if (code !== 0) reject(new Error(`sw_vers exited with ${code}`))
|
||||||
|
else resolve(out.trim())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchLatestReleaseAsset(): Promise<ReleaseAsset> {
|
||||||
|
const response = await fetch(RELEASE_API, {
|
||||||
|
headers: {
|
||||||
|
// Identify the installer so GitHub's abuse heuristics treat us as a known client.
|
||||||
|
'User-Agent': 'codeburn-menubar-installer',
|
||||||
|
Accept: 'application/vnd.github+json',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`GitHub release lookup failed: HTTP ${response.status}`)
|
||||||
|
}
|
||||||
|
const body = await response.json() as ReleaseResponse
|
||||||
|
const asset = body.assets.find(a => ASSET_PATTERN.test(a.name))
|
||||||
|
if (!asset) {
|
||||||
|
throw new Error(
|
||||||
|
`No ${APP_BUNDLE_NAME} zip found in release ${body.tag_name}. ` +
|
||||||
|
`Check https://github.com/AgentSeal/codeburn/releases.`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return asset
|
||||||
|
}
|
||||||
|
|
||||||
|
async function downloadToFile(url: string, destPath: string): Promise<void> {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
headers: { 'User-Agent': 'codeburn-menubar-installer' },
|
||||||
|
redirect: 'follow',
|
||||||
|
})
|
||||||
|
if (!response.ok || response.body === null) {
|
||||||
|
throw new Error(`Download failed: HTTP ${response.status}`)
|
||||||
|
}
|
||||||
|
// fetch's ReadableStream needs to be wrapped for Node streams.
|
||||||
|
const nodeStream = Readable.fromWeb(response.body as never)
|
||||||
|
await pipeline(nodeStream, createWriteStream(destPath))
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runCommand(command: string, args: string[]): Promise<void> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const proc = spawn(command, args, { stdio: 'inherit' })
|
||||||
|
proc.on('error', reject)
|
||||||
|
proc.on('close', (code) => {
|
||||||
|
if (code === 0) resolve()
|
||||||
|
else reject(new Error(`${command} exited with status ${code}`))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function isAppRunning(): Promise<boolean> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const proc = spawn('/usr/bin/pgrep', ['-f', APP_PROCESS_NAME])
|
||||||
|
proc.on('close', (code) => resolve(code === 0))
|
||||||
|
proc.on('error', () => resolve(false))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function killRunningApp(): Promise<void> {
|
||||||
|
await new Promise<void>((resolve) => {
|
||||||
|
const proc = spawn('/usr/bin/pkill', ['-f', APP_PROCESS_NAME])
|
||||||
|
proc.on('close', () => resolve())
|
||||||
|
proc.on('error', () => resolve())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function installMenubarApp(options: { force?: boolean } = {}): Promise<InstallResult> {
|
||||||
|
await ensureSupportedPlatform()
|
||||||
|
|
||||||
|
const appsDir = userApplicationsDir()
|
||||||
|
const targetPath = join(appsDir, APP_BUNDLE_NAME)
|
||||||
|
const alreadyInstalled = await exists(targetPath)
|
||||||
|
|
||||||
|
if (alreadyInstalled && !options.force) {
|
||||||
|
if (!(await isAppRunning())) {
|
||||||
|
await runCommand('/usr/bin/open', [targetPath])
|
||||||
|
}
|
||||||
|
return { installedPath: targetPath, launched: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Looking up the latest CodeBurn Menubar release...')
|
||||||
|
const asset = await fetchLatestReleaseAsset()
|
||||||
|
|
||||||
|
const stagingDir = await mkdtemp(join(tmpdir(), 'codeburn-menubar-'))
|
||||||
|
try {
|
||||||
|
const archivePath = join(stagingDir, asset.name)
|
||||||
|
console.log(`Downloading ${asset.name}...`)
|
||||||
|
await downloadToFile(asset.browser_download_url, archivePath)
|
||||||
|
|
||||||
|
console.log('Unpacking...')
|
||||||
|
await runCommand('/usr/bin/unzip', ['-q', archivePath, '-d', stagingDir])
|
||||||
|
|
||||||
|
const unpackedApp = join(stagingDir, APP_BUNDLE_NAME)
|
||||||
|
if (!(await exists(unpackedApp))) {
|
||||||
|
throw new Error(`Archive did not contain ${APP_BUNDLE_NAME}.`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear Gatekeeper's quarantine xattr. Without this, the first launch shows the
|
||||||
|
// "cannot verify developer" prompt even for a signed + notarized app when the bundle
|
||||||
|
// was delivered via curl/fetch instead of the Mac App Store.
|
||||||
|
await runCommand('/usr/bin/xattr', ['-dr', 'com.apple.quarantine', unpackedApp]).catch(() => {})
|
||||||
|
|
||||||
|
await mkdir(appsDir, { recursive: true })
|
||||||
|
if (alreadyInstalled) {
|
||||||
|
// Kill the running copy before replacing its bundle so `mv` can proceed cleanly and the
|
||||||
|
// user ends up on the new version.
|
||||||
|
await killRunningApp()
|
||||||
|
await rm(targetPath, { recursive: true, force: true })
|
||||||
|
}
|
||||||
|
await rename(unpackedApp, targetPath)
|
||||||
|
|
||||||
|
console.log('Launching CodeBurn Menubar...')
|
||||||
|
await runCommand('/usr/bin/open', [targetPath])
|
||||||
|
return { installedPath: targetPath, launched: true }
|
||||||
|
} finally {
|
||||||
|
await rm(stagingDir, { recursive: true, force: true })
|
||||||
|
}
|
||||||
|
}
|
||||||
182
src/menubar-json.ts
Normal file
182
src/menubar-json.ts
Normal file
|
|
@ -0,0 +1,182 @@
|
||||||
|
/// Rollup of one time window (today / 7 days / 30 days / month / all) used as the canonical
|
||||||
|
/// input to the menubar payload. Built inside the CLI and also consumed by the day-aggregator
|
||||||
|
/// when hydrating per-day cache entries.
|
||||||
|
export type PeriodData = {
|
||||||
|
label: string
|
||||||
|
cost: number
|
||||||
|
calls: number
|
||||||
|
sessions: number
|
||||||
|
inputTokens: number
|
||||||
|
outputTokens: number
|
||||||
|
cacheReadTokens: number
|
||||||
|
cacheWriteTokens: number
|
||||||
|
categories: Array<{ name: string; cost: number; turns: number; editTurns: number; oneShotTurns: number }>
|
||||||
|
models: Array<{ name: string; cost: number; calls: number }>
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ProviderCost = {
|
||||||
|
name: string
|
||||||
|
cost: number
|
||||||
|
}
|
||||||
|
import type { OptimizeResult } from './optimize.js'
|
||||||
|
|
||||||
|
const TOP_ACTIVITIES_LIMIT = 20
|
||||||
|
const TOP_MODELS_LIMIT = 20
|
||||||
|
const TOP_FINDINGS_LIMIT = 10
|
||||||
|
const HISTORY_DAYS_LIMIT = 365
|
||||||
|
const SYNTHETIC_MODEL_NAME = '<synthetic>'
|
||||||
|
|
||||||
|
export type DailyModelBreakdown = {
|
||||||
|
name: string
|
||||||
|
cost: number
|
||||||
|
calls: number
|
||||||
|
inputTokens: number
|
||||||
|
outputTokens: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DailyHistoryEntry = {
|
||||||
|
date: string
|
||||||
|
cost: number
|
||||||
|
calls: number
|
||||||
|
inputTokens: number
|
||||||
|
outputTokens: number
|
||||||
|
cacheReadTokens: number
|
||||||
|
cacheWriteTokens: number
|
||||||
|
topModels: DailyModelBreakdown[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export type MenubarPayload = {
|
||||||
|
generated: string
|
||||||
|
current: {
|
||||||
|
label: string
|
||||||
|
cost: number
|
||||||
|
calls: number
|
||||||
|
sessions: number
|
||||||
|
oneShotRate: number | null
|
||||||
|
inputTokens: number
|
||||||
|
outputTokens: number
|
||||||
|
cacheHitPercent: number
|
||||||
|
topActivities: Array<{
|
||||||
|
name: string
|
||||||
|
cost: number
|
||||||
|
turns: number
|
||||||
|
oneShotRate: number | null
|
||||||
|
}>
|
||||||
|
topModels: Array<{
|
||||||
|
name: string
|
||||||
|
cost: number
|
||||||
|
calls: number
|
||||||
|
}>
|
||||||
|
providers: Record<string, number>
|
||||||
|
}
|
||||||
|
optimize: {
|
||||||
|
findingCount: number
|
||||||
|
savingsUSD: number
|
||||||
|
topFindings: Array<{
|
||||||
|
title: string
|
||||||
|
impact: 'high' | 'medium' | 'low'
|
||||||
|
savingsUSD: number
|
||||||
|
}>
|
||||||
|
}
|
||||||
|
history: {
|
||||||
|
daily: DailyHistoryEntry[]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function oneShotRateFor(editTurns: number, oneShotTurns: number): number | null {
|
||||||
|
if (editTurns === 0) return null
|
||||||
|
return oneShotTurns / editTurns
|
||||||
|
}
|
||||||
|
|
||||||
|
function aggregateOneShotRate(categories: PeriodData['categories']): number | null {
|
||||||
|
let edits = 0
|
||||||
|
let oneShots = 0
|
||||||
|
for (const cat of categories) {
|
||||||
|
edits += cat.editTurns
|
||||||
|
oneShots += cat.oneShotTurns
|
||||||
|
}
|
||||||
|
if (edits === 0) return null
|
||||||
|
return oneShots / edits
|
||||||
|
}
|
||||||
|
|
||||||
|
function cacheHitPercent(inputTokens: number, cacheReadTokens: number): number {
|
||||||
|
const denom = inputTokens + cacheReadTokens
|
||||||
|
if (denom === 0) return 0
|
||||||
|
return (cacheReadTokens / denom) * 100
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildTopActivities(categories: PeriodData['categories']): MenubarPayload['current']['topActivities'] {
|
||||||
|
return categories.slice(0, TOP_ACTIVITIES_LIMIT).map(cat => ({
|
||||||
|
name: cat.name,
|
||||||
|
cost: cat.cost,
|
||||||
|
turns: cat.turns,
|
||||||
|
oneShotRate: oneShotRateFor(cat.editTurns, cat.oneShotTurns),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildTopModels(models: PeriodData['models']): MenubarPayload['current']['topModels'] {
|
||||||
|
return models
|
||||||
|
.filter(m => m.name !== SYNTHETIC_MODEL_NAME)
|
||||||
|
.slice(0, TOP_MODELS_LIMIT)
|
||||||
|
.map(m => ({ name: m.name, cost: m.cost, calls: m.calls }))
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildOptimize(optimize: OptimizeResult | null): MenubarPayload['optimize'] {
|
||||||
|
if (!optimize || optimize.findings.length === 0) {
|
||||||
|
return { findingCount: 0, savingsUSD: 0, topFindings: [] }
|
||||||
|
}
|
||||||
|
const { findings, costRate } = optimize
|
||||||
|
const totalSavingsUSD = findings.reduce((s, f) => s + f.tokensSaved * costRate, 0)
|
||||||
|
const topFindings = findings.slice(0, TOP_FINDINGS_LIMIT).map(f => ({
|
||||||
|
title: f.title,
|
||||||
|
impact: f.impact,
|
||||||
|
savingsUSD: f.tokensSaved * costRate,
|
||||||
|
}))
|
||||||
|
return {
|
||||||
|
findingCount: findings.length,
|
||||||
|
savingsUSD: totalSavingsUSD,
|
||||||
|
topFindings,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildProviders(providers: ProviderCost[]): Record<string, number> {
|
||||||
|
const map: Record<string, number> = {}
|
||||||
|
for (const p of providers) {
|
||||||
|
if (p.cost < 0) continue
|
||||||
|
map[p.name.toLowerCase()] = p.cost
|
||||||
|
}
|
||||||
|
return map
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildHistory(daily: DailyHistoryEntry[] | undefined): MenubarPayload['history'] {
|
||||||
|
if (!daily || daily.length === 0) return { daily: [] }
|
||||||
|
const sorted = [...daily].sort((a, b) => a.date.localeCompare(b.date))
|
||||||
|
const trimmed = sorted.slice(-HISTORY_DAYS_LIMIT)
|
||||||
|
return { daily: trimmed }
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildMenubarPayload(
|
||||||
|
current: PeriodData,
|
||||||
|
providers: ProviderCost[],
|
||||||
|
optimize: OptimizeResult | null,
|
||||||
|
dailyHistory?: DailyHistoryEntry[],
|
||||||
|
): MenubarPayload {
|
||||||
|
return {
|
||||||
|
generated: new Date().toISOString(),
|
||||||
|
current: {
|
||||||
|
label: current.label,
|
||||||
|
cost: current.cost,
|
||||||
|
calls: current.calls,
|
||||||
|
sessions: current.sessions,
|
||||||
|
oneShotRate: aggregateOneShotRate(current.categories),
|
||||||
|
inputTokens: current.inputTokens,
|
||||||
|
outputTokens: current.outputTokens,
|
||||||
|
cacheHitPercent: cacheHitPercent(current.inputTokens, current.cacheReadTokens),
|
||||||
|
topActivities: buildTopActivities(current.categories),
|
||||||
|
topModels: buildTopModels(current.models),
|
||||||
|
providers: buildProviders(providers),
|
||||||
|
},
|
||||||
|
optimize: buildOptimize(optimize),
|
||||||
|
history: buildHistory(dailyHistory),
|
||||||
|
}
|
||||||
|
}
|
||||||
334
src/menubar.ts
334
src/menubar.ts
|
|
@ -1,334 +0,0 @@
|
||||||
import { execFileSync, execSync } from 'child_process'
|
|
||||||
import { existsSync } from 'fs'
|
|
||||||
import { chmod, mkdir, unlink, writeFile } from 'fs/promises'
|
|
||||||
import { homedir, platform } from 'os'
|
|
||||||
import { join } from 'path'
|
|
||||||
import { formatCost, formatTokens } from './format.js'
|
|
||||||
import { getCurrency } from './currency.js'
|
|
||||||
|
|
||||||
const PLUGIN_REFRESH = '5m'
|
|
||||||
const SWIFTBAR_PREFERENCES_DOMAIN = 'com.ameba.SwiftBar'
|
|
||||||
const SWIFTBAR_PLUGIN_DIRECTORY_KEY = 'PluginDirectory'
|
|
||||||
|
|
||||||
const MENUBAR_LABEL_MAX_LENGTH = 14
|
|
||||||
const MENUBAR_LABEL_ALLOWLIST = /[^A-Za-z0-9 ._/-]/g
|
|
||||||
|
|
||||||
// SwiftBar/xbar parse `|` as the metadata separator and interpret ANSI escapes
|
|
||||||
// on some paths. Replace anything outside a conservative allowlist with `?`
|
|
||||||
// and truncate before padEnd.
|
|
||||||
function sanitizeMenubarLabel(name: string): string {
|
|
||||||
return name.replace(MENUBAR_LABEL_ALLOWLIST, '?').slice(0, MENUBAR_LABEL_MAX_LENGTH)
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSwiftBarPluginDir(): string {
|
|
||||||
return join(homedir(), 'Library', 'Application Support', 'SwiftBar', 'plugins')
|
|
||||||
}
|
|
||||||
|
|
||||||
function getXbarPluginDir(): string {
|
|
||||||
return join(homedir(), 'Library', 'Application Support', 'xbar', 'plugins')
|
|
||||||
}
|
|
||||||
|
|
||||||
export function parsePluginDirectoryPreference(value: string): string | undefined {
|
|
||||||
const pluginDir = value.trim()
|
|
||||||
if (!pluginDir) return undefined
|
|
||||||
if (pluginDir === '~') return homedir()
|
|
||||||
if (pluginDir.startsWith('~/')) return join(homedir(), pluginDir.slice(2))
|
|
||||||
return pluginDir
|
|
||||||
}
|
|
||||||
|
|
||||||
function getConfiguredSwiftBarPluginDir(): string | undefined {
|
|
||||||
if (platform() !== 'darwin') return undefined
|
|
||||||
|
|
||||||
try {
|
|
||||||
return parsePluginDirectoryPreference(execFileSync('defaults', [
|
|
||||||
'read',
|
|
||||||
SWIFTBAR_PREFERENCES_DOMAIN,
|
|
||||||
SWIFTBAR_PLUGIN_DIRECTORY_KEY,
|
|
||||||
], { encoding: 'utf-8' }))
|
|
||||||
} catch {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSwiftBarPluginDirs(): string[] {
|
|
||||||
const dirs = [getConfiguredSwiftBarPluginDir(), getSwiftBarPluginDir()]
|
|
||||||
return dirs.filter((dir, index): dir is string => dir !== undefined && dirs.indexOf(dir) === index)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function chooseMenubarPluginDir(
|
|
||||||
swiftBarPluginDirs: string[],
|
|
||||||
xbarPluginDir: string,
|
|
||||||
pathExists: (path: string) => boolean,
|
|
||||||
): { pluginDir: string; appName: string } {
|
|
||||||
const preferredSwiftBarDir = swiftBarPluginDirs[0] ?? getSwiftBarPluginDir()
|
|
||||||
|
|
||||||
for (const pluginDir of swiftBarPluginDirs) {
|
|
||||||
if (pathExists(pluginDir)) return { pluginDir, appName: 'SwiftBar' }
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pathExists(xbarPluginDir)) return { pluginDir: xbarPluginDir, appName: 'xbar' }
|
|
||||||
|
|
||||||
return { pluginDir: preferredSwiftBarDir, appName: 'SwiftBar' }
|
|
||||||
}
|
|
||||||
|
|
||||||
function getCodeburnBin(): string {
|
|
||||||
try {
|
|
||||||
return execSync('which codeburn', { encoding: 'utf-8' }).trim()
|
|
||||||
} catch {
|
|
||||||
return 'npx --yes codeburn'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function generatePlugin(bin: string): string {
|
|
||||||
const home = homedir()
|
|
||||||
// Resolve the directory of the node binary used at install time so the
|
|
||||||
// plugin uses the same Node version codeburn was installed with — even
|
|
||||||
// when SwiftBar/xbar launch with a minimal PATH that finds an older
|
|
||||||
// system Node first. Fixes #63.
|
|
||||||
const nodeBinDir = join(process.execPath, '..')
|
|
||||||
return `#!/bin/bash
|
|
||||||
# <xbar.title>CodeBurn</xbar.title>
|
|
||||||
# <xbar.version>v0.1.0</xbar.version>
|
|
||||||
# <xbar.author>AgentSeal</xbar.author>
|
|
||||||
# <xbar.author.github>agentseal</xbar.author.github>
|
|
||||||
# <xbar.desc>See where your AI coding tokens burn. Tracks cost, activity, and model usage across Claude Code, Cursor, and Codex by task type, tool, MCP server, and project.</xbar.desc>
|
|
||||||
# <xbar.image>file://${home}/codeburn/assets/logo.png</xbar.image>
|
|
||||||
# <xbar.abouturl>https://github.com/agentseal/codeburn</xbar.abouturl>
|
|
||||||
# <xbar.dependencies>node</xbar.dependencies>
|
|
||||||
|
|
||||||
export HOME="${home}"
|
|
||||||
export PATH="${nodeBinDir}:$HOME/.local/bin:$HOME/.npm-global/bin:/opt/homebrew/bin:/usr/local/bin:$PATH"
|
|
||||||
|
|
||||||
${bin} status --format menubar 2>/dev/null || echo "-- | sfimage=flame.fill"
|
|
||||||
`
|
|
||||||
}
|
|
||||||
|
|
||||||
function miniBar(value: number, max: number, width: number = 10): string {
|
|
||||||
if (max === 0) return '·'.repeat(width)
|
|
||||||
const filled = Math.round((value / max) * width)
|
|
||||||
return '█'.repeat(Math.min(filled, width)) + '·'.repeat(Math.max(width - filled, 0))
|
|
||||||
}
|
|
||||||
|
|
||||||
export type PeriodData = {
|
|
||||||
label: string
|
|
||||||
cost: number
|
|
||||||
calls: number
|
|
||||||
inputTokens: number
|
|
||||||
outputTokens: number
|
|
||||||
cacheReadTokens: number
|
|
||||||
cacheWriteTokens: number
|
|
||||||
categories: Array<{ name: string; cost: number; turns: number; editTurns: number; oneShotTurns: number }>
|
|
||||||
models: Array<{ name: string; cost: number; calls: number }>
|
|
||||||
}
|
|
||||||
|
|
||||||
export type ProviderCost = {
|
|
||||||
name: string
|
|
||||||
cost: number
|
|
||||||
}
|
|
||||||
|
|
||||||
export function renderMenubarFormat(
|
|
||||||
today: PeriodData,
|
|
||||||
week: PeriodData,
|
|
||||||
thirtyDays: PeriodData,
|
|
||||||
month: PeriodData,
|
|
||||||
todayProviders?: ProviderCost[],
|
|
||||||
): string {
|
|
||||||
const lines: string[] = []
|
|
||||||
|
|
||||||
lines.push(`${formatCost(today.cost)} | sfimage=flame.fill color=#FF8C42`)
|
|
||||||
lines.push('---')
|
|
||||||
|
|
||||||
lines.push(`CodeBurn | size=15 color=#FF8C42`)
|
|
||||||
lines.push(`AI Coding Cost Tracker | size=11`)
|
|
||||||
if (todayProviders && todayProviders.length > 1) {
|
|
||||||
for (const p of todayProviders) {
|
|
||||||
lines.push(` ${p.name.padEnd(10)} ${formatCost(p.cost).padStart(10)} | font=Menlo size=11`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
lines.push('---')
|
|
||||||
|
|
||||||
lines.push(`Today ${formatCost(today.cost)} ${today.calls.toLocaleString()} calls | size=14`)
|
|
||||||
lines.push('---')
|
|
||||||
|
|
||||||
const maxCat = Math.max(...today.categories.map(c => c.cost), 0.01)
|
|
||||||
lines.push(`Activity - Today | size=12 color=#FF8C42`)
|
|
||||||
for (const cat of today.categories.slice(0, 8)) {
|
|
||||||
const bar = miniBar(cat.cost, maxCat)
|
|
||||||
const name = sanitizeMenubarLabel(cat.name).padEnd(14)
|
|
||||||
lines.push(`${bar} ${name} ${formatCost(cat.cost).padStart(8)} ${String(cat.turns).padStart(4)} turns | font=Menlo size=11`)
|
|
||||||
}
|
|
||||||
lines.push('---')
|
|
||||||
|
|
||||||
const maxModel = Math.max(...today.models.filter(m => m.name !== '<synthetic>').map(m => m.cost), 0.01)
|
|
||||||
lines.push(`Models - Today | size=12 color=#FF8C42`)
|
|
||||||
for (const model of today.models.slice(0, 5)) {
|
|
||||||
if (model.name === '<synthetic>') continue
|
|
||||||
const bar = miniBar(model.cost, maxModel)
|
|
||||||
const name = sanitizeMenubarLabel(model.name).padEnd(14)
|
|
||||||
lines.push(`${bar} ${name} ${formatCost(model.cost).padStart(8)} ${String(model.calls).padStart(5)} calls | font=Menlo size=11`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const cacheHit = today.inputTokens + today.cacheReadTokens > 0
|
|
||||||
? ((today.cacheReadTokens / (today.inputTokens + today.cacheReadTokens)) * 100).toFixed(0)
|
|
||||||
: '0'
|
|
||||||
lines.push(`Tokens: ${formatTokens(today.inputTokens)} in · ${formatTokens(today.outputTokens)} out · ${cacheHit}% cache hit | font=Menlo size=10`)
|
|
||||||
lines.push('---')
|
|
||||||
|
|
||||||
lines.push(`7 Days ${formatCost(week.cost)} ${week.calls.toLocaleString()} calls | size=14`)
|
|
||||||
const weekMaxCat = Math.max(...week.categories.map(c => c.cost), 0.01)
|
|
||||||
const weekMaxModel = Math.max(...week.models.filter(m => m.name !== '<synthetic>').map(m => m.cost), 0.01)
|
|
||||||
lines.push(`--Activity | size=12 color=#FF8C42`)
|
|
||||||
for (const cat of week.categories.slice(0, 8)) {
|
|
||||||
const bar = miniBar(cat.cost, weekMaxCat)
|
|
||||||
const name = sanitizeMenubarLabel(cat.name).padEnd(14)
|
|
||||||
lines.push(`--${bar} ${name} ${formatCost(cat.cost).padStart(8)} ${String(cat.turns).padStart(4)} turns | font=Menlo size=11`)
|
|
||||||
}
|
|
||||||
lines.push(`-----`)
|
|
||||||
lines.push(`--Models | size=12 color=#FF8C42`)
|
|
||||||
for (const model of week.models.slice(0, 5)) {
|
|
||||||
if (model.name === '<synthetic>') continue
|
|
||||||
const bar = miniBar(model.cost, weekMaxModel)
|
|
||||||
const name = sanitizeMenubarLabel(model.name).padEnd(14)
|
|
||||||
lines.push(`--${bar} ${name} ${formatCost(model.cost).padStart(8)} ${String(model.calls).padStart(5)} calls | font=Menlo size=11`)
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push(`30 Days ${formatCost(thirtyDays.cost)} ${thirtyDays.calls.toLocaleString()} calls | size=14`)
|
|
||||||
const tdMaxCat = Math.max(...thirtyDays.categories.map(c => c.cost), 0.01)
|
|
||||||
const tdMaxModel = Math.max(...thirtyDays.models.filter(m => m.name !== '<synthetic>').map(m => m.cost), 0.01)
|
|
||||||
lines.push(`--Activity | size=12 color=#FF8C42`)
|
|
||||||
for (const cat of thirtyDays.categories.slice(0, 8)) {
|
|
||||||
const bar = miniBar(cat.cost, tdMaxCat)
|
|
||||||
const name = sanitizeMenubarLabel(cat.name).padEnd(14)
|
|
||||||
lines.push(`--${bar} ${name} ${formatCost(cat.cost).padStart(8)} ${String(cat.turns).padStart(4)} turns | font=Menlo size=11`)
|
|
||||||
}
|
|
||||||
lines.push(`-----`)
|
|
||||||
lines.push(`--Models | size=12 color=#FF8C42`)
|
|
||||||
for (const model of thirtyDays.models.slice(0, 5)) {
|
|
||||||
if (model.name === '<synthetic>') continue
|
|
||||||
const bar = miniBar(model.cost, tdMaxModel)
|
|
||||||
const name = sanitizeMenubarLabel(model.name).padEnd(14)
|
|
||||||
lines.push(`--${bar} ${name} ${formatCost(model.cost).padStart(8)} ${String(model.calls).padStart(5)} calls | font=Menlo size=11`)
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push(`Month ${formatCost(month.cost)} ${month.calls.toLocaleString()} calls | size=14`)
|
|
||||||
const monthMaxCat = Math.max(...month.categories.map(c => c.cost), 0.01)
|
|
||||||
const monthMaxModel = Math.max(...month.models.filter(m => m.name !== '<synthetic>').map(m => m.cost), 0.01)
|
|
||||||
lines.push(`--Activity | size=12 color=#FF8C42`)
|
|
||||||
for (const cat of month.categories.slice(0, 8)) {
|
|
||||||
const bar = miniBar(cat.cost, monthMaxCat)
|
|
||||||
const name = sanitizeMenubarLabel(cat.name).padEnd(14)
|
|
||||||
lines.push(`--${bar} ${name} ${formatCost(cat.cost).padStart(8)} ${String(cat.turns).padStart(4)} turns | font=Menlo size=11`)
|
|
||||||
}
|
|
||||||
lines.push(`-----`)
|
|
||||||
lines.push(`--Models | size=12 color=#FF8C42`)
|
|
||||||
for (const model of month.models.slice(0, 5)) {
|
|
||||||
if (model.name === '<synthetic>') continue
|
|
||||||
const bar = miniBar(model.cost, monthMaxModel)
|
|
||||||
const name = sanitizeMenubarLabel(model.name).padEnd(14)
|
|
||||||
lines.push(`--${bar} ${name} ${formatCost(model.cost).padStart(8)} ${String(model.calls).padStart(5)} calls | font=Menlo size=11`)
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push('---')
|
|
||||||
const home = process.env.HOME ?? '~'
|
|
||||||
const bin = getCodeburnBin()
|
|
||||||
// Invoke the resolved `codeburn` binary directly. SwiftBar/xbar deliver
|
|
||||||
// each `paramN=` value as its own argv entry, so there's no shell
|
|
||||||
// quoting involved — and we don't ship the user to a `~/codeburn`
|
|
||||||
// checkout that only exists when running from a dev clone (#32).
|
|
||||||
lines.push(`Open Full Report | terminal=true shell=${bin} param1=report`)
|
|
||||||
lines.push(`Export CSV to Desktop | terminal=false shell=${bin} param1=export param2=-o param3=${home}/Desktop/codeburn-report.csv`)
|
|
||||||
|
|
||||||
// Currency submenu -- common currencies as clickable items.
|
|
||||||
// Clicking one runs 'codeburn currency XXX' and refreshes the plugin.
|
|
||||||
const activeCurrency = getCurrency().code
|
|
||||||
const currencies = [
|
|
||||||
{ code: 'USD', name: 'US Dollar' },
|
|
||||||
{ code: 'GBP', name: 'British Pound' },
|
|
||||||
{ code: 'EUR', name: 'Euro' },
|
|
||||||
{ code: 'AUD', name: 'Australian Dollar' },
|
|
||||||
{ code: 'CAD', name: 'Canadian Dollar' },
|
|
||||||
{ code: 'NZD', name: 'New Zealand Dollar' },
|
|
||||||
{ code: 'JPY', name: 'Japanese Yen' },
|
|
||||||
{ code: 'CHF', name: 'Swiss Franc' },
|
|
||||||
{ code: 'INR', name: 'Indian Rupee' },
|
|
||||||
{ code: 'BRL', name: 'Brazilian Real' },
|
|
||||||
{ code: 'SEK', name: 'Swedish Krona' },
|
|
||||||
{ code: 'SGD', name: 'Singapore Dollar' },
|
|
||||||
{ code: 'HKD', name: 'Hong Kong Dollar' },
|
|
||||||
{ code: 'KRW', name: 'South Korean Won' },
|
|
||||||
{ code: 'MXN', name: 'Mexican Peso' },
|
|
||||||
{ code: 'ZAR', name: 'South African Rand' },
|
|
||||||
{ code: 'DKK', name: 'Danish Krone' },
|
|
||||||
]
|
|
||||||
lines.push(`Currency: ${activeCurrency} | size=14`)
|
|
||||||
for (const { code, name } of currencies) {
|
|
||||||
const check = code === activeCurrency ? ' *' : ''
|
|
||||||
// The real CLI subcommand is `codeburn currency [code]` (with `--reset`
|
|
||||||
// for USD), not `codeburn config currency` — the latter doesn't exist
|
|
||||||
// and silently fails when SwiftBar runs it. Fixes #27.
|
|
||||||
if (code === 'USD') {
|
|
||||||
lines.push(`--${name} (${code})${check} | terminal=false refresh=true shell=${bin} param1=currency param2=--reset`)
|
|
||||||
} else {
|
|
||||||
lines.push(`--${name} (${code})${check} | terminal=false refresh=true shell=${bin} param1=currency param2=${code}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push(`Refresh | refresh=true`)
|
|
||||||
|
|
||||||
return lines.join('\n')
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function installMenubar(): Promise<string> {
|
|
||||||
if (platform() !== 'darwin') {
|
|
||||||
return 'Menu bar integration is only available on macOS. Use `codeburn watch` or `codeburn status` instead.'
|
|
||||||
}
|
|
||||||
|
|
||||||
const bin = getCodeburnBin()
|
|
||||||
const pluginContent = generatePlugin(bin)
|
|
||||||
|
|
||||||
const { pluginDir, appName } = chooseMenubarPluginDir(getSwiftBarPluginDirs(), getXbarPluginDir(), existsSync)
|
|
||||||
|
|
||||||
if (!existsSync(pluginDir)) {
|
|
||||||
await mkdir(pluginDir, { recursive: true })
|
|
||||||
}
|
|
||||||
|
|
||||||
const pluginPath = join(pluginDir, `codeburn.${PLUGIN_REFRESH}.sh`)
|
|
||||||
await writeFile(pluginPath, pluginContent, 'utf-8')
|
|
||||||
await chmod(pluginPath, 0o755)
|
|
||||||
|
|
||||||
const swiftbarInstalled = existsSync('/Applications/SwiftBar.app') || existsSync(join(homedir(), 'Applications', 'SwiftBar.app'))
|
|
||||||
const xbarInstalled = existsSync('/Applications/xbar.app') || existsSync(join(homedir(), 'Applications', 'xbar.app'))
|
|
||||||
|
|
||||||
const lines: string[] = []
|
|
||||||
lines.push(`\n Plugin installed to: ${pluginPath}`)
|
|
||||||
|
|
||||||
if (swiftbarInstalled || xbarInstalled) {
|
|
||||||
lines.push(` ${appName} detected - plugin should appear in your menu bar shortly.`)
|
|
||||||
lines.push(` If not, open ${appName} and refresh plugins.\n`)
|
|
||||||
} else {
|
|
||||||
lines.push(`\n To see CodeBurn in your menu bar, install SwiftBar:`)
|
|
||||||
lines.push(` brew install --cask swiftbar`)
|
|
||||||
lines.push(`\n Then launch SwiftBar - the plugin will load automatically.\n`)
|
|
||||||
}
|
|
||||||
|
|
||||||
return lines.join('\n')
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function uninstallMenubar(): Promise<string> {
|
|
||||||
const paths = [
|
|
||||||
...getSwiftBarPluginDirs().map(dir => join(dir, `codeburn.${PLUGIN_REFRESH}.sh`)),
|
|
||||||
join(getXbarPluginDir(), `codeburn.${PLUGIN_REFRESH}.sh`),
|
|
||||||
]
|
|
||||||
|
|
||||||
let removed = false
|
|
||||||
for (const p of paths) {
|
|
||||||
if (existsSync(p)) {
|
|
||||||
await unlink(p)
|
|
||||||
removed = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return removed
|
|
||||||
? '\n Menu bar plugin removed.\n'
|
|
||||||
: '\n No menu bar plugin found.\n'
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
import { readdir } from 'fs/promises'
|
import { readdir, stat } from 'fs/promises'
|
||||||
import { basename, join } from 'path'
|
import { basename, join } from 'path'
|
||||||
import { readSessionFile } from './fs-utils.js'
|
import { readSessionFile } from './fs-utils.js'
|
||||||
import { calculateCost, getShortModelName } from './models.js'
|
import { calculateCost, getShortModelName } from './models.js'
|
||||||
|
|
@ -266,6 +266,15 @@ async function parseSessionFile(
|
||||||
seenMsgIds: Set<string>,
|
seenMsgIds: Set<string>,
|
||||||
dateRange?: DateRange,
|
dateRange?: DateRange,
|
||||||
): Promise<SessionSummary | null> {
|
): Promise<SessionSummary | null> {
|
||||||
|
// Skip files whose mtime is older than the range start. A session file
|
||||||
|
// can only contain entries up to its last-modified time; if that predates
|
||||||
|
// the requested range, nothing in this file can match.
|
||||||
|
if (dateRange) {
|
||||||
|
try {
|
||||||
|
const s = await stat(filePath)
|
||||||
|
if (s.mtimeMs < dateRange.start.getTime()) return null
|
||||||
|
} catch { /* fall through to normal read; missing stat shouldn't break parsing */ }
|
||||||
|
}
|
||||||
const content = await readSessionFile(filePath)
|
const content = await readSessionFile(filePath)
|
||||||
if (content === null) return null
|
if (content === null) return null
|
||||||
const lines = content.split('\n').filter(l => l.trim())
|
const lines = content.split('\n').filter(l => l.trim())
|
||||||
|
|
@ -388,6 +397,12 @@ async function parseProviderSources(
|
||||||
const sessionMap = new Map<string, { project: string; turns: ClassifiedTurn[] }>()
|
const sessionMap = new Map<string, { project: string; turns: ClassifiedTurn[] }>()
|
||||||
|
|
||||||
for (const source of sources) {
|
for (const source of sources) {
|
||||||
|
if (dateRange) {
|
||||||
|
try {
|
||||||
|
const s = await stat(source.path)
|
||||||
|
if (s.mtimeMs < dateRange.start.getTime()) continue
|
||||||
|
} catch { /* fall through; treat unknown stat as "may contain data" */ }
|
||||||
|
}
|
||||||
const parser = provider.createSessionParser(
|
const parser = provider.createSessionParser(
|
||||||
{ path: source.path, project: source.project, provider: providerName },
|
{ path: source.path, project: source.project, provider: providerName },
|
||||||
seenKeys,
|
seenKeys,
|
||||||
|
|
|
||||||
189
tests/daily-cache.test.ts
Normal file
189
tests/daily-cache.test.ts
Normal file
|
|
@ -0,0 +1,189 @@
|
||||||
|
import { afterEach, beforeEach, describe, expect, it } from 'vitest'
|
||||||
|
import { readFile, rm } from 'fs/promises'
|
||||||
|
import { existsSync } from 'fs'
|
||||||
|
import { tmpdir } from 'os'
|
||||||
|
import { join } from 'path'
|
||||||
|
|
||||||
|
import {
|
||||||
|
addNewDays,
|
||||||
|
DAILY_CACHE_VERSION,
|
||||||
|
type DailyCache,
|
||||||
|
type DailyEntry,
|
||||||
|
getDaysInRange,
|
||||||
|
loadDailyCache,
|
||||||
|
saveDailyCache,
|
||||||
|
withDailyCacheLock,
|
||||||
|
} from '../src/daily-cache.js'
|
||||||
|
|
||||||
|
function emptyDay(date: string, cost = 0, calls = 0): DailyEntry {
|
||||||
|
return {
|
||||||
|
date,
|
||||||
|
cost,
|
||||||
|
calls,
|
||||||
|
sessions: 0,
|
||||||
|
inputTokens: 0,
|
||||||
|
outputTokens: 0,
|
||||||
|
cacheReadTokens: 0,
|
||||||
|
cacheWriteTokens: 0,
|
||||||
|
editTurns: 0,
|
||||||
|
oneShotTurns: 0,
|
||||||
|
models: {},
|
||||||
|
categories: {},
|
||||||
|
providers: {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const TMP_CACHE_ROOT = join(tmpdir(), `codeburn-cache-test-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`)
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
process.env['CODEBURN_CACHE_DIR'] = TMP_CACHE_ROOT
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
delete process.env['CODEBURN_CACHE_DIR']
|
||||||
|
if (existsSync(TMP_CACHE_ROOT)) {
|
||||||
|
await rm(TMP_CACHE_ROOT, { recursive: true, force: true })
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('loadDailyCache', () => {
|
||||||
|
it('returns an empty cache when the file does not exist', async () => {
|
||||||
|
const cache = await loadDailyCache()
|
||||||
|
expect(cache.version).toBe(DAILY_CACHE_VERSION)
|
||||||
|
expect(cache.lastComputedDate).toBeNull()
|
||||||
|
expect(cache.days).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns an empty cache when the file contains invalid JSON', async () => {
|
||||||
|
const { writeFile, mkdir } = await import('fs/promises')
|
||||||
|
await mkdir(TMP_CACHE_ROOT, { recursive: true })
|
||||||
|
await writeFile(join(TMP_CACHE_ROOT, 'daily-cache.json'), 'not valid json{{', 'utf-8')
|
||||||
|
const cache = await loadDailyCache()
|
||||||
|
expect(cache.days).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns an empty cache when the version does not match', async () => {
|
||||||
|
const saved: DailyCache = {
|
||||||
|
version: DAILY_CACHE_VERSION - 999,
|
||||||
|
lastComputedDate: '2026-04-10',
|
||||||
|
days: [emptyDay('2026-04-10', 10)],
|
||||||
|
}
|
||||||
|
const { writeFile, mkdir } = await import('fs/promises')
|
||||||
|
await mkdir(TMP_CACHE_ROOT, { recursive: true })
|
||||||
|
await writeFile(join(TMP_CACHE_ROOT, 'daily-cache.json'), JSON.stringify(saved), 'utf-8')
|
||||||
|
const cache = await loadDailyCache()
|
||||||
|
expect(cache.days).toEqual([])
|
||||||
|
expect(cache.lastComputedDate).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('round-trips a valid cache through save and load', async () => {
|
||||||
|
const saved: DailyCache = {
|
||||||
|
version: DAILY_CACHE_VERSION,
|
||||||
|
lastComputedDate: '2026-04-10',
|
||||||
|
days: [emptyDay('2026-04-09', 12.5, 40), emptyDay('2026-04-10', 7.25, 28)],
|
||||||
|
}
|
||||||
|
await saveDailyCache(saved)
|
||||||
|
const loaded = await loadDailyCache()
|
||||||
|
expect(loaded).toEqual(saved)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('saveDailyCache', () => {
|
||||||
|
it('writes atomically so no temp file is left after a successful save', async () => {
|
||||||
|
const saved: DailyCache = {
|
||||||
|
version: DAILY_CACHE_VERSION,
|
||||||
|
lastComputedDate: '2026-04-10',
|
||||||
|
days: [emptyDay('2026-04-10', 5)],
|
||||||
|
}
|
||||||
|
await saveDailyCache(saved)
|
||||||
|
const { readdir } = await import('fs/promises')
|
||||||
|
const files = await readdir(TMP_CACHE_ROOT)
|
||||||
|
const tempLeftovers = files.filter(f => f.endsWith('.tmp'))
|
||||||
|
expect(tempLeftovers).toEqual([])
|
||||||
|
const finalFile = await readFile(join(TMP_CACHE_ROOT, 'daily-cache.json'), 'utf-8')
|
||||||
|
expect(JSON.parse(finalFile)).toEqual(saved)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('addNewDays', () => {
|
||||||
|
it('returns a new cache with the added days sorted ascending by date', () => {
|
||||||
|
const base: DailyCache = {
|
||||||
|
version: DAILY_CACHE_VERSION,
|
||||||
|
lastComputedDate: '2026-04-08',
|
||||||
|
days: [emptyDay('2026-04-07', 3), emptyDay('2026-04-08', 5)],
|
||||||
|
}
|
||||||
|
const updated = addNewDays(base, [emptyDay('2026-04-10', 9), emptyDay('2026-04-09', 7)], '2026-04-10')
|
||||||
|
expect(updated.days.map(d => d.date)).toEqual(['2026-04-07', '2026-04-08', '2026-04-09', '2026-04-10'])
|
||||||
|
expect(updated.lastComputedDate).toBe('2026-04-10')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('skips days already present in the cache (first write wins)', () => {
|
||||||
|
const base: DailyCache = {
|
||||||
|
version: DAILY_CACHE_VERSION,
|
||||||
|
lastComputedDate: '2026-04-08',
|
||||||
|
days: [emptyDay('2026-04-08', 5)],
|
||||||
|
}
|
||||||
|
const updated = addNewDays(base, [emptyDay('2026-04-08', 99)], '2026-04-08')
|
||||||
|
const aprilEight = updated.days.find(d => d.date === '2026-04-08')!
|
||||||
|
expect(aprilEight.cost).toBe(5)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not regress lastComputedDate if incoming newestDate is older', () => {
|
||||||
|
const base: DailyCache = {
|
||||||
|
version: DAILY_CACHE_VERSION,
|
||||||
|
lastComputedDate: '2026-04-10',
|
||||||
|
days: [emptyDay('2026-04-10', 5)],
|
||||||
|
}
|
||||||
|
const updated = addNewDays(base, [emptyDay('2026-04-05', 3)], '2026-04-05')
|
||||||
|
expect(updated.lastComputedDate).toBe('2026-04-10')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('getDaysInRange', () => {
|
||||||
|
const cache: DailyCache = {
|
||||||
|
version: DAILY_CACHE_VERSION,
|
||||||
|
lastComputedDate: '2026-04-10',
|
||||||
|
days: [
|
||||||
|
emptyDay('2026-04-05', 1),
|
||||||
|
emptyDay('2026-04-06', 2),
|
||||||
|
emptyDay('2026-04-07', 3),
|
||||||
|
emptyDay('2026-04-08', 4),
|
||||||
|
emptyDay('2026-04-09', 5),
|
||||||
|
emptyDay('2026-04-10', 6),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
it('returns inclusive start and end range', () => {
|
||||||
|
const days = getDaysInRange(cache, '2026-04-07', '2026-04-09')
|
||||||
|
expect(days.map(d => d.date)).toEqual(['2026-04-07', '2026-04-08', '2026-04-09'])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns empty when range is entirely outside cache', () => {
|
||||||
|
expect(getDaysInRange(cache, '2026-03-01', '2026-03-10')).toEqual([])
|
||||||
|
expect(getDaysInRange(cache, '2026-05-01', '2026-05-10')).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('clips to available cache days when range extends beyond', () => {
|
||||||
|
const days = getDaysInRange(cache, '2026-04-09', '2026-04-20')
|
||||||
|
expect(days.map(d => d.date)).toEqual(['2026-04-09', '2026-04-10'])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('withDailyCacheLock', () => {
|
||||||
|
it('serializes concurrent operations', async () => {
|
||||||
|
const sequence: string[] = []
|
||||||
|
const op = async (tag: string): Promise<void> => {
|
||||||
|
await withDailyCacheLock(async () => {
|
||||||
|
sequence.push(`start-${tag}`)
|
||||||
|
await new Promise(r => setTimeout(r, 20))
|
||||||
|
sequence.push(`end-${tag}`)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
await Promise.all([op('a'), op('b'), op('c')])
|
||||||
|
for (let i = 0; i < sequence.length; i += 2) {
|
||||||
|
expect(sequence[i]?.startsWith('start-')).toBe(true)
|
||||||
|
expect(sequence[i + 1]?.startsWith('end-')).toBe(true)
|
||||||
|
expect(sequence[i]!.slice(6)).toBe(sequence[i + 1]!.slice(4))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
258
tests/day-aggregator.test.ts
Normal file
258
tests/day-aggregator.test.ts
Normal file
|
|
@ -0,0 +1,258 @@
|
||||||
|
import { describe, expect, it } from 'vitest'
|
||||||
|
|
||||||
|
import { aggregateProjectsIntoDays, buildPeriodDataFromDays } from '../src/day-aggregator.js'
|
||||||
|
import type { ProjectSummary } from '../src/types.js'
|
||||||
|
|
||||||
|
function makeProject(overrides: Partial<ProjectSummary> & { sessions: ProjectSummary['sessions'] }): ProjectSummary {
|
||||||
|
return {
|
||||||
|
project: 'p',
|
||||||
|
projectPath: '/p',
|
||||||
|
totalCostUSD: overrides.sessions.reduce((s, sess) => s + sess.totalCostUSD, 0),
|
||||||
|
totalApiCalls: overrides.sessions.reduce((s, sess) => s + sess.apiCalls, 0),
|
||||||
|
...overrides,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeCall(timestamp: string, costUSD: number, model = 'Opus 4.7', provider = 'claude') {
|
||||||
|
return {
|
||||||
|
provider,
|
||||||
|
model,
|
||||||
|
usage: {
|
||||||
|
inputTokens: 100,
|
||||||
|
outputTokens: 200,
|
||||||
|
cacheCreationInputTokens: 0,
|
||||||
|
cacheReadInputTokens: 50,
|
||||||
|
cachedInputTokens: 0,
|
||||||
|
reasoningTokens: 0,
|
||||||
|
webSearchRequests: 0,
|
||||||
|
},
|
||||||
|
costUSD,
|
||||||
|
tools: [],
|
||||||
|
mcpTools: [],
|
||||||
|
hasAgentSpawn: false,
|
||||||
|
hasPlanMode: false,
|
||||||
|
speed: 'standard' as const,
|
||||||
|
timestamp,
|
||||||
|
bashCommands: [],
|
||||||
|
deduplicationKey: `dk-${timestamp}-${costUSD}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('aggregateProjectsIntoDays', () => {
|
||||||
|
it('buckets api calls by calendar date derived from timestamp', () => {
|
||||||
|
const projects: ProjectSummary[] = [
|
||||||
|
makeProject({
|
||||||
|
sessions: [{
|
||||||
|
sessionId: 's1',
|
||||||
|
project: 'p',
|
||||||
|
firstTimestamp: '2026-04-09T10:00:00Z',
|
||||||
|
lastTimestamp: '2026-04-10T08:00:00Z',
|
||||||
|
totalCostUSD: 10,
|
||||||
|
totalInputTokens: 0,
|
||||||
|
totalOutputTokens: 0,
|
||||||
|
totalCacheReadTokens: 0,
|
||||||
|
totalCacheWriteTokens: 0,
|
||||||
|
apiCalls: 2,
|
||||||
|
turns: [
|
||||||
|
{
|
||||||
|
userMessage: 'hi',
|
||||||
|
timestamp: '2026-04-09T10:00:00Z',
|
||||||
|
sessionId: 's1',
|
||||||
|
category: 'coding',
|
||||||
|
retries: 0,
|
||||||
|
hasEdits: true,
|
||||||
|
assistantCalls: [
|
||||||
|
makeCall('2026-04-09T10:00:00Z', 4),
|
||||||
|
makeCall('2026-04-10T08:00:00Z', 6),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
modelBreakdown: {},
|
||||||
|
toolBreakdown: {},
|
||||||
|
mcpBreakdown: {},
|
||||||
|
bashBreakdown: {},
|
||||||
|
categoryBreakdown: {} as never,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
|
||||||
|
const days = aggregateProjectsIntoDays(projects)
|
||||||
|
expect(days.map(d => d.date)).toEqual(['2026-04-09', '2026-04-10'])
|
||||||
|
expect(days[0]!.cost).toBe(4)
|
||||||
|
expect(days[0]!.calls).toBe(1)
|
||||||
|
expect(days[1]!.cost).toBe(6)
|
||||||
|
expect(days[1]!.calls).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('attributes category turns + editTurns + oneShotTurns to the first call date of the turn', () => {
|
||||||
|
const projects: ProjectSummary[] = [
|
||||||
|
makeProject({
|
||||||
|
sessions: [{
|
||||||
|
sessionId: 's1',
|
||||||
|
project: 'p',
|
||||||
|
firstTimestamp: '2026-04-09T10:00:00Z',
|
||||||
|
lastTimestamp: '2026-04-09T10:05:00Z',
|
||||||
|
totalCostUSD: 3,
|
||||||
|
totalInputTokens: 0,
|
||||||
|
totalOutputTokens: 0,
|
||||||
|
totalCacheReadTokens: 0,
|
||||||
|
totalCacheWriteTokens: 0,
|
||||||
|
apiCalls: 1,
|
||||||
|
turns: [
|
||||||
|
{
|
||||||
|
userMessage: 'hi',
|
||||||
|
timestamp: '2026-04-09T10:00:00Z',
|
||||||
|
sessionId: 's1',
|
||||||
|
category: 'coding',
|
||||||
|
retries: 0,
|
||||||
|
hasEdits: true,
|
||||||
|
assistantCalls: [makeCall('2026-04-09T10:00:00Z', 3)],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
modelBreakdown: {},
|
||||||
|
toolBreakdown: {},
|
||||||
|
mcpBreakdown: {},
|
||||||
|
bashBreakdown: {},
|
||||||
|
categoryBreakdown: {} as never,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
const days = aggregateProjectsIntoDays(projects)
|
||||||
|
const day = days[0]!
|
||||||
|
expect(day.editTurns).toBe(1)
|
||||||
|
expect(day.oneShotTurns).toBe(1)
|
||||||
|
expect(day.categories['coding']).toEqual({
|
||||||
|
turns: 1,
|
||||||
|
cost: 3,
|
||||||
|
editTurns: 1,
|
||||||
|
oneShotTurns: 1,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('counts a session under its firstTimestamp date', () => {
|
||||||
|
const projects: ProjectSummary[] = [
|
||||||
|
makeProject({
|
||||||
|
sessions: [{
|
||||||
|
sessionId: 's1',
|
||||||
|
project: 'p',
|
||||||
|
firstTimestamp: '2026-04-09T23:59:00Z',
|
||||||
|
lastTimestamp: '2026-04-10T00:10:00Z',
|
||||||
|
totalCostUSD: 1,
|
||||||
|
totalInputTokens: 0, totalOutputTokens: 0, totalCacheReadTokens: 0, totalCacheWriteTokens: 0,
|
||||||
|
apiCalls: 0,
|
||||||
|
turns: [],
|
||||||
|
modelBreakdown: {}, toolBreakdown: {}, mcpBreakdown: {}, bashBreakdown: {},
|
||||||
|
categoryBreakdown: {} as never,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
const days = aggregateProjectsIntoDays(projects)
|
||||||
|
expect(days[0]!.date).toBe('2026-04-09')
|
||||||
|
expect(days[0]!.sessions).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('aggregates per-model and per-provider totals inside each day', () => {
|
||||||
|
const projects: ProjectSummary[] = [
|
||||||
|
makeProject({
|
||||||
|
sessions: [{
|
||||||
|
sessionId: 's1',
|
||||||
|
project: 'p',
|
||||||
|
firstTimestamp: '2026-04-10T10:00:00Z',
|
||||||
|
lastTimestamp: '2026-04-10T10:00:00Z',
|
||||||
|
totalCostUSD: 10,
|
||||||
|
totalInputTokens: 0, totalOutputTokens: 0, totalCacheReadTokens: 0, totalCacheWriteTokens: 0,
|
||||||
|
apiCalls: 2,
|
||||||
|
turns: [
|
||||||
|
{
|
||||||
|
userMessage: 'x', timestamp: '2026-04-10T10:00:00Z', sessionId: 's1',
|
||||||
|
category: 'coding', retries: 0, hasEdits: false,
|
||||||
|
assistantCalls: [
|
||||||
|
makeCall('2026-04-10T10:00:00Z', 7, 'Opus 4.7', 'claude'),
|
||||||
|
makeCall('2026-04-10T10:00:00Z', 3, 'gpt-5', 'codex'),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
modelBreakdown: {}, toolBreakdown: {}, mcpBreakdown: {}, bashBreakdown: {},
|
||||||
|
categoryBreakdown: {} as never,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
const days = aggregateProjectsIntoDays(projects)
|
||||||
|
const day = days[0]!
|
||||||
|
expect(day.models['Opus 4.7']).toEqual({
|
||||||
|
calls: 1, cost: 7,
|
||||||
|
inputTokens: 100, outputTokens: 200,
|
||||||
|
cacheReadTokens: 50, cacheWriteTokens: 0,
|
||||||
|
})
|
||||||
|
expect(day.models['gpt-5']).toEqual({
|
||||||
|
calls: 1, cost: 3,
|
||||||
|
inputTokens: 100, outputTokens: 200,
|
||||||
|
cacheReadTokens: 50, cacheWriteTokens: 0,
|
||||||
|
})
|
||||||
|
expect(day.providers['claude']).toEqual({ calls: 1, cost: 7 })
|
||||||
|
expect(day.providers['codex']).toEqual({ calls: 1, cost: 3 })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('buildPeriodDataFromDays', () => {
|
||||||
|
function makeDay(date: string, cost: number) {
|
||||||
|
return {
|
||||||
|
date,
|
||||||
|
cost,
|
||||||
|
calls: 10,
|
||||||
|
sessions: 2,
|
||||||
|
inputTokens: 100,
|
||||||
|
outputTokens: 200,
|
||||||
|
cacheReadTokens: 300,
|
||||||
|
cacheWriteTokens: 0,
|
||||||
|
editTurns: 3,
|
||||||
|
oneShotTurns: 2,
|
||||||
|
models: {
|
||||||
|
'Opus 4.7': { calls: 8, cost: cost * 0.8, inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 0 },
|
||||||
|
'Haiku 4.5': { calls: 2, cost: cost * 0.2, inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 0 },
|
||||||
|
},
|
||||||
|
categories: { 'coding': { turns: 2, cost: cost * 0.5, editTurns: 2, oneShotTurns: 1 } },
|
||||||
|
providers: { 'claude': { calls: 10, cost } },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
it('sums cost, calls, sessions, tokens across days', () => {
|
||||||
|
const days = [makeDay('2026-04-09', 10), makeDay('2026-04-10', 20)]
|
||||||
|
const pd = buildPeriodDataFromDays(days, '7 Days')
|
||||||
|
expect(pd.label).toBe('7 Days')
|
||||||
|
expect(pd.cost).toBe(30)
|
||||||
|
expect(pd.calls).toBe(20)
|
||||||
|
expect(pd.sessions).toBe(4)
|
||||||
|
expect(pd.inputTokens).toBe(200)
|
||||||
|
expect(pd.outputTokens).toBe(400)
|
||||||
|
expect(pd.cacheReadTokens).toBe(600)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('merges per-model totals across days and sorts by cost desc', () => {
|
||||||
|
const days = [makeDay('2026-04-09', 10), makeDay('2026-04-10', 20)]
|
||||||
|
const pd = buildPeriodDataFromDays(days, 'Today')
|
||||||
|
expect(pd.models[0]!.name).toBe('Opus 4.7')
|
||||||
|
expect(pd.models[0]!.cost).toBeCloseTo(24)
|
||||||
|
expect(pd.models[1]!.name).toBe('Haiku 4.5')
|
||||||
|
expect(pd.models[1]!.cost).toBeCloseTo(6)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('merges per-category totals and keeps editTurns + oneShotTurns per category', () => {
|
||||||
|
const days = [makeDay('2026-04-09', 10), makeDay('2026-04-10', 20)]
|
||||||
|
const pd = buildPeriodDataFromDays(days, 'Today')
|
||||||
|
const coding = pd.categories.find(c => c.name === 'Coding')!
|
||||||
|
expect(coding.turns).toBe(4)
|
||||||
|
expect(coding.editTurns).toBe(4)
|
||||||
|
expect(coding.oneShotTurns).toBe(2)
|
||||||
|
expect(coding.cost).toBeCloseTo(15)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns empty period totals when no days supplied', () => {
|
||||||
|
const pd = buildPeriodDataFromDays([], 'Today')
|
||||||
|
expect(pd.cost).toBe(0)
|
||||||
|
expect(pd.calls).toBe(0)
|
||||||
|
expect(pd.sessions).toBe(0)
|
||||||
|
expect(pd.categories).toEqual([])
|
||||||
|
expect(pd.models).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -120,8 +120,15 @@ describe('exportCsv', () => {
|
||||||
]
|
]
|
||||||
|
|
||||||
const outputPath = join(tmpDir, 'report.csv')
|
const outputPath = join(tmpDir, 'report.csv')
|
||||||
await exportCsv(periods, outputPath)
|
const folder = await exportCsv(periods, outputPath)
|
||||||
const content = await readFile(outputPath, 'utf-8')
|
// exportCsv now writes a folder of clean one-table-per-file CSVs, so the formula-prefix
|
||||||
|
// guard is scattered across files. Concatenate them for the assertion surface.
|
||||||
|
const [projects, models, shell] = await Promise.all([
|
||||||
|
readFile(join(folder, 'projects.csv'), 'utf-8'),
|
||||||
|
readFile(join(folder, 'models.csv'), 'utf-8'),
|
||||||
|
readFile(join(folder, 'shell-commands.csv'), 'utf-8'),
|
||||||
|
])
|
||||||
|
const content = projects + models + shell
|
||||||
|
|
||||||
expect(content).toContain("\"'=cmd,calc\"")
|
expect(content).toContain("\"'=cmd,calc\"")
|
||||||
expect(content).toContain("'+danger-model")
|
expect(content).toContain("'+danger-model")
|
||||||
|
|
|
||||||
234
tests/menubar-json.test.ts
Normal file
234
tests/menubar-json.test.ts
Normal file
|
|
@ -0,0 +1,234 @@
|
||||||
|
import { describe, expect, it } from 'vitest'
|
||||||
|
|
||||||
|
import { buildMenubarPayload, type PeriodData, type ProviderCost } from '../src/menubar-json.js'
|
||||||
|
import type { OptimizeResult } from '../src/optimize.js'
|
||||||
|
|
||||||
|
function emptyPeriod(label: string): PeriodData {
|
||||||
|
return {
|
||||||
|
label,
|
||||||
|
cost: 0,
|
||||||
|
calls: 0,
|
||||||
|
sessions: 0,
|
||||||
|
inputTokens: 0,
|
||||||
|
outputTokens: 0,
|
||||||
|
cacheReadTokens: 0,
|
||||||
|
cacheWriteTokens: 0,
|
||||||
|
categories: [],
|
||||||
|
models: [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('buildMenubarPayload', () => {
|
||||||
|
it('emits the full schema with current-period metrics and iso timestamp', () => {
|
||||||
|
const period: PeriodData = {
|
||||||
|
label: '7 Days',
|
||||||
|
cost: 1248.01,
|
||||||
|
calls: 11231,
|
||||||
|
sessions: 97,
|
||||||
|
inputTokens: 19100,
|
||||||
|
outputTokens: 675600,
|
||||||
|
cacheReadTokens: 0,
|
||||||
|
cacheWriteTokens: 0,
|
||||||
|
categories: [],
|
||||||
|
models: [],
|
||||||
|
}
|
||||||
|
const payload = buildMenubarPayload(period, [], null)
|
||||||
|
|
||||||
|
expect(payload.generated).toMatch(/^\d{4}-\d{2}-\d{2}T/)
|
||||||
|
expect(payload.current.label).toBe('7 Days')
|
||||||
|
expect(payload.current.cost).toBe(1248.01)
|
||||||
|
expect(payload.current.calls).toBe(11231)
|
||||||
|
expect(payload.current.sessions).toBe(97)
|
||||||
|
expect(payload.current.inputTokens).toBe(19100)
|
||||||
|
expect(payload.current.outputTokens).toBe(675600)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('computes per-category oneShotRate from editTurns and skips categories without edits', () => {
|
||||||
|
const period: PeriodData = {
|
||||||
|
label: 'Today',
|
||||||
|
cost: 0, calls: 0, sessions: 0,
|
||||||
|
inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 0,
|
||||||
|
categories: [
|
||||||
|
{ name: 'Coding', cost: 15.83, turns: 7, editTurns: 7, oneShotTurns: 6 },
|
||||||
|
{ name: 'Conversation', cost: 16.69, turns: 47, editTurns: 0, oneShotTurns: 0 },
|
||||||
|
],
|
||||||
|
models: [],
|
||||||
|
}
|
||||||
|
const payload = buildMenubarPayload(period, [], null)
|
||||||
|
|
||||||
|
const coding = payload.current.topActivities.find(a => a.name === 'Coding')!
|
||||||
|
expect(coding.oneShotRate).toBeCloseTo(6 / 7)
|
||||||
|
|
||||||
|
const conv = payload.current.topActivities.find(a => a.name === 'Conversation')!
|
||||||
|
expect(conv.oneShotRate).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('computes aggregate oneShotRate across categories with edits', () => {
|
||||||
|
const period: PeriodData = {
|
||||||
|
label: 'Today',
|
||||||
|
cost: 0, calls: 0, sessions: 0,
|
||||||
|
inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 0,
|
||||||
|
categories: [
|
||||||
|
{ name: 'Coding', cost: 1, turns: 7, editTurns: 10, oneShotTurns: 8 },
|
||||||
|
{ name: 'Debugging', cost: 1, turns: 5, editTurns: 10, oneShotTurns: 6 },
|
||||||
|
{ name: 'Conversation', cost: 1, turns: 40, editTurns: 0, oneShotTurns: 0 },
|
||||||
|
],
|
||||||
|
models: [],
|
||||||
|
}
|
||||||
|
const payload = buildMenubarPayload(period, [], null)
|
||||||
|
expect(payload.current.oneShotRate).toBeCloseTo((8 + 6) / (10 + 10))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns null aggregate oneShotRate when no categories have editTurns', () => {
|
||||||
|
const period: PeriodData = {
|
||||||
|
label: 'Today',
|
||||||
|
cost: 0, calls: 0, sessions: 0,
|
||||||
|
inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 0,
|
||||||
|
categories: [{ name: 'Conversation', cost: 1, turns: 5, editTurns: 0, oneShotTurns: 0 }],
|
||||||
|
models: [],
|
||||||
|
}
|
||||||
|
const payload = buildMenubarPayload(period, [], null)
|
||||||
|
expect(payload.current.oneShotRate).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('filters out the synthetic model and caps topModels at 20 so multi-model users see all their models', () => {
|
||||||
|
const models = Array.from({ length: 30 }, (_, i) => ({
|
||||||
|
name: `Model${i}`, cost: 30 - i, calls: 100,
|
||||||
|
}))
|
||||||
|
const period: PeriodData = {
|
||||||
|
label: 'Today',
|
||||||
|
cost: 0, calls: 0, sessions: 0,
|
||||||
|
inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 0,
|
||||||
|
categories: [],
|
||||||
|
models: [{ name: '<synthetic>', cost: 99, calls: 0 }, ...models],
|
||||||
|
}
|
||||||
|
const payload = buildMenubarPayload(period, [], null)
|
||||||
|
expect(payload.current.topModels.find(m => m.name === '<synthetic>')).toBeUndefined()
|
||||||
|
expect(payload.current.topModels).toHaveLength(20)
|
||||||
|
expect(payload.current.topModels[0].name).toBe('Model0')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('caps topActivities at 20 so all task categories can surface', () => {
|
||||||
|
const period: PeriodData = {
|
||||||
|
label: 'Today',
|
||||||
|
cost: 0, calls: 0, sessions: 0,
|
||||||
|
inputTokens: 0, outputTokens: 0, cacheReadTokens: 0, cacheWriteTokens: 0,
|
||||||
|
categories: Array.from({ length: 25 }, (_, i) => ({
|
||||||
|
name: `Cat${i}`, cost: 1, turns: 1, editTurns: 1, oneShotTurns: 1,
|
||||||
|
})),
|
||||||
|
models: [],
|
||||||
|
}
|
||||||
|
const payload = buildMenubarPayload(period, [], null)
|
||||||
|
expect(payload.current.topActivities).toHaveLength(20)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('computes cacheHitPercent from cache reads over input plus cache reads', () => {
|
||||||
|
const period: PeriodData = {
|
||||||
|
label: 'Today',
|
||||||
|
cost: 0, calls: 0, sessions: 0,
|
||||||
|
inputTokens: 100,
|
||||||
|
outputTokens: 200,
|
||||||
|
cacheReadTokens: 900,
|
||||||
|
cacheWriteTokens: 0,
|
||||||
|
categories: [],
|
||||||
|
models: [],
|
||||||
|
}
|
||||||
|
const payload = buildMenubarPayload(period, [], null)
|
||||||
|
expect(payload.current.cacheHitPercent).toBeCloseTo(90)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns zero cacheHitPercent when there is no input or cache traffic', () => {
|
||||||
|
const payload = buildMenubarPayload(emptyPeriod('Today'), [], null)
|
||||||
|
expect(payload.current.cacheHitPercent).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('handles null optimize as empty findings block', () => {
|
||||||
|
const payload = buildMenubarPayload(emptyPeriod('Today'), [], null)
|
||||||
|
expect(payload.optimize).toEqual({ findingCount: 0, savingsUSD: 0, topFindings: [] })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('converts tokensSaved to savingsUSD via costRate and caps topFindings at 10', () => {
|
||||||
|
const findings = Array.from({ length: 15 }, (_, i) => ({
|
||||||
|
title: `F${i}`, explanation: '', impact: 'low' as const, tokensSaved: 1000,
|
||||||
|
fix: { type: 'paste' as const, label: '', text: '' },
|
||||||
|
}))
|
||||||
|
const optimize: OptimizeResult = {
|
||||||
|
findings,
|
||||||
|
costRate: 0.00002,
|
||||||
|
healthScore: 60,
|
||||||
|
healthGrade: 'C',
|
||||||
|
}
|
||||||
|
const payload = buildMenubarPayload(emptyPeriod('Today'), [], optimize)
|
||||||
|
|
||||||
|
expect(payload.optimize.findingCount).toBe(15)
|
||||||
|
expect(payload.optimize.topFindings).toHaveLength(10)
|
||||||
|
expect(payload.optimize.topFindings[0].title).toBe('F0')
|
||||||
|
expect(payload.optimize.topFindings[0].savingsUSD).toBeCloseTo(1000 * 0.00002)
|
||||||
|
expect(payload.optimize.savingsUSD).toBeCloseTo(15 * 1000 * 0.00002)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('maps providers into a lowercased dict inside the current-period block', () => {
|
||||||
|
const providers: ProviderCost[] = [
|
||||||
|
{ name: 'Claude Code', cost: 76.45 },
|
||||||
|
{ name: 'Cursor', cost: 2.18 },
|
||||||
|
{ name: 'Codex', cost: 1.5 },
|
||||||
|
]
|
||||||
|
const payload = buildMenubarPayload(emptyPeriod('Today'), providers, null)
|
||||||
|
expect(payload.current.providers).toEqual({ 'claude code': 76.45, cursor: 2.18, codex: 1.5 })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('keeps zero-cost providers in the dict so installed-but-unused providers still render as tabs', () => {
|
||||||
|
const providers: ProviderCost[] = [
|
||||||
|
{ name: 'Claude', cost: 76.45 },
|
||||||
|
{ name: 'Codex', cost: 0 },
|
||||||
|
{ name: 'Cursor', cost: 2.18 },
|
||||||
|
]
|
||||||
|
const payload = buildMenubarPayload(emptyPeriod('Today'), providers, null)
|
||||||
|
expect(payload.current.providers).toEqual({ claude: 76.45, codex: 0, cursor: 2.18 })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('includes up to 365 daily history entries sorted ascending by date', () => {
|
||||||
|
const history = Array.from({ length: 400 }, (_, i) => {
|
||||||
|
const d = new Date(2025, 0, 1)
|
||||||
|
d.setDate(d.getDate() + i)
|
||||||
|
return {
|
||||||
|
date: d.toISOString().slice(0, 10),
|
||||||
|
cost: i,
|
||||||
|
calls: i * 10,
|
||||||
|
inputTokens: 0,
|
||||||
|
outputTokens: 0,
|
||||||
|
cacheReadTokens: 0,
|
||||||
|
cacheWriteTokens: 0,
|
||||||
|
topModels: [],
|
||||||
|
}
|
||||||
|
})
|
||||||
|
const payload = buildMenubarPayload(emptyPeriod('Today'), [], null, history)
|
||||||
|
expect(payload.history.daily).toHaveLength(365)
|
||||||
|
expect(payload.history.daily[0]!.date < payload.history.daily[364]!.date).toBe(true)
|
||||||
|
expect(payload.history.daily[364]!.date).toBe(history[399]!.date)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('preserves token fields in dailyHistory entries', () => {
|
||||||
|
const history = [
|
||||||
|
{ date: '2026-04-15', cost: 10, calls: 50, inputTokens: 100, outputTokens: 200, cacheReadTokens: 5000, cacheWriteTokens: 800, topModels: [{ name: 'Opus 4.7', cost: 8, calls: 40, inputTokens: 80, outputTokens: 160 }] },
|
||||||
|
{ date: '2026-04-16', cost: 20, calls: 75, inputTokens: 150, outputTokens: 350, cacheReadTokens: 8000, cacheWriteTokens: 1200, topModels: [] },
|
||||||
|
]
|
||||||
|
const payload = buildMenubarPayload(emptyPeriod('Today'), [], null, history)
|
||||||
|
expect(payload.history.daily[0]).toEqual(history[0])
|
||||||
|
expect(payload.history.daily[1]).toEqual(history[1])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns empty history when none supplied', () => {
|
||||||
|
const payload = buildMenubarPayload(emptyPeriod('Today'), [], null)
|
||||||
|
expect(payload.history.daily).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('drops providers with negative cost defensively', () => {
|
||||||
|
const providers: ProviderCost[] = [
|
||||||
|
{ name: 'Claude', cost: 76.45 },
|
||||||
|
{ name: 'Broken', cost: -1 },
|
||||||
|
]
|
||||||
|
const payload = buildMenubarPayload(emptyPeriod('Today'), providers, null)
|
||||||
|
expect(payload.current.providers).toEqual({ claude: 76.45 })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,57 +0,0 @@
|
||||||
import { describe, expect, it } from 'vitest'
|
|
||||||
import { join } from 'path'
|
|
||||||
import { homedir } from 'os'
|
|
||||||
|
|
||||||
import { chooseMenubarPluginDir, parsePluginDirectoryPreference } from '../src/menubar.js'
|
|
||||||
|
|
||||||
describe('parsePluginDirectoryPreference', () => {
|
|
||||||
it('trims defaults output and preserves spaces in paths', () => {
|
|
||||||
expect(parsePluginDirectoryPreference('/Users/test/Documents/Tech stuff/swiftbar_plugins\n')).toBe('/Users/test/Documents/Tech stuff/swiftbar_plugins')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('expands tilde paths', () => {
|
|
||||||
expect(parsePluginDirectoryPreference('~/swiftbar_plugins')).toBe(join(homedir(), 'swiftbar_plugins'))
|
|
||||||
})
|
|
||||||
|
|
||||||
it('ignores blank preference values', () => {
|
|
||||||
expect(parsePluginDirectoryPreference(' \n')).toBeUndefined()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('chooseMenubarPluginDir', () => {
|
|
||||||
const configuredSwiftBarDir = '/Users/test/Documents/Tech stuff/swiftbar_plugins'
|
|
||||||
const defaultSwiftBarDir = '/Users/test/Library/Application Support/SwiftBar/plugins'
|
|
||||||
const xbarDir = '/Users/test/Library/Application Support/xbar/plugins'
|
|
||||||
|
|
||||||
it('uses SwiftBar configured plugin directory before the default directory', () => {
|
|
||||||
const existing = new Set([configuredSwiftBarDir, defaultSwiftBarDir])
|
|
||||||
const result = chooseMenubarPluginDir(
|
|
||||||
[configuredSwiftBarDir, defaultSwiftBarDir],
|
|
||||||
xbarDir,
|
|
||||||
path => existing.has(path),
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(result).toEqual({ pluginDir: configuredSwiftBarDir, appName: 'SwiftBar' })
|
|
||||||
})
|
|
||||||
|
|
||||||
it('falls back to xbar when no SwiftBar plugin directory exists', () => {
|
|
||||||
const existing = new Set([xbarDir])
|
|
||||||
const result = chooseMenubarPluginDir(
|
|
||||||
[defaultSwiftBarDir],
|
|
||||||
xbarDir,
|
|
||||||
path => existing.has(path),
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(result).toEqual({ pluginDir: xbarDir, appName: 'xbar' })
|
|
||||||
})
|
|
||||||
|
|
||||||
it('creates the preferred SwiftBar directory when no plugin directory exists', () => {
|
|
||||||
const result = chooseMenubarPluginDir(
|
|
||||||
[configuredSwiftBarDir, defaultSwiftBarDir],
|
|
||||||
xbarDir,
|
|
||||||
() => false,
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(result).toEqual({ pluginDir: configuredSwiftBarDir, appName: 'SwiftBar' })
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
@ -1,47 +0,0 @@
|
||||||
import { describe, it, expect } from 'vitest'
|
|
||||||
|
|
||||||
import { renderMenubarFormat, type PeriodData } from '../../src/menubar.js'
|
|
||||||
|
|
||||||
const ESC = '\u001b'
|
|
||||||
|
|
||||||
function period(name: string): PeriodData {
|
|
||||||
return {
|
|
||||||
label: 'x',
|
|
||||||
cost: 0.01,
|
|
||||||
calls: 1,
|
|
||||||
inputTokens: 1,
|
|
||||||
outputTokens: 1,
|
|
||||||
cacheReadTokens: 0,
|
|
||||||
cacheWriteTokens: 0,
|
|
||||||
categories: [{ name, cost: 0.01, turns: 1, editTurns: 0, oneShotTurns: 1 }],
|
|
||||||
models: [{ name, cost: 0.01, calls: 1 }],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function linesWithToken(output: string, token: string): string[] {
|
|
||||||
return output.split('\n').filter(l => l.includes(token))
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('MEDIUM-2 menubar directive separator injection', () => {
|
|
||||||
it('strips pipe separators from model names', () => {
|
|
||||||
const p = period('foo | href=https://attacker.example/pwn')
|
|
||||||
const out = renderMenubarFormat(p, p, p, p)
|
|
||||||
for (const line of linesWithToken(out, 'foo')) {
|
|
||||||
expect(line.split('|').length).toBeLessThanOrEqual(2)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('strips ANSI escapes from model names', () => {
|
|
||||||
const p = period(`foo${ESC}[31mMODEL${ESC}[0m`)
|
|
||||||
const out = renderMenubarFormat(p, p, p, p)
|
|
||||||
expect(out).not.toContain(ESC)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('strips pipe separators from category names', () => {
|
|
||||||
const p = period('cat | color=red')
|
|
||||||
const out = renderMenubarFormat(p, p, p, p)
|
|
||||||
for (const line of linesWithToken(out, 'cat')) {
|
|
||||||
expect(line.split('|').length).toBeLessThanOrEqual(2)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue