Ruview/ui/services/training.service.js
rUv 113011e704
fix: WebSocket race condition, data source indicators, auto-start pose detection (#96)
* feat: RVF training pipeline & UI integration (ADR-036)

Implement full model training, management, and inference pipeline:

Backend (Rust):
- recording.rs: CSI recording API (start/stop/list/download/delete)
- model_manager.rs: RVF model loading, LoRA profile switching, model library
- training_api.rs: Training API with WebSocket progress streaming, simulated
  training mode with realistic loss curves, auto-RVF export on completion
- main.rs: Wire new modules, recording hooks in all CSI paths, data dirs

UI (new components):
- ModelPanel.js: Dark-mode model library with load/unload, LoRA dropdown
- TrainingPanel.js: Recording controls, training config, live Canvas charts
- model.service.js: Model REST API client with events
- training.service.js: Training + recording API client with WebSocket progress

UI (enhancements):
- LiveDemoTab: Model selector, LoRA profile switcher, A/B split view toggle,
  training quick-panel with 60s recording shortcut
- SettingsPanel: Full dark mode conversion (issue #92), model configuration
  (device, threads, auto-load), training configuration (epochs, LR, patience)
- PoseDetectionCanvas: 10-frame pose trail with ghost keypoints and motion
  trajectory lines, cyan trail toggle button
- pose.service.js: Model-inference confidence thresholds

UI (plumbing):
- index.html: Training tab (8th tab)
- app.js: Panel initialization and tab routing
- style.css: ~250 lines of training/model panel dark-mode styles

191 Rust tests pass, 0 failures. Closes #92.

Refs: ADR-036, #93

Co-Authored-By: claude-flow <ruv@ruv.net>

* fix: real RuVector training pipeline + UI service fixes

Training pipeline (training_api.rs):
- Replace simulated training with real signal-based training loop
- Load actual CSI data from .csi.jsonl recordings or live frame history
- Extract 180 features per frame: subcarrier amplitudes, temporal variance,
  Goertzel frequency analysis (9 bands), motion gradients, global stats
- Train calibrated linear CSI-to-pose mapping via mini-batch gradient descent
  with L2 regularization (ridge regression), Xavier init, cosine LR decay
- Self-supervised: teacher targets from derive_pose_from_sensing() heuristics
- Real validation metrics: MSE and PCK@0.2 on 80/20 train/val split
- Export trained .rvf with real weights, feature normalization stats, witness
- Add infer_pose_from_model() for live inference from trained model
- 16 new tests covering features, training, inference, serialization

UI fixes:
- Fix double-URL bug in model.service.js and training.service.js
  (buildApiUrl was called twice — once in service, once in apiService)
- Fix route paths to match Rust backend (/api/v1/train/*, /api/v1/recording/*)
- Fix request body formats (session_name, nested config object)
- Fix top-level await in LiveDemoTab.js blocking module graph
- Dynamic imports for ModelPanel/TrainingPanel in app.js
- Center nav tabs with flex-wrap for 8-tab layout

Co-Authored-By: claude-flow <ruv@ruv.net>

* fix: WebSocket onOpen race condition, data source indicators, auto-start pose detection

- Fix WebSocket onOpen race condition in websocket.service.js where
  setupEventHandlers replaced onopen after socket was already open,
  preventing pose service from receiving connection signal
- Add 4-state data source indicator (LIVE/SIMULATED/RECONNECTING/OFFLINE)
  across Dashboard, Sensing, and Live Demo tabs via sensing.service.js
- Add hot-plug ESP32 auto-detection in sensing server (auto mode runs
  both UDP listener and simulation, switches on ESP32_TIMEOUT)
- Auto-start pose detection when backend is reachable
- Hide duplicate PoseDetectionCanvas controls when enableControls=false
- Add standalone Demo button in LiveDemoTab for offline animated demo
- Add data source banner and status styling

Co-Authored-By: claude-flow <ruv@ruv.net>
2026-03-02 13:47:49 -05:00

211 lines
5.8 KiB
JavaScript

// Training Service for WiFi-DensePose UI
// Manages training lifecycle, progress streaming, and CSI recordings.
import { buildWsUrl } from '../config/api.config.js';
import { apiService } from './api.service.js';
export class TrainingService {
constructor() {
this.progressSocket = null;
this.listeners = {};
this.logger = this.createLogger();
}
createLogger() {
return {
debug: (...args) => console.debug('[TRAIN-DEBUG]', new Date().toISOString(), ...args),
info: (...args) => console.info('[TRAIN-INFO]', new Date().toISOString(), ...args),
warn: (...args) => console.warn('[TRAIN-WARN]', new Date().toISOString(), ...args),
error: (...args) => console.error('[TRAIN-ERROR]', new Date().toISOString(), ...args)
};
}
// --- Event emitter helpers ---
on(event, callback) {
if (!this.listeners[event]) {
this.listeners[event] = [];
}
this.listeners[event].push(callback);
return () => this.off(event, callback);
}
off(event, callback) {
if (!this.listeners[event]) return;
this.listeners[event] = this.listeners[event].filter(cb => cb !== callback);
}
emit(event, data) {
if (!this.listeners[event]) return;
this.listeners[event].forEach(cb => {
try { cb(data); } catch (err) { this.logger.error('Listener error', { event, err }); }
});
}
// --- Training API methods ---
async startTraining(config) {
try {
this.logger.info('Starting training', { config });
const data = await apiService.post('/api/v1/train/start', config);
this.emit('training-started', data);
return data;
} catch (error) {
this.logger.error('Failed to start training', { error: error.message });
throw error;
}
}
async stopTraining() {
try {
this.logger.info('Stopping training');
const data = await apiService.post('/api/v1/train/stop', {});
this.emit('training-stopped', data);
return data;
} catch (error) {
this.logger.error('Failed to stop training', { error: error.message });
throw error;
}
}
async getTrainingStatus() {
try {
const data = await apiService.get('/api/v1/train/status');
return data;
} catch (error) {
this.logger.error('Failed to get training status', { error: error.message });
throw error;
}
}
async startPretraining(config) {
try {
this.logger.info('Starting pretraining', { config });
const data = await apiService.post('/api/v1/train/pretrain', config);
this.emit('training-started', data);
return data;
} catch (error) {
this.logger.error('Failed to start pretraining', { error: error.message });
throw error;
}
}
async startLoraTraining(config) {
try {
this.logger.info('Starting LoRA training', { config });
const data = await apiService.post('/api/v1/train/lora', config);
this.emit('training-started', data);
return data;
} catch (error) {
this.logger.error('Failed to start LoRA training', { error: error.message });
throw error;
}
}
// --- Recording API methods ---
async listRecordings() {
try {
const data = await apiService.get('/api/v1/recording/list');
return data?.recordings ?? [];
} catch (error) {
this.logger.error('Failed to list recordings', { error: error.message });
throw error;
}
}
async startRecording(config) {
try {
this.logger.info('Starting recording', { config });
const data = await apiService.post('/api/v1/recording/start', config);
this.emit('recording-started', data);
return data;
} catch (error) {
this.logger.error('Failed to start recording', { error: error.message });
throw error;
}
}
async stopRecording() {
try {
this.logger.info('Stopping recording');
const data = await apiService.post('/api/v1/recording/stop', {});
this.emit('recording-stopped', data);
return data;
} catch (error) {
this.logger.error('Failed to stop recording', { error: error.message });
throw error;
}
}
async deleteRecording(id) {
try {
this.logger.info('Deleting recording', { id });
const data = await apiService.delete(
`/api/v1/recording/${encodeURIComponent(id)}`
);
return data;
} catch (error) {
this.logger.error('Failed to delete recording', { id, error: error.message });
throw error;
}
}
// --- WebSocket progress stream ---
connectProgressStream() {
if (this.progressSocket) {
this.logger.warn('Progress stream already connected');
return this.progressSocket;
}
const url = buildWsUrl('/ws/train/progress');
this.logger.info('Connecting progress stream', { url });
const ws = new WebSocket(url);
ws.onopen = () => {
this.logger.info('Progress stream connected');
this.emit('progress-connected', {});
};
ws.onmessage = (event) => {
try {
const data = JSON.parse(event.data);
this.emit('progress', data);
} catch (err) {
this.logger.warn('Failed to parse progress message', { error: err.message });
}
};
ws.onerror = (error) => {
this.logger.error('Progress stream error', { error });
this.emit('progress-error', { error });
};
ws.onclose = () => {
this.logger.info('Progress stream disconnected');
this.progressSocket = null;
this.emit('progress-disconnected', {});
};
this.progressSocket = ws;
return ws;
}
disconnectProgressStream() {
if (this.progressSocket) {
this.progressSocket.close();
this.progressSocket = null;
}
}
dispose() {
this.disconnectProgressStream();
this.listeners = {};
this.logger.info('TrainingService disposed');
}
}
// Create singleton instance
export const trainingService = new TrainingService();