@classytic/react-stream
v0.1.0
Published
Clean, reusable React hooks for browser media device control (camera, microphone, screen share)
Maintainers
Readme
@classytic/react-stream
Clean, reusable React hooks for browser media device control (camera, microphone, screen share).
Similar to Google Meet's approach - centralized control of media devices with proper state management.
✨ Features
- 🎯 Throttled Audio Analysis: 10Hz updates (not 60Hz RAF) to prevent render thrashing
- 🔇 Clean Mute/Unmute: Disable tracks without stopping stream
- 📷 Hard Stop Camera: Full track.stop() + re-acquire pattern (like Google Meet)
- 📡 Event Callbacks:
onMicrophoneChange,onCameraChange,onScreenShareChange,onAudioLevel - ⚠️ Error Handling: Proper error types and user-friendly messages
- 📝 TypeScript: Full type safety with comprehensive interfaces
- 🌲 Tree-shakeable: Import only what you need
- ⚡ Split Contexts: Separate state/actions contexts for optimal re-renders
- 🔄 React 18/19: Full support for latest React versions
📦 Installation
# pnpm (recommended)
pnpm add @classytic/react-stream
# npm
npm install @classytic/react-stream
# yarn
yarn add @classytic/react-stream🚀 Quick Start
Option 1: Direct Hook (Simple use cases)
import { useMediaManager } from "@classytic/react-stream";
import { useEffect, useRef } from "react";
function InterviewRoom() {
const videoRef = useRef<HTMLVideoElement>(null);
const media = useMediaManager({
onMicrophoneChange: (state) => console.log("Mic:", state.status),
onAudioLevel: (data) => console.log("Level:", data.level),
});
useEffect(() => {
media.initialize();
return () => media.cleanup();
}, []);
useEffect(() => {
if (videoRef.current && media.cameraStream) {
videoRef.current.srcObject = media.cameraStream;
}
}, [media.cameraStream]);
return (
<div>
<video ref={videoRef} autoPlay muted />
<button onClick={media.toggleMicrophone}>
{media.microphone.trackEnabled ? "Mute" : "Unmute"}
</button>
<button onClick={media.toggleCamera}>
{media.camera.trackEnabled ? "Cam Off" : "Cam On"}
</button>
<meter value={media.audioLevel} max={100} />
</div>
);
}Option 2: Context Provider (Multi-component apps)
import {
MediaProvider,
useMediaContext,
useMediaState,
useMediaActions,
} from "@classytic/react-stream";
// Wrap your app
function App() {
return (
<MediaProvider autoInitialize onAudioLevel={(d) => console.log(d.level)}>
<VideoPreview />
<ControlBar />
</MediaProvider>
);
}
// State consumer (optimized - doesn't re-render on action changes)
function VideoPreview() {
const { cameraStream, camera } = useMediaState();
// ... render video
}
// Actions consumer (optimized - doesn't re-render on state changes)
function ControlBar() {
const { toggleMicrophone, toggleCamera } = useMediaActions();
// ... render controls
}Option 3: Individual Device Control
import { useMediaDevice, useAudioAnalyzer } from "@classytic/react-stream";
function CustomMicControl() {
const mic = useMediaDevice("microphone");
const { level, isSpeaking } = useAudioAnalyzer(mic.stream);
return (
<div>
<button onClick={() => mic.acquire()}>Start</button>
<button onClick={() => mic.toggle()}>Toggle</button>
<meter value={level} max={100} />
</div>
);
}📐 Architecture
┌─────────────────────────────────────────────────────┐
│ MediaProvider │
│ (Optional context wrapper for multi-component) │
└────────────────────────┬────────────────────────────┘
│
┌────────────────────────▼────────────────────────────┐
│ useMediaManager │
│ (Orchestrates camera + mic + screen together) │
└─────────┬──────────────┬──────────────┬─────────────┘
│ │ │
┌─────────▼────┐ ┌───────▼────┐ ┌───────▼─────┐
│ useMediaDevice │ │ useMediaDevice │ │ useMediaDevice │
│ (camera) │ │ (microphone) │ │ (screen) │
└───────────────┘ └──────┬─────┘ └─────────────┘
│
┌────────▼────────┐
│ useAudioAnalyzer │
│ (throttled 10Hz) │
└─────────────────┘📘 API Reference
useMediaManager(options?)
Main hook for unified media device management.
interface UseMediaManagerOptions {
/** Video constraints for camera */
videoConstraints?: VideoConstraints;
/** Audio constraints for microphone */
audioConstraints?: AudioConstraints;
/** Screen share options */
screenShareOptions?: ScreenShareOptions;
/** Audio analyzer configuration */
audioAnalyzerConfig?: AudioAnalyzerConfig;
/** Auto-initialize on mount */
autoInitialize?: boolean;
/** Callbacks */
onMicrophoneChange?: (state: DeviceState) => void;
onCameraChange?: (state: DeviceState) => void;
onScreenShareChange?: (state: DeviceState) => void;
onAudioLevel?: (data: AudioLevelData) => void;
onError?: (device: MediaDeviceType, error: Error) => void;
}
interface UseMediaManagerReturn {
// State
state: MediaManagerState;
microphone: DeviceState;
camera: DeviceState;
screen: DeviceState;
cameraStream: MediaStream | null;
screenStream: MediaStream | null;
audioLevel: number;
isSpeaking: boolean;
isInitialized: boolean;
isInitializing: boolean;
// Actions
initialize: () => Promise<boolean>;
toggleMicrophone: () => void;
setMicrophoneEnabled: (enabled: boolean) => void;
toggleCamera: () => Promise<void>;
setCameraEnabled: (enabled: boolean) => Promise<void>;
startScreenShare: () => Promise<boolean>;
stopScreenShare: () => void;
toggleScreenShare: () => Promise<void>;
cleanup: () => void;
getState: () => MediaManagerState;
}useMediaDevice(type, options?)
Hook for individual device control.
type MediaDeviceType = "camera" | "microphone" | "screen";
interface UseMediaDeviceReturn {
state: DeviceState;
status: DeviceStatus;
stream: MediaStream | null;
isEnabled: boolean;
hasError: boolean;
acquire: () => Promise<boolean>;
release: () => void;
mute: () => void;
unmute: () => Promise<void>;
toggle: () => Promise<void>;
setEnabled: (enabled: boolean) => Promise<void>;
getTrack: () => MediaStreamTrack | null;
}useAudioAnalyzer(stream, options?)
Hook for audio level analysis.
interface UseAudioAnalyzerOptions {
fftSize?: number; // Default: 256
smoothingTimeConstant?: number; // Default: 0.8
updateInterval?: number; // Default: 100 (10Hz)
minDecibels?: number; // Default: -90
maxDecibels?: number; // Default: -10
onLevelChange?: (data: AudioLevelData) => void;
}
interface UseAudioAnalyzerReturn {
level: number; // 0-100
raw: number; // Raw average
isSpeaking: boolean;
isActive: boolean;
start: () => void;
stop: () => void;
}Context Hooks
// Full access (state + actions)
useMediaContext(): UseMediaManagerReturn
// State only (optimized re-renders)
useMediaState(): MediaStateContextValue
// Actions only (optimized re-renders)
useMediaActions(): MediaActionsContextValue
// Fine-grained selectors
useMicrophoneState(): DeviceState
useCameraState(): DeviceState
useScreenShareState(): DeviceState
useAudioLevel(): { level: number; isSpeaking: boolean }
useCameraStream(): { stream: MediaStream | null; isEnabled: boolean }
useScreenStream(): { stream: MediaStream | null; isEnabled: boolean }🧪 Testing
# Run tests
pnpm test
# Watch mode
pnpm test:watch
# Coverage
pnpm test:coverage🔧 Browser Support
- Chrome 74+
- Firefox 66+
- Safari 14+
- Edge 79+
🌐 WebRTC Integration
This library handles device management only. To send streams to other users, integrate with a WebRTC transport:
// 1. react-stream manages the stream
const media = useMediaManager();
// 2. Get raw tracks from the stream
const videoTrack = media.cameraStream?.getVideoTracks()[0];
// 3. Pass tracks to your WebRTC provider (LiveKit, Daily, Agora, etc.)
localParticipant.publishTrack(videoTrack);See docs/webrtc-integration.md for complete examples with:
- LiveKit
- Daily.co
- Agora
🐛 Debugging
Enable debug logs in development:
import { enableDebug } from "@classytic/react-stream";
// Enable via code
enableDebug();
// Or via localStorage
localStorage.setItem("DEBUG_REACT_MEDIA", "true");📄 License
MIT © Classytic
🔗 Related
- @classytic/react-video - Video recording and upload library
