Track Types
lib-jitsi-meet provides two main track classes:- JitsiLocalTrack: Represents local media (your camera/microphone)
- JitsiRemoteTrack: Represents remote participant media
Creating Local Tracks
Request user media
Create audio and video tracks:
JitsiMeetJS.createLocalTracks({
devices: ['audio', 'video'],
resolution: 720,
cameraDeviceId: 'default',
micDeviceId: 'default'
}).then(tracks => {
console.log('Created tracks:', tracks);
localTracks = tracks;
}).catch(error => {
console.error('Failed to create tracks:', error);
});
Track Creation Options
interface ICreateLocalTracksOptions {
// Device selection
devices?: ('audio' | 'video' | 'desktop')[];
cameraDeviceId?: string;
micDeviceId?: string;
// Video options
resolution?: number | string; // 180, 360, 720, 1080, '4K'
minFps?: number;
maxFps?: number;
facingMode?: 'user' | 'environment'; // Mobile camera facing
// Audio options
disableAGC?: boolean; // Disable auto gain control
disableAP?: boolean; // Disable audio processing
disableHPF?: boolean; // Disable high-pass filter
disableNS?: boolean; // Disable noise suppression
// Desktop sharing
desktopSharingSourceDevice?: string;
desktopSharingSources?: string[]; // ['screen', 'window', 'tab']
// Effects
effects?: IStreamEffect[];
}
Track Types and Properties
Check Track Type
// Check media type
if (track.getType() === 'video') {
console.log('This is a video track');
}
if (track.isAudioTrack()) {
console.log('Audio track');
}
if (track.isVideoTrack()) {
console.log('Video track');
}
// Check video type
const videoType = track.getVideoType();
if (videoType === 'camera') {
console.log('Camera track');
} else if (videoType === 'desktop') {
console.log('Screen sharing track');
}
// Check if local or remote
if (track.isLocal()) {
console.log('Local track');
}
Track Properties
// Get track details
const trackId = track.getId();
const mediaType = track.getType(); // 'audio' or 'video'
const participantId = track.getParticipantId();
const isMuted = track.isMuted();
const deviceId = track.getDeviceId();
// Get resolution (video tracks)
if (track.isVideoTrack()) {
const resolution = track.resolution; // e.g., 720
const width = track.getWidth();
const height = track.getHeight();
}
// Get source name (for signaling)
const sourceName = track.getSourceName();
Managing Track State
Mute and Unmute
// Mute audio
audioTrack.mute().then(() => {
console.log('Audio muted');
}).catch(error => {
console.error('Failed to mute:', error);
});
// Mute video
videoTrack.mute();
Track Events
// Listen for mute changes
track.addEventListener(
JitsiMeetJS.events.track.TRACK_MUTE_CHANGED,
() => {
console.log('Mute state:', track.isMuted());
}
);
// Audio level changes (local tracks)
track.addEventListener(
JitsiMeetJS.events.track.TRACK_AUDIO_LEVEL_CHANGED,
(audioLevel) => {
console.log('Audio level:', audioLevel);
}
);
// Track stopped (user revoked permissions, device unplugged)
track.addEventListener(
JitsiMeetJS.events.track.LOCAL_TRACK_STOPPED,
() => {
console.log('Track stopped');
track.dispose();
}
);
// No data from source
track.addEventListener(
JitsiMeetJS.events.track.NO_DATA_FROM_SOURCE,
() => {
console.warn('No data received from track');
}
);
Attaching and Detaching Tracks
Attach to DOM
// Create video element
const videoElement = document.createElement('video');
videoElement.autoplay = true;
videoElement.id = `video-${track.getId()}`;
// Attach track
track.attach(videoElement);
// Add to DOM
document.getElementById('video-container').appendChild(videoElement);
Detach from DOM
// Detach from specific element
track.detach(videoElement);
// Detach from all elements
track.detach();
// Remove element from DOM
videoElement.remove();
Switching Devices
Enumerate Devices
JitsiMeetJS.mediaDevices.enumerateDevices((devices) => {
devices.forEach(device => {
console.log('Device:', device.kind, device.label, device.deviceId);
});
});
// Get specific device types
JitsiMeetJS.mediaDevices.getAudioInputDevices().then(devices => {
console.log('Microphones:', devices);
});
JitsiMeetJS.mediaDevices.getVideoInputDevices().then(devices => {
console.log('Cameras:', devices);
});
JitsiMeetJS.mediaDevices.getAudioOutputDevices().then(devices => {
console.log('Speakers:', devices);
});
Switch Camera
async function switchCamera(newDeviceId) {
try {
// Create new video track
const [newTrack] = await JitsiMeetJS.createLocalTracks({
devices: ['video'],
cameraDeviceId: newDeviceId
});
// Get old track
const oldTrack = localTracks.find(t => t.getType() === 'video');
// Replace track in conference
await conference.replaceTrack(oldTrack, newTrack);
// Dispose old track
oldTrack.dispose();
// Update local tracks array
const index = localTracks.indexOf(oldTrack);
localTracks[index] = newTrack;
console.log('Camera switched successfully');
} catch (error) {
console.error('Failed to switch camera:', error);
}
}
Switch Microphone
async function switchMicrophone(newDeviceId) {
const [newTrack] = await JitsiMeetJS.createLocalTracks({
devices: ['audio'],
micDeviceId: newDeviceId
});
const oldTrack = localTracks.find(t => t.getType() === 'audio');
await conference.replaceTrack(oldTrack, newTrack);
oldTrack.dispose();
const index = localTracks.indexOf(oldTrack);
localTracks[index] = newTrack;
}
Change Audio Output
// Set audio output device (speakers)
JitsiMeetJS.mediaDevices.setAudioOutputDevice(deviceId).then(() => {
console.log('Audio output device changed');
}).catch(error => {
console.error('Failed to change audio output:', error);
});
// Listen for audio output changes
JitsiMeetJS.mediaDevices.addEventListener(
JitsiMeetJS.events.mediaDevices.AUDIO_OUTPUT_DEVICE_CHANGED,
(deviceId) => {
console.log('Audio output changed to:', deviceId);
}
);
Track Effects
Apply Effects
// Create effect
const effect = {
isEnabled: () => true,
startEffect: (stream) => {
// Process stream and return modified stream
return processedStream;
},
stopEffect: () => {
// Clean up effect
}
};
// Set effect on track
await track.setEffect(effect);
// Remove effect
await track.setEffect(undefined);
Background Blur Example
const blurEffect = {
isEnabled: (track) => track.isVideoTrack(),
startEffect: (stream) => {
// Initialize background blur processing
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
// Process video frames
const video = document.createElement('video');
video.srcObject = stream;
video.play();
function processFrame() {
ctx.drawImage(video, 0, 0);
// Apply blur to background
requestAnimationFrame(processFrame);
}
processFrame();
return canvas.captureStream();
},
stopEffect: () => {
// Clean up resources
}
};
await videoTrack.setEffect(blurEffect);
Remote Tracks
Receive Remote Tracks
conference.on(
JitsiMeetJS.events.conference.TRACK_ADDED,
(track) => {
if (track.isLocal()) {
return;
}
const participantId = track.getParticipantId();
console.log(`Remote track from ${participantId}:`, track.getType());
// Create container for participant
let container = document.getElementById(`participant-${participantId}`);
if (!container) {
container = document.createElement('div');
container.id = `participant-${participantId}`;
document.getElementById('remote-videos').appendChild(container);
}
// Create and attach media element
const element = track.isVideoTrack()
? document.createElement('video')
: document.createElement('audio');
element.autoplay = true;
element.id = `${participantId}-${track.getType()}`;
container.appendChild(element);
track.attach(element);
}
);
Handle Track Removal
conference.on(
JitsiMeetJS.events.conference.TRACK_REMOVED,
(track) => {
if (track.isLocal()) {
return;
}
const participantId = track.getParticipantId();
console.log(`Track removed from ${participantId}`);
// Detach and remove element
const element = document.getElementById(
`${participantId}-${track.getType()}`
);
if (element) {
track.detach(element);
element.remove();
}
}
);
Track Streaming Status
// Monitor track playback status
track.addEventListener(
JitsiMeetJS.events.track.TRACK_STREAMING_STATUS_CHANGED,
(streamingStatus) => {
// streamingStatus: 'active', 'inactive', 'interrupted', 'restoring'
console.log('Streaming status:', streamingStatus);
if (streamingStatus === 'interrupted') {
// Show loading indicator
} else if (streamingStatus === 'active') {
// Hide loading indicator
}
}
);
Advanced Track Operations
Get Underlying MediaStream
// Get WebRTC MediaStream
const stream = track.getOriginalStream();
const mediaStreamTrack = track.track;
// Get stream settings
const settings = mediaStreamTrack.getSettings();
console.log('Track settings:', settings);
// Get constraints
const constraints = mediaStreamTrack.getConstraints();
console.log('Track constraints:', constraints);
Apply Constraints
// Apply new constraints to video track
const constraints = {
width: { ideal: 1280 },
height: { ideal: 720 },
frameRate: { ideal: 30 }
};
track.track.applyConstraints(constraints).then(() => {
console.log('Constraints applied');
}).catch(error => {
console.error('Failed to apply constraints:', error);
});
Dispose Tracks
// Properly dispose of a track
track.dispose().then(() => {
console.log('Track disposed');
});
// Dispose all local tracks
localTracks.forEach(track => {
track.dispose();
});
localTracks = [];
Best Practices
Always dispose tracks when done
Always dispose tracks when done
// Clean up on conference leave
conference.on(
JitsiMeetJS.events.conference.CONFERENCE_LEFT,
() => {
localTracks.forEach(track => {
track.dispose();
});
localTracks = [];
}
);
Handle device changes gracefully
Handle device changes gracefully
// Listen for device changes
JitsiMeetJS.mediaDevices.addEventListener(
JitsiMeetJS.events.mediaDevices.DEVICE_LIST_CHANGED,
(devices) => {
console.log('Devices changed:', devices);
// Update device list in UI
}
);
Check for track support
Check for track support
// Check if browser supports video
if (JitsiMeetJS.mediaDevices.isDevicePermissionGranted('video')) {
// Create video track
}
// Check for specific features
if (JitsiMeetJS.isDesktopSharingEnabled()) {
// Enable screen sharing button
}
Next Steps
Screen Sharing
Learn about desktop sharing
Audio/Video Quality
Control media quality