Skip to main content
The Track.Source enum identifies the source of a media track (camera, microphone, screen share, etc.).

Cases

unknown
case
Unknown or unspecified track source.
camera
case
Track is from a camera device.Used for video tracks captured from the device’s camera.
microphone
case
Track is from a microphone device.Used for audio tracks captured from the device’s microphone.
screenShareVideo
case
Track is from screen sharing (video portion).Used for video tracks when sharing the screen.
screenShareAudio
case
Track is from screen sharing (audio portion).Used for audio tracks when sharing system audio during screen sharing.

Usage

Access the source of any track:
func handleTrack(_ track: Track) {
    switch track.source {
    case .camera:
        print("Camera track")
        // Attach to video view
        
    case .microphone:
        print("Microphone track")
        // Show audio indicator
        
    case .screenShareVideo:
        print("Screen share video")
        // Show in screen share view
        
    case .screenShareAudio:
        print("Screen share audio")
        // Handle system audio
        
    case .unknown:
        print("Unknown track source")
    }
}

Filtering Tracks by Source

You can filter tracks based on their source:
func getParticipantCamera(_ participant: Participant) -> TrackPublication? {
    return participant.trackPublications.values.first { publication in
        publication.source == .camera
    }
}

func getParticipantMicrophone(_ participant: Participant) -> TrackPublication? {
    return participant.trackPublications.values.first { publication in
        publication.source == .microphone
    }
}

func getScreenShareTracks(_ participant: Participant) -> [TrackPublication] {
    return participant.trackPublications.values.filter { publication in
        publication.source == .screenShareVideo || publication.source == .screenShareAudio
    }
}

Publishing Tracks with Source

When publishing tracks, the source is automatically set based on the track type:
// Camera track - source is .camera
let cameraTrack = LocalVideoTrack.createCameraTrack()
try await room.localParticipant.publish(videoTrack: cameraTrack)
print(cameraTrack.source) // .camera

// Microphone track - source is .microphone  
let micTrack = LocalAudioTrack.createTrack()
try await room.localParticipant.publish(audioTrack: micTrack)
print(micTrack.source) // .microphone

// Screen share - source is .screenShareVideo
if let screenTrack = LocalVideoTrack.createScreenShareTrack() {
    try await room.localParticipant.publish(videoTrack: screenTrack)
    print(screenTrack.source) // .screenShareVideo
}

UI Organization by Source

Organize your UI based on track sources:
class ParticipantView: UIView {
    var cameraView: VideoView?
    var screenShareView: VideoView?
    
    func handlePublication(_ publication: TrackPublication) {
        guard let videoTrack = publication.track as? VideoTrack else { return }
        
        switch publication.source {
        case .camera:
            // Show in main camera view
            if cameraView == nil {
                cameraView = VideoView()
                addSubview(cameraView!)
            }
            videoTrack.add(videoRenderer: cameraView!)
            
        case .screenShareVideo:
            // Show in screen share view
            if screenShareView == nil {
                screenShareView = VideoView()
                addSubview(screenShareView!)
            }
            videoTrack.add(videoRenderer: screenShareView!)
            
        default:
            break
        }
    }
}

SwiftUI Example

import SwiftUI
import LiveKit

struct ParticipantTile: View {
    let participant: Participant
    @State private var cameraTrack: VideoTrack?
    @State private var screenTrack: VideoTrack?
    
    var body: some View {
        VStack {
            if let cameraTrack = cameraTrack {
                SwiftUIVideoView(track: cameraTrack)
                    .frame(height: 200)
            }
            
            if let screenTrack = screenTrack {
                SwiftUIVideoView(track: screenTrack)
                    .frame(height: 400)
            }
        }
        .onAppear {
            updateTracks()
        }
    }
    
    func updateTracks() {
        // Find camera track
        cameraTrack = participant.trackPublications.values
            .first { $0.source == .camera }?
            .track as? VideoTrack
        
        // Find screen share track  
        screenTrack = participant.trackPublications.values
            .first { $0.source == .screenShareVideo }?
            .track as? VideoTrack
    }
}

Best Practices

  1. Always Check Source: When handling tracks, check the source to determine how to display or process them
  2. Separate Views: Use separate UI elements for camera and screen share tracks
  3. Handle Unknown: Always handle the .unknown case gracefully
  4. Filter Efficiently: Use source-based filtering to quickly find specific tracks
// Good: Filter by source
let cameraTracks = participant.trackPublications.values.filter { 
    $0.source == .camera 
}

// Good: Handle all cases
switch track.source {
case .camera:
    handleCamera(track)
case .microphone:
    handleMicrophone(track)
case .screenShareVideo:
    handleScreenVideo(track)
case .screenShareAudio:
    handleScreenAudio(track)
case .unknown:
    handleUnknown(track)
}

Build docs developers (and LLMs) love