Join ScreenMeetLive session
ScreenMeet.config.organizationKey = yourMobileAPIKey //provided by ScreenMeet
let code = "OdeWGubyvsUh" // session code
ScreenMeet.connect(code) { [weak self] error in
if let error = error {
// session start error
} else {
// session started
}
}
Retrieve Connection state
let connectionState = ScreenMeet.getConnectionState()
switch connectionState {
case .connecting:
print("waiting for connecting to call ...")
case .connected:
print("joined the call")
case .reconnecting:
print("trying to restore connection to call ...")
case .disconnected(.callNotStarted):
print("Call disconnected. Call is not started")
case .disconnected(.callEnded):
print("Call disconnected. Call is finished")
case .disconnected(.leftCall):
print("Call disconnected. Client left call")
case .disconnected(.networkError):
print("Call disconnected. Network error")
}
Share Camera
ScreenMeet.shareCamera() // by default start front camera sharing
To specify camera and/or camera configuration use AVCaptureDevice
let device = AVCaptureDevice.DiscoverySession.init(deviceTypes: [.builtInWideAngleCamera],
mediaType: .video,
position: .back).devices.first!
ScreenMeet.shareCamera(device)
Share Screen
ScreenMeet.shareScreen()
Share stream of images (Can be used for screen sharing by continuously providing raw images, screen shots for example)
SMImageHandler
contains a single interface transferImage(_ image: UIImage)
that you can use to send images
ScreenMeet.shareScreenWithImageTransfer(_ completion: @escaping ((SMImageHandler?) -> Void))
Stop Video sharing
ScreenMeet.stopVideoSharing() // Stop Camera or Screen sharing
Share Microphone
ScreenMeet.shareMicrophone()
Stop Audio sharing
ScreenMeet.stopAudioSharing() // Stop audio sharing
Retrieve Audio and Video states
let state = ScreenMeet.getMediaState()
let isAudioActive = state.isAudioActive // true: unmuted, false: muted
let isVideoActive = state.isVideoActive // true: unmuted, false: muted
let videoState = state.videoState // VideoState enum [CAMERA, SCREEN, NONE]
let audioState = state.audioState // AudioState enum [MICROPHONE, NONE]
Leave Session
ScreenMeet.disconnect()
Call participants
let partisipantsList = ScreenMeet.getParticipants() // Returns list of call participants [SMParticipant]
To run the example project, clone the repo, and run pod install
from the Example directory first.
More advanced sample with SwiftUI see in FullExample application.
Minimum iOS version | |
---|---|
ScreenMeetSDK | iOS 12.0 |
Example | iOS 13.0 |
ScreenMeetSDK is available through CocoaPods. To install it, simply add the following line to your Podfile:
pod 'ScreenMeetSDK'
Also bitcode should be disabled. It can be done manualy in xCode, or add the following lines at the end of your pod file
post_install do |installer|
installer.pods_project.targets.each do |target|
target.build_configurations.each do |config|
config.build_settings['ENABLE_BITCODE'] = 'NO'
end
end
end
Set your event handler
ScreenMeet.delegate = yourSMDelegate
where yourSMDelegate
is your implementation of ScreenMeetDelegate
protocol
/// on Audio stream created
func onLocalAudioCreated()
/// on Local Video stream created
/// - Parameter videoTrack: Can be used to preview local video. See `RTCVideoTrack`
func onLocalVideoCreated(_ videoTrack: RTCVideoTrack)
/// on Local Video stream stoped
func onLocalVideoStopped()
/// on Local Audio stream stoped
func onLocalAudioStopped()
/// On participant joins call.
/// - Parameter participant: Participant details. See `SMParticipant`
func onParticipantJoined(_ participant: SMParticipant)
/// On receiving video stream from participant.
/// - Parameter participant: Participant details. See `SMParticipant`
/// - Parameter remoteVideoTrack: Can be used to preview participant video stream. See `RTCVideoTrack`
func onParticipantVideoTrackCreated(_ participant: SMParticipant, _ remoteVideoTrack: RTCVideoTrack)
/// On receiving video stream from participant.
/// - Parameter participant: Participant details. See `SMParticipant`
/// - Parameter remoteAudioTrack: Remote participant audio stream. See `RTCAudioTrack`
func onParticipantAudioTrackCreated(_ participant: SMParticipant, _ remoteAudioTrack: RTCAudioTrack)
/// On participant left call.
/// - Parameter participant: Participant details. See `SMParticipant`
func onParticipantLeft(_ participant: SMParticipant)
/// When participant state was changed. For example participant muted, paused, resumed video, etc
/// - Parameter participant: Participant details. See `SMParticipant`
func onParticipantMediaStateChanged(_ participant: SMParticipant)
/// When active speaker changed.
/// - Parameter participant: Participant details. See `SMParticipant`
func onActiveSpeakerChanged(_ participant: SMParticipant, _ remoteVideoTrack: RTCVideoTrack)
/// Occurs when approval for a feature(remote control or laser pointer) is requested from you
///
/// - Parameters:
/// - feature: Feature being requested. Containes details about type of the feature and participant who requested it
/// - decisionHandler: The callback called after request is accepted or denied
/// - granted: The retrieved decision for request.
func onFeatureRequest(_ feature: SMFeature, _ decisionHandler: @escaping (_ granted: Bool) -> Void)
/// Occurs when previous request is rejected
///
/// - Parameters:
/// - feature: Feature request that has been rejested. Containes details about type of the feature and participant who requested it
func onFeatureRequestRejected(feature: SMFeature)
/// Occurs when a feature has stopped
///
/// - Parameters:
/// - feture: Feature that has been stopped
func onFeatureStopped(feature: SMFeature)
/// Occurs when certain feature (you approved) starts its activity (remote control, laser pointer)
///
/// - Parameters:
/// - feature: Feature that has stated
func onFeatureStarted(feature: SMFeature)
/// Occures during remote control session when an agent triggers and event. Can be a mouse or a keybaord event
///
/// - Parameters:
/// - event: Remote control event. See `SMRemoteControlEvent`
func onRemoteControlEvent(_ event: SMRemoteControlEvent)
/// Root view controller to be remote controlled (Allowing viewer to perform touches on your view controller(s)). It should be the root(bottom most superview) view of the entire window
var rootViewController: UIViewController? { get }
/// When error occurred
/// - Parameter error `SMError`
func onError(_ error: SMError)
/// On connection state change
/// - Parameter new session state: `SMState`
func onConnectionStateChanged(_ newState: SMConnectionState)
ScreenMeet Live requires initial config to join session
//Create config object
let config = SMSessionConfig()
To start work with SDK organizationKey (mobileKey) is required
//Set organization mobile Key
ScreenMeet.shared.config.organizationKey = yourMobileAPIKey //provided by ScreenMeet
Represent the severity and importance of log messages ouput
config.loggingLevel = .debug
Possible values:
public enum LogLevel {
/// Information that may be helpful, but is not essential, for troubleshooting errors
case info
/// Verbose information that may be useful during development or while troubleshooting a specific problem
case debug
/// Designates error events that might still allow the application to continue running
case error
}
Set custom endpoint URL
config.endpoint = yourEndpointURL
ScreenMeetLiveSDK is available under the MIT license. See the LICENSE file for more info.