diff --git a/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone.cs b/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone.cs index a58a97e8..5fa2c9ab 100644 --- a/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone.cs +++ b/Packages/StreamVideo/DocsCodeSamples/03-guides/CameraAndMicrophone.cs @@ -42,7 +42,7 @@ public void SetupCameraInput() var cameraDevice = WebCamTexture.devices.First(); // Use device name to create a new WebCamTexture instance - var activeCamera = new WebCamTexture(cameraDevice.name); + var activeCamera = new WebCamTexture(cameraDevice.name, 1920, 1080, 24); // Call Play() in order to start capturing the video activeCamera.Play(); diff --git a/Packages/StreamVideo/DocsCodeSamples/03-guides/VideoOptimization.cs b/Packages/StreamVideo/DocsCodeSamples/03-guides/VideoOptimization.cs new file mode 100644 index 00000000..0cfd59e4 --- /dev/null +++ b/Packages/StreamVideo/DocsCodeSamples/03-guides/VideoOptimization.cs @@ -0,0 +1,24 @@ +using System.Threading.Tasks; +using StreamVideo.Core; + +namespace DocsCodeSamples._03_guides +{ + internal class VideoOptimization + { + public async Task ControlParticipantVideoResolution() + { + var callType = StreamCallType.Default; // Call type affects default permissions + var callId = "my-call-id"; + + // Notice that we pass create argument as true - this will create the call if it doesn't already exist + var streamCall = await _client.JoinCallAsync(callType, callId, create: true, ring: false, notify: false); + + foreach (var participant in streamCall.Participants) + { + participant.UpdateRequestedVideoResolution(new VideoResolution(1280, 720)); + } + } + + private IStreamVideoClient _client; + } +} \ No newline at end of file diff --git a/Packages/StreamVideo/DocsCodeSamples/03-guides/VideoOptimization.cs.meta b/Packages/StreamVideo/DocsCodeSamples/03-guides/VideoOptimization.cs.meta new file mode 100644 index 00000000..7c8349e0 --- /dev/null +++ b/Packages/StreamVideo/DocsCodeSamples/03-guides/VideoOptimization.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: bfe61e5fda7a4ce8aeb36f02bb78b3f1 +timeCreated: 1709052970 \ No newline at end of file diff --git a/docusaurus/docs/Unity/03-guides/04-camera-and-microphone.mdx b/docusaurus/docs/Unity/03-guides/04-camera-and-microphone.mdx index 0de3f780..f70911fb 100644 --- a/docusaurus/docs/Unity/03-guides/04-camera-and-microphone.mdx +++ b/docusaurus/docs/Unity/03-guides/04-camera-and-microphone.mdx @@ -84,7 +84,7 @@ They way you start streaming video from a camera device is by creating a `WebCam var cameraDevice = WebCamTexture.devices.First(); // Use device name to create a new WebCamTexture instance -var activeCamera = new WebCamTexture(cameraDevice.name); +var activeCamera = new WebCamTexture(cameraDevice.name, 1920, 1080, 24); // Call Play() in order to start capturing the video activeCamera.Play(); @@ -93,9 +93,17 @@ activeCamera.Play(); _client.SetCameraInputSource(activeCamera); ``` +The video resolution and FPS parameters you set in your `WebCamTexture` instance will be used for the video publishing settings. In the above example, the video will aim to be streamed at 1080p (1920x1080) resolution and 24 frames per second. + +:::note + +Stream service will dynamically adjust the video resolution and FPS parameters based on the network traffic. The ultimate goal is to ensure a smooth video experience without video stuttering. The settings you provide are the maximum aimed for if the network conditions allow it. + +::: + #### Change camera device during the call -The most efficient way to change the camera device is to update the `deviceName` property` on the instance of `WebCamTexture` that was previously set as an input source via `_client.SetCameraInputSource`: +The most efficient way to change the camera device is to update the `deviceName` property on the instance of `WebCamTexture` that was previously set as an input source via `_client.SetCameraInputSource`: ```csharp _activeCamera.Stop(); diff --git a/docusaurus/docs/Unity/03-guides/10-custom-data.mdx b/docusaurus/docs/Unity/03-guides/10-custom-data.mdx index fe58a9ce..d085f204 100644 --- a/docusaurus/docs/Unity/03-guides/10-custom-data.mdx +++ b/docusaurus/docs/Unity/03-guides/10-custom-data.mdx @@ -1,6 +1,6 @@ --- title: Custom data -description: How to pin participants +description: How to set and retrieve custom data --- Custom data is additional information that you can add to the call or participants. It is a dictionary of key-value pairs that you can use to store any types of objects you need. diff --git a/docusaurus/docs/Unity/03-guides/11-video-optimization.mdx b/docusaurus/docs/Unity/03-guides/11-video-optimization.mdx new file mode 100644 index 00000000..ed2340fc --- /dev/null +++ b/docusaurus/docs/Unity/03-guides/11-video-optimization.mdx @@ -0,0 +1,36 @@ +--- +title: Video Optimization +description: Best practices for video optimization +--- + +Many factors affect the smoothness of video transitions, one of which is the optimal usage of network bandwidth. Network bandwidth represents the maximum data capacity that a user can send and receive through the network. Given video's substantial size, providing a good real-time video experience requires transferring sufficient data to maintain an appropriate number of frames per second (FPS). However, attempting to send too much data can result in video stuttering. + +## Dynamic resolution scaling + +The Stream service automatically adjusts the video resolution and framerate delivered to each call participant based on current network conditions to ensure smooth video transmission. In extreme cases where a participant's bandwidth capacity is insufficient for both video and audio, the Stream service may temporarily pause video to preserve audio streaming quality. + +Stream media servers actively monitor network conditions and optimize the data delivered to each participant. However, there are certain actions you can take to make this process as efficient as possible. + +## Control requested video resolution per participant + +A practical approach is to request video tracks at the resolution that matches your actual rendering needs. This minimizes UI scaling and optimizes bandwidth usage. + +By default, video is delivered at 1080p (1920x1080) resolution. While this ensures high-quality video, it may be excessive depending on your UI layout. For example, if your application displays participants in a small grid, where each participant's video occupies a maximum of 480x270 pixels, you would be transferring four times more data than necessary. This excessive data transfer, due to down-scaling, can lead to bandwidth issues. + +To request a specific video resolution for each call participant, use the UpdateRequestedVideoResolution method on the IStreamVideoCallParticipant object as shown below: +```csharp +participant.UpdateRequestedVideoResolution(new VideoResolution(1280, 720)); +``` +Invoke this method whenever the rendered resolution changes. + +## Video Frame Rate recommendations + +The ideal frames per second (FPS) for video transmission varies by use case. Below are general recommendations: + +| Use case | FPS (frames per second) | Comment | +| --- | --- | --- | +| Video Calling & Broadcasting | 30 | Typically, 30FPS is sufficient for standard video calls and broadcasting. | +| Screen-sharing | 10-15 | A lower FPS is usually adequate, but a high resolution is recommended to prevent artifacts and ensure clarity. | +| E-Sports streaming | 50-60 | Fast-paced action requires a higher frame rate for smooth playback. | + +