I have a remote server setup that stores VOD (video on demand). I use Shaka player on the client side.
I encoded the video as a Low Latency Dash
using Shaka Streamer
and I want Shaka Player
to use HTTP range requests
on the video parts. I also want the server to use HTTP2 and push the next fragments before the client asks for them (it seems the player will not try to async his requests, and the latency kills the experience).
example of expected/wanted behavior:
a. client -> server: videoA1, Range 0:300, response: 206
b. server-> client : videoA1, Range 0:300, response: 206
c. server-> client : push videoA1, Range 300:600, response: 206
d. server-> client : push videoA1, Range 600:900, response: 206
As a first step, I just try to make the player use HTTP range requests (I will leverage HTTP2 after I make the player use HTTP range requests).
Currently, the player side requests the whole chunk and receives it (200 response), but since the server is some distance away, this negatively impacts the player experience.
This is my Shaka player
script:
const manifestUri = 'http://172.27.17.50/shaka/dash.mpd';
function initApp() {
// Install built-in polyfills to patch browser incompatibilities.
shaka.polyfill.installAll();
// Check to see if the browser supports the basic APIs Shaka needs.
if (shaka.Player.isBrowserSupported()) {
// Everything looks good!
initPlayer();
} else {
// This browser does not have the minimum set of APIs we need.
console.error('Browser not supported!');
}
}
async function initPlayer() {
// Create a Player instance.
const video = document.getElementById('video');
const player = new shaka.Player(video);
// Attach player to the window to make it easy to access in the JS console.
window.player = player;
// Listen for error events.
player.addEventListener('error', onErrorEvent);
// Try to load a manifest.
// This is an asynchronous process.
try {
await player.load(manifestUri);
// This runs if the asynchronous load is successful.
console.log('The video has now been loaded!');
} catch (e) {
// onError is executed if the asynchronous load fails.
onError(e);
}
}
function onErrorEvent(event) {
// Extract the shaka.util.Error object from the event.
onError(event.detail);
}
function onError(error) {
// Log the error.
console.error('Error code', error.code, 'object', error);
}
document.addEventListener('DOMContentLoaded', initApp);
This is my Nginx
config:
worker_processes auto;
pid /run/nginx.pid;
events {
use epoll;
}
http {
default_type application/octet-stream;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
open_file_cache max=1000 inactive=5m;
open_file_cache_valid 2m;
open_file_cache_min_uses 1;
open_file_cache_errors on;
aio on;
server {
listen 80;
server_name localhost;
root /usr/share/lhls/;
location / {
autoindex on;
root /usr/share/lhls;
add_header X-requests '$uri';
add_header Access-Control-Allow-Headers '*';
add_header Access-Control-Allow-Origin '*';
add_header Access-Control-Allow-Methods '*';
}
}
}
The shaka stream
configuration files:
InputFile.yaml
:
input:
- name: my_video.mp4
media_type: video
DashConfig.yaml
:
streaming_mode: vod
resolutions:
- 1080p
channel_layouts:
- stereo
- surround
audio_codecs:
- aac
video_codecs:
- h264
manifest_format:
- dash
segment_size: 2
low_latency_dash_mode: True
What should I do to make the player use Range requests on the video parts and make the user's experience smoother?