I took a look at your project, and 'far as I can see, you’re using HLS.js to load one of Mux’s test livestreams, right? In that case, where does the nginx server come in?
I tried to replicate your code in mine, using the same test-stream URL, and it works fine on desktop, but running it on mobile gives me a CORS error:
Uncaught SecurityError: Failed to execute 'texImage2D' on 'WebGL2RenderingContext': The video element contains cross-origin data, and may not be loaded.
This happens moments after the scene is loaded, before I’ve even tapped the screen to begin playback.
When I comment out this.videoTexture.upload()
in update()
, the error is gone. However, that just leaves me with a blank screen and audio. Not exactly the goal here.
Did you change something in your project, perhaps related to CORS? (Sorry, I’m new to PlayCanvas.)
My script is down below. Applying the texture to the material is done in another script, as in PlayCanvas’ tutorial.
var VideoTexture = pc.createScript('videoTexture');
VideoTexture.attributes.add('videoSrc', {
title: 'Video Source URL',
description: 'Livestream URL.',
type: 'string',
default: 'https://test-streams.mux.dev/x36xhzz/x36xhzz.m3u8'
});
VideoTexture.attributes.add('playEvent', {
title: 'Play Event',
description: 'Event that is fired as soon as the video texture is ready to play.',
type: 'string',
default: ''
});
// initialize code called once per entity
VideoTexture.prototype.initialize = function() {
var app = this.app;
// Create HTML Video Element to play the video
var video = document.createElement('video');
// needed because the video is being hosted on a different server url
video.crossOrigin = "anonymous";
// muted attribute is required for videos to autoplay
video.muted = true;
// critical for iOS or the video won't initially play, and will go fullscreen when playing
video.playsInline = true;
video.autoplay = false;
video.loop = true;
// iOS video texture playback requires that you add the video to the DOMParser
// with at least 1x1 as the video's dimensions
var style = video.style;
style.width = '1px';
style.height = '1px';
style.position = 'absolute';
style.opacity = '0';
style.zIndex = '-1000';
style.pointerEvents = 'none';
document.body.appendChild(video);
// Create a texture to hold the video frame data
this.videoTexture = new pc.Texture(app.graphicsDevice, {
format: pc.PIXELFORMAT_R8_G8_B8,
minFilter: pc.FILTER_LINEAR,
magFilter: pc.FILTER_LINEAR,
addressU: pc.ADDRESS_CLAMP_TO_EDGE,
addressV: pc.ADDRESS_CLAMP_TO_EDGE,
mipmaps: true
});
this.app.mouse.on(pc.EVENT_MOUSEDOWN, () => {
video.muted = false;
video.play();
});
if (pc.platform.mobile) {
this.app.touch.on(pc.EVENT_TOUCHSTART, () => {
video.muted = false;
video.play();
});
}
video.addEventListener('canplay', function (e) {
this.videoTexture.setSource(video);
app.fire(this.playEvent, this.videoTexture);
}.bind(this));
if (video.canPlayType('application/vnd.apple.mpegurl')) {
console.log("Using native HLS support.");
video.src = this.videoSrc;
} else if (Hls.isSupported()) {
var hls = new Hls();
hls.loadSource(this.videoSrc);
hls.attachMedia(video);
}
};
// update code called every frame
VideoTexture.prototype.update = function(dt) {
this.videoTexture.upload();
};
I figured it out!
I swapped my if-else statements so that, like in your code, it checks for HLS.js support first instead of native support, and it works on my Android phone now. Edit: Works on my iPad’s Safari, too.