Does anyone know how to apply the above code in playcanvas?
Logic 2)
When playing video, takes video texture, and splits into two equal parts. then apply one to diffuseMap/emissiveMap and another to opacityMap. But 2nd logic I don’t know how to split texture into two parts.
Can anyone help me with any of the logic solutions?
If I play two videos (Diffuse+Alpha) separately it will not sync properly.
To your first question this post may be of help it contains a public project that is still live and playbacks an alpha masked video. Beware though I remember there were issues on iOS:
var AlphaMaskVideoPlayer = pc.createScript('alphaMaskVideoPlayer');
AlphaMaskVideoPlayer.attributes.add('alphaImage', {
type: 'entity',
description: 'Final Alpha image should applay on this'
});
AlphaMaskVideoPlayer.attributes.add('videoAsset', {
type: 'asset',
assetType: 'audio',
description: 'Apply an alpha mask to a video. Each frame of the video has color information on top and alpha information at the bottom.'
});
AlphaMaskVideoPlayer.attributes.add('videoWidthHeight', {
type: 'vec2',
description: 'Video resolution'
});
AlphaMaskVideoPlayer.attributes.add('isMute', {
type: 'boolean',
defalut:true,
description: 'Mute Sound when video start play'
});
AlphaMaskVideoPlayer.attributes.add('playVideoByURL', {
type: 'boolean',
description: 'Pass external video url'
});
AlphaMaskVideoPlayer.attributes.add('video_url', {
type: 'string'
});
AlphaMaskVideoPlayer.prototype.initialize = function() {
this.upload=false;
this.isPlaying=false;
this.createTexture();
this.createVideoPlayer();
this.makeAllHtmlElements();
};
AlphaMaskVideoPlayer.prototype.createVideoPlayer = function() {
var video = document.createElement('video');
if (this.playVideoByURL) {
video.src = this.video_url;
} else {
video.src = this.videoAsset.getFileUrl();//this.videoUrl;
}
video.type = "video/mp4";
video.crossOrigin = 'anonymous';
video.loop = true;
video.preload = "auto";
// critical for iOS or the video won't initially play, and will go fullscreen when playing
video.playsInline = true;
video.muted = this.isMute;
var videoStyle = video.style;
videoStyle.width = '1px';
videoStyle.height = '1px';
videoStyle.position = 'absolute';
videoStyle.opacity = '0';
videoStyle.zIndex = '1';
videoStyle.pointerEvents = 'none';
video.play();
document.body.appendChild(video);
this.video=video;
var self=this;
//Calling This When Video got loaded
video.addEventListener('canplay', function (e) {
if(!this.upload)
{
console.log("Can start playing video");
self.upload = true;
self.isPlaying = true;
}
});
};
AlphaMaskVideoPlayer.prototype.createTexture = function() {
//For loading base64 texture for playcanvas
this._texture2D = new pc.Texture(this.app.graphicsDevice,{
mipmaps: false
});
this._texture2D.minFilter = pc.FILTER_LINEAR;
this._texture2D.magFilter = pc.FILTER_LINEAR;
this._texture2D.addressU = pc.ADDRESS_CLAMP_TO_EDGE;
this._texture2D.addressV = pc.ADDRESS_CLAMP_TO_EDGE;
};
AlphaMaskVideoPlayer.prototype.makeAllHtmlElements = function() {
//Image for saving base64 video frame after process
var baseImage = document.createElement("IMG");
var baseImageStyle=baseImage.style;
baseImageStyle.width='1px';
baseImageStyle.height='1px';
baseImageStyle.opacity='0';
document.body.appendChild(baseImage);
this._image=baseImage;
///////////////////////////////
//Canvas for playing video for take pixels
var bufferCanvas=document.createElement("CANVAS");
bufferCanvas.width=this.videoWidthHeight.x;
bufferCanvas.height=this.videoWidthHeight.y;
var bufferCanvasStyle=bufferCanvas.style;
bufferCanvasStyle.display= "none";
//////////////////////////////
//Canvas for output
var outputCanvas=document.createElement("CANVAS");
outputCanvas.width=this.videoWidthHeight.x;
outputCanvas.height=this.videoWidthHeight.y/2;
var outputCanvasStyle=outputCanvas.style;
outputCanvasStyle.opacity='0';
/////////////////////
var output = outputCanvas.getContext('2d'),buffer = bufferCanvas.getContext('2d');
this._buffer=buffer;
this._output=output;
this._outputCanvas=outputCanvas;
document.body.appendChild(bufferCanvas);
document.body.appendChild(outputCanvas);
};
AlphaMaskVideoPlayer.prototype.update = function(dt) {
if (this.isPlaying) {
this.upload = !this.upload;
if (this.upload){
this.processFrame();
}
/*if(this.video.currentTime>1)
{
//Can do somthing
}*/
}
};
AlphaMaskVideoPlayer.prototype.processFrame = function(){
this._buffer.drawImage(this.video, 0, 0);
// this can be done without alphaData, except in Firefox which doesn't like it when image is bigger than the canvas
let image = this._buffer.getImageData(0, 0, this.videoWidthHeight.x, this.videoWidthHeight.y/2);
let imageData = image.data;
let alphaData = this._buffer.getImageData(0, this.videoWidthHeight.y/2, this.videoWidthHeight.x, this.videoWidthHeight.y/2).data;
for (var i = 3, len = imageData.length; i < len; i = i + 4) {
imageData[i] = alphaData[i - 1];
}
this._output.putImageData(image, 0, 0, 0, 0, this.videoWidthHeight.x, this.videoWidthHeight.y);
var pngUrl = this._outputCanvas.toDataURL();
this.ShowShareImage(pngUrl);
};
AlphaMaskVideoPlayer.prototype.ShowShareImage = function (base64Data) {
let self=this;
this._image.src = base64Data;
this._image.crossOrigin = 'anonymous';
this._image.onload = (e) => {
this._texture2D.setSource(this._image);
this.alphaImage.element.texture = this._texture2D;
};
};
In this tutorial I explain how to make and added new version alpha video script
This script uses the shader so it performs better than my previous version.
ok, great … and thanks for the rapid effort
Unfortunately the/my issue turns in the direction of very ‘cutting edge’ video-encoding exports.
→ the problem becomes quite exotic in a negative way, as it is a
“Special way/issue of/by setting alpha layer in Blender 3.3”
If you can do something, in relation to this:
then great.
Otherwise I will (at least momentarely) turn to the developers of Blender to find out, what they can do to ‘the layers coding-wise’ in order for this to happen / be executable on PlayCanvas-/browser-level … and thus; hear if they can make emends at Blender 3.4 version (PS edit: The video is made with their new ‘shadow caustics’ option)
NB: This is how a still from the Windowsplayer-mp4-executing video looks like
ok, have done accordingly … still not sure were to set the settings for alpha channel in After Effects? (have googled that mp4 does not have an alpha channel [on its own])
Tried everything else now - still it leads me towards the Blender exporter … both Blender 2.83 and 3.3 has issues (regardless if the .mp4-file is exported with Shadow Caustics or not)
Can You please put up some screen dumps on how you export in AE? (maybe I can then successfully export as mp4 in AE, but by imported stills [where these single-images were exported as stills in Blender [AVI JPEG]])
[that will work for me, but all-in-all; I will adjust the total issue towards the Blender devs, to see if they can include color data in the ‘floating transparent’ mask]