World position in post effect?

Is there any way to get the world position of a pixel in a post effect?
I feel like some combination of depth pass, camera matrix and screen space pixel position should be able to do it, but I’m not sure how to put it all together.
Or is there a simpler way? I’m a bit spoiled by Unreal where you have easy access to all sorts of things in the post-processing pass!

Hi @steve_wk,

Yes it’s possible in a number of ways, usually we start from the depth buffer and calculate the view space (clip space) position. Here is how it’s being calculated in the SSAO post effect:

From there to get the world position you will have to multiply that view space position with the inverse camera projection matrix.

Sorry I don’t have an exact example on that but if you search for generic OpenGL/GLSL shaders on how to reconstruct world position from depth, you will find a lot of resources.

Hope that helps.

2 Likes

Thanks so much for the pointers. I believe I’m getting close to the solution, but something is not right.

Here is my project: PlayCanvas | HTML5 Game Engine

As a test I’m just trying to tint red everything with a negative value X. What I would expect is that half of the box would appear tinted red constantly, but as it is, the tint seems to move around in relation to the camera.

Any idea what I’m doing wrong here?

Hey, the project is private!

1 Like

Oops! Sorry, made it public now!

1 Like

Still bashing my head against this one, but feel like I’m fumbling around in the dark a bit. Any thoughts on what I’m doing wrong now you’ve seen the project?

Hey, I think you are doing a matrix multiplication not required. To get the world position you only need to multiply with inverse projection matrix like this:

``````var pm = this.camera.camera.projectionMatrix.clone();
var f = new pc.Mat4();
f.copy(pm);
f.invert();
``````

Edit: actually I take it back, it’s still in clip space / view space. That needs some further research sorry

Here is an improved version of the code, not fully correct, hope that’s of more help

``````// --------------- POST EFFECT DEFINITION --------------- //
/**
* @class
* @name BloomEffect
* @classdesc Implements the BloomEffect post processing effect.
* @description Creates new instance of the post effect.
* @augments PostEffect
* @param {GraphicsDevice} graphicsDevice - The graphics device of the application.
* @property {number} bloomThreshold Only pixels brighter then this threshold will be processed. Ranges from 0 to 1.
* @property {number} blurAmount Controls the amount of blurring.
* @property {number} bloomIntensity The intensity of the effect.
*/
function BloomEffect(graphicsDevice) {
pc.PostEffect.call(this, graphicsDevice);

this.needsDepthBuffer = true;

var attributes = {
aPosition: pc.SEMANTIC_POSITION
};

var passThroughVert = [
"attribute vec2 aPosition;",
"",
"varying vec2 vUv0;",
"",
"void main(void)",
"{",
"    gl_Position = vec4(aPosition, 0.0, 1.0);",
"    vUv0 = (aPosition + 1.0) * 0.5;",
"}"
].join("\n");

var worldPositionFrag = [
"precision " + graphicsDevice.precision + " float;",
"",
"varying vec2 vUv0;",
"uniform mat4 invViewProj;",
"",
"uniform float uAspect;",
"",
"uniform sampler2D uBaseTexture;",
"",
"highp vec3 computeViewSpacePositionFromDepth(highp vec2 uv, highp float linearDepth) {",
"    return vec3((0.5 - uv) * vec2(uAspect, 1.0) * linearDepth, linearDepth);",
"}",
"void main(void)",
"{",
"    highp vec2 uv = vUv0;",
"    highp float depth = getLinearScreenDepth(vUv0);",
"    highp vec3 origin = computeViewSpacePositionFromDepth(uv, depth);",
"    highp vec4 result = vec4(origin, 1.0) * invViewProj;",
"    highp vec3 viewPos = result.xyz/result.w;",
"    highp vec3 worldPosition = (matrix_view * vec4(viewPos, 1.0)).xyz;",
"    if(worldPosition.x < 0.0)",
"    {",
"        gl_FragColor = texture2D(uBaseTexture, vUv0) * vec4(1.0, 0.5, 0.5, 1.0);",
"    } else {",
"        gl_FragColor = texture2D(uBaseTexture, vUv0);",
"    }",
"}"
].join("\n");

attributes: attributes,
});

this.targets = [];
}

BloomEffect.prototype = Object.create(pc.PostEffect.prototype);
BloomEffect.prototype.constructor = BloomEffect;

BloomEffect.prototype._destroy = function () {

};

BloomEffect.prototype._resize = function (target) {
};

Object.assign(BloomEffect.prototype, {
render: function (inputTarget, outputTarget, rect) {

this._resize(inputTarget);

var device = this.device;
var scope = device.scope;

var pm = this.camera.camera.projectionMatrix.clone();
var f = new pc.Mat4();
f.copy(pm);
f.invert();
scope.resolve("invViewProj").setValue(f.data);
scope.resolve("uAspect").setValue(inputTarget.width / inputTarget.height);
scope.resolve("uBaseTexture").setValue(inputTarget.colorBuffer);
}
});

// ----------------- SCRIPT DEFINITION ------------------ //
var Bloom = pc.createScript('bloom');

type: 'entity',
'title': 'Camera'
});

Bloom.prototype.initialize = function () {
this.effect = new BloomEffect(this.app.graphicsDevice);

this.effect.camera = this.camera;

var queue = this.entity.camera.postEffects;

this.on('attr', function (name, value) {
this.effect[name] = value;
}, this);

this.on('state', function (enabled) {
if (enabled) {