Can't define custom VertexFormat

I’m porting some code from a unity project over to playCanvas that generates some procedural meshes using a custom vertex specification. The vertex specification in Unity is

VertexAttributeDescriptor[] layout = new[] {
    new VertexAttributeDescriptor(VertexAttribute.Position, VertexAttributeFormat.SNorm16, 2, 0),
    new VertexAttributeDescriptor(VertexAttribute.TexCoord0, VertexAttributeFormat.UInt8, 4, 0),
    new VertexAttributeDescriptor(VertexAttribute.TexCoord1, VertexAttributeFormat.UNorm16, 2, 0)
};

my corresponding playcanvas vertexFormat is:

var vertexFormat = new pc.VertexFormat(graphicsDevice, [
    { semantic: pc.SEMANTIC_POSITION, components: 2, type: pc.TYPE_INT16, normalize: true },
    { semantic: pc.SEMANTIC_TEXCOORD0, components: 4, type: pc.TYPE_UINT8, normalize: false },
    { semantic: pc.SEMANTIC_TEXCOORD1, components: 2, type: pc.TYPE_UINT16, normalize: true }
]);

The following is my code to load the vertex data (from a binary blob that contains the binary data of the vertexbuffer):

MeshLoader.prototype.initialize = function() {
  var app = this.app;
  var graphicsDevice = app.graphicsDevice;

  var vertexFormat = new pc.VertexFormat(graphicsDevice, [
    { semantic: pc.SEMANTIC_POSITION, components: 2, type: pc.TYPE_INT16, normalize: true },
    { semantic: pc.SEMANTIC_TEXCOORD0, components: 4, type: pc.TYPE_UINT8, normalize: false },
    { semantic: pc.SEMANTIC_TEXCOORD1, components: 2, type: pc.TYPE_UINT16, normalize: true }
  ]);

  var vertexData = this.data.resource;
  var numVertices = vertexData.byteLength / 12;
  var vertexBuffer = new pc.VertexBuffer(graphicsDevice, vertexFormat, numVertices, pc.BUFFER_STATIC, vertexData);

  var numTiles = numVertices / 4;
  var indexBuffer;
  if(numVertices < 65535) {
    let indices = new Uint16Array(numTiles * 6);
    for (let i = 0; i < numTiles; i++) {
      indices[i * 6 + 0] = i * 4 + 0;
      indices[i * 6 + 1] = i * 4 + 1;
      indices[i * 6 + 2] = i * 4 + 2;
      indices[i * 6 + 3] = i * 4 + 0;
      indices[i * 6 + 4] = i * 4 + 2;
      indices[i * 6 + 5] = i * 4 + 3;
    }
    indexBuffer = new pc.IndexBuffer(graphicsDevice, pc.INDEXFORMAT_UINT16,
                                     numTiles * 6,
                                     pc.BUFFER_STATIC,
                                     indices);
  } else {
    let indices = new Uint32Array(numTiles * 6);
    for (let i = 0; i < numTiles; i++) {
      indices[i * 6 + 0] = i * 4 + 0;
      indices[i * 6 + 1] = i * 4 + 1;
      indices[i * 6 + 2] = i * 4 + 2;
      indices[i * 6 + 3] = i * 4 + 0;
      indices[i * 6 + 4] = i * 4 + 2;
      indices[i * 6 + 5] = i * 4 + 3;
    }
    indexBuffer = new pc.IndexBuffer(graphicsDevice, pc.INDEXFORMAT_UINT32,
                                     numTiles * 6,
                                     pc.BUFFER_STATIC,
                                     indices);
  }

  var mesh = new pc.Mesh(graphicsDevice);
  mesh.vertexBuffer = vertexBuffer;
  mesh.indexBuffer[0] = indexBuffer;
  mesh.primitive[0].type = pc.PRIMITIVE_TRIANGLES;
  mesh.primitive[0].base = 0;
  mesh.primitive[0].count = indexBuffer.getNumIndices();
  mesh.primitive[0].indexed = true;

  var vertexShader = this.app.assets.find('CustomVS');
  var fragmentShader = this.app.assets.find('CustomFS');
  var shaderDefinition = {
      attributes: {
          vertex: pc.SEMANTIC_POSITION,
          depthAndMasks: pc.gfx.SEMANTIC_TEXCOORD0,
          texcoord: pc.gfx.SEMANTIC_TEXCOORD1
      },
      vshader: "#version 300 es\n" + vertexShader.resource,
      fshader: "#version 300 es\n" + "precision " + graphicsDevice.precision + " float;\n" + fragmentShader.resource
  };

  var shader = new pc.Shader(graphicsDevice, shaderDefinition);

  this.material = new pc.Material();
  this.material.setShader(shader);

  var meshInstance = new pc.MeshInstance(mesh, this.material, this.entity);

  this.model = new pc.Model();
  this.model.graph = this.entity;
  this.model.meshInstances = [ meshInstance ];
  app.scene.addModel(this.model);
};

My Vertex shader has the following attributes and uniforms:

in vec2 vertex;
in uvec4 depthAndMasks;
in vec2 texcoord;

uniform mat4 matrix_model;
uniform mat4 matrix_viewProjection;

centroid out vec2 uv;

Which looks correct to me, since the corresponding Unity shader takes

struct appdata_t {
    float2 vertex : POSITION;
    uint4 depthAndMasks : TEXCOORD0;
    float2 texcoord : TEXCOORD1;
};

However, when running, I’m getting the following GL error:

GL_INVALID_OPERATION: Vertex shader input type does not match the type of the bound vertex attribute.

What am I missing?

1 Like

Calling @mvaligursky.

1 Like

It looks like the problem is the ‘uvec4’ - how can I specify that the vertex attribute is read as an integer format rather than a floating point format?

1 Like

integer type vertex attributes are not supported on Webgl1, and are WebGl2 only … so perhaps the best option would be to find alternative way to store the data?

I have not personally tried uvec4 on Webgl2 yes, so without deeper investigation I’m not sure what the problem here is.

1 Like

I think to properly support this usecase, the engine must call WebGL2RenderingContext.vertexAttribIPointer() - Web APIs | MDN instead of WebGLRenderingContext.vertexAttribPointer() - Web APIs | MDN

2 Likes

I think you’re right on this one … I’ve created an issue to track this. Thanks for the report, much appreciated.

3 Likes