19 Hello USDZ

Rendering animated USDZ files is just a bit more difficult than rendering a single triangle with Tellusim Core SDK. When dealing with skinned mesh animation, we must pass joint transformations to the Pipeline. Our pipeline binding model follows a simple rule: Uniform and Storage buffers occupy the first binding set, while Textures and Surfaces occupy the second binding set. The subsequent sets are utilized by Samplers, Tracings, Texel Buffers, and Texture Tables in consecutive order. For instance, a single Uniform buffer would use (binding = 0, set = 0), and a Texture would use (binding = 0, set = 1) if there is a Uniform or Storage buffer. Otherwise, the Texture binding would be (binding = 0, set = 0).

    // vertex layout
    struct Vertex {
        Vector3f position;
        Vector3f normal;
        Vector4f tangent;
        Vector2f texcoord;
        Vector4f weights;
        Vector4u joints;
    };

    // create pipeline
    Pipeline pipeline = device.createPipeline();
    pipeline.setSamplerMask(0, Shader::MaskFragment);
    pipeline.setTextureMasks(0, 3, Shader::MaskFragment);
    pipeline.setUniformMasks(0, 2, Shader::MaskVertex);
    pipeline.addAttribute(Pipeline::AttributePosition, FormatRGBf32, 0, offsetof(Vertex, position), sizeof(Vertex));
    pipeline.addAttribute(Pipeline::AttributeNormal, FormatRGBf32, 0, offsetof(Vertex, normal), sizeof(Vertex));
    pipeline.addAttribute(Pipeline::AttributeTangent, FormatRGBAf32, 0, offsetof(Vertex, tangent), sizeof(Vertex));
    pipeline.addAttribute(Pipeline::AttributeTexCoord, FormatRGf32, 0, offsetof(Vertex, texcoord), sizeof(Vertex));
    pipeline.addAttribute(Pipeline::AttributeWeights, FormatRGBAf32, 0, offsetof(Vertex, weights), sizeof(Vertex));
    pipeline.addAttribute(Pipeline::AttributeJoints, FormatRGBAu32, 0, offsetof(Vertex, joints), sizeof(Vertex));
    pipeline.setColorFormat(window.getColorFormat());
    pipeline.setDepthFormat(window.getDepthFormat());
    pipeline.setDepthFunc(Pipeline::DepthFuncLess);
    if(!pipeline.loadShaderGLSL(Shader::TypeVertex, "main.shader", "VERTEX_SHADER=1")) return 1;
    if(!pipeline.loadShaderGLSL(Shader::TypeFragment, "main.shader", "FRAGMENT_SHADER=1")) return 1;
    if(!pipeline.create()) return 1;

The Pipeline has two Uniform buffers, which are accessible from the Vertex shader (binding = 0, 1; set = 0), three Texture accessible from Fragment Shader (binding 0, 1, 2; set = 1), and a Sampler (binding = 0; set = 2). The Vertex contains position, tangent basis, texture coordinate, joint weights, and joint indices. There is no vertex size optimization, so the Vertex size is 80 bytes. We will load shaders from the file this time. To select which shader should be loaded, we will pass macro definitions for the preprocessor. The corresponding Vertex and Fragment shader inputs signatures are as follows:

    layout(location = 0) in vec4 in_position;
    layout(location = 1) in vec3 in_normal;
    layout(location = 2) in vec4 in_tangent;
    layout(location = 3) in vec2 in_texcoord;
    layout(location = 4) in vec4 in_weights;
    layout(location = 5) in uvec4 in_joints;

    layout(row_major, binding = 0) uniform CommonParameters {
        mat4 projection;
        mat4 modelview;
        vec4 camera;
    };

    layout(binding = 1) uniform JointParameters {
        vec4 joints[192];
    };

    layout(binding = 0, set = 1) uniform texture2D in_normal_texture;
    layout(binding = 1, set = 1) uniform texture2D in_diffuse_texture;
    layout(binding = 2, set = 1) uniform texture2D in_roughness_texture;
    layout(binding = 0, set = 2) uniform sampler in_sampler;

To load USDZ (GLTF, FBX, DAE, FLT, OBJ, LWO, 3DS, PLY, STL, MESH) scene, we need few lines of code:

    // load mesh
    Mesh mesh;
    if(!mesh.load("model.usdz")) return 1;
    if(!mesh.getNumGeometries()) return 1;
    if(!mesh.getNumAnimations()) return 1;
    mesh.setBasis(Mesh::BasisZUpRight);
    mesh.createTangents();

    // create model
    MeshModel model;
    if(!model.create(device, pipeline, mesh)) return 1;

The Mesh class represents a mesh-related scene graph with its own MeshNodeMeshGeometryMeshJointMeshMaterialMeshAnimation, Camera, and Light interfaces. The mesh basis is set to have the Z axis up. If tangent vectors do not exist, they are created. The MeshModel interface automatically converts the input Mesh into the specified Pipeline layout, including format conversion and index optimization. Multiple attributes of the same type can be present for multiple texture coordinates, colors, or morphing. Once the geometry is uploaded to the GPU, textures are created from the MeshMaterial.

static Texture create_texture(const Device &device, const MeshMaterial &material, const char *type) {

    // fine material parameter
    uint32_t index = material.findParameter(type);
    if(index == Maxu32 || !material.hasParameterFlag(index, MeshMaterial::FlagBlob)) return Texture::null;

    // load image
    Image image;
    Blob blob = material.getParameterBlob(index);
    if(!image.load(blob)) return Texture::null;

    // create texture
    return device.createTexture(image, Texture::FlagMipmaps);
}

In the case of USDZ format, we have embedded images stored inside the MeshMaterial. To process them, we follow a simple set of steps. First, we find a parameter based on its type. Then, we check if there is a Blob associated with the parameter. If there is, we load the Image from the Blob. Finally, we create a Texture with Mipmap generation.

To iterate through all the materials of our mesh, we need to use two loops. This is because there can be multiple Geometries or multiple Materials per each Geometry:

    Array<Texture> normal_textures;
    Array<Texture> diffuse_textures;
    Array<Texture> roughness_textures;
    for(const MeshGeometry &geometry : mesh.getGeometries()) {
        for(const MeshMaterial &material : geometry.getMaterials()) {
            normal_textures.append(create_texture(device, material, MeshMaterial::TypeNormal));
            diffuse_textures.append(create_texture(device, material, MeshMaterial::TypeDiffuse));
            roughness_textures.append(create_texture(device, material, MeshMaterial::TypeRoughness));
        }
    }

And finally, here is the Mesh rendering code with animation and joint transformations:

    // common parameters
    struct CommonParameters {
        Matrix4x4f projection;
        Matrix4x4f modelview;
        Vector4f camera;
    };

    // set pipeline
    command.setPipeline(pipeline);

    // set sampler
    command.setSampler(0, sampler);

    // set model buffers
    model.setBuffers(command);

    // set common parameters
    CommonParameters common_parameters;
    common_parameters.camera = Vector4f(0.0f, -180.0f, 180.0f, 0.0f);
    common_parameters.projection = Matrix4x4f::perspective(60.0f, (float32_t)window.getWidth() / window.getHeight(), 0.1f, 1000.0f);
    common_parameters.modelview = Matrix4x4f::lookAt(common_parameters.camera.xyz, Vector3f(0.0f, 0.0f, 80.0f), Vector3f(0.0f, 0.0f, 1.0f));
    if(target.isFlipped()) common_parameters.projection = Matrix4x4f::scale(1.0f, -1.0f, 1.0f) * common_parameters.projection;
    command.setUniform(0, common_parameters);

    // mesh animation
    float64_t time = Time::seconds();
    MeshAnimation animation = mesh.getAnimation(0);
    animation.setTime(time, Matrix4x3d::rotateZ(Tellusim::sin(time) * 30.0));

    // draw geometries
    uint32_t texture_index = 0;
    Vector4f joint_parameters[192];
    for(const MeshGeometry &geometry : mesh.getGeometries()) {

        // joint transforms
        for(uint32_t i = 0, j = 0; i < geometry.getNumJoints(); i++, j += 3) {
            const MeshJoint &joint = geometry.getJoint(i);
            Matrix4x3f transform = Matrix4x3f(animation.getGlobalTransform(joint)) * joint.getITransform() * geometry.getTransform();
            joint_parameters[j + 0] = transform.row_0;
            joint_parameters[j + 1] = transform.row_1;
            joint_parameters[j + 2] = transform.row_2;
        }
        command.setUniform(1, joint_parameters);

        // draw materials
        for(const MeshMaterial &material : geometry.getMaterials()) {
            command.setTexture(0, normal_textures[texture_index]);
            command.setTexture(1, diffuse_textures[texture_index]);
            command.setTexture(2, roughness_textures[texture_index]);
            model.draw(command, geometry.getIndex(), material.getIndex());
            texture_index++;
        }
    }

Uniform parameters are automatically uploaded to the GPU from CPU structures. On some APIs, the Target can be inverted horizontally, so we should flip the projection matrix in that case. And after that, we render all Geometries and Materials of our Mesh. And here is the result (clickable to WebGL build).


Veribot model by Katarzyna Kowalczuk

留下评论

您的邮箱地址不会被公开。 必填项已用 * 标注