Pipeline Planning

For this collection we have 3 Different Trait types. We can combine our separate meshes into a unified mesh for each trait type, and apply the appropriate material. We end up having 1 mesh object for each trait in our outliner.

In order to export unique avatars we will want to only enable the meshes that make up a specific token. We can create a python script in Blender that reads in the JSON we created earlier and enable and disable visibility to export specific token glbs.

The easiest way to do this would be to enforce a naming convention when creating meshes in the outliner something like:

{trait_type}_{trait_name}

i.e.

head_smiley_face

So the general steps would be:

  • Read in collection metadata

  • Loop through each token

    • hide all meshes

    • enable meshes from token metadata

    • export GLB from Blender.

    • run the exported GLB through the Avatar Tools CLI

Installing Avatar Tools CLI

  • open up a terminal and change the directory to the folder you just extracted i.e. cd /path/to/avatar-tools-main

  • add node_bin to PATH

    • Windows set PATH=..\node_bin;%PATH%

    • Mac/Linux export PATH=../node_bin;$PATH

  • run npm install

  • run npm run build --workspace packages --workspace clis --workspace tools

Below is a very basic script that can be run within Blender to export all avatars for a collection. There is plenty of room for improvement to fit specific needs and workflows but this should give a foundational look at how to create a simple process for generating all unique avatars.

import os
import sys
import subprocess
import json
from pathlib import Path
import bpy

# set these folders/files relative to this blender file
WORKING_DIRECTORY = Path(bpy.data.filepath).parent
COLLECTION_METADATA = Path(WORKING_DIRECTORY, "boxie_metadata.json").as_posix()
OUTPUT_DIRECTORY = Path(WORKING_DIRECTORY, "glb_export").as_posix()

def get_node_binary_dir():
    return Path(WORKING_DIRECTORY, "node_bin").as_posix()
def get_avatar_tool_dir():
    return Path(WORKING_DIRECTORY, "avatar-tools-main").as_posix()
def tokenize_trait_names(token_data):
    trait_names = []
    
    for trait in token_data.get("attributes", []):
        trait_type = trait["trait_type"]
        trait_name = trait["value"].replace(" ", "_")
        
        tokenized_name = f"{trait_type}_{trait_name}"
        trait_names.append(tokenized_name)
    return trait_names
        

def solo_meshes_by_names(mesh_names):
    for obj in bpy.data.objects:
        if obj.type != "MESH":
            continue
        if obj.name in mesh_names:
            obj.hide_viewport = False
        else:
            obj.hide_viewport = True

def export_glb(output_path):
    
    output_name = Path(output_path).stem
    output_dir = Path(output_path).parent
    temp_output_path = Path(output_dir, output_name+"_tmp").with_suffix(".glb").as_posix()
    
    bpy.ops.export_scene.gltf(
        filepath=temp_output_path,
        export_format="GLB",
        export_skins=True,
        export_current_frame=True,
        export_animations=False,
        export_normals=True,
        export_tangents=True,
        export_image_format="AUTO",
        export_jpeg_quality=100,
        use_visible=True,
        # export_draco_mesh_compression_enable=True,
    )
    if Path(output_path).exists():
        Path(output_path).unlink()
    conform_glb(temp_output_path, output_path)
    
    Path(temp_output_path).unlink()
    
def conform_glb(input_path, output_path, skip_transparent_check=True):
    node_bin = get_node_binary_dir()
    env = os.environ.copy()
    env["PATH"] = f"{node_bin}{os.pathsep}{env['PATH']}"
    
    if sys.platform == "win32":
        args = ["npm.cmd"]
    else:
        args = ["npm"]
    
    args.append("run")
    args.append("convert")
    args.append("--")
    args.append("-i")
    args.append(input_path)
    args.append("-o")
    args.append(output_path)
    
    if skip_transparent_check:
        args.append("--skip-remove-transparency-from-materials")
        
    results = subprocess.run(args, cwd=get_avatar_tool_dir(), env=env)
    print(results)
    if results.returncode:
        output_encoded = results.stderr or results.stdout
        raise Exception("Error Conforming Mesh")
    

def export_from_token_data(token_data, output_path):
    mesh_names = tokenize_trait_names(token_data)
    solo_meshes_by_names(mesh_names)
    export_glb(output_path)

def show_all_meshes():
    for obj in bpy.data.objects:
        if obj.type != "MESH":
            continue
        obj.hide_viewport = False

def export_collection(data_path, output_directory):
    
    # load collection data from JSON
    if not Path(data_path).exists() or not Path(data_path).is_file():
        raise Exception("Invalid Collection Data")
        
    with open(data_path, "r") as f:
        collection_data = json.load(f)
        
    
    # create directory if it does not exists
    output_dir_path = Path(output_directory)
    output_dir_path.mkdir(exist_ok=True, parents=True)
    
    for token_data in collection_data:
        # build glb export path
        output_path = Path(output_dir_path, str(token_data["id"]) + ".glb")
        output_path_str = output_path.as_posix()
        
        print("Exporting", token_data.get("name", token_data.get("id")), "to", output_path_str)
        export_from_token_data(token_data, output_path_str)
    show_all_meshes()

if __name__ == "__main__":
    export_collection(COLLECTION_METADATA, OUTPUT_DIRECTORY) 
    
    

Last updated