Skip to content

asset_conversion_utils

convert_scene_urdf_to_json(urdf, json_path)

Converts a scene from a URDF file to a JSON file.

This function loads the scene described by the URDF file into the OmniGibson simulator, plays the simulation, and saves the scene to a JSON file. After saving, it removes the "init_info" from the JSON file and saves it again.

Parameters:

Name Type Description Default
urdf str

The file path to the URDF file describing the scene.

required
json_path str

The file path where the JSON file will be saved.

required
Source code in OmniGibson/omnigibson/utils/asset_conversion_utils.py
def convert_scene_urdf_to_json(urdf, json_path):
    """
    Converts a scene from a URDF file to a JSON file.

    This function loads the scene described by the URDF file into the OmniGibson simulator,
    plays the simulation, and saves the scene to a JSON file. After saving, it removes the
    "init_info" from the JSON file and saves it again.

    Args:
        urdf (str): The file path to the URDF file describing the scene.
        json_path (str): The file path where the JSON file will be saved.
    """
    # First, load the requested objects from the URDF into OG
    _load_scene_from_urdf(urdf=urdf)

    # Play the simulator, then save
    og.sim.play()
    Path(os.path.dirname(json_path)).mkdir(parents=True, exist_ok=True)
    og.sim.save(json_paths=[json_path])

    # Load the json, remove the init_info because we don't need it, then save it again
    with open(json_path, "r") as f:
        scene_info = json.load(f)

    scene_info.pop("init_info")

    with open(json_path, "w+") as f:
        json.dump(scene_info, f, cls=_TorchEncoder, indent=4)

copy_urdf_to_dataset(urdf_path, category, mdl, urdf_dep_paths=None, dataset_root=gm.CUSTOM_DATASET_PATH, suffix='original', overwrite=False)

Copies a URDF file and its dependencies to a structured dataset directory.

Parameters:

Name Type Description Default
urdf_path str

Path to the source URDF file.

required
category str

Category name for organizing the model in the dataset.

required
mdl str

Model identifier/name.

required
urdf_dep_paths list

List of relative paths to URDF dependencies. If None, dependencies will be automatically detected. Defaults to None.

None
dataset_root str

Root directory of the dataset. Defaults to gm.CUSTOM_DATASET_PATH.

CUSTOM_DATASET_PATH
suffix str

Suffix to append to the model name in the new URDF. Defaults to "original".

'original'
overwrite bool

Whether to overwrite existing directories. If False, raises an assertion error if target directory exists. Defaults to False.

False

Returns:

Type Description
str

Path to the newly created URDF file in the dataset.

Raises:

Type Description
AssertionError

If the target directory already exists and overwrite is False.

Source code in OmniGibson/omnigibson/utils/asset_conversion_utils.py
def copy_urdf_to_dataset(
    urdf_path,
    category,
    mdl,
    urdf_dep_paths=None,
    dataset_root=gm.CUSTOM_DATASET_PATH,
    suffix="original",
    overwrite=False,
):
    """
    Copies a URDF file and its dependencies to a structured dataset directory.

    Parameters:
        urdf_path (str): Path to the source URDF file.
        category (str): Category name for organizing the model in the dataset.
        mdl (str): Model identifier/name.
        urdf_dep_paths (list, optional): List of relative paths to URDF dependencies.
            If None, dependencies will be automatically detected. Defaults to None.
        dataset_root (str, optional): Root directory of the dataset.
            Defaults to gm.CUSTOM_DATASET_PATH.
        suffix (str, optional): Suffix to append to the model name in the new URDF.
            Defaults to "original".
        overwrite (bool, optional): Whether to overwrite existing directories.
            If False, raises an assertion error if target directory exists.
            Defaults to False.

    Returns:
        str: Path to the newly created URDF file in the dataset.

    Raises:
        AssertionError: If the target directory already exists and overwrite is False.
    """
    # Create a directory for the object
    obj_dir = pathlib.Path(dataset_root) / "objects" / category / mdl / "urdf"
    if not overwrite:
        assert not obj_dir.exists(), f"Object directory {obj_dir} already exists!"
    obj_dir.mkdir(parents=True, exist_ok=True)

    # Copy over all relevant meshes to new obj directory
    old_urdf_dir = pathlib.Path(os.path.dirname(urdf_path))

    # Load urdf
    tree = ET.parse(urdf_path)
    root = tree.getroot()

    # Find all mesh paths, and replace them with new obj directory
    # urdf_dep_paths should be relative paths wrt the original URDF path
    new_dirs = set() if urdf_dep_paths is None else set(urdf_dep_paths)
    for mesh_type in ["visual", "collision"]:
        for mesh_element in root.findall(f"link/{mesh_type}/geometry/mesh"):
            mesh_root_dir = mesh_element.attrib["filename"].split("/")[0]
            new_dirs.add(mesh_root_dir)
    for new_dir in new_dirs:
        shutil.copytree(old_urdf_dir / new_dir, obj_dir / new_dir, dirs_exist_ok=overwrite)

    # Export this URDF
    return _save_xmltree_as_urdf(
        root_element=root,
        name=f"{mdl}_{suffix}",
        dirpath=obj_dir,
        unique_urdf=False,
    )

find_all_prim_children_with_type(prim_type, root_prim)

Recursively searches children of @root_prim to find all instances of prim that satisfy type @prim_type

Parameters:

Name Type Description Default
prim_type str

Type of the prim to search

required
root_prim Prim

Root prim to search

required

Returns:

Type Description
list of Usd.Prim

All found prims whose prim type includes @prim_type

Source code in OmniGibson/omnigibson/utils/asset_conversion_utils.py
def find_all_prim_children_with_type(prim_type, root_prim):
    """
    Recursively searches children of @root_prim to find all instances of prim that satisfy type @prim_type

    Args:
        prim_type (str): Type of the prim to search
        root_prim (Usd.Prim): Root prim to search

    Returns:
        list of Usd.Prim: All found prims whose prim type includes @prim_type
    """
    found_prims = []
    for child in root_prim.GetChildren():
        if prim_type in child.GetTypeName():
            found_prims.append(child)
        found_prims += find_all_prim_children_with_type(prim_type=prim_type, root_prim=child)

    return found_prims

generate_collision_meshes(trimesh_mesh, method='coacd', hull_count=32, discard_not_volume=True, error_handling=False)

Generates a set of collision meshes from a trimesh mesh using CoACD.

Parameters:

Name Type Description Default
trimesh_mesh Trimesh

The trimesh mesh to generate the collision mesh from.

required
method str

Method to generate collision meshes. Valid options are {"coacd", "convex"}

'coacd'
hull_count int

If @method="coacd", this sets the max number of hulls to generate

32
discard_not_volume bool

If @method="coacd" and set to True, this discards any generated hulls that are not proper volumes

True
error_handling

If true, will run coacd_runner.py and handle the coacd assertion fault by using convex hull instead

False

Returns:

Type Description
List[Trimesh]

The collision meshes.

Source code in OmniGibson/omnigibson/utils/asset_conversion_utils.py
def generate_collision_meshes(
    trimesh_mesh, method="coacd", hull_count=32, discard_not_volume=True, error_handling=False
):
    """
    Generates a set of collision meshes from a trimesh mesh using CoACD.

    Args:
        trimesh_mesh (trimesh.Trimesh): The trimesh mesh to generate the collision mesh from.
        method (str): Method to generate collision meshes. Valid options are {"coacd", "convex"}
        hull_count (int): If @method="coacd", this sets the max number of hulls to generate
        discard_not_volume (bool): If @method="coacd" and set to True, this discards any generated hulls
            that are not proper volumes
        error_handling: If true, will run coacd_runner.py and handle the coacd assertion fault by using convex hull instead

    Returns:
        List[trimesh.Trimesh]: The collision meshes.
    """
    # If the mesh is convex or the mesh is a proper volume and similar to its convex hull, simply return that directly
    if trimesh_mesh.is_convex or (
        trimesh_mesh.is_volume and (trimesh_mesh.volume / trimesh_mesh.convex_hull.volume) > 0.90
    ):
        hulls = [trimesh_mesh.convex_hull]

    elif method == "coacd":
        if error_handling:
            # Run CoACD with error handling
            import subprocess
            import sys
            import tempfile
            import pickle
            import os

            # Create separate temp files with proper extensions
            with tempfile.NamedTemporaryFile(suffix=".pkl", delete=False) as f:
                data_path = f.name
                pickle.dump((trimesh_mesh.vertices, trimesh_mesh.faces, hull_count), f)

            script_path = tempfile.mktemp(suffix=".py")
            result_path = tempfile.mktemp(suffix=".pkl")

            # Run subprocess with clean file paths
            success = (
                subprocess.call(
                    [sys.executable, os.path.join(os.path.dirname(__file__), "coacd_runner.py"), data_path, result_path]
                )
                == 0
            )

            # Process results or fallback
            if success and os.path.exists(result_path):
                with open(result_path, "rb") as f:
                    result = pickle.load(f)

                # Process results as before
                hulls = []
                coacd_vol = 0.0
                for vs, fs in result:
                    hull = trimesh.Trimesh(vertices=vs, faces=fs, process=False)
                    if discard_not_volume and not hull.is_volume:
                        continue
                    hulls.append(hull)
                    coacd_vol += hull.convex_hull.volume

                # Check if we found any valid hulls
                if len(hulls) == 0:
                    print("No valid collision meshes generated, falling back to convex hull")
                    hulls = [trimesh_mesh.convex_hull]
                else:
                    # Compare volume ratios as in original code
                    vol_ratio = coacd_vol / trimesh_mesh.convex_hull.volume
                    if 0.95 < vol_ratio < 1.05:
                        print("MINIMAL CHANGE -- USING CONVEX HULL INSTEAD")
                        hulls = [trimesh_mesh.convex_hull]
            else:
                print("CoACD processing failed, falling back to convex hull")
                hulls = [trimesh_mesh.convex_hull]

            # Clean up temp files
            for path in [data_path, script_path, result_path]:
                if os.path.exists(path):
                    os.remove(path)
        else:
            try:
                import coacd
            except ImportError:
                raise ImportError("Please install the `coacd` package to use this function.")

            # Get the vertices and faces
            coacd_mesh = coacd.Mesh(trimesh_mesh.vertices, trimesh_mesh.faces)

            # Run CoACD with the hull count
            result = coacd.run_coacd(
                coacd_mesh,
                max_convex_hull=hull_count,
                max_ch_vertex=60,
            )

            # Convert the returned vertices and faces to trimesh meshes
            # and assert that they are volumes (and if not, discard them if required)
            hulls = []
            coacd_vol = 0.0
            for vs, fs in result:
                hull = trimesh.Trimesh(vertices=vs, faces=fs, process=False)
                if discard_not_volume and not hull.is_volume:
                    continue
                hulls.append(hull)
                coacd_vol += hull.convex_hull.volume

            # Assert that we got _some_ collision meshes
            assert len(hulls) > 0, "No collision meshes generated!"

            # Compare coacd's generation compared to the original mesh's convex hull
            # If the difference is small (<10% volume difference), simply keep the convex hull
            vol_ratio = coacd_vol / trimesh_mesh.convex_hull.volume
            if 0.95 < vol_ratio < 1.05:
                print("MINIMAL CHANGE -- USING CONVEX HULL INSTEAD")
                # from IPython import embed; embed()
                hulls = [trimesh_mesh.convex_hull]

    elif method == "convex":
        hulls = [trimesh_mesh.convex_hull]

    else:
        raise ValueError(f"Invalid collision mesh generation method specified: {method}")

    # Sanity check all convex hulls
    # For whatever reason, some convex hulls are not true volumes, so we take the convex hull again
    # See https://github.com/mikedh/trimesh/issues/535
    hulls = [hull.convex_hull if not hull.is_volume else hull for hull in hulls]

    # For each hull, simplify so that the complexity is guaranteed to be Omniverse-GPU compatible
    # See https://docs.omniverse.nvidia.com/extensions/latest/ext_physics/rigid-bodies.html#collision-settings
    simplified_hulls = [simplify_convex_hull(hull) for hull in hulls]

    return simplified_hulls

generate_urdf_for_mesh(asset_path, obj_dir, category, mdl, collision_method=None, hull_count=32, up_axis='z', scale=1.0, check_scale=False, rescale=False, dataset_root=None, overwrite=False, n_submesh=10)

Generate URDF file for either single mesh or articulated files. Each submesh in articulated files (glb, gltf) will be extracted as a separate link.

Parameters:

Name Type Description Default
asset_path

Path to the input mesh file (.obj, .glb, .gltf)

required
obj_dir

Output directory

required
category

Category name for the object

required
mdl

Model name

required
collision_method

Method for generating collision meshes ("convex", "coacd", or None)

None
hull_count

Maximum number of convex hulls for COACD method

32
up_axis

Up axis for the model ("y" or "z")

'z'
scale

User choice scale, will be overwritten if check_scale and rescale

1.0
check_scale

Whether to check mesh size based on heuristic

False
rescale

Whether to rescale mesh if check_scale

False
dataset_root

Root directory for the dataset

None
overwrite

Whether to overwrite existing files

False
n_submesh

If submesh number is more than n_submesh, will not convert and skip

10
Source code in OmniGibson/omnigibson/utils/asset_conversion_utils.py
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
2332
2333
2334
2335
2336
2337
2338
2339
2340
2341
2342
2343
2344
2345
2346
2347
2348
2349
2350
2351
2352
2353
2354
2355
2356
2357
2358
2359
2360
2361
2362
2363
2364
2365
2366
2367
2368
2369
2370
2371
2372
2373
2374
2375
2376
2377
2378
2379
2380
2381
2382
2383
2384
2385
2386
2387
2388
2389
2390
2391
2392
2393
2394
2395
2396
2397
2398
2399
2400
2401
2402
2403
2404
2405
2406
2407
2408
2409
2410
2411
2412
2413
2414
2415
2416
2417
2418
2419
2420
2421
2422
2423
2424
2425
2426
2427
2428
2429
2430
2431
2432
2433
2434
2435
2436
2437
2438
2439
2440
2441
2442
2443
2444
2445
2446
2447
2448
2449
2450
2451
2452
2453
2454
2455
2456
2457
2458
2459
2460
2461
2462
2463
2464
2465
2466
2467
2468
2469
2470
2471
2472
2473
2474
2475
2476
2477
2478
2479
2480
2481
2482
2483
2484
2485
2486
2487
2488
2489
2490
2491
2492
2493
2494
2495
2496
2497
2498
2499
2500
2501
2502
2503
2504
2505
2506
2507
2508
2509
2510
2511
2512
2513
2514
2515
2516
2517
2518
2519
2520
2521
2522
2523
2524
2525
2526
2527
2528
2529
2530
2531
2532
2533
2534
2535
2536
2537
2538
2539
2540
2541
2542
2543
2544
2545
2546
2547
2548
2549
2550
2551
2552
2553
2554
2555
2556
2557
2558
2559
2560
2561
2562
2563
2564
2565
2566
2567
2568
2569
2570
def generate_urdf_for_mesh(
    asset_path,
    obj_dir,
    category,
    mdl,
    collision_method=None,
    hull_count=32,
    up_axis="z",
    scale=1.0,
    check_scale=False,
    rescale=False,
    dataset_root=None,
    overwrite=False,
    n_submesh=10,
):
    """
    Generate URDF file for either single mesh or articulated files.
    Each submesh in articulated files (glb, gltf) will be extracted as a separate link.

    Args:
        asset_path: Path to the input mesh file (.obj, .glb, .gltf)
        obj_dir: Output directory
        category: Category name for the object
        mdl: Model name
        collision_method: Method for generating collision meshes ("convex", "coacd", or None)
        hull_count: Maximum number of convex hulls for COACD method
        up_axis: Up axis for the model ("y" or "z")
        scale: User choice scale, will be overwritten if check_scale and rescale
        check_scale: Whether to check mesh size based on heuristic
        rescale: Whether to rescale mesh if check_scale
        dataset_root: Root directory for the dataset
        overwrite: Whether to overwrite existing files
        n_submesh: If submesh number is more than n_submesh, will not convert and skip
    """

    # Validate file format
    valid_formats = trimesh.available_formats()
    mesh_format = pathlib.Path(asset_path).suffix[1:]  # Remove the dot
    assert mesh_format in valid_formats, f"Invalid mesh format: {mesh_format}. Valid formats: {valid_formats}"
    assert mesh_format in [
        "obj",
        "glb",
        "gltf",
    ], "Not obj, glb, gltf file, can only deal with these file types"

    # Convert obj_dir to Path object
    if isinstance(obj_dir, str):
        obj_dir = pathlib.Path(obj_dir)

    # Create directory structure
    if not overwrite:
        assert not obj_dir.exists(), f"Object directory {obj_dir} already exists!"
    obj_dir.mkdir(parents=True, exist_ok=True)

    obj_name = "_".join([category, mdl])

    # Dictionary to store links with their visual and collision meshes
    links = {}

    # Load and process based on file type
    if mesh_format == "obj":
        # Handle single mesh files with original loading method
        visual_mesh = trimesh.load(asset_path, force="mesh", process=False)
        if isinstance(visual_mesh, list):
            visual_mesh = visual_mesh[0]  # Take first mesh if multiple

        # Generate collision meshes if requested
        collision_meshes = []
        if collision_method is not None:
            collision_meshes = generate_collision_meshes(
                visual_mesh, method=collision_method, hull_count=hull_count, error_handling=True
            )

        # Add to links dictionary as a single link named "base_link"
        links["base_link"] = {"visual_mesh": visual_mesh, "collision_meshes": collision_meshes, "transform": th.eye(4)}

    elif mesh_format in ["glb", "gltf"]:
        # Handle articulated files
        scene = trimesh.load(asset_path)
        # Count geometries (submeshes)
        submesh_count = len(scene.geometry)
        if submesh_count > n_submesh:
            print(f"❌ Submesh count: {submesh_count} > {n_submesh}, skipping")
            return None

        # Get transforms from graph and extract each geometry as a separate link
        link_index = 0
        for node_name in scene.graph.nodes_geometry:
            geometry_name = scene.graph[node_name][1]
            if not isinstance(geometry_name, str):
                print(f"Warning: Skipping node {node_name} with non-string geometry name: {geometry_name}")
                continue

            # Get the geometry and transform
            geometry = scene.geometry[geometry_name]

            transform, _ = scene.graph.get(frame_to=node_name, frame_from=scene.graph.base_frame)
            transform_tensor = th.from_numpy(transform.copy()).float()

            # Process the geometry based on its type
            if isinstance(geometry, trimesh.Trimesh):
                # Create a link name based on the node name or index
                link_name = f"link_{link_index}"
                if node_name and isinstance(node_name, str):
                    # Clean up node name to make it a valid link name
                    link_name = "link_" + "".join(c if c.isalnum() or c == "_" else "_" for c in node_name)

                # Create a copy of the geometry
                visual_mesh = geometry.copy()

                # Generate collision meshes if requested
                collision_meshes = []
                if collision_method is not None:
                    # Create collision meshes based on the original geometry
                    # (not transformed yet - we'll handle transforms at the URDF level)
                    collision_meshes = generate_collision_meshes(
                        geometry,
                        method=collision_method,
                        hull_count=hull_count,
                        discard_not_volume=True,
                        error_handling=True,
                    )

                # Add to links dictionary with original transform
                links[link_name] = {
                    "visual_mesh": visual_mesh,
                    "collision_meshes": collision_meshes,
                    "transform": transform_tensor,
                    "node_name": node_name,
                }
                link_index += 1

            elif isinstance(geometry, (list, tuple)):
                # Handle cases where geometry is a list of meshes
                for i, submesh in enumerate(geometry):
                    if isinstance(submesh, trimesh.Trimesh):
                        # Create a link name
                        link_name = f"link_{link_index}"
                        if node_name and isinstance(node_name, str):
                            link_name = f"link_{node_name}_{i}"

                        # Create a copy of the submesh
                        visual_mesh = submesh.copy()

                        # Generate collision meshes if requested
                        collision_meshes = []

                        if collision_method is not None:
                            # Create collision meshes based on the original geometry
                            collision_meshes = generate_collision_meshes(
                                submesh,
                                method=collision_method,
                                hull_count=hull_count,
                                discard_not_volume=True,
                                error_handling=True,
                            )

                        # Add to links dictionary with original transform
                        links[link_name] = {
                            "visual_mesh": visual_mesh,
                            "collision_meshes": collision_meshes,
                            "transform": transform_tensor,
                            "node_name": f"{node_name}_{i}",
                        }
                        link_index += 1

        if not links:
            print("Warning: No valid meshes found in the scene!")
            print("Scene contents:")
            print(f"Geometries: {scene.geometry}")
            print(f"Graph: {scene.graph}")
            raise ValueError("No valid meshes found in the input file")
    else:
        raise ValueError(f"Unsupported file format: {mesh_format}")

    # Handle rotation for up_axis if needed
    if up_axis == "y":
        rotation_matrix = trimesh.transformations.rotation_matrix(math.pi / 2, [1, 0, 0])
        rotation_tensor = th.from_numpy(rotation_matrix).float()

        for link_name, link_data in links.items():
            # Update the transform - we'll apply the actual transforms later
            link_data["transform"] = th.matmul(rotation_tensor, link_data["transform"])

    # Compute new scale if check_scale = True
    new_scale = 1.0

    if check_scale:
        if links:
            # Find the link with the biggest bounding box
            max_bbox_size = [0, 0, 0]
            max_bbox_link = None

            for link_name, link_data in links.items():
                # Apply the transform to get the correct size
                temp_mesh = link_data["visual_mesh"].copy()
                temp_mesh.apply_transform(link_data["transform"].numpy())
                bbox_size = temp_mesh.bounding_box.extents

                # Check if this link has a bigger dimension than the current max
                if any(s > max_s for s, max_s in zip(bbox_size, max_bbox_size)):
                    max_bbox_size = bbox_size
                    max_bbox_link = link_name

            click.echo(f"Largest visual mesh bounding box size: {max_bbox_size} (link: {max_bbox_link})")

            # Check if any dimension is too large (> 100)
            if any(size > 5.0 for size in max_bbox_size):
                if any(size > 50.0 for size in max_bbox_size):
                    if any(size > 500.0 for size in max_bbox_size):
                        new_scale = 0.001
                    else:
                        new_scale = 0.01
                else:
                    new_scale = 0.1

                click.echo(
                    "Warning: The bounding box sounds a bit large. "
                    "We just wanted to confirm this is intentional. You can skip this check by passing check_scale = False."
                )

            # Check if any dimension is too small (< 0.01)
            elif all(size < 0.005 for size in max_bbox_size):
                new_scale = 1000.0
                click.echo(
                    "Warning: The bounding box sounds a bit small. "
                    "We just wanted to confirm this is intentional. You can skip this check by passing check_scale = False."
                )

            else:
                click.echo("Size is reasonable, no scaling")

        else:
            click.echo("Warning: No links found in the file!")
            return None

    # Rescale mesh if rescale= True, else scale based on function input scale
    if rescale:
        click.echo(f"Original scale {scale} be overwrtten to {new_scale}")
        scale = new_scale

    if scale != 1.0:
        click.echo(f"Adjusting scale to {scale}")
        scale_transform = trimesh.transformations.scale_matrix(scale)
        scale_tensor = th.from_numpy(scale_transform).float()

        for link_name, link_data in links.items():
            # Update the transform - we'll apply the actual transforms later
            link_data["transform"] = th.matmul(scale_tensor, link_data["transform"])

    # Create temporary directory for processing
    with tempfile.TemporaryDirectory() as temp_dir:
        temp_dir_path = pathlib.Path(temp_dir)

        # Create directory structure for the output
        obj_link_mesh_folder = obj_dir / "shape"
        obj_link_mesh_folder.mkdir(exist_ok=True)
        obj_link_visual_mesh_folder = obj_link_mesh_folder / "visual"
        obj_link_visual_mesh_folder.mkdir(exist_ok=True)
        obj_link_collision_mesh_folder = obj_link_mesh_folder / "collision"
        obj_link_collision_mesh_folder.mkdir(exist_ok=True)
        obj_link_material_folder = obj_dir / "material"
        obj_link_material_folder.mkdir(exist_ok=True)

        # Dictionary to store information for URDF generation
        urdf_links = {}

        # Process each link
        for link_name, link_data in links.items():
            visual_mesh = link_data["visual_mesh"].copy()  # Create a copy to avoid modifying original
            collision_meshes = [mesh.copy() for mesh in link_data["collision_meshes"]]  # Copy all collision meshes
            transform = link_data["transform"]

            # Apply transform to visual mesh before exporting
            visual_mesh.apply_transform(transform.numpy())

            # Export the transformed mesh
            visual_filename = f"{obj_name}_{link_name}.obj"
            visual_temp_path = temp_dir_path / visual_filename
            visual_mesh.export(visual_temp_path, file_type="obj")

            # Check for material files
            material_files = [x for x in temp_dir_path.iterdir() if x.suffix == ".mtl"]
            material_filename = None

            if material_files:
                # Process material file if exists
                material_file = material_files[0]
                material_filename = f"{obj_name}_{link_name}.mtl"

                # Process MTL file (similar to original code)
                with open(visual_temp_path, "r") as f:
                    new_lines = []
                    for line in f.readlines():
                        if f"mtllib {material_file.name}" in line:
                            line = f"mtllib {material_filename}\n"
                        new_lines.append(line)

                with open(visual_temp_path, "w") as f:
                    for line in new_lines:
                        f.write(line)

                # Process texture references in MTL file
                with open(material_file, "r") as f:
                    new_lines = []
                    for line in f.readlines():
                        if "map_" in line:
                            parts = line.split(" ", 1)
                            if len(parts) > 1:
                                map_kind, texture_filename = parts
                                texture_filename = texture_filename.strip()
                                map_kind = map_kind.strip().replace("map_", "")
                                new_filename = f"../../material/{obj_name}_{link_name}_{map_kind}.png"

                                # Copy texture file
                                texture_from_path = temp_dir_path / texture_filename
                                if texture_from_path.exists():
                                    texture_to_path = (
                                        obj_link_material_folder / f"{obj_name}_{link_name}_{map_kind}.png"
                                    )
                                    if not overwrite and texture_to_path.exists():
                                        print(f"Warning: Texture file {texture_to_path} already exists!")
                                    else:
                                        shutil.copy2(texture_from_path, texture_to_path)

                                # Update line
                                line = f"{parts[0]} {new_filename}\n"
                        new_lines.append(line)

                # Write updated MTL file
                with open(obj_link_visual_mesh_folder / material_filename, "w") as f:
                    for line in new_lines:
                        f.write(line)

            # Copy visual mesh to final location
            visual_final_path = obj_link_visual_mesh_folder / visual_filename
            shutil.copy2(visual_temp_path, visual_final_path)

            # Process collision meshes
            collision_info = []
            for i, collision_mesh in enumerate(collision_meshes):
                # Apply transform to collision mesh before exporting
                collision_mesh.apply_transform(transform.numpy())

                # Export collision mesh filename
                collision_filename = visual_filename.replace(".obj", f"_collision_{i}.obj")

                # Scale collision mesh to unit bbox if needed
                bounding_box = collision_mesh.bounding_box.extents
                if all(x > 0 for x in bounding_box):
                    collision_scale = 1.0 / bounding_box
                    collision_scale_matrix = th.eye(4)
                    collision_scale_matrix[:3, :3] = th.diag(th.as_tensor(collision_scale))

                    # Create a copy to avoid modifying the original
                    scaled_collision_mesh = collision_mesh.copy()
                    scaled_collision_mesh.apply_transform(collision_scale_matrix.numpy())

                    # Export collision mesh
                    collision_path = obj_link_collision_mesh_folder / collision_filename
                    scaled_collision_mesh.export(collision_path, file_type="obj")

                    # Since we've already applied the transform, scale includes only the sizing adjustment
                    collision_info.append({"filename": collision_filename, "scale": 1.0 / collision_scale})
                else:
                    print(f"Warning: Skipping collision mesh with invalid bounding box: {bounding_box}")

            # Store information for URDF generation - now without transform since it's been applied
            urdf_links[link_name] = {
                "visual_filename": visual_filename,
                "collision_info": collision_info,
                "transform": th.eye(4),  # Identity transform since we've already applied it to the meshes
            }

    if mesh_format == "obj":
        # Change the link name from "base_link" to "obj_link"
        if "base_link" in urdf_links:
            urdf_links["obj_link"] = urdf_links.pop("base_link")

    # Generate URDF XML
    tree_root = ET.Element("robot")
    tree_root.attrib = {"name": mdl}

    # Create a base_link as the root
    base_link = ET.SubElement(tree_root, "link")
    base_link.attrib = {"name": "base_link"}

    # Add all other links and joints to connect them to the base_link
    for link_name, link_info in urdf_links.items():
        # Create link element
        link_xml = ET.SubElement(tree_root, "link")
        link_xml.attrib = {"name": link_name}

        # Add visual geometry
        visual_xml = ET.SubElement(link_xml, "visual")
        visual_origin_xml = ET.SubElement(visual_xml, "origin")
        visual_origin_xml.attrib = {"xyz": "0 0 0", "rpy": "0 0 0"}  # Zero transform since already applied
        visual_geometry_xml = ET.SubElement(visual_xml, "geometry")
        visual_mesh_xml = ET.SubElement(visual_geometry_xml, "mesh")
        visual_mesh_xml.attrib = {
            "filename": os.path.join("shape", "visual", link_info["visual_filename"]).replace("\\", "/"),
            "scale": "1 1 1",  # Using 1.0 scale since transform already applied
        }

        # Add collision geometries
        for i, collision in enumerate(link_info["collision_info"]):
            collision_xml = ET.SubElement(link_xml, "collision")
            collision_xml.attrib = {"name": f"{link_name}_collision_{i}"}
            collision_origin_xml = ET.SubElement(collision_xml, "origin")
            collision_origin_xml.attrib = {"xyz": "0 0 0", "rpy": "0 0 0"}  # Zero transform since already applied
            collision_geometry_xml = ET.SubElement(collision_xml, "geometry")
            collision_mesh_xml = ET.SubElement(collision_geometry_xml, "mesh")
            collision_mesh_xml.attrib = {
                "filename": os.path.join("shape", "collision", collision["filename"]).replace("\\", "/"),
                "scale": " ".join(str(item) for item in collision["scale"]),
            }

        # Create a joint to connect this link to the base_link
        joint_xml = ET.SubElement(tree_root, "joint")
        joint_xml.attrib = {"name": f"{link_name}_joint", "type": "fixed"}

        # Set parent and child links
        parent_xml = ET.SubElement(joint_xml, "parent")
        parent_xml.attrib = {"link": "base_link"}
        child_xml = ET.SubElement(joint_xml, "child")
        child_xml.attrib = {"link": link_name}

        # Set origin for the joint with zeros since transform was applied to meshes
        joint_origin_xml = ET.SubElement(joint_xml, "origin")
        joint_origin_xml.attrib = {"xyz": "0 0 0", "rpy": "0 0 0"}

    # Save URDF file
    xmlstr = minidom.parseString(ET.tostring(tree_root)).toprettyxml(indent="   ")
    xmlio = io.StringIO(xmlstr)
    tree = ET.parse(xmlio)

    urdf_path = obj_dir / f"{mdl}.urdf"
    with open(urdf_path, "wb") as f:
        tree.write(f, xml_declaration=True)

    return str(urdf_path)

get_collision_approximation_for_urdf(urdf_path, collision_method='coacd', hull_count=32, coacd_links=None, convex_links=None, no_decompose_links=None, visual_only_links=None, ignore_links=None)

Computes collision approximation for all collision meshes (which are assumed to be non-convex) in the given URDF.

NOTE: This is an in-place operation! It will overwrite @urdf_path

Parameters:

Name Type Description Default
urdf_path str

Absolute path to the URDF to decompose

required
collision_method str

Default collision method to use. Valid options are: {"coacd", "convex"}

'coacd'
hull_count int

Maximum number of convex hulls to decompose individual visual meshes into. Only relevant if @collision_method is "coacd"

32
coacd_links None or list of str

If specified, links that should use CoACD to decompose collision meshes

None
convex_links None or list of str

If specified, links that should use convex hull to decompose collision meshes

None
no_decompose_links None or list of str

If specified, links that should not have any special collision decomposition applied. This will only use the convex hull

None
visual_only_links None or list of str

If specified, link names corresponding to links that should have no collision associated with them (so any pre-existing collisions will be removed!)

None
ignore_links None or list of str

If specified, link names corresponding to links that should be skipped during collision generation process

None
Source code in OmniGibson/omnigibson/utils/asset_conversion_utils.py
def get_collision_approximation_for_urdf(
    urdf_path,
    collision_method="coacd",
    hull_count=32,
    coacd_links=None,
    convex_links=None,
    no_decompose_links=None,
    visual_only_links=None,
    ignore_links=None,
):
    """
    Computes collision approximation for all collision meshes (which are assumed to be non-convex) in
    the given URDF.

    NOTE: This is an in-place operation! It will overwrite @urdf_path

    Args:
        urdf_path (str): Absolute path to the URDF to decompose
        collision_method (str): Default collision method to use. Valid options are: {"coacd", "convex"}
        hull_count (int): Maximum number of convex hulls to decompose individual visual meshes into.
            Only relevant if @collision_method is "coacd"
        coacd_links (None or list of str): If specified, links that should use CoACD to decompose collision meshes
        convex_links (None or list of str): If specified, links that should use convex hull to decompose collision meshes
        no_decompose_links (None or list of str): If specified, links that should not have any special collision
            decomposition applied. This will only use the convex hull
        visual_only_links (None or list of str): If specified, link names corresponding to links that should have
            no collision associated with them (so any pre-existing collisions will be removed!)
        ignore_links (None or list of str): If specified, link names corresponding to links that should be skipped
            during collision generation process
    """
    # Load URDF
    urdf_dir = os.path.dirname(urdf_path)
    tree = ET.parse(urdf_path)
    root = tree.getroot()

    # Next, iterate over each visual mesh and define collision meshes for them
    coacd_links = set() if coacd_links is None else set(coacd_links)
    convex_links = set() if convex_links is None else set(convex_links)
    no_decompose_links = set() if no_decompose_links is None else set(no_decompose_links)
    visual_only_links = set() if visual_only_links is None else set(visual_only_links)
    ignore_links = set() if ignore_links is None else set(ignore_links)
    col_mesh_rel_folder = "meshes/collision"
    col_mesh_folder = pathlib.Path(urdf_dir) / col_mesh_rel_folder
    col_mesh_folder.mkdir(exist_ok=True, parents=True)
    for link in root.findall("link"):
        link_name = link.attrib["name"]
        old_cols = link.findall("collision")
        # Completely skip this link if this a link to explicitly skip or we have no collision tags
        if link_name in ignore_links or len(old_cols) == 0:
            continue

        print(f"Generating collision approximation for link {link_name}...")
        generated_new_col = False
        idx = 0
        if link_name not in visual_only_links:
            for vis in link.findall("visual"):
                # Get origin
                origin = vis.find("origin")
                # Check all geometries
                geoms = vis.findall("geometry/*")
                # We should only have a single geom, so assert here
                assert len(geoms) == 1
                # Check whether we actually need to generate a collision approximation
                # No need if the geom type is not a mesh (i.e.: it's a primitive -- so we assume if a collision is already
                # specified, it's that same primitive)
                geom = geoms[0]
                if geom.tag != "mesh":
                    continue
                mesh_path = os.path.join(os.path.dirname(urdf_path), geom.attrib["filename"])
                tm = trimesh.load(mesh_path, force="mesh", process=False)

                if link_name in coacd_links:
                    method = "coacd"
                elif link_name in convex_links:
                    method = "convex"
                elif link_name in no_decompose_links:
                    # Output will just be ignored, so skip
                    continue
                else:
                    method = collision_method
                collision_meshes = generate_collision_meshes(
                    trimesh_mesh=tm,
                    method=method,
                    hull_count=hull_count,
                )
                # Save and merge precomputed collision mesh
                collision_filenames_and_scales = []
                for i, collision_mesh in enumerate(collision_meshes):
                    processed_collision_mesh = collision_mesh.copy()
                    processed_collision_mesh._cache.cache["vertex_normals"] = processed_collision_mesh.vertex_normals
                    collision_filename = f"{link_name}_col_{idx}.obj"

                    # OmniGibson requires unit-bbox collision meshes, so here we do that scaling
                    bounding_box = processed_collision_mesh.bounding_box.extents
                    assert all(
                        x > 0 for x in bounding_box
                    ), f"Bounding box extents are not all positive: {bounding_box}"
                    collision_scale = 1.0 / bounding_box
                    collision_scale_matrix = th.eye(4)
                    collision_scale_matrix[:3, :3] = th.diag(th.as_tensor(collision_scale))
                    processed_collision_mesh.apply_transform(collision_scale_matrix.numpy())
                    processed_collision_mesh.export(col_mesh_folder / collision_filename, file_type="obj")
                    collision_filenames_and_scales.append((collision_filename, 1 / collision_scale))

                    idx += 1

                for collision_filename, collision_scale in collision_filenames_and_scales:
                    collision_xml = ET.SubElement(link, "collision")
                    collision_xml.attrib = {"name": collision_filename.replace(".obj", "")}
                    # Add origin info if defined
                    if origin is not None:
                        collision_xml.append(deepcopy(origin))
                    collision_geometry_xml = ET.SubElement(collision_xml, "geometry")
                    collision_mesh_xml = ET.SubElement(collision_geometry_xml, "mesh")
                    collision_mesh_xml.attrib = {
                        "filename": os.path.join(col_mesh_rel_folder, collision_filename),
                        "scale": " ".join([str(item) for item in collision_scale]),
                    }

                if link_name not in no_decompose_links:
                    generated_new_col = True

        # If we generated a new set of collision meshes, remove the old ones
        if generated_new_col or link_name in visual_only_links:
            for col in old_cols:
                link.remove(col)

    # Save the URDF file
    _save_xmltree_as_urdf(
        root_element=root,
        name=os.path.splitext(os.path.basename(urdf_path))[0],
        dirpath=os.path.dirname(urdf_path),
        unique_urdf=False,
    )

import_obj_metadata(usd_path, obj_category, obj_model, dataset_root, import_render_channels=False)

Imports metadata for a given object model from the dataset. This metadata consist of information that is NOT included in the URDF file and instead included in the various JSON files shipped in iGibson and OmniGibson datasets.

Parameters:

Name Type Description Default
usd_path str

Path to USD file

required
obj_category str

The category of the object.

required
obj_model str

The model name of the object.

required
dataset_root str

The root directory of the dataset.

required
import_render_channels bool

Flag to import rendering channels. Defaults to False.

False

Raises:

Type Description
ValueError

If the bounding box size is not found in the metadata.

Returns:

Type Description

None

Source code in OmniGibson/omnigibson/utils/asset_conversion_utils.py
def import_obj_metadata(usd_path, obj_category, obj_model, dataset_root, import_render_channels=False):
    """
    Imports metadata for a given object model from the dataset. This metadata consist of information
    that is NOT included in the URDF file and instead included in the various JSON files shipped in
    iGibson and OmniGibson datasets.

    Args:
        usd_path (str): Path to USD file
        obj_category (str): The category of the object.
        obj_model (str): The model name of the object.
        dataset_root (str): The root directory of the dataset.
        import_render_channels (bool, optional): Flag to import rendering channels. Defaults to False.

    Raises:
        ValueError: If the bounding box size is not found in the metadata.

    Returns:
        None
    """
    # Check if filepath exists
    model_root_path = f"{dataset_root}/objects/{obj_category}/{obj_model}"
    log.debug("Loading", usd_path, "for metadata import.")

    # Load model
    lazy.isaacsim.core.utils.stage.open_stage(usd_path)
    stage = lazy.isaacsim.core.utils.stage.get_current_stage()
    prim = stage.GetDefaultPrim()

    data = dict()
    for data_group in {"metadata", "mvbb_meta", "material_groups", "heights_per_link"}:
        data_path = f"{model_root_path}/misc/{data_group}.json"
        if exists(data_path):
            # Load data
            with open(data_path, "r") as f:
                data[data_group] = json.load(f)

    # If certain metadata doesn't exist, populate with some core info
    if "base_link_offset" not in data["metadata"]:
        data["metadata"]["base_link_offset"] = [0, 0, 0]
    if "bbox_size" not in data["metadata"]:
        raise ValueError("We cannot work without a bbox size.")

    # Pop bb and base link offset and meta links info
    base_link_offset = data["metadata"].pop("base_link_offset")
    default_bb = data["metadata"].pop("bbox_size")

    # Manually modify material groups info
    if "material_groups" in data:
        data["material_groups"] = {
            "groups": data["material_groups"][0],
            "links": data["material_groups"][1],
        }

    # Manually modify metadata
    if "openable_joint_ids" in data["metadata"]:
        data["metadata"]["openable_joint_ids"] = {
            str(pair[0]): pair[1] for pair in data["metadata"]["openable_joint_ids"]
        }

    # Grab light info if any
    meta_links = data["metadata"].get("meta_links", dict())

    log.debug("Process meta links")

    # Convert primitive meta links
    for link_name, link_metadata in meta_links.items():
        for meta_link_type, meta_link_infos in link_metadata.items():
            _generate_meshes_for_primitive_meta_links(stage, obj_model, link_name, meta_link_type, meta_link_infos)

    # Get all meta links, set them to guide purpose, and add some metadata
    # Here we want to include every link that has the meta__ prefix.
    # This includes meta links that get added into the URDF in earlier
    # stages.
    meta_link_prims = [
        p for p in prim.GetChildren() if p.GetName().startswith("meta__") and p.GetName().endswith("_link")
    ]
    for meta_prim in meta_link_prims:
        # Get meta link information
        unparsed_meta = meta_prim.GetName()[6:-5]  # remove meta__ and _link
        meta_parts = unparsed_meta.rsplit("_", 3)
        assert len(meta_parts) == 4, f"Invalid meta link name: {unparsed_meta}"
        link_name, meta_link_type, link_id, link_sub_id = meta_parts

        # Add the is_meta_link, meta_link_type, and meta_link_id attributes
        meta_prim.CreateAttribute("ig:isMetaLink", lazy.pxr.Sdf.ValueTypeNames.Bool)
        meta_prim.GetAttribute("ig:isMetaLink").Set(True)
        meta_prim.CreateAttribute("ig:metaLinkType", lazy.pxr.Sdf.ValueTypeNames.String)
        meta_prim.GetAttribute("ig:metaLinkType").Set(meta_link_type)
        meta_prim.CreateAttribute("ig:metaLinkId", lazy.pxr.Sdf.ValueTypeNames.String)
        meta_prim.GetAttribute("ig:metaLinkId").Set(link_id)
        meta_prim.CreateAttribute("ig:metaLinkSubId", lazy.pxr.Sdf.ValueTypeNames.Int)
        meta_prim.GetAttribute("ig:metaLinkSubId").Set(int(link_sub_id))

        # Set the purpose of the visual meshes to be guide
        visual_prim = meta_prim.GetChild("visuals")
        if visual_prim.IsValid():
            # If it's an imageable, set the purpose to guide
            if visual_prim.GetTypeName() == "Mesh":
                purpose_attr = lazy.pxr.UsdGeom.Imageable(visual_prim).CreatePurposeAttr()
                purpose_attr.Set(lazy.pxr.UsdGeom.Tokens.guide)
            for visual_mesh in visual_prim.GetChildren():
                if visual_mesh.GetTypeName() == "Mesh":
                    purpose_attr = lazy.pxr.UsdGeom.Imageable(visual_mesh).CreatePurposeAttr()
                    purpose_attr.Set(lazy.pxr.UsdGeom.Tokens.guide)

    log.debug("Done processing meta links")

    # Iterate over dict and replace any lists of dicts as dicts of dicts (with each dict being indexed by an integer)
    data = _recursively_replace_list_of_dict(data)

    log.debug("Done recursively replacing")

    # Create attributes for bb, offset, category, model and store values
    prim.CreateAttribute("ig:nativeBB", lazy.pxr.Sdf.ValueTypeNames.Vector3f)
    prim.CreateAttribute("ig:offsetBaseLink", lazy.pxr.Sdf.ValueTypeNames.Vector3f)
    prim.CreateAttribute("ig:category", lazy.pxr.Sdf.ValueTypeNames.String)
    prim.CreateAttribute("ig:model", lazy.pxr.Sdf.ValueTypeNames.String)
    prim.GetAttribute("ig:nativeBB").Set(lazy.pxr.Gf.Vec3f(*default_bb))
    prim.GetAttribute("ig:offsetBaseLink").Set(lazy.pxr.Gf.Vec3f(*base_link_offset))
    prim.GetAttribute("ig:category").Set(obj_category)
    prim.GetAttribute("ig:model").Set(obj_model)

    log.debug(f"data: {data}")

    # Store remaining data as metadata
    prim.SetCustomData(data)

    # Add material channels
    # log.debug(f"prim children: {prim.GetChildren()}")
    # looks_prim_path = f"{str(prim.GetPrimPath())}/Looks"
    # looks_prim = prim.GetChildren()[0] #lazy.isaacsim.core.utils.prims.get_prim_at_path(looks_prim_path)
    # mat_prim_path = f"{str(prim.GetPrimPath())}/Looks/material_material_0"
    # mat_prim = looks_prim.GetChildren()[0] #lazy.isaacsim.core.utils.prims.get_prim_at_path(mat_prim_path)
    # log.debug(f"looks children: {looks_prim.GetChildren()}")
    # log.debug(f"mat prim: {mat_prim}")
    if import_render_channels:
        _import_rendering_channels(
            obj_prim=prim,
            obj_category=obj_category,
            obj_model=obj_model,
            model_root_path=model_root_path,
            usd_path=usd_path,
            dataset_root=dataset_root,
        )
    for link, link_tags in data["metadata"]["link_tags"].items():
        if "glass" in link_tags:
            _process_glass_link(prim.GetChild(link))

    # Rename model to be named <model> if not already named that
    old_prim_path = prim.GetPrimPath().pathString
    if old_prim_path.split("/")[-1] != obj_model:
        new_prim_path = "/".join(old_prim_path.split("/")[:-1]) + f"/{obj_model}"
        lazy.omni.kit.commands.execute("MovePrim", path_from=old_prim_path, path_to=new_prim_path)
        prim = stage.GetDefaultPrim()

    # Hacky way to avoid new prim being created at /World
    class DummyScene:
        prim_path = ""

    og.sim.render()

    mat_prims = find_all_prim_children_with_type(prim_type="Material", root_prim=prim)
    for i, mat_prim in enumerate(mat_prims):
        mat = MaterialPrim(mat_prim.GetPrimPath().pathString, f"mat{i}")
        mat.load(DummyScene)
        mat.shader_update_asset_paths_with_root_path(root_path=os.path.dirname(usd_path), relative=True)

    # Save stage
    stage.Save()

    # Return the root prim
    return prim

import_obj_urdf(urdf_path, obj_category, obj_model, dataset_root=gm.CUSTOM_DATASET_PATH, use_omni_convex_decomp=False, use_usda=False, merge_fixed_joints=False)

Imports an object from a URDF file into the current stage.

Parameters:

Name Type Description Default
urdf_path str

Path to URDF file to import

required
obj_category str

The category of the object.

required
obj_model str

The model name of the object.

required
dataset_root str

The root directory of the dataset.

CUSTOM_DATASET_PATH
use_omni_convex_decomp bool

Whether to use omniverse's built-in convex decomposer for collision meshes

False
use_usda bool

If set, will write files to .usda files instead of .usd (bigger memory footprint, but human-readable)

False
merge_fixed_joints bool

whether to merge fixed joints or not

False

Returns:

Type Description
2 - tuple
  • str: Absolute path to post-processed URDF file used to generate USD
  • str: Absolute path to the imported USD file
Source code in OmniGibson/omnigibson/utils/asset_conversion_utils.py
def import_obj_urdf(
    urdf_path,
    obj_category,
    obj_model,
    dataset_root=gm.CUSTOM_DATASET_PATH,
    use_omni_convex_decomp=False,
    use_usda=False,
    merge_fixed_joints=False,
):
    """
    Imports an object from a URDF file into the current stage.

    Args:
        urdf_path (str): Path to URDF file to import
        obj_category (str): The category of the object.
        obj_model (str): The model name of the object.
        dataset_root (str): The root directory of the dataset.
        use_omni_convex_decomp (bool): Whether to use omniverse's built-in convex decomposer for collision meshes
        use_usda (bool): If set, will write files to .usda files instead of .usd
            (bigger memory footprint, but human-readable)
        merge_fixed_joints (bool): whether to merge fixed joints or not

    Returns:
        2-tuple:
            - str: Absolute path to post-processed URDF file used to generate USD
            - str: Absolute path to the imported USD file
    """
    # Preprocess input URDF to account for meta links
    urdf_path = _add_meta_links_to_urdf(
        urdf_path=urdf_path, obj_category=obj_category, obj_model=obj_model, dataset_root=dataset_root
    )
    # Import URDF
    cfg = _create_urdf_import_config(
        use_convex_decomposition=use_omni_convex_decomp,
        merge_fixed_joints=merge_fixed_joints,
    )
    # Check if filepath exists
    usd_path = f"{dataset_root}/objects/{obj_category}/{obj_model}/usd/{obj_model}.{'usda' if use_usda else 'usd'}"
    if _SPLIT_COLLISION_MESHES:
        log.debug(f"Converting collision meshes from {obj_category}, {obj_model}...")
        urdf_path = _split_all_objs_in_urdf(urdf_fpath=urdf_path, name_suffix="split")
    log.debug(f"Importing {obj_category}, {obj_model} into path {usd_path}...")
    # Only import if it doesn't exist
    lazy.omni.kit.commands.execute(
        "URDFParseAndImportFile",
        urdf_path=urdf_path,
        import_config=cfg,
        dest_path=usd_path,
    )
    log.debug(f"Imported {obj_category}, {obj_model}")

    return urdf_path, usd_path

import_og_asset_from_urdf(category, model, urdf_path=None, urdf_dep_paths=None, collision_method='coacd', coacd_links=None, convex_links=None, no_decompose_links=None, visual_only_links=None, merge_fixed_joints=False, dataset_root=gm.CUSTOM_DATASET_PATH, hull_count=32, overwrite=False, use_usda=False)

Imports an asset from URDF format into OmniGibson-compatible USD format. This will write the new USD (and copy the URDF if it does not already exist within @dataset_root) to @dataset_root

Parameters:

Name Type Description Default
category str

Category to assign to imported asset

required
model str

Model name to assign to imported asset

required
urdf_path None or str

If specified, external URDF that should be copied into the dataset first before converting into USD format. Otherwise, assumes that the urdf file already exists within @dataset_root dir

None
urdf_dep_paths None or list of str

If specified, relative paths to the @urdf_path directory that should be copied over to the custom dataset, e.g., relevant material directories

None
collision_method None or str

If specified, collision decomposition method to use to generate OmniGibson-compatible collision meshes. Valid options are {"coacd", "convex"}

'coacd'
coacd_links None or list of str

If specified, links that should use CoACD to decompose collision meshes

None
convex_links None or list of str

If specified, links that should use convex hull to decompose collision meshes

None
no_decompose_links None or list of str

If specified, links that should not have any special collision decomposition applied. This will only use the convex hull

None
visual_only_links None or list of str

If specified, links that should have no colliders associated with it

None
merge_fixed_joints bool

Whether to merge fixed joints or not

False
dataset_root str

Dataset root directory to use for writing imported USD file. Default is custom dataset path set from the global macros

CUSTOM_DATASET_PATH
hull_count int

Maximum number of convex hulls to decompose individual visual meshes into. Only relevant if @collision_method is "coacd"

32
overwrite bool

If set, will overwrite any pre-existing files

False
use_usda bool

If set, will write files to .usda files instead of .usd (bigger memory footprint, but human-readable)

False

Returns:

Type Description
3 - tuple
  • str: Absolute path to post-processed URDF file
  • str: Absolute path to generated USD file
  • Usd.Prim: Generated root USD prim (currently on active stage)
Source code in OmniGibson/omnigibson/utils/asset_conversion_utils.py
def import_og_asset_from_urdf(
    category,
    model,
    urdf_path=None,
    urdf_dep_paths=None,
    collision_method="coacd",
    coacd_links=None,
    convex_links=None,
    no_decompose_links=None,
    visual_only_links=None,
    merge_fixed_joints=False,
    dataset_root=gm.CUSTOM_DATASET_PATH,
    hull_count=32,
    overwrite=False,
    use_usda=False,
):
    """
    Imports an asset from URDF format into OmniGibson-compatible USD format. This will write the new USD
    (and copy the URDF if it does not already exist within @dataset_root) to @dataset_root

    Args:
        category (str): Category to assign to imported asset
        model (str): Model name to assign to imported asset
        urdf_path (None or str): If specified, external URDF that should be copied into the dataset first before
            converting into USD format. Otherwise, assumes that the urdf file already exists within @dataset_root dir
        urdf_dep_paths (None or list of str): If specified, relative paths to the @urdf_path directory that should be copied
            over to the custom dataset, e.g., relevant material directories
        collision_method (None or str): If specified, collision decomposition method to use to generate
            OmniGibson-compatible collision meshes. Valid options are {"coacd", "convex"}
        coacd_links (None or list of str): If specified, links that should use CoACD to decompose collision meshes
        convex_links (None or list of str): If specified, links that should use convex hull to decompose collision meshes
        no_decompose_links (None or list of str): If specified, links that should not have any special collision
            decomposition applied. This will only use the convex hull
        visual_only_links (None or list of str): If specified, links that should have no colliders associated with it
        merge_fixed_joints (bool): Whether to merge fixed joints or not
        dataset_root (str): Dataset root directory to use for writing imported USD file. Default is custom dataset
            path set from the global macros
        hull_count (int): Maximum number of convex hulls to decompose individual visual meshes into.
            Only relevant if @collision_method is "coacd"
        overwrite (bool): If set, will overwrite any pre-existing files
        use_usda (bool): If set, will write files to .usda files instead of .usd
            (bigger memory footprint, but human-readable)

    Returns:
        3-tuple:
            - str: Absolute path to post-processed URDF file
            - str: Absolute path to generated USD file
            - Usd.Prim: Generated root USD prim (currently on active stage)
    """
    # If URDF already exists, write it to the dataset
    if urdf_path is not None:
        print(f"Copying URDF to dataset root {dataset_root}...")
        urdf_path = copy_urdf_to_dataset(
            urdf_path=urdf_path,
            category=category,
            mdl=model,
            urdf_dep_paths=urdf_dep_paths,
            dataset_root=dataset_root,
            suffix="original",
            overwrite=overwrite,
        )
    else:
        # Verify that the object exists at the expected location
        # This is <dataset_root>/objects/<category>/<model>/urdf/<model>_original.urdf
        urdf_path = os.path.join(dataset_root, "objects", category, model, "urdf", f"{model}_original.urdf")
        assert os.path.exists(urdf_path), f"Expected urdf at dataset location {urdf_path}, but none was found!"

    # Make sure all scaling is positive
    model_dir = os.path.join(dataset_root, "objects", category, model)
    urdf_path = make_asset_positive(urdf_fpath=urdf_path)

    # Update collisions if requested
    if collision_method is not None:
        print("Generating collision approximation for URDF...")
        get_collision_approximation_for_urdf(
            urdf_path=urdf_path,
            collision_method=collision_method,
            hull_count=hull_count,
            coacd_links=coacd_links,
            convex_links=convex_links,
            no_decompose_links=no_decompose_links,
            visual_only_links=visual_only_links,
        )

    # Generate metadata
    print("Recording object metadata from URDF...")
    record_obj_metadata_from_urdf(
        urdf_path=urdf_path,
        obj_dir=model_dir,
        joint_setting="zero",
        overwrite=overwrite,
    )

    # Convert to USD
    print("Converting obj URDF to USD...")
    og.launch()
    assert len(og.sim.scenes) == 0
    urdf_path, usd_path = import_obj_urdf(
        urdf_path=urdf_path,
        obj_category=category,
        obj_model=model,
        dataset_root=dataset_root,
        use_omni_convex_decomp=False,  # We already pre-decomposed the values, so don' use omni convex decomp
        use_usda=use_usda,
        merge_fixed_joints=merge_fixed_joints,
    )

    # Copy meta links URDF to original name of object model
    shutil.copy2(urdf_path, os.path.join(dataset_root, "objects", category, model, "urdf", f"{model}.urdf"))

    prim = import_obj_metadata(
        usd_path=usd_path,
        obj_category=category,
        obj_model=model,
        dataset_root=dataset_root,
        import_render_channels=False,  # TODO: Make this True once we find a systematic / robust way to import materials of different source formats
    )
    print(
        f"\nConversion complete! Object has been successfully imported into OmniGibson-compatible USD, located at:\n\n{usd_path}\n"
    )

    return urdf_path, usd_path, prim

record_obj_metadata_from_urdf(urdf_path, obj_dir, joint_setting='zero', overwrite=False)

Records object metadata and writes it to misc/metadata.json within the object directory.

Parameters:

Name Type Description Default
urdf_path str

Path to object URDF

required
obj_dir str

Absolute path to the object's root directory

required
joint_setting str

Setting for joints when calculating canonical metadata. Valid options are {"low", "zero", "high"} (i.e.: lower joint limit, all 0 values, or upper joint limit)

'zero'
overwrite bool

Whether to overwrite any pre-existing data

False
Source code in OmniGibson/omnigibson/utils/asset_conversion_utils.py
def record_obj_metadata_from_urdf(urdf_path, obj_dir, joint_setting="zero", overwrite=False):
    """
    Records object metadata and writes it to misc/metadata.json within the object directory.

    Args:
        urdf_path (str): Path to object URDF
        obj_dir (str): Absolute path to the object's root directory
        joint_setting (str): Setting for joints when calculating canonical metadata. Valid options
            are {"low", "zero", "high"} (i.e.: lower joint limit, all 0 values, or upper joint limit)
        overwrite (bool): Whether to overwrite any pre-existing data
    """
    # Load the URDF file into urdfpy
    robot = URDF.load(urdf_path)

    # Do FK with everything at desired configuration
    if joint_setting == "zero":
        val = lambda jnt: 0.0
    elif joint_setting == "low":
        val = lambda jnt: jnt.limit.lower
    elif joint_setting == "high":
        val = lambda jnt: jnt.limit.upper
    else:
        raise ValueError(f"Got invalid joint_setting: {joint_setting}! Valid options are ['low', 'zero', 'high']")
    joint_cfg = {joint.name: val(joint) for joint in robot.joints if joint.joint_type in ("prismatic", "revolute")}
    vfk = robot.visual_trimesh_fk(cfg=joint_cfg)

    scene = trimesh.Scene()
    for mesh, transform in vfk.items():
        scene.add_geometry(geometry=mesh, transform=transform)

    # Calculate relevant metadata

    # Base link offset is pos offset from robot root link -> overall AABB center
    # Since robot is placed at origin, this is simply the AABB centroid
    base_link_offset = scene.bounding_box.centroid

    # BBox size is simply the extent of the overall AABB
    bbox_size = scene.bounding_box.extents

    # Save metadata json
    out_metadata = {
        "meta_links": {},
        "link_tags": {},
        "object_parts": [],
        "base_link_offset": base_link_offset.tolist(),
        "bbox_size": bbox_size.tolist(),
        "orientations": [],
    }
    misc_dir = pathlib.Path(obj_dir) / "misc"
    misc_dir.mkdir(exist_ok=overwrite)
    with open(misc_dir / "metadata.json", "w") as f:
        json.dump(out_metadata, f)

simplify_convex_hull(tm, max_vertices=60, max_faces=128)

Simplifies a convex hull mesh by using quadric edge collapse to reduce the number of faces

Parameters:

Name Type Description Default
tm Trimesh

Trimesh mesh to simply. Should be convex hull

required
max_vertices int

Maximum number of vertices to generate

60
Source code in OmniGibson/omnigibson/utils/asset_conversion_utils.py
def simplify_convex_hull(tm, max_vertices=60, max_faces=128):
    """
    Simplifies a convex hull mesh by using quadric edge collapse to reduce the number of faces

    Args:
        tm (Trimesh): Trimesh mesh to simply. Should be convex hull
        max_vertices (int): Maximum number of vertices to generate
    """
    # If number of faces is less than or equal to @max_faces, simply return directly
    if len(tm.vertices) <= max_vertices:
        return tm

    # Use pymeshlab to reduce
    ms = pymeshlab.MeshSet()
    ms.add_mesh(pymeshlab.Mesh(vertex_matrix=tm.vertices, face_matrix=tm.faces, v_normals_matrix=tm.vertex_normals))
    while len(ms.current_mesh().vertex_matrix()) > max_vertices:
        ms.apply_filter("meshing_decimation_quadric_edge_collapse", targetfacenum=max_faces)
        max_faces -= 2
    vertices_reduced = ms.current_mesh().vertex_matrix()
    faces_reduced = ms.current_mesh().face_matrix()
    vertex_normals_reduced = ms.current_mesh().vertex_normal_matrix()
    return trimesh.Trimesh(
        vertices=vertices_reduced,
        faces=faces_reduced,
        vertex_normals=vertex_normals_reduced,
    ).convex_hull