diff --git a/.gitignore b/.gitignore
index 68bc17f..6ac860f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,16 @@
+# Project-specific
+assets/train_assembly
+assets/test_assembly
+plan_sequence/generator/network/*.pt
+*.sdf
+*.obj
+*.dae
+*.zip
+
+.vscode
+.DS_Store
+__MACOSX
+
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
diff --git a/README.md b/README.md
index bbafe94..e1fb3b5 100644
--- a/README.md
+++ b/README.md
@@ -1 +1,199 @@
-# ASAP
\ No newline at end of file
+# ASAP
+
+This repository contains the official code and dataset of [ASAP: Automated Sequence Planning for Complex Robotic Assembly with Physical Feasibility (ICRA 2024)](http://asap.csail.mit.edu).
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+**Authors**: Yunsheng Tian, Karl D.D. Willis, Bassel Al Omari, Jieliang Luo, Pingchuan Ma, Yichen Li, Farhad Javid, Edward Gu, Joshua Jacob, Shinjiro Sueda, Hui Li, Sachin Chitta, Wojciech Matusik
+
+**Summary**: The automated assembly of complex products requires a system that can automatically plan a physically feasible sequence of actions for assembling many parts together. In this paper, we present ASAP, a physics-based planning approach for automatically generating such a sequence for general-shaped assemblies. ASAP accounts for gravity to design a sequence where each sub-assembly is physically stable with a limited number of parts being held and a support surface. We apply efficient tree search algorithms to reduce the combinatorial complexity of determining such an assembly sequence. The search can be guided by either geometric heuristics or graph neural networks trained on data with simulation labels. Finally, we show the superior performance of ASAP at generating physically realistic assembly sequence plans on a large dataset of hundreds of complex product assemblies. We further demonstrate the applicability of ASAP on both simulation and real-world robotic setups.
+
+## Installation
+
+### 1. Clone repository
+
+```
+git clone --recurse-submodules git@github.com:yunshengtian/RobotAssembly.git
+```
+
+### 2. Python environment
+
+```
+conda env create -f environment.yml
+conda activate asap
+```
+
+or
+
+```
+pip install numpy networkx matplotlib scipy pyglet rtree sortedcontainers scipy tqdm trimesh torch torch_geometric torch_sparse torch_scatter seaborn ikpy pyquaternion
+```
+
+### 3. Python binding of simulation
+
+```
+cd simulation
+python setup.py install
+```
+
+To test if the installation steps are successful, run:
+
+```
+python test_sim/test_simple_sim.py --model box/box_stack --steps 2000
+```
+
+Then the simulation viewer should appear. [Here](https://github.com/yunshengtian/Assemble-Them-All?tab=readme-ov-file#simulation-viewer) are some tips on interacting with the viewer.
+Additionally, press `V` for outputting the camera parameters (lookat and pos).
+
+We also provide a beam assembly under ``assets/beam_assembly`` folder. To visualize the simulation of that, run:
+
+```
+python test_sim/test_multi_sim.py --dir beam_assembly --id original --gravity 9.8 --steps 2000 --friction 0.5 --camera-pos 3.15 -1.24 1.6 --camera-lookat 2.59 -0.55 1.16
+```
+
+### 4. Assembly dataset (optional)
+
+Install the training set and test set:
+
+| Training set (1906 assemblies) | Test Set (240 assemblies) |
+| :--------------------------------------: | :------------------------------: |
+|  |  |
+| [Link (591MB)](https://people.csail.mit.edu/yunsheng/ASAP/dataset_2404/training_assembly.zip) | [Link (124MB)](https://people.csail.mit.edu/yunsheng/ASAP/dataset_2404/test_assembly.zip) |
+
+For point-based SDF collision check to work more accurately, we highly recommend subdividing the assembly meshes to have denser contact points by running ``assets/subdivide_batch.py``. For example, to subdivide the dataset saved in ``assets/test_assembly`` and export to ``assets/test_assembly_dense``:
+
+```
+python assets/subdivide_batch.py --source-dir assets/test_assembly --target-dir assets/test_assembly_dense --num-proc NUM_PROCESSES
+```
+
+## Experiments
+
+### Sequence planning
+
+Use the following command to run sequence planning on the beam assembly we provided:
+
+```
+python plan_sequence/run_seq_plan.py --dir beam_assembly --id original --planner dfs --generator heur-out --max-gripper 2 --base-part 6 --log-dir logs/beam_seq --early-term
+```
+
+Important arguments include (see the complete list in `plan_sequence/run_seq_plan.py`):
+
+- `dir`: assembly directory (relative to `assets/`)
+- `id`: assembly id
+- `planner`: name of the node selection algorithm (i.e., tree search planner) (see `plan_sequence/planner/__init__.py` for supported options)
+- `generator`: name of the part selection algorithm (i.e., part generator) (see `plan_sequence/generator/__init__.py` for supported options)
+- `seed`: random seed
+- `budget`: maximum number of feasibility evaluation
+- `max-gripper`: number of available grippers (for assembling and holding parts)
+- `max-pose`: number of pose candidates to search from during pose selection
+- `pose-reuse`: number of poses to be reused from the parent node for pose selection
+- `early-term`: early termination once a feasible plan is found (rather than waiting for the whole tree to be fully expanded)
+- `timeout`: timeout in seconds for the whole sequence planning
+- `base-part`: id of the base part (if exists) as the first part that stays in place (reorientation will not be allowed then)
+- `log-dir`: log directory for storing all the planning outputs
+- `plan-grasp`: whether to plan gripper grasps
+- `plan-arm`: whether to plan arm motions
+
+### Log folder structure
+
+If `log-dir` is specified in the above command, the log files will be saved in this directory: `{log-dir}/{planner}-{generator}/s{seed}/{id}/`.
+
+There are three files generated:
+1. `setup.json` that stores the arguments used for experiments;
+2. `stats.json` that stores the high-level planning results;
+3. `tree.pkl` that stores the explored disassembly tree with all necessary information on edges/nodes.
+
+### Generating results from log
+
+We separate the planning and result generation for flexibility considerations. Suppose you have run the above command for planning, then use the following command to generate planned results:
+
+```
+python plan_sequence/play_logged_plan.py --log-dir logs/beam_seq/dfs-heur-out/s0/original/ --assembly-dir assets/beam_assembly/original --result-dir results/beam_seq/ --save-all --camera-pos 3.15 -1.24 1.6 --camera-lookat 2.59 -0.55 1.16
+```
+
+Important arguments include (see the complete list in `plan_sequence/play_logged_plan.py`):
+
+- `log-dir`: input log directory
+- `assembly-dir`: input assembly directory (absolute path)
+- `result-dir`: output result directory
+- `save-mesh`: whether to output meshes in the result folder (not necessarily needed, same as meshes in assembly dir)
+- `save-pose`: whether to output (reoriented) pose of every assembly step
+- `save-part`: whether to output parts to be held
+- `save-record`: whether to output rendered videos
+- `save-all`: whether to output everything above
+- `reverse`: whether to reverse the rendering (to be assembly instead of disassembly)
+- `show-fix`: whether to show fixed parts in rendering (in grey)
+- `show-grasp`: whether to show gripper grasp in rendering
+- `show-arm`: whether to show arm motion in rendering
+
+If `save-all` is specified, the results will be saved in `result-dir` with the following structure.
+Assume there are `N` parts, `N-1` assembly steps, `T` time steps in each assembly step,
+N part ids are `{p0}, {p1}, ... {pN-1}`, and N-1 ordered part ids following the disassembly order are `{p'0}, {p'1}, ..., {p'N-2}`:
+
+```
+mesh/ --- meshes of individual parts
+ ├── part{p0}.obj
+ ├── ...
+ └── part{pN-1}.obj
+part_fix/ --- parts to be held in every assembly step
+ ├── 0_{p'0}.json
+ ├── ...
+ └── N-2_{p'N-2}.json
+path/ --- geometric assembly paths in every time step in every assembly step (4x4 transformation matrices)
+ └── 0_{p'0}/
+ └── 0/
+ ├── part{p0}.npy
+ ├── ...
+ └── part{pN-1}.npy
+ ├── ...
+ └── {T-1}/
+ ├── ...
+ └── N-2_{p'N-2}/
+pose/ --- global pose of the whole (sub)assembly in every assembly step (4x4 transformation matrix)
+ ├── 0_{p'0}.npy
+ ├── ...
+ └── N-2_{p'N-2}.npy
+record/ --- (dis)assembly animations in every assembly step
+ ├── 0_{p'0}.gif
+ ├── ...
+ └── N-2_{p'N-2}.gif
+```
+
+After the animations are generated, you can use `plan_sequence/combine_animation.py` to concatenate all videos into a single one.
+
+### Batch sequence planning
+
+Use `plan_sequence/run_seq_plan_batch.py` to run batch sequence planning for all assemblies in the assembly directory (with similar arguments as shown above for the serial script). The log folders will be saved in this directory: `{log-dir}/g{max-gripper}/{planner}-{generator}/s{seed}/`.
+
+To check success rates quantitatively, run:
+```
+python plan_sequence/check_success_rate_batch.py --log-dir {log-dir}/g{max-gripper}
+```
+
+## Contact
+
+Please feel free to contact yunsheng@csail.mit.edu or create a GitHub issue for any questions about the code or dataset.
+
+## Citation
+
+If you find our paper, code or dataset is useful, please consider citing:
+
+```
+@article{tian2023asap,
+ title={ASAP: Automated Sequence Planning for Complex Robotic Assembly with Physical Feasibility},
+ author={Tian, Yunsheng and Willis, Karl DD and Omari, Bassel Al and Luo, Jieliang and Ma, Pingchuan and Li, Yichen and Javid, Farhad and Gu, Edward and Jacob, Joshua and Sueda, Shinjiro and others},
+ journal={arXiv preprint arXiv:2309.16909},
+ year={2023}
+}
+```
\ No newline at end of file
diff --git a/assets/beam_assembly/original/config.json b/assets/beam_assembly/original/config.json
new file mode 100644
index 0000000..6fa7c8d
--- /dev/null
+++ b/assets/beam_assembly/original/config.json
@@ -0,0 +1,22 @@
+{
+ "0": {
+ "initial_state": [6.8913, 8.6769, 0.762, 0, 0, 0],
+ "final_state": [12.245, 3.8152, 7.0889, 0, 0, 1.570796]
+ },
+ "1": {
+ "initial_state": [6.8833, 13.2202, 0.762, 0, 0, 0],
+ "final_state": [21.135, 3.8152, 7.0889, 0, 0, 1.570796]
+ },
+ "2": {
+ "initial_state": [17.0764, 8.7173, 0.635, -1.570796, 0, -1.570796],
+ "final_state": [12.2424, 3.8152, 3.8614, 0, 0, -1.570796]
+ },
+ "3": {
+ "initial_state": [17.047, 13.1969, 0.635, -1.570796, 0, -1.570796],
+ "final_state": [21.1376, 3.8152, 3.8614, 0, 0, -1.570796]
+ },
+ "6": {
+ "initial_state": [16.69, 3.8152, 0.635, 0, 0, 1.570796],
+ "final_state": [16.69, 3.8152, 0.6321, 0, 0, 1.570796]
+ }
+}
\ No newline at end of file
diff --git a/assets/beam_assembly/original/id_map.json b/assets/beam_assembly/original/id_map.json
new file mode 100644
index 0000000..d9343c5
--- /dev/null
+++ b/assets/beam_assembly/original/id_map.json
@@ -0,0 +1 @@
+{"0": "Table_Base - Feet-1.obj", "1": "Table_Base - Feet-2.obj", "2": "Table_Base - Upright-1.obj", "3": "Table_Base - Upright-2.obj", "4": "Table_Base - Supports-1.obj", "5": "Table_Base - Supports-2.obj", "6": "Table_Base - Top-1.obj"}
\ No newline at end of file
diff --git a/assets/beam_assembly/original/robot.json b/assets/beam_assembly/original/robot.json
new file mode 100644
index 0000000..30955ac
--- /dev/null
+++ b/assets/beam_assembly/original/robot.json
@@ -0,0 +1,34 @@
+{
+ "arm": {
+ "base_pos": [-21.25, -30.0, 0.0],
+ "base_angle": 0.0,
+ "scale": 1.0,
+ "rest_q": [0.63879051, -0.41713369, 0.28274334, 0.65100781, 0.13089969, 1.0559242, 0.82030475]
+ },
+ "gripper": {
+ "type": "robotiq-140",
+ "scale": 1.0
+ },
+ "grasp": {
+ "0": {
+ "antipodals": [[0.0, 0.635, 1.073], [0.0, -0.635, 1.073]],
+ "base_direction": [0.0, 0.0, 1.0]
+ },
+ "1": {
+ "antipodals": [[0.0, 0.635, 1.073], [0.0, -0.635, 1.073]],
+ "base_direction": [0.0, 0.0, 1.0]
+ },
+ "2": {
+ "antipodals": [[-0.635, -1.2, 1.905], [0.635, -1.2, 1.905]],
+ "base_direction": [0.0, -1.0, 0.0]
+ },
+ "3": {
+ "antipodals": [[-0.635, -1.2, 1.905], [0.635, -1.2, 1.905]],
+ "base_direction": [0.0, -1.0, 0.0]
+ },
+ "6": {
+ "antipodals": [[-0.889, 0.0, 1.2], [0.889, 0.0, 1.2]],
+ "base_direction": [0.0, 0.0, 1.0]
+ }
+ }
+}
\ No newline at end of file
diff --git a/assets/box/box_stack.xml b/assets/box/box_stack.xml
new file mode 100644
index 0000000..04046a2
--- /dev/null
+++ b/assets/box/box_stack.xml
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/assets/color.py b/assets/color.py
new file mode 100644
index 0000000..7457e0b
--- /dev/null
+++ b/assets/color.py
@@ -0,0 +1,26 @@
+'''
+Predefined colors for assembly meshes
+'''
+
+import numpy as np
+
+
+def get_color(part_ids, normalize=True):
+ color_map = {}
+ if len(part_ids) <= 2:
+ colors = np.array([
+ [107, 166, 161, 255],
+ [209, 184, 148, 255],
+ ], dtype=int)
+ else:
+ colors = np.array([
+ [210, 87, 89, 255],
+ [237, 204, 73, 255],
+ [60, 167, 221, 255],
+ [190, 126, 208, 255],
+ [108, 192, 90, 255],
+ ], dtype=int)
+ if normalize: colors = colors.astype(float) / 255.0
+ for i, part_id in enumerate(part_ids):
+ color_map[part_id] = colors[i % len(colors)]
+ return color_map
diff --git a/assets/load.py b/assets/load.py
new file mode 100644
index 0000000..1728d80
--- /dev/null
+++ b/assets/load.py
@@ -0,0 +1,183 @@
+'''
+Load assembly meshes and transform
+'''
+import os
+import sys
+
+project_base_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
+sys.path.append(project_base_dir)
+
+import numpy as np
+import json
+import trimesh
+from scipy.spatial.transform import Rotation as R
+
+from assets.color import get_color
+from assets.transform import get_transform_matrix, q_to_pos_quat
+
+
+def load_config(obj_dir):
+ '''
+ Load config from dir (initial/final states)
+ '''
+ config_path = os.path.join(obj_dir, 'config.json')
+ if not os.path.exists(config_path):
+ return None
+
+ with open(config_path, 'r') as fp:
+ config = json.load(fp)
+
+ return config
+
+
+def load_pos_quat_dict(obj_dir, transform='final'):
+ config = load_config(obj_dir)
+ pos_dict, quat_dict = {}, {}
+ part_ids = load_part_ids(obj_dir)
+ for part_id in part_ids:
+ if config is None or part_id not in config:
+ pos_dict[part_id], quat_dict[part_id] = np.array([0., 0., 0.]), np.array([1., 0., 0., 0.])
+ else:
+ part_cfg = config[part_id]
+ if transform == 'final':
+ state = part_cfg['final_state']
+ elif transform == 'initial':
+ state = part_cfg['initial_state'] if 'initial_state' in part_cfg else None
+ else:
+ raise Exception(f'Unknown transform type: {transform}')
+ if state is not None:
+ pos_dict[part_id], quat_dict[part_id] = q_to_pos_quat(state)
+ else:
+ pos_dict[part_id], quat_dict[part_id] = np.array([0., 0., 0.]), np.array([1., 0., 0., 0.])
+ return pos_dict, quat_dict
+
+
+def load_part_ids(obj_dir):
+ part_ids = []
+ for obj_name in os.listdir(obj_dir):
+ if obj_name.endswith('.obj'):
+ part_id = obj_name.replace('.obj', '')
+ part_ids.append(part_id)
+ part_ids.sort()
+ return part_ids
+
+
+def load_assembly(obj_dir, transform='final'):
+ '''
+ Load the entire assembly from dir
+ transform: 'final', 'initial' or 'none'
+ '''
+ obj_ids = load_part_ids(obj_dir)
+ obj_ids = sorted(obj_ids)
+ color_map = get_color(obj_ids, normalize=False)
+
+ assembly = {}
+ config = load_config(obj_dir)
+
+ for obj_id in obj_ids:
+ obj_name = f'{obj_id}.obj'
+ obj_path = os.path.join(obj_dir, obj_name)
+ mesh = trimesh.load_mesh(obj_path, process=False, maintain_order=True)
+ mesh.visual.face_colors = color_map[obj_id]
+
+ assembly[obj_id] = {
+ 'mesh': mesh,
+ 'name': obj_name,
+ 'transform': transform,
+ }
+
+ if config is not None and obj_id in config:
+ if transform == 'final':
+ mat = get_transform_matrix(config[obj_id]['final_state'])
+ mesh.apply_transform(mat)
+ elif transform == 'initial':
+ mat = get_transform_matrix(config[obj_id]['initial_state'])
+ mesh.apply_transform(mat)
+ elif transform == 'none':
+ pass
+ else:
+ raise Exception(f'Unknown transform type: {transform}')
+
+ assembly[obj_id]['initial_state'] = config[obj_id]['initial_state'] if 'initial_state' in config[obj_id] else None
+ assembly[obj_id]['final_state'] = config[obj_id]['final_state']
+ else:
+ assembly[obj_id]['initial_state'] = None
+ assembly[obj_id]['final_state'] = None
+
+ return assembly
+
+
+def load_assembly_all_transformed(obj_dir):
+ '''
+ Load the entire assembly from dir with all transforms applied
+ '''
+ obj_ids = load_part_ids(obj_dir)
+ obj_ids = sorted(obj_ids)
+ color_map = get_color(obj_ids, normalize=False)
+
+ assembly = {}
+ config = load_config(obj_dir)
+
+ for obj_id in obj_ids:
+ obj_name = f'{obj_id}.obj'
+ obj_path = os.path.join(obj_dir, obj_name)
+ mesh = trimesh.load_mesh(obj_path, process=False, maintain_order=True)
+ mesh.visual.face_colors = color_map[obj_id]
+
+ mesh_none = mesh.copy()
+ mesh_final = mesh.copy()
+ mesh_initial = None
+ if config is not None and obj_id in config:
+ mat_final = get_transform_matrix(config[obj_id]['final_state'])
+ mat_initial = get_transform_matrix(config[obj_id]['initial_state']) if 'initial_state' in config[obj_id] else None
+ mesh_final.apply_transform(mat_final)
+ if mat_initial is not None:
+ mesh_initial = mesh.copy()
+ mesh_initial.apply_transform(mat_initial)
+
+ assembly[obj_id] = {
+ 'name': obj_name,
+ 'mesh': mesh_none,
+ 'mesh_final': mesh_final,
+ 'mesh_initial': mesh_initial,
+ }
+ if config is not None and obj_id in config:
+ assembly[obj_id]['initial_state'] = config[obj_id]['initial_state'] if 'initial_state' in config[obj_id] else None
+ assembly[obj_id]['final_state'] = config[obj_id]['final_state']
+
+ return assembly
+
+
+def load_paths(path_dir):
+ '''
+ Load motion of assembly meshes at every time step
+ '''
+ paths = {}
+ for step in os.listdir(path_dir):
+ obj_id = step.split('_')[1]
+ step_dir = os.path.join(path_dir, step)
+ if os.path.isdir(step_dir):
+ path = []
+ frame_files = []
+ for frame_file in os.listdir(step_dir):
+ if frame_file.endswith('.npy'):
+ frame_files.append(frame_file)
+ frame_files.sort(key=lambda x: int(x.replace('.npy', '')))
+ for frame_file in frame_files:
+ frame_path = os.path.join(step_dir, frame_file)
+ frame_transform = np.load(frame_path)
+ path.append(frame_transform)
+ paths[obj_id] = path
+ return paths
+
+
+if __name__ == '__main__':
+ from argparse import ArgumentParser
+
+ parser = ArgumentParser()
+ parser.add_argument('--dir', type=str, required=True)
+ args = parser.parse_args()
+
+ assembly = load_assembly(args.dir)
+ meshes = [assembly[obj_id]['mesh'] for obj_id in assembly]
+ trimesh.Scene(meshes).show()
diff --git a/assets/make_sdf.py b/assets/make_sdf.py
new file mode 100644
index 0000000..47203bd
--- /dev/null
+++ b/assets/make_sdf.py
@@ -0,0 +1,65 @@
+import os
+import sys
+
+project_base_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
+sys.path.append(project_base_dir)
+
+import redmax_py as redmax
+import json
+from argparse import ArgumentParser
+
+from assets.load import load_pos_quat_dict, load_part_ids
+from assets.save import clear_saved_sdfs
+
+
+def arr_to_str(arr):
+ return ' '.join([str(x) for x in arr])
+
+
+def get_xml_string(assembly_dir, part_ids, sdf_dx):
+ pos_dict, quat_dict = load_pos_quat_dict(assembly_dir)
+ joint_type = 'fixed'
+ string = f'''
+
+
+'''
+ for part_id in part_ids:
+ string += f'''
+
+
+
+
+
+
+'''
+ string += f'''
+
+'''
+ return string
+
+
+def make_sdf(asset_folder, assembly_dir, sdf_dx):
+ part_ids = load_part_ids(assembly_dir)
+ model_string = get_xml_string(
+ assembly_dir=assembly_dir,
+ part_ids=part_ids,
+ sdf_dx=sdf_dx,
+ )
+ redmax.Simulation(model_string, asset_folder)
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser()
+ parser.add_argument('--id', type=str, required=True, help='assembly id (e.g. 00000)')
+ parser.add_argument('--dir', type=str, default='multi_assembly', help='directory storing all assemblies')
+ parser.add_argument('--sdf-dx', type=float, default=0.05, help='grid resolution of SDF')
+ parser.add_argument('--clear', default=False, action='store_true')
+ args = parser.parse_args()
+
+ asset_folder = os.path.join(project_base_dir, './assets')
+ assembly_dir = os.path.join(asset_folder, args.dir, args.id)
+
+ if args.clear:
+ clear_saved_sdfs(assembly_dir)
+ else:
+ make_sdf(asset_folder, assembly_dir, args.sdf_dx)
diff --git a/assets/make_sdf_batch.py b/assets/make_sdf_batch.py
new file mode 100644
index 0000000..4b7c0ee
--- /dev/null
+++ b/assets/make_sdf_batch.py
@@ -0,0 +1,35 @@
+import os
+import sys
+
+project_base_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
+sys.path.append(project_base_dir)
+
+from argparse import ArgumentParser
+from make_sdf import make_sdf
+
+from utils.parallel import parallel_execute
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser()
+ parser.add_argument('--dir', type=str, default='multi_assembly', help='directory storing all assemblies')
+ parser.add_argument('--sdf-dx', type=float, default=0.05, help='grid resolution of SDF')
+ parser.add_argument('--num-proc', type=int, default=8)
+ args = parser.parse_args()
+
+ asset_folder = os.path.join(project_base_dir, './assets')
+ assemblies_dir = os.path.join(asset_folder, args.dir)
+ assembly_ids = []
+ for assembly_id in os.listdir(assemblies_dir):
+ assembly_dir = os.path.join(assemblies_dir, assembly_id)
+ if os.path.isdir(assembly_dir):
+ assembly_ids.append(assembly_id)
+ assembly_ids.sort()
+
+ worker_args = []
+ for assembly_id in assembly_ids:
+ assembly_dir = os.path.join(assemblies_dir, assembly_id)
+ worker_args.append([asset_folder, assembly_dir, args.sdf_dx])
+
+ for _ in parallel_execute(make_sdf, worker_args, args.num_proc, show_progress=True):
+ pass
diff --git a/assets/mesh_distance.py b/assets/mesh_distance.py
new file mode 100644
index 0000000..0e54e0b
--- /dev/null
+++ b/assets/mesh_distance.py
@@ -0,0 +1,94 @@
+'''
+Compute the minimum distance between meshes at certain states
+'''
+
+import os
+import sys
+
+project_base_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
+sys.path.append(project_base_dir)
+
+import numpy as np
+from assets.transform import get_transform_matrix, transform_pts_by_matrix
+
+
+def compute_all_mesh_distance(meshes, states, verbose=False):
+ '''
+ Compute the minimum distance between meshes at certain states
+ '''
+ assert len(meshes) == len(states)
+
+ mats, inv_mats = [], []
+ for i in range(len(meshes)):
+ mat = get_transform_matrix(states[i])
+ mats.append(mat)
+ inv_mats.append(np.linalg.inv(mat))
+
+ d = np.inf
+ for i in range(len(meshes)):
+ for j in range(i + 1, len(meshes)):
+ v_i_trans = transform_pts_by_matrix(meshes[i].vertices.T, inv_mats[j].dot(mats[i]))
+ v_j_trans = transform_pts_by_matrix(meshes[j].vertices.T, inv_mats[i].dot(mats[j]))
+ d_ij = meshes[i].min_distance(v_j_trans.T)
+ d_ji = meshes[j].min_distance(v_i_trans.T)
+ d = np.min([d, d_ij, d_ji])
+ if verbose:
+ print(f'mesh distance between {i} {j}:', min([d_ij, d_ji]))
+ if verbose:
+ print('mesh distance minimum:', d)
+
+ return d
+
+
+def compute_move_mesh_distance(move_mesh, still_meshes, state):
+ '''
+ Compute the minimum distance between meshes at certain states
+ '''
+ move_mat = get_transform_matrix(state)
+ move_inv_mat = np.linalg.inv(move_mat)
+
+ v_m_trans = transform_pts_by_matrix(move_mesh.vertices.T, move_mat).T
+
+ d = np.inf
+ for i in range(len(still_meshes)):
+ v_s_trans = transform_pts_by_matrix(still_meshes[i].vertices.T, move_inv_mat).T
+ d_ms = move_mesh.min_distance(v_s_trans)
+ d_sm = still_meshes[i].min_distance(v_m_trans)
+ d = np.min([d, d_ms, d_sm])
+
+ return d
+
+
+def compute_move_mesh_distance_from_mat(move_mesh, still_meshes, move_mat):
+
+ move_inv_mat = np.linalg.inv(move_mat)
+
+ v_m_trans = transform_pts_by_matrix(move_mesh.vertices.T, move_mat).T
+
+ d = np.inf
+ for i in range(len(still_meshes)):
+ v_s_trans = transform_pts_by_matrix(still_meshes[i].vertices.T, move_inv_mat).T
+ d_ms = move_mesh.min_distance(v_s_trans)
+ d_sm = still_meshes[i].min_distance(v_m_trans)
+ d = np.min([d, d_ms, d_sm])
+
+ return d
+
+
+def compute_ground_distance(move_mesh, state):
+
+ move_mat = get_transform_matrix(state)
+ v_m = transform_pts_by_matrix(move_mesh.vertices.T, move_mat)
+
+ d = np.min(v_m[:, 2])
+
+ return d
+
+
+def compute_ground_distance_from_mat(move_mesh, move_mat):
+
+ v_m = transform_pts_by_matrix(move_mesh.vertices.T, move_mat)
+
+ d = np.min(v_m[:, 2])
+
+ return d
diff --git a/assets/obj_utils/concat_obj.py b/assets/obj_utils/concat_obj.py
new file mode 100644
index 0000000..0c2959c
--- /dev/null
+++ b/assets/obj_utils/concat_obj.py
@@ -0,0 +1,36 @@
+import trimesh
+import os
+from argparse import ArgumentParser
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser()
+ parser.add_argument('--input-path', type=str, required=True)
+ parser.add_argument('--output-path', type=str, required=True)
+ parser.add_argument('--batch', action='store_true', default=False)
+ args = parser.parse_args()
+
+ if args.batch:
+ for obj_file in os.listdir(args.input_path):
+ obj_file = obj_file.lower()
+ if not obj_file.endswith('.obj'):
+ continue
+ input_path = os.path.join(args.input_path, obj_file)
+ output_path = os.path.join(args.output_path, os.path.basename(obj_file))
+ os.makedirs(args.output_path, exist_ok=True)
+ mesh = trimesh.load(input_path)
+ if type(mesh) == trimesh.Scene:
+ meshes = list(mesh.geometry.values())
+ mesh = trimesh.util.concatenate(meshes)
+ else:
+ print('Not a scene, no need to concatenate.')
+ mesh.export(output_path)
+ else:
+ assert os.path.isfile(args.input_path) and args.input_path.endswith('.obj')
+ mesh = trimesh.load(args.input_path)
+ if type(mesh) == trimesh.Scene:
+ meshes = list(mesh.geometry.values())
+ mesh = trimesh.util.concatenate(meshes)
+ else:
+ print('Not a scene, no need to concatenate.')
+ mesh.export(args.output_path)
diff --git a/assets/obj_utils/convexify_obj.py b/assets/obj_utils/convexify_obj.py
new file mode 100644
index 0000000..bde17d5
--- /dev/null
+++ b/assets/obj_utils/convexify_obj.py
@@ -0,0 +1,31 @@
+import numpy as np
+import trimesh
+import os
+from argparse import ArgumentParser
+
+
+def convexity_obj(input_path, output_path):
+ obj = trimesh.exchange.obj.export_obj(trimesh.load(input_path).convex_hull, header='')
+ with open(output_path, 'w') as fp:
+ fp.write(obj)
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser()
+ parser.add_argument('--input-path', type=str, required=True)
+ parser.add_argument('--output-path', type=str, required=True)
+ parser.add_argument('--batch', default=False, action='store_true')
+ args = parser.parse_args()
+
+ if args.batch:
+ os.makedirs(args.output_path, exist_ok=True)
+ for obj_file in os.listdir(args.input_path):
+ obj_file = obj_file.lower()
+ if not obj_file.lower().endswith('.obj'):
+ continue
+ input_path = os.path.join(args.input_path, obj_file)
+ output_path = os.path.join(args.output_path, obj_file)
+ convexity_obj(input_path, output_path)
+ else:
+ assert os.path.isfile(args.input_path) and args.input_path.endswith('.obj')
+ convexity_obj(args.input_path, args.output_path)
diff --git a/assets/obj_utils/dae_to_obj.py b/assets/obj_utils/dae_to_obj.py
new file mode 100644
index 0000000..88d6c61
--- /dev/null
+++ b/assets/obj_utils/dae_to_obj.py
@@ -0,0 +1,40 @@
+import numpy as np
+import trimesh
+import os
+from argparse import ArgumentParser
+
+
+def dae_to_obj(input_path, output_path, material):
+ if material:
+ mtl_path = output_path.replace('.obj', '.mtl')
+ obj, mtl = trimesh.exchange.obj.export_obj(trimesh.load(input_path), include_color=True, include_texture=True, return_texture=True, header='', mtl_name=mtl_path)
+ with open(output_path, 'w') as fp:
+ fp.write(obj)
+ for mtl_path, mtl_data in mtl.items():
+ with open(mtl_path, 'wb') as fp:
+ fp.write(mtl_data)
+ else:
+ obj = trimesh.exchange.obj.export_obj(trimesh.load(input_path), header='')
+ with open(output_path, 'w') as fp:
+ fp.write(obj)
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser()
+ parser.add_argument('--input-path', type=str, required=True)
+ parser.add_argument('--output-path', type=str, required=True)
+ parser.add_argument('--batch', default=False, action='store_true')
+ parser.add_argument('--material', default=False, action='store_true')
+ args = parser.parse_args()
+
+ if args.batch:
+ for dae_file in os.listdir(args.input_path):
+ dae_file = dae_file.lower()
+ if not dae_file.lower().endswith('.dae'):
+ continue
+ input_path = os.path.join(args.input_path, dae_file)
+ output_path = os.path.join(args.output_path, os.path.basename(dae_file).replace('.dae', '.obj'))
+ dae_to_obj(input_path, output_path, args.material)
+ else:
+ assert os.path.isfile(args.input_path) and args.input_path.endswith('.dae')
+ dae_to_obj(args.input_path, args.output_path, args.material)
diff --git a/assets/obj_utils/render_obj.py b/assets/obj_utils/render_obj.py
new file mode 100644
index 0000000..4d98646
--- /dev/null
+++ b/assets/obj_utils/render_obj.py
@@ -0,0 +1,13 @@
+import trimesh
+import os
+from argparse import ArgumentParser
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser()
+ parser.add_argument('--obj-path', type=str, required=True)
+ args = parser.parse_args()
+
+ assert os.path.isfile(args.obj_path) and args.obj_path.endswith('.obj')
+ mesh = trimesh.load(args.obj_path)
+ mesh.show()
diff --git a/assets/obj_utils/scale_obj.py b/assets/obj_utils/scale_obj.py
new file mode 100644
index 0000000..301aa3f
--- /dev/null
+++ b/assets/obj_utils/scale_obj.py
@@ -0,0 +1,31 @@
+import numpy as np
+import trimesh
+import os
+from argparse import ArgumentParser
+
+
+def scale_obj(input_path, output_path, scale):
+ mesh = trimesh.load(input_path)
+ mesh.apply_scale(scale)
+ mesh.export(output_path)
+
+if __name__ == '__main__':
+ parser = ArgumentParser()
+ parser.add_argument('--input-path', type=str, required=True)
+ parser.add_argument('--output-path', type=str, required=True)
+ parser.add_argument('--scale', type=float, required=True)
+ parser.add_argument('--batch', default=False, action='store_true')
+ args = parser.parse_args()
+
+ if args.batch:
+ os.makedirs(args.output_path, exist_ok=True)
+ for obj_file in os.listdir(args.input_path):
+ obj_file = obj_file.lower()
+ if not obj_file.lower().endswith('.obj'):
+ continue
+ input_path = os.path.join(args.input_path, obj_file)
+ output_path = os.path.join(args.output_path, obj_file)
+ scale_obj(input_path, output_path, args.scale)
+ else:
+ assert os.path.isfile(args.input_path) and args.input_path.endswith('.obj')
+ scale_obj(args.input_path, args.output_path, args.scale)
diff --git a/assets/obj_utils/stl_to_obj.py b/assets/obj_utils/stl_to_obj.py
new file mode 100644
index 0000000..f7cb1d0
--- /dev/null
+++ b/assets/obj_utils/stl_to_obj.py
@@ -0,0 +1,41 @@
+import numpy as np
+import stl
+from stl import mesh
+import pywavefront
+import glob
+import os
+from argparse import ArgumentParser
+
+
+def stl_to_obj(stl_file, obj_file):
+ # Load the STL files and add the vectors to the plot
+ stl_mesh = mesh.Mesh.from_file(stl_file)
+ with open(obj_file, 'w') as file:
+ for i, facet in enumerate(stl_mesh.vectors):
+ # Write vertices
+ for vertex in facet:
+ file.write('v {0} {1} {2}\n'.format(vertex[0], vertex[1], vertex[2]))
+ # Write faces
+ file.write('f {0} {1} {2}\n'.format(i*3+1, i*3+2, i*3+3))
+ print(f"{stl_file} has been converted to {obj_file}")
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser()
+ parser.add_argument('--input-path', type=str, required=True)
+ parser.add_argument('--output-path', type=str, required=True)
+ parser.add_argument('--batch', default=False, action='store_true')
+ args = parser.parse_args()
+
+ if args.batch:
+ for stl_file in os.listdir(args.input_path):
+ stl_file = stl_file.lower()
+ if not stl_file.endswith('.stl'):
+ continue
+ input_path = os.path.join(args.input_path, stl_file)
+ output_path = os.path.join(args.output_path, os.path.basename(stl_file).replace('.stl', '.obj'))
+ os.makedirs(args.output_path, exist_ok=True)
+ stl_to_obj(input_path, output_path)
+ else:
+ assert os.path.isfile(args.input_path) and args.input_path.endswith('.stl')
+ stl_to_obj(args.input_path, args.output_path)
diff --git a/assets/obj_utils/translate_obj.py b/assets/obj_utils/translate_obj.py
new file mode 100644
index 0000000..b41c400
--- /dev/null
+++ b/assets/obj_utils/translate_obj.py
@@ -0,0 +1,31 @@
+import numpy as np
+import trimesh
+import os
+from argparse import ArgumentParser
+
+
+def translate_obj(input_path, output_path, translation):
+ mesh = trimesh.load(input_path)
+ mesh.apply_translation(translation)
+ mesh.export(output_path)
+
+if __name__ == '__main__':
+ parser = ArgumentParser()
+ parser.add_argument('--input-path', type=str, required=True)
+ parser.add_argument('--output-path', type=str, required=True)
+ parser.add_argument('--translation', type=float, nargs='+', required=True)
+ parser.add_argument('--batch', default=False, action='store_true')
+ args = parser.parse_args()
+
+ if args.batch:
+ os.makedirs(args.output_path, exist_ok=True)
+ for obj_file in os.listdir(args.input_path):
+ obj_file = obj_file.lower()
+ if not obj_file.lower().endswith('.obj'):
+ continue
+ input_path = os.path.join(args.input_path, obj_file)
+ output_path = os.path.join(args.output_path, obj_file)
+ translate_obj(input_path, output_path, args.translation)
+ else:
+ assert os.path.isfile(args.input_path) and args.input_path.endswith('.obj')
+ translate_obj(args.input_path, args.output_path, args.translation)
diff --git a/assets/panda/panda.xml b/assets/panda/panda.xml
new file mode 100644
index 0000000..c5bdcc7
--- /dev/null
+++ b/assets/panda/panda.xml
@@ -0,0 +1,74 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+