Skip to content

Commit

Permalink
added n_layers param
Browse files Browse the repository at this point in the history
  • Loading branch information
rogeriobonatti committed Jun 5, 2022
1 parent be1eb0a commit c3715e9
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 4 deletions.
7 changes: 6 additions & 1 deletion mushr_rhc_ros/launch/sim/sim_server_eval.launch
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,12 @@
<!-- logging output paths -->
<arg name="out_path" default="/home/rb/hackathon_data_premium/e2e_eval/model_test" />

<!-- number of layers for model -->
<arg name="n_layers" default="12" />

<!-- action model -->
<arg name="model_path_act" default="/home/rb/hackathon_data_premium/aml_outputs/log_output/hvd_test_16/GPTcorl_scratch_trainm_e2e_statet_pointnet_traini_1_nla_12_nhe_8_statel_0.01_2022-06-02_1654131996.2524076_2022-06-02_1654131996.2524228/model/epoch30.pth.tar" />
<!-- <arg name="model_path_act" default="/home/rb/hackathon_data_premium/aml_outputs/log_output/hvd_test_16/GPTcorl_scratch_trainm_e2e_statet_pointnet_traini_1_nla_12_nhe_8_statel_0.01_2022-06-02_1654131996.2524076_2022-06-02_1654131996.2524228/model/epoch30.pth.tar" /> -->
<arg name="model_path_act" default="/home/rb/hackathon_data_premium/aml_outputs/log_output/model_sizes_0/GPTcorl_scratch_trainm_e2e_statet_pointnet_traini_0_nla_6_nhe_8_statel_0.01_2022-06-03_1654253046.3212142_2022-06-03_1654253046.3212266/model/epoch30.pth.tar" />

<!-- map model -->
<arg name="use_map" default="false" />
Expand Down Expand Up @@ -35,6 +39,7 @@
<param name="deployment_map" value="$(arg deployment_map)" />

<param name="out_path" value="$(arg out_path)" />
<param name="n_layers" type="int" value="$(arg n_layers) " />
<param name="model_path_act" value="$(arg model_path_act)" />
<param name="model_path_map" value="$(arg model_path_map)" />
<param name="model_path_loc" value="$(arg model_path_loc)" />
Expand Down
8 changes: 5 additions & 3 deletions mushr_rhc_ros/src/rhcnode_network_pcl_new.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,12 +108,14 @@ def __init__(self, dtype, params, logger, name):
saved_model_path_action = rospy.get_param("~model_path_act", 'default_value')
self.out_path = rospy.get_param("~out_path", 'default_value')

self.n_layers = rospy.get_param("~n_layers", 12)

vocab_size = 100
block_size = self.clip_len * 2
max_timestep = self.clip_len

mconf = GPTConfig(block_size, max_timestep,
n_layer=12, n_head=8, n_embd=128, model_type='GPT', use_pred_state=True,
n_layer=self.n_layers, n_head=8, n_embd=128, model_type='GPT', use_pred_state=True,
state_tokenizer='pointnet', pretrained_encoder_path='',
loss='MSE', train_mode='e2e', pretrained_model_path='',
map_decoder='deconv', map_recon_dim=64, freeze_core=False,
Expand Down Expand Up @@ -160,7 +162,7 @@ def __init__(self, dtype, params, logger, name):
saved_map_model_path = rospy.get_param("~model_path_map", '')

mconf_map = GPTConfig(block_size, max_timestep,
n_layer=12, n_head=8, n_embd=128, model_type='GPT', use_pred_state=True,
n_layer=self.n_layers, n_head=8, n_embd=128, model_type='GPT', use_pred_state=True,
state_tokenizer='pointnet', pretrained_encoder_path='',
loss='MSE', train_mode='map', pretrained_model_path='',
map_decoder='deconv', map_recon_dim=64, freeze_core=False,
Expand Down Expand Up @@ -191,7 +193,7 @@ def __init__(self, dtype, params, logger, name):
saved_loc_model_path = rospy.get_param("~model_path_loc", '')

mconf_loc = GPTConfig(block_size, max_timestep,
n_layer=12, n_head=8, n_embd=128, model_type='GPT', use_pred_state=True,
n_layer=self.n_layers, n_head=8, n_embd=128, model_type='GPT', use_pred_state=True,
state_tokenizer='pointnet', pretrained_encoder_path='',
loss='MSE', train_mode='loc', pretrained_model_path='',
map_decoder='deconv', map_recon_dim=64, freeze_core=False,
Expand Down

0 comments on commit c3715e9

Please sign in to comment.