diff --git a/batch_train.sh b/batch_train.sh new file mode 100644 index 0000000..a243720 --- /dev/null +++ b/batch_train.sh @@ -0,0 +1,2 @@ +#!/bin/bash +python3 train.py --batch_size=32 --lr=0.00001 --optimizer adam --dataset ../DualBiSeNet/data_raw_bev_mask/ --train --embedding --weighted --num_epoch 300 --validation_step 5 --telegram --patience 2 --patience_start 50 --dataloader generatedDataset --lossfunction MSE --teacher_path ./trainedmodels/teacher/teacher_model_27e66514.pth diff --git a/teacher_train.py b/teacher_train.py index 2e1fd2c..611ad2a 100644 --- a/teacher_train.py +++ b/teacher_train.py @@ -454,7 +454,7 @@ def train(args, model, optimizer, dataloader_train, dataloader_val, dataset_trai wandb.log({"Val/loss": loss_val, "Val/Acc": acc_val, "random_rate": random_rate, - "conf-matrix_{}_{}".format(wandb.run.name, epoch): wandb.Image(plt)}, step=epoch) + "conf-matrix_{}_{}".format(wandb.run.id, epoch): wandb.Image(plt)}, step=epoch) if args.nowandb: if args.triplet: print('Saving model: ', @@ -472,17 +472,17 @@ def train(args, model, optimizer, dataloader_train, dataloader_val, dataset_trai else: if args.triplet: print('Saving model: ', - os.path.join(args.save_model_path, 'teacher_model_{}.pth'.format(wandb.run.name))) + os.path.join(args.save_model_path, 'teacher_model_{}.pth'.format(wandb.run.id))) torch.save(bestModel, os.path.join(args.save_model_path, - 'teacher_model_{}.pth'.format(wandb.run.name))) - savepath = os.path.join(args.save_model_path, 'teacher_model_{}.pth'.format(wandb.run.name)) + 'teacher_model_{}.pth'.format(wandb.run.id))) + savepath = os.path.join(args.save_model_path, 'teacher_model_{}.pth'.format(wandb.run.id)) else: print('Saving model: ', - os.path.join(args.save_model_path, 'teacher_model_class_{}.pth'.format(wandb.run.name))) + os.path.join(args.save_model_path, 'teacher_model_class_{}.pth'.format(wandb.run.id))) torch.save(bestModel, os.path.join(args.save_model_path, - 'teacher_model_class_{}.pth'.format(wandb.run.name))) + 'teacher_model_class_{}.pth'.format(wandb.run.id))) savepath = os.path.join(args.save_model_path, - 'teacher_model_class_{}.pth'.format(wandb.run.name)) + 'teacher_model_class_{}.pth'.format(wandb.run.id)) elif epoch < args.patience_start: patience = 0 @@ -533,7 +533,7 @@ def train(args, model, optimizer, dataloader_train, dataloader_val, dataset_trai 'in https://docs.wandb.com/sweeps/configuration#command') parser.add_argument('--telegram', action='store_true', help='Send info through Telegram') - parser.add_argument('--triplet', type=bool, default=True, help='Triplet Loss') + parser.add_argument('--triplet', action='store_true', help='Triplet Loss') parser.add_argument('--swap', action='store_true', help='Triplet Loss swap') parser.add_argument('--margin', type=float, default=2.0, help='margin in triplet') parser.add_argument('--no_noise', action='store_true', help='In case you want to disable the noise injection in ' diff --git a/train.py b/train.py index 24d2f7d..4d4ee9c 100644 --- a/train.py +++ b/train.py @@ -952,7 +952,7 @@ def main(args, model=None): if args.wandb_group_id: group_id = args.wandb_group_id else: - group_id = 'Kitti2011_Homography' + group_id = 'Kitti2011_mask' print(args) warnings.filterwarnings("ignore")