Skip to content

Commit

Permalink
revert bundle tutorial
Browse files Browse the repository at this point in the history
Signed-off-by: YunLiu <[email protected]>
  • Loading branch information
KumoLiu committed Jan 18, 2024
1 parent 01673c4 commit 98447af
Show file tree
Hide file tree
Showing 5 changed files with 296 additions and 264 deletions.
6 changes: 3 additions & 3 deletions bundle/01_bundle_intro.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -402,9 +402,9 @@
"datadicts: '$[{i: (i * i)} for i in range(10)]' # create a fake dataset as a list of dicts\n",
"\n",
"test_dataset: # creates an instance of an object because _target_ is present\n",
" _target_: Dataset # name of type to create is monai.data.Dataset (loaded implicitly from MONAI)\n",
" data: '@datadicts' # argument data provided by a definition\n",
" transform: '$None' # argument transform provided by a Python expression\n",
" _target_: Dataset # name of type to create is monai.data.Dataset (loaded implicitly from MONAI)\n",
" data: '@datadicts' # argument data provided by a definition\n",
" transform: '$None' # argument transform provided by a Python expression\n",
"\n",
"test:\n",
"- '$print(\"Dataset\", @test_dataset)'\n",
Expand Down
96 changes: 49 additions & 47 deletions bundle/02_mednist_classification.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -294,25 +294,25 @@
"\n",
"# define the network separately, don't need to refer to MONAI types by name or import MONAI\n",
"network_def:\n",
" _target_: densenet121\n",
" spatial_dims: 2\n",
" in_channels: 1\n",
" out_channels: 6\n",
" _target_: densenet121\n",
" spatial_dims: 2\n",
" in_channels: 1\n",
" out_channels: 6\n",
"\n",
"# define the network to be the given definition moved to the device\n",
"net: '$@network_def.to(@device)'\n",
"\n",
"# define a transform sequence by instantiating a Compose instance with a transform sequence\n",
"transform:\n",
" _target_: Compose\n",
" transforms:\n",
" - _target_: LoadImaged\n",
" keys: 'image'\n",
" image_only: true\n",
" - _target_: EnsureChannelFirstd\n",
" keys: 'image'\n",
" - _target_: ScaleIntensityd\n",
" keys: 'image'"
" _target_: Compose\n",
" transforms:\n",
" - _target_: LoadImaged\n",
" keys: 'image'\n",
" image_only: true\n",
" - _target_: EnsureChannelFirstd\n",
" keys: 'image'\n",
" - _target_: ScaleIntensityd\n",
" keys: 'image'"
]
},
{
Expand Down Expand Up @@ -356,32 +356,32 @@
"max_epochs: 25\n",
"\n",
"dataset:\n",
" _target_: MedNISTDataset\n",
" root_dir: '@root_dir'\n",
" transform: '@transform'\n",
" section: training\n",
" download: true\n",
" _target_: MedNISTDataset\n",
" root_dir: '@root_dir'\n",
" transform: '@transform'\n",
" section: training\n",
" download: true\n",
"\n",
"train_dl:\n",
" _target_: DataLoader\n",
" dataset: '@dataset'\n",
" batch_size: 512\n",
" shuffle: true\n",
" num_workers: 4\n",
" _target_: DataLoader\n",
" dataset: '@dataset'\n",
" batch_size: 512\n",
" shuffle: true\n",
" num_workers: 4\n",
"\n",
"trainer:\n",
" _target_: SupervisedTrainer\n",
" device: '@device'\n",
" max_epochs: '@max_epochs'\n",
" train_data_loader: '@train_dl'\n",
" network: '@net'\n",
" optimizer: \n",
" _target_: torch.optim.Adam\n",
" params: '[email protected]()'\n",
" lr: 0.00001 # learning rate set slow so that you can see network improvement over epochs\n",
" loss_function: \n",
" _target_: torch.nn.CrossEntropyLoss\n",
" inferer: \n",
" _target_: SupervisedTrainer\n",
" device: '@device'\n",
" max_epochs: '@max_epochs'\n",
" train_data_loader: '@train_dl'\n",
" network: '@net'\n",
" optimizer: \n",
" _target_: torch.optim.Adam\n",
" params: '[email protected]()'\n",
" lr: 0.00001 # learning rate set slow so that you can see network improvement over epochs\n",
" loss_function: \n",
" _target_: torch.nn.CrossEntropyLoss\n",
" inferer: \n",
" _target_: SimpleInferer\n",
"\n",
"train:\n",
Expand Down Expand Up @@ -519,6 +519,7 @@
"source": [
"%%writefile MedNISTClassifier/scripts/__init__.py\n",
"\n",
"from monai.networks.utils import eval_mode\n",
"\n",
"def evaluate(net, dataloader, class_names, device):\n",
" with eval_mode(net):\n",
Expand All @@ -527,7 +528,7 @@
" prob = result.detach().to(\"cpu\")[0]\n",
" pred = class_names[prob.argmax()]\n",
" gt = item[\"class_name\"][0]\n",
" print(f\"Prediction: {pred}. Ground-truth: {gt}\")"
" print(f\"Prediction: {pred}. Ground-truth: {gt}\")\n"
]
},
{
Expand Down Expand Up @@ -556,6 +557,7 @@
],
"source": [
"%%writefile MedNISTClassifier/configs/evaluate.yaml\n",
"\n",
"imports: \n",
"- $import scripts\n",
"\n",
Expand All @@ -564,23 +566,23 @@
"ckpt_file: \"\"\n",
"\n",
"testdata:\n",
" _target_: MedNISTDataset\n",
" root_dir: '@root_dir'\n",
" transform: '@transform'\n",
" section: test\n",
" download: false\n",
" runtime_cache: true\n",
" _target_: MedNISTDataset\n",
" root_dir: '@root_dir'\n",
" transform: '@transform'\n",
" section: test\n",
" download: false\n",
" runtime_cache: true\n",
"\n",
"eval_dl:\n",
" _target_: DataLoader\n",
" dataset: '$@testdata[:@max_items_to_print]'\n",
" batch_size: 1\n",
" num_workers: 0\n",
" _target_: DataLoader\n",
" dataset: '$@testdata[:@max_items_to_print]'\n",
" batch_size: 1\n",
" num_workers: 0\n",
"\n",
"# loads the weights from the given file (which needs to be set on the command line) then calls \"evaluate\"\n",
"evaluate:\n",
"- '[email protected]_state_dict(torch.load(@ckpt_file))'\n",
"- '$scripts.evaluate(@net, @eval_dl, @class_names, @device)'"
"- '$scripts.evaluate(@net, @eval_dl, @class_names, @device)'\n"
]
},
{
Expand Down
Loading

0 comments on commit 98447af

Please sign in to comment.