Skip to content

Commit

Permalink
feat(modules): add config_activation option in {config,dense}_block
Browse files Browse the repository at this point in the history
Additional changes:
* Update doctests
* Update docs
  • Loading branch information
adosar committed Jan 3, 2025
1 parent 9aa015d commit 8cdf4c6
Show file tree
Hide file tree
Showing 2 changed files with 54 additions and 13 deletions.
2 changes: 2 additions & 0 deletions docs/source/changelog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ Version 2.0.0

.. versionadded:: 2.0.0

* Option ``config_activation`` for :func:`~.conv1d_block` and
:func:`~.dense_block`.
* Support to ``.load_from_checkpoint`` without arguments for
:class:`~.PCDLit` and :class:`~.PCDDataModule`.
* Support for unlabeled data in :class:`~.PCDDataset` and
Expand Down
65 changes: 52 additions & 13 deletions src/aidsorb/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,10 @@

import torch
from torch import nn
from . _torch_utils import get_activation


def conv1d_block(in_channels, out_channels, **kwargs):
def conv1d_block(in_channels, out_channels, config_activation=None, **kwargs):
r"""
Return a 1D convolutional block.
Expand All @@ -43,14 +44,20 @@ def conv1d_block(in_channels, out_channels, **kwargs):
block = nn.Sequential(
conv_layer,
nn.BatchNorm1d(out_channels),
nn.ReLU(),
activation_fn
)
Parameters
----------
in_channels : int or None
If ``None``, the ``conv_layer`` is lazy initialized.
out_channels : int
config_activation : dict, default=None
Dictionary for configuring activation function. If ``None``, the
:class:`~torch.nn.modules.activation.ReLU` activation is used.
* ``'name'`` activation's class name :class:`str`
* ``'hparams'`` activation's hyperparameters :class:`dict`
**kwargs
Valid keyword arguments for :class:`~torch.nn.Conv1d`.
Expand All @@ -60,15 +67,28 @@ def conv1d_block(in_channels, out_channels, **kwargs):
Examples
--------
>>> inp, out = 4, 128
>>> x = torch.randn(32, 4, 100) # Shape (B, C_in, N).
>>> block = conv1d_block(4, 128, kernel_size=1)
>>> block(x).shape # Shape (B, C_out, N).
>>> config_afn = {'name': 'LeakyReLU', 'hparams': {'negative_slope': 0.5}}
>>> # Default activation function (ReLU).
>>> block = conv1d_block(inp, out, kernel_size=1)
>>> block(x).shape
torch.Size([32, 128, 100])
>>> block[2]
ReLU()
>>> # Custom activation function.
>>> block = conv1d_block(inp, out, config_afn, kernel_size=1)
>>> block(x).shape
torch.Size([32, 128, 100])
>>> block[2]
LeakyReLU(negative_slope=0.5)
>>> # Lazy initialized.
>>> block = conv1d_block(None, 16, kernel_size=1)
>>> block = conv1d_block(None, out, kernel_size=1)
>>> block(x).shape
torch.Size([32, 16, 100])
torch.Size([32, 128, 100])
"""
if in_channels is not None:
conv_layer = nn.Conv1d(in_channels, out_channels, **kwargs)
Expand All @@ -78,13 +98,13 @@ def conv1d_block(in_channels, out_channels, **kwargs):
block = nn.Sequential(
conv_layer,
nn.BatchNorm1d(out_channels),
nn.ReLU(),
get_activation(config_activation)
)

return block


def dense_block(in_features, out_features, **kwargs):
def dense_block(in_features, out_features, config_activation=None, **kwargs):
r"""
Return a dense block.
Expand All @@ -93,14 +113,20 @@ def dense_block(in_features, out_features, **kwargs):
block = nn.Sequential(
linear_layer,
nn.BatchNorm1d(out_features),
nn.ReLU(),
activation_fn,
)
Parameters
----------
in_features : int or None
If ``None``, the ``linear_layer`` is lazy initialized.
out_features : int
config_activation : dict, default=None
Dictionary for configuring activation function. If ``None``, the
:class:`~torch.nn.modules.activation.ReLU` activation is used.
* ``'name'`` activation's class name :class:`str`
* ``'hparams'`` activation's hyperparameters :class:`dict`
**kwargs
Valid keyword arguments for :class:`~torch.nn.Linear`.
Expand All @@ -110,10 +136,23 @@ def dense_block(in_features, out_features, **kwargs):
Examples
--------
>>> x = torch.randn(64, 3) # Shape (B, in_features).
>>> block = dense_block(3, 10)
>>> block(x).shape # Shape (B, out_features).
>>> inp, out = 3, 10
>>> x = torch.randn(64, inp) # Shape (B, in_features).
>>> config_afn = {'name': 'SELU', 'hparams': {}}
>>> # Default activation function (ReLU).
>>> block = dense_block(inp, out)
>>> block(x).shape
torch.Size([64, 10])
>>> block[2]
ReLU()
>>> # Custom activation function.
>>> block = dense_block(inp, out, config_afn)
>>> block(x).shape
torch.Size([64, 10])
>>> block[2]
SELU()
>>> # Lazy initialized.
>>> block = dense_block(None, 16)
Expand All @@ -128,7 +167,7 @@ def dense_block(in_features, out_features, **kwargs):
block = nn.Sequential(
linear_layer,
nn.BatchNorm1d(out_features),
nn.ReLU(),
get_activation(config_activation)
)

return block
Expand Down

0 comments on commit 8cdf4c6

Please sign in to comment.