diff --git a/luxonis_train/models/predefined_models/ddrnet_segmentation_model.py b/luxonis_train/models/predefined_models/ddrnet_segmentation_model.py index 03daea1e..a3fb60da 100644 --- a/luxonis_train/models/predefined_models/ddrnet_segmentation_model.py +++ b/luxonis_train/models/predefined_models/ddrnet_segmentation_model.py @@ -1,5 +1,4 @@ from dataclasses import dataclass, field -from typing import Literal from luxonis_train.utils.config import ( LossModuleConfig, diff --git a/luxonis_train/nodes/backbones/ddrnet/__init__.py b/luxonis_train/nodes/backbones/ddrnet/__init__.py index ef2f869e..8ecc5814 100644 --- a/luxonis_train/nodes/backbones/ddrnet/__init__.py +++ b/luxonis_train/nodes/backbones/ddrnet/__init__.py @@ -1,3 +1,3 @@ from .ddrnet import DDRNet -__all__ = ["DDRNet"] \ No newline at end of file +__all__ = ["DDRNet"] diff --git a/luxonis_train/nodes/backbones/ddrnet/blocks.py b/luxonis_train/nodes/backbones/ddrnet/blocks.py index 07eca586..87f54118 100644 --- a/luxonis_train/nodes/backbones/ddrnet/blocks.py +++ b/luxonis_train/nodes/backbones/ddrnet/blocks.py @@ -5,14 +5,12 @@ Paper: U{https://arxiv.org/pdf/2101.06085.pdf} @license: U{https://github.com/Deci-AI/super-gradients/blob/master/LICENSE.md} """ -from abc import ABC -from typing import Dict, Type +from typing import Type import torch from torch import Tensor, nn from torch.nn import functional as F -from luxonis_train.nodes.base_node import BaseNode from luxonis_train.nodes.blocks import ConvModule @@ -371,6 +369,7 @@ def forward(self, x: Tensor, output_height: int, output_width: int) -> Tensor: """ return F.interpolate(x, size=[output_height, output_width], mode=self.mode) + class BasicDDRBackBone(nn.Module): def __init__( self, @@ -505,6 +504,7 @@ def get_backbone_output_number_of_channels(self) -> dict[str, int]: return output_shapes + def _make_layer( block: Type[nn.Module], in_planes: int, @@ -567,10 +567,8 @@ def drop_path(x: Tensor, drop_prob: float = 0.0, scale_by_keep: bool = True) -> @return: Tensor with dropped paths based on the provided drop probability. """ keep_prob = 1 - drop_prob - shape = (x.shape[0],) + (1,) * ( - x.ndim - 1 - ) + shape = (x.shape[0],) + (1,) * (x.ndim - 1) random_tensor = x.new_empty(shape).bernoulli_(keep_prob) if keep_prob > 0.0 and scale_by_keep: random_tensor.div_(keep_prob) - return x * random_tensor \ No newline at end of file + return x * random_tensor diff --git a/luxonis_train/nodes/backbones/ddrnet/ddrnet.py b/luxonis_train/nodes/backbones/ddrnet/ddrnet.py index 1d8ddbf3..00e7cae9 100644 --- a/luxonis_train/nodes/backbones/ddrnet/ddrnet.py +++ b/luxonis_train/nodes/backbones/ddrnet/ddrnet.py @@ -5,18 +5,21 @@ Paper: U{https://arxiv.org/pdf/2101.06085.pdf} @license: U{https://github.com/Deci-AI/super-gradients/blob/master/LICENSE.md} """ -from abc import ABC from typing import Dict, Type -import torch from torch import Tensor, nn -from torch.nn import functional as F from luxonis_train.nodes.base_node import BaseNode from luxonis_train.nodes.blocks import ConvModule -from luxonis_train.nodes.heads import DDRNetSegmentationHead -from .blocks import BasicResNetBlock, Bottleneck, UpscaleOnline, BasicDDRBackBone, DAPPM, _make_layer +from .blocks import ( + DAPPM, + BasicDDRBackBone, + BasicResNetBlock, + Bottleneck, + UpscaleOnline, + _make_layer, +) class DDRNet(BaseNode[Tensor, list[Tensor]]): @@ -113,7 +116,7 @@ def __init__( self.backbone_layers, self.additional_layers = self.layers[:4], self.layers[4:] self.input_channels = input_channels - self._backbone: DDRBackBoneBase = BasicDDRBackBone( + self._backbone = BasicDDRBackBone( block=self.block, width=self.planes, layers=self.backbone_layers,