Skip to content

Commit

Permalink
Make activation handler guess the layout based on tensor rank if missing
Browse files Browse the repository at this point in the history
  • Loading branch information
iksnagreb committed Jan 21, 2025
1 parent a8bcfcb commit c2905f7
Showing 1 changed file with 31 additions and 7 deletions.
38 changes: 31 additions & 7 deletions src/finn/transformation/qonnx/qonnx_activation_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,12 +402,24 @@ def _calculate_thresholds(self):
else:
thresholds[c][t] = step / selu_scale

# First try to consider the tensor layout of the input for determining
# the number of output channels
# Get the shape of the input (should also be the output) tensor
# Note: Querying the input is more safe as we do not want to
# propagate shapes backwards by accident.
shape = self._model.get_tensor_shape(self._q_node.input[0]) # noqa
# First try to consider the tensor layout of the input for
# determining the number of output channels
layout = self._model.get_tensor_layout(self._q_node.input[0])
# If there is a layout annotation, use this to determine the index of
# the channel dimension
if layout is not None and "C" in layout:
# If there is no layout annotation, guess based on rank of the
# tensor
# TODO: No support for Rank >= 5
if layout is None and len(shape) < 5:
# Maps tensor rank to layout annotation
rank_to_layout = {0: None, 1: "C", 2: "NC", 3: "NWC", 4: "NCHW"}
# Lookup the layout required by this input shape
layout = rank_to_layout[len(shape)]
# If there is a layout annotation, use this to determine the index
# of the channel dimension
if layout is not None and "C" in layout: # noqa: Duplicate
# Lookup the index in list
cdim = layout.index("C")
# If no layout has been annotated or there is no channel dimension, fall
Expand Down Expand Up @@ -570,12 +582,24 @@ def _calculate_thresholds(self):
for t in range(num_thresholds):
thresholds[c][t] = min_threshold[c] + step[c] * t

# Get the shape of the input (should also be the output) tensor
# Note: Querying the input is more safe as we do not want to
# propagate shapes backwards by accident.
shape = self._model.get_tensor_shape(self._q_node.input[0])
# First try to consider the tensor layout of the input for
# determining the number of output channels
layout = self._model.get_tensor_layout(self._q_node.input[0])
layout = self._model.get_tensor_layout(self._q_node.input[0]) # noqa
# If there is no layout annotation, guess based on rank of the
# tensor
# TODO: No support for Rank >= 5
if layout is None and len(shape) < 5:
# Maps tensor rank to layout annotation
rank_to_layout = {0: None, 1: "C", 2: "NC", 3: "NWC", 4: "NCHW"}
# Lookup the layout required by this input shape
layout = rank_to_layout[len(shape)]
# If there is a layout annotation, use this to determine the index
# of the channel dimension
if layout is not None and "C" in layout:
if layout is not None and "C" in layout: # noqa: Duplicate
# Lookup the index in list
cdim = layout.index("C")
# If no layout has been annotated or there is no channel dimension,
Expand Down

0 comments on commit c2905f7

Please sign in to comment.