Skip to content

Commit

Permalink
Move common validators to NnxTest
Browse files Browse the repository at this point in the history
  • Loading branch information
lukamac committed Jan 19, 2024
1 parent 2947229 commit c7301cc
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 76 deletions.
40 changes: 3 additions & 37 deletions test/Ne16TestConf.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from typing import List, Union, Optional
from Ne16 import Ne16
from NnxTestClasses import NnxTestConf
from TestClasses import implies, KernelShape, Padding, Stride, IntegerType
from TestClasses import implies, KernelShape, Stride, IntegerType
from pydantic import field_validator, model_validator


Expand Down Expand Up @@ -89,52 +89,18 @@ def check_valid_out_channel_with_stride_2x2(self) -> Ne16TestConf:
return self

@model_validator(mode="after") # type: ignore
def check_valid_depthwise(self) -> Ne16TestConf:
def check_valid_depthwise_kernel_shape(self) -> Ne16TestConf:
assert implies(
self.depthwise, self.kernel_shape == KernelShape(height=3, width=3)
), f"Depthwise supported only on 3x3 kernel shape. Given kernel shape {self.kernel_shape}."
assert implies(self.depthwise, self.in_channel == self.out_channel), (
f"Input and output channel should be the same in a depthwise layer. "
f"input channel: {self.in_channel}, output channel: {self.out_channel}"
)
return self

@model_validator(mode="after") # type: ignore
def check_valid_padding_with_kernel_shape_1x1(self) -> Ne16TestConf:
assert implies(
self.kernel_shape == KernelShape(height=1, width=1),
self.padding == Padding(top=0, bottom=0, left=0, right=0),
), f"No padding on 1x1 kernel. Given padding {self.padding}"
return self

@field_validator("has_norm_quant")
@classmethod
def check_valid_has_norm_quant(cls, v: bool) -> bool:
assert v == True, f"Untested without has_norm_quant."
return v

@model_validator(mode="after") # type: ignore
def check_valid_norm_quant_types_when_has_norm_qunat(self) -> Ne16TestConf:
if self.has_norm_quant:
assert self.scale_type is not None, "Scale type was not provided."
if self.has_bias:
assert self.bias_type is not None, "Bias type was not provided."
return self

@model_validator(mode="after") # type: ignore
def check_valid_out_type_with_flags(self) -> Ne16TestConf:
def check_valid_out_type_with_norm_quant(self) -> Ne16TestConf:
assert implies(
not self.has_norm_quant, self.out_type == Ne16.ACCUMULATOR_TYPE
), (
f"Without quantization, the output type has to be equal to the "
f"accumulator type {Ne16.ACCUMULATOR_TYPE}. Given output type {self.out_type}"
)
assert implies(
self.has_norm_quant,
(self.has_relu and not self.out_type._signed)
or (not self.has_relu and self.out_type._signed),
), (
f"Output type has to be unsigned when there is relu, otherwise signed. "
f"Given output type {self.out_type} and has_relu {self.has_relu}"
)
return self
40 changes: 3 additions & 37 deletions test/NeurekaTestConf.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from Neureka import Neureka
from typing import List, Union, Optional
from NnxTestClasses import NnxTestConf
from TestClasses import implies, KernelShape, Padding, Stride, IntegerType
from TestClasses import implies, KernelShape, Stride, IntegerType
from pydantic import field_validator, model_validator


Expand Down Expand Up @@ -80,52 +80,18 @@ def check_valid_bias_type(cls, v: Optional[IntegerType]) -> Optional[IntegerType
return v

@model_validator(mode="after") # type: ignore
def check_valid_depthwise(self) -> NeurekaTestConf:
def check_valid_depthwise_kernel_shape(self) -> NeurekaTestConf:
assert implies(
self.depthwise, self.kernel_shape == KernelShape(height=3, width=3)
), f"Depthwise supported only on 3x3 kernel shape. Given kernel shape {self.kernel_shape}."
assert implies(self.depthwise, self.in_channel == self.out_channel), (
f"Input and output channel should be the same in a depthwise layer. "
f"input channel: {self.in_channel}, output channel: {self.out_channel}"
)
return self

@model_validator(mode="after") # type: ignore
def check_valid_padding_with_kernel_shape_1x1(self) -> NeurekaTestConf:
assert implies(
self.kernel_shape == KernelShape(height=1, width=1),
self.padding == Padding(top=0, bottom=0, left=0, right=0),
), f"No padding on 1x1 kernel. Given padding {self.padding}"
return self

@field_validator("has_norm_quant")
@classmethod
def check_valid_has_norm_quant(cls, v: bool) -> bool:
assert v == True, f"Untested without has_norm_quant."
return v

@model_validator(mode="after") # type: ignore
def check_valid_norm_quant_types_when_has_norm_qunat(self) -> NeurekaTestConf:
if self.has_norm_quant:
assert self.scale_type is not None, "Scale type was not provided."
if self.has_bias:
assert self.bias_type is not None, "Bias type was not provided."
return self

@model_validator(mode="after") # type: ignore
def check_valid_out_type_with_flags(self) -> NeurekaTestConf:
def check_valid_out_type_with_norm_quant(self) -> NeurekaTestConf:
assert implies(
not self.has_norm_quant, self.out_type == Neureka.ACCUMULATOR_TYPE
), (
f"Without quantization, the output type has to be equal to the "
f"accumulator type {Neureka.ACCUMULATOR_TYPE}. Given output type {self.out_type}"
)
assert implies(
self.has_norm_quant,
(self.has_relu and not self.out_type._signed)
or (not self.has_relu and self.out_type._signed),
), (
f"Output type has to be unsigned when there is relu, otherwise signed. "
f"Given output type {self.out_type} and has_relu {self.has_relu}"
)
return self
42 changes: 40 additions & 2 deletions test/NnxTestClasses.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
import torch.nn.functional as F
import os
from HeaderWriter import HeaderWriter
from TestClasses import IntegerType, Stride, Padding, KernelShape, implies
from pydantic import BaseModel, PositiveInt
from TestClasses import IntegerType, Stride, Padding, KernelShape, implies, xor
from pydantic import BaseModel, PositiveInt, field_validator


class NnxTestConf(BaseModel):
Expand All @@ -46,6 +46,44 @@ class NnxTestConf(BaseModel):
has_bias: bool
has_relu: bool

@model_validator(mode="after") # type: ignore
def check_valid_depthwise_channels(self) -> NnxTestConf:
assert implies(self.depthwise, self.in_channel == self.out_channel), (
f"Input and output channel should be the same in a depthwise layer. "
f"input channel: {self.in_channel}, output channel: {self.out_channel}"
)
return self

@model_validator(mode="after") # type: ignore
def check_valid_padding_with_kernel_shape_1x1(self) -> NnxTestConf:
assert implies(
self.kernel_shape == KernelShape(height=1, width=1),
self.padding == Padding(top=0, bottom=0, left=0, right=0),
), f"No padding on 1x1 kernel. Given padding {self.padding}"
return self

@field_validator("has_norm_quant")
@classmethod
def check_valid_has_norm_quant(cls, v: bool) -> bool:
assert v == True, f"Untested without has_norm_quant."
return v

@model_validator(mode="after") # type: ignore
def check_valid_norm_quant_types_when_has_norm_qunat(self) -> NnxTestConf:
if self.has_norm_quant:
assert self.scale_type is not None, "Scale type was not provided."
if self.has_bias:
assert self.bias_type is not None, "Bias type was not provided."
return self

@model_validator(mode="after") # type: ignore
def check_valid_out_type_with_relu(self) -> NnxTestConf:
assert xor(self.has_relu, not self.out_type._signed), (
f"Output type has to be unsigned when there is relu, otherwise signed. "
f"Given output type {self.out_type} and has_relu {self.has_relu}"
)
return self


class NnxTest:
_CONF_NAME = "conf.json"
Expand Down
4 changes: 4 additions & 0 deletions test/TestClasses.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,10 @@ def implies(a: bool, b: bool):
return (not a) or b


def xor(a: bool, b: bool):
return (a and not b) or (not a and b)


class KernelShape(BaseModel):
height: PositiveInt
width: PositiveInt
Expand Down

0 comments on commit c7301cc

Please sign in to comment.