Reorg for converters in hardtanh(FX Converter Refactor [5/N]) <Target: converter_reorg_proto> by apbose · Pull Request #1901 · pytorch/TensorRT (original) (raw)

--- py/torch_tensorrt/fx/converters/aten_ops_converters.py 2023-05-11 23:31:10.187297 +0000 +++ py/torch_tensorrt/fx/converters/aten_ops_converters.py 2023-05-11 23:31:26.881529 +0000 @@ -209,17 +209,11 @@ kwargs: Dict[str, Argument], name: str, ) -> Union[TRTTensor, Sequence[TRTTensor]]:

return activation.hardtanh(

@tensorrt_converter(torch.ops.aten.linear) def aten_ops_linear( --- py/torch_tensorrt/fx/converters/impl/activation.py 2023-05-11 23:31:10.187297 +0000 +++ py/torch_tensorrt/fx/converters/impl/activation.py 2023-05-11 23:31:27.090627 +0000 @@ -93,11 +93,11 @@ source_ir, name, operation_type, input_val, alpha,

def relu( --- py/torch_tensorrt/fx/converters/acc_ops_converters.py 2023-05-11 23:31:10.187297 +0000 +++ py/torch_tensorrt/fx/converters/acc_ops_converters.py 2023-05-11 23:31:28.868334 +0000 @@ -3596,11 +3596,11 @@ target, SourceIR.ATEN, name, kwargs["input"], kwargs["min_val"],

@tensorrt_converter(acc_ops.interpolate) def acc_ops_interpolate(