Reorg for converters leaky_relu (FX Converter Refactor [6/N]) <Target: converter_reorg_proto> by apbose · Pull Request #1902 · pytorch/TensorRT (original) (raw)

--- py/torch_tensorrt/fx/converters/aten_ops_converters.py 2023-05-10 05:50:01.758314 +0000 +++ py/torch_tensorrt/fx/converters/aten_ops_converters.py 2023-05-10 05:50:17.483655 +0000 @@ -214,28 +214,20 @@ "other": args[1], } return acc_ops_converters.acc_ops_fmod(network, target, None, kwargs_new, name)

@tensorrt_converter(torch.ops.aten.linear) def aten_ops_linear( network: TRTNetwork, --- py/torch_tensorrt/fx/converters/impl/activation.py 2023-05-10 05:50:01.758314 +0000 +++ py/torch_tensorrt/fx/converters/impl/activation.py 2023-05-10 05:50:17.702542 +0000 @@ -96,16 +96,18 @@ network: TRTNetwork, target: Target, source_ir: Optional[SourceIR], name: str, input_val: TRTTensor,

): operation_type = trt.ActivationType.LEAKY_RELU

def leaky_relu_dyn_range_fn(dyn_range):

--- py/torch_tensorrt/fx/converters/nn_ops_converters.py 2023-05-10 05:50:01.758314 +0000 +++ py/torch_tensorrt/fx/converters/nn_ops_converters.py 2023-05-10 05:50:17.768262 +0000 @@ -34,7 +34,7 @@ network=network, target="torch.nn.functional.leaky_relu", source_ir=SourceIR.NN, name=layer_name, input_val=kwargs["input"],

\ No newline at end of file

--- py/torch_tensorrt/fx/converters/acc_ops_converters.py 2023-05-10 05:50:01.758314 +0000 +++ py/torch_tensorrt/fx/converters/acc_ops_converters.py 2023-05-10 05:50:19.367247 +0000 @@ -1023,16 +1023,11 @@ kwargs: Dict[str, Argument], name: str, ) -> Union[TRTTensor, Sequence[TRTTensor]]:

return activation.leaky_relu(

@tensorrt_converter(acc_ops.elu) def acc_ops_elu( --- py/torch_tensorrt/fx/test/converters/aten_op/test_leaky_relu_aten.py 2023-05-10 05:50:01.762314 +0000 +++ py/torch_tensorrt/fx/test/converters/aten_op/test_leaky_relu_aten.py 2023-05-10 05:50:19.646364 +0000 @@ -48,6 +48,6 @@ TestModule(), input_specs, expected_ops={torch.ops.aten.leaky_relu.default} )

if name == "main":

\ No newline at end of file