Reorg for converters leaky_relu (FX Converter Refactor [6/N]) <Target: converter_reorg_proto> by apbose · Pull Request #1902 · pytorch/TensorRT (original) (raw)
--- py/torch_tensorrt/fx/converters/aten_ops_converters.py 2023-05-10 05:50:01.758314 +0000 +++ py/torch_tensorrt/fx/converters/aten_ops_converters.py 2023-05-10 05:50:17.483655 +0000 @@ -214,28 +214,20 @@ "other": args[1], } return acc_ops_converters.acc_ops_fmod(network, target, None, kwargs_new, name)
@tensorrt_converter(torch.ops.aten.leaky_relu.default) def aten_ops_leaky_relu( network: TRTNetwork, target: Target, args: Tuple[Argument, ...], kwargs: Dict[str, Argument], name: str, ) -> Union[TRTTensor, Sequence[TRTTensor]]:
return activation.leaky_relu(
network,
target,
SourceIR.ATEN,
name,
args[0],
args[1]
)
- return activation.leaky_relu(network, target, SourceIR.ATEN, name, args[0], args[1])
@tensorrt_converter(torch.ops.aten.linear) def aten_ops_linear( network: TRTNetwork, --- py/torch_tensorrt/fx/converters/impl/activation.py 2023-05-10 05:50:01.758314 +0000 +++ py/torch_tensorrt/fx/converters/impl/activation.py 2023-05-10 05:50:17.702542 +0000 @@ -96,16 +96,18 @@ network: TRTNetwork, target: Target, source_ir: Optional[SourceIR], name: str, input_val: TRTTensor,
- alpha: Optional[Any]
- alpha: Optional[Any],
): operation_type = trt.ActivationType.LEAKY_RELU
def leaky_relu_dyn_range_fn(dyn_range):
return (max(0, dyn_range[0]) + alpha * min(0, dyn_range[0])), (max(0, dyn_range[1]) + alpha * min(0, dyn_range[1]))
return (max(0, dyn_range[0]) + alpha * min(0, dyn_range[0])), (
max(0, dyn_range[1]) + alpha * min(0, dyn_range[1])
return convert_activation( network, target, source_ir,)
--- py/torch_tensorrt/fx/converters/nn_ops_converters.py 2023-05-10 05:50:01.758314 +0000 +++ py/torch_tensorrt/fx/converters/nn_ops_converters.py 2023-05-10 05:50:17.768262 +0000 @@ -34,7 +34,7 @@ network=network, target="torch.nn.functional.leaky_relu", source_ir=SourceIR.NN, name=layer_name, input_val=kwargs["input"],
alpha=kwargs["negative_slope"]
- )
\ No newline at end of file
alpha=kwargs["negative_slope"],
- )
--- py/torch_tensorrt/fx/converters/acc_ops_converters.py 2023-05-10 05:50:01.758314 +0000 +++ py/torch_tensorrt/fx/converters/acc_ops_converters.py 2023-05-10 05:50:19.367247 +0000 @@ -1023,16 +1023,11 @@ kwargs: Dict[str, Argument], name: str, ) -> Union[TRTTensor, Sequence[TRTTensor]]:
return activation.leaky_relu(
network,
target,
SourceIR.ACC,
name,
kwargs["input"],
kwargs["negative_slope"]
)network, target, SourceIR.ACC, name, kwargs["input"], kwargs["negative_slope"]
@tensorrt_converter(acc_ops.elu) def acc_ops_elu( --- py/torch_tensorrt/fx/test/converters/aten_op/test_leaky_relu_aten.py 2023-05-10 05:50:01.762314 +0000 +++ py/torch_tensorrt/fx/test/converters/aten_op/test_leaky_relu_aten.py 2023-05-10 05:50:19.646364 +0000 @@ -48,6 +48,6 @@ TestModule(), input_specs, expected_ops={torch.ops.aten.leaky_relu.default} )
if name == "main":
- run_tests()
\ No newline at end of file
- run_tests()