From d8be9b1c52ad9d9ee2f1ab35840aab0e382f57b9 Mon Sep 17 00:00:00 2001 From: Jorge Pineda <32918197+jorgep31415@users.noreply.github.com> Date: Fri, 23 Aug 2024 11:50:35 -0700 Subject: [PATCH] [ET-VK] Set export log level INFO Differential Revision: D61723563 Pull Request resolved: https://github.com/pytorch/executorch/pull/4870 --- backends/vulkan/partitioner/vulkan_partitioner.py | 9 ++++++--- backends/vulkan/serialization/vulkan_graph_builder.py | 7 +++++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/backends/vulkan/partitioner/vulkan_partitioner.py b/backends/vulkan/partitioner/vulkan_partitioner.py index 4d24877b63..c4fbaabdbc 100644 --- a/backends/vulkan/partitioner/vulkan_partitioner.py +++ b/backends/vulkan/partitioner/vulkan_partitioner.py @@ -38,6 +38,9 @@ torch.ops.aten.upsample_nearest2d.vec, ] +logger: logging.Logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + class VulkanSupportedOperators(OperatorSupportBase): _ops: OpList = enumerate_supported_ops() @@ -110,7 +113,7 @@ def is_node_supported( ) -> bool: r = self._is_node_supported(submodules, node) if not r and node.op == "call_function": - logging.info(f"Skipping node in Vulkan partitioning: {node.format_node()}") + logger.info(f"Skipping node in Vulkan partitioning: {node.format_node()}") return r def _is_node_supported( @@ -179,9 +182,9 @@ def partition(self, exported_program: ExportedProgram) -> PartitionResult: pl = len(partition_list) if pl == 0: - logging.warning("No Vulkan subgraphs can be partitioned!") + logger.warning("No Vulkan subgraphs can be partitioned!") else: - logging.info(f"Found {pl} Vulkan subgraphs to be partitioned.") + logger.info(f"Found {pl} Vulkan subgraphs to be partitioned.") tag_constant_data(exported_program) diff --git a/backends/vulkan/serialization/vulkan_graph_builder.py b/backends/vulkan/serialization/vulkan_graph_builder.py index da40f0a720..fcbf3edb7e 100644 --- a/backends/vulkan/serialization/vulkan_graph_builder.py +++ b/backends/vulkan/serialization/vulkan_graph_builder.py @@ -24,6 +24,9 @@ Node, NoneType, _ScalarType, TensorSpec, List[_ScalarType], List[Node], str ] +logger: logging.Logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + class VkGraphBuilder: def __init__( @@ -351,9 +354,9 @@ def build_graph(self) -> vk_graph_schema.VkGraph: self.process_node(node, call_node_debug_hdl) call_node_debug_hdl += 1 - logging.info("Operators included in this Vulkan partition: ") + logger.info("Operators included in this Vulkan partition: ") for op in self.seen_ops: - logging.info(f" {op.__name__}") + logger.info(f" {op.__name__}") return vk_graph_schema.VkGraph( version="0",