From e33c35c0c5ceade11bdb9f8efa4744fb32dbbda5 Mon Sep 17 00:00:00 2001
From: Philip Colangelo
Date: Fri, 6 Dec 2024 13:15:53 -0500
Subject: [PATCH 01/13] Initial commit for feature request #2. - Offers
solution for steps 1-3
---
examples/analysis.py | 4 +
src/digest/gui_config.yaml | 2 +-
src/digest/main.py | 286 ++++++-
src/digest/model_class/digest_model.py | 170 +++++
src/digest/model_class/digest_onnx_model.py | 668 ++++++++++++++++
src/digest/modelsummary.py | 4 +-
src/digest/multi_model_analysis.py | 15 +-
src/digest/multi_model_selection_page.py | 11 +-
src/digest/node_summary.py | 10 +-
src/digest/thread.py | 6 +-
src/digest/ui/mainwindow.ui | 2 +-
src/digest/ui/mainwindow_ui.py | 696 ++++++++++-------
src/utils/onnx_utils.py | 794 +-------------------
test/test_gui.py | 2 +
test/test_reports.py | 10 +-
15 files changed, 1604 insertions(+), 1076 deletions(-)
create mode 100644 src/digest/model_class/digest_model.py
create mode 100644 src/digest/model_class/digest_onnx_model.py
diff --git a/examples/analysis.py b/examples/analysis.py
index e9b9c63..a0bc277 100644
--- a/examples/analysis.py
+++ b/examples/analysis.py
@@ -90,6 +90,10 @@ def main(onnx_files: str, output_dir: str):
summary_filepath = os.path.join(output_dir, f"{model_name}_summary.txt")
digest_model.save_txt_report(summary_filepath)
+ # Model summary yaml report
+ summary_filepath = os.path.join(output_dir, f"{model_name}_summary.yaml")
+ digest_model.save_yaml_report(summary_filepath)
+
# Save csv containing node-level information
nodes_filepath = os.path.join(output_dir, f"{model_name}_nodes.csv")
digest_model.save_nodes_csv_report(nodes_filepath)
diff --git a/src/digest/gui_config.yaml b/src/digest/gui_config.yaml
index baffd47..dbd1c08 100644
--- a/src/digest/gui_config.yaml
+++ b/src/digest/gui_config.yaml
@@ -2,4 +2,4 @@
# For EXE releases we can block certain features e.g. to customers
modules:
- huggingface: false
\ No newline at end of file
+ huggingface: true
\ No newline at end of file
diff --git a/src/digest/main.py b/src/digest/main.py
index 08c401a..01dc01c 100644
--- a/src/digest/main.py
+++ b/src/digest/main.py
@@ -44,6 +44,8 @@
from digest.modelsummary import modelSummary
from digest.node_summary import NodeSummary
from digest.qt_utils import apply_dark_style_sheet
+from digest.model_class.digest_onnx_model import DigestOnnxModel
+from digest.model_class.digest_model import save_node_type_counts_csv_report
from utils import onnx_utils
GUI_CONFIG = os.path.join(os.path.dirname(__file__), "gui_config.yaml")
@@ -161,7 +163,7 @@ def __init__(self, model_file: Optional[str] = None):
self.status_dialog = None
self.err_open_dialog = None
self.temp_dir = tempfile.TemporaryDirectory()
- self.digest_models: Dict[str, onnx_utils.DigestOnnxModel] = {}
+ self.digest_models: Dict[str, DigestOnnxModel] = {}
# QThread containers
self.model_nodes_stats_thread: Dict[str, StatsThread] = {}
@@ -243,10 +245,10 @@ def uncheck_ingest_buttons(self):
def tab_focused(self, index):
widget = self.ui.tabWidget.widget(index)
if isinstance(widget, modelSummary):
- model_id = widget.digest_model.unique_id
+ unique_id = widget.digest_model.unique_id
if (
- self.stats_save_button_flag[model_id]
- and self.similarity_save_button_flag[model_id]
+ self.stats_save_button_flag[unique_id]
+ and self.similarity_save_button_flag[unique_id]
):
self.ui.saveBtn.setEnabled(True)
else:
@@ -273,17 +275,28 @@ def closeTab(self, index):
def openFile(self):
filename, _ = QFileDialog.getOpenFileName(
- self, "Open File", "", "ONNX Files (*.onnx)"
+ self, "Open File", "", "ONNX and Report Files (*.onnx *.yaml)"
)
- if (
- filename and os.path.splitext(filename)[-1] == ".onnx"
- ): # Only if user selects a file and clicks OK
+ if not filename:
+ return
+
+ file_ext = os.path.splitext(filename)[-1]
+
+ if file_ext == ".onnx":
self.load_onnx(filename)
+ elif file_ext == ".yaml":
+ self.load_report(filename)
+ else:
+ bad_ext_dialog = StatusDialog(
+ f"Digest does not support files with the extension {file_ext}",
+ parent=self,
+ )
+ bad_ext_dialog.show()
def update_flops_label(
self,
- digest_model: onnx_utils.DigestOnnxModel,
+ digest_model: DigestOnnxModel,
unique_id: str,
):
self.digest_models[unique_id].model_flops = digest_model.model_flops
@@ -432,7 +445,7 @@ def load_onnx(self, filepath: str):
basename = os.path.splitext(os.path.basename(filepath))
model_name = basename[0]
- digest_model = onnx_utils.DigestOnnxModel(
+ digest_model = DigestOnnxModel(
onnx_model=model, model_name=model_name, save_proto=False
)
model_id = digest_model.unique_id
@@ -652,6 +665,251 @@ def load_onnx(self, filepath: str):
except FileNotFoundError as e:
print(f"File not found: {e.filename}")
+ def load_report(self, filepath: str):
+
+ # Ensure the filepath follows a standard formatting:
+ filepath = os.path.normpath(filepath)
+
+ if not os.path.exists(filepath):
+ return
+
+ # Every time a report is loaded we should emulate a model summary button click
+ self.summary_clicked()
+
+ # Before opening the file, check to see if it is already opened.
+ for index in range(self.ui.tabWidget.count()):
+ widget = self.ui.tabWidget.widget(index)
+ if isinstance(widget, modelSummary) and filepath == widget.file:
+ self.ui.tabWidget.setCurrentIndex(index)
+ return
+
+ try:
+
+ progress = ProgressDialog("Loading Digest Report File...", 8, self)
+ QApplication.processEvents() # Process pending events
+
+ with open(filepath, "r", encoding="utf-8") as yaml_f:
+ report_data = yaml.safe_load(yaml_f)
+ model_name = report_data["model_name"]
+
+ model_id = digest_model.unique_id
+
+ # There is no sense in offering to save the report
+ self.stats_save_button_flag[model_id] = False
+ self.similarity_save_button_flag[model_id] = False
+
+ self.digest_models[model_id] = digest_model
+
+ # We must set the proto for the model_summary freeze_inputs
+ self.digest_models[model_id].model_proto = opt_model
+
+ model_summary = modelSummary(self.digest_models[model_id])
+ model_summary.freeze_inputs.complete_signal.connect(self.load_onnx)
+
+ dynamic_input_dims = onnx_utils.get_dynamic_input_dims(opt_model)
+ if dynamic_input_dims:
+ model_summary.ui.freezeButton.setVisible(True)
+ model_summary.ui.warningLabel.setText(
+ "⚠️ Some model details are unavailable due to dynamic input dimensions. "
+ "See section Input Tensor(s) Information below for more details."
+ )
+ model_summary.ui.warningLabel.show()
+
+ elif not opt_passed:
+ model_summary.ui.warningLabel.setText(
+ "⚠️ The model could not be optimized either due to an ONNX Runtime "
+ "session error or it did not pass the ONNX checker."
+ )
+ model_summary.ui.warningLabel.show()
+
+ progress.step()
+ progress.setLabelText("Checking for dynamic Inputs")
+
+ self.ui.tabWidget.addTab(model_summary, "")
+ model_summary.ui.flops.setText("Loading...")
+
+ # Hide some of the components
+ model_summary.ui.similarityCorrelation.hide()
+ model_summary.ui.similarityCorrelationStatic.hide()
+
+ model_summary.file = filepath
+ model_summary.setObjectName(model_name)
+ model_summary.ui.modelName.setText(model_name)
+ model_summary.ui.modelFilename.setText(filepath)
+ model_summary.ui.generatedDate.setText(datetime.now().strftime("%B %d, %Y"))
+
+ self.digest_models[model_id].model_name = model_name
+ self.digest_models[model_id].filepath = filepath
+
+ self.digest_models[model_id].model_inputs = (
+ onnx_utils.get_model_input_shapes_types(opt_model)
+ )
+ self.digest_models[model_id].model_outputs = (
+ onnx_utils.get_model_output_shapes_types(opt_model)
+ )
+
+ progress.step()
+ progress.setLabelText("Calculating Parameter Count")
+
+ parameter_count = onnx_utils.get_parameter_count(opt_model)
+ model_summary.ui.parameters.setText(format(parameter_count, ","))
+
+ # Kick off model stats thread
+ self.model_nodes_stats_thread[model_id] = StatsThread()
+ self.model_nodes_stats_thread[model_id].completed.connect(
+ self.update_flops_label
+ )
+
+ self.model_nodes_stats_thread[model_id].model = opt_model
+ self.model_nodes_stats_thread[model_id].tab_name = model_name
+ self.model_nodes_stats_thread[model_id].unique_id = model_id
+ self.model_nodes_stats_thread[model_id].start()
+
+ progress.step()
+ progress.setLabelText("Calculating Node Type Counts")
+
+ node_type_counts = onnx_utils.get_node_type_counts(opt_model)
+ if len(node_type_counts) < 15:
+ bar_spacing = 40
+ else:
+ bar_spacing = 20
+ model_summary.ui.opHistogramChart.bar_spacing = bar_spacing
+ model_summary.ui.opHistogramChart.set_data(node_type_counts)
+ model_summary.ui.nodes.setText(str(sum(node_type_counts.values())))
+ self.digest_models[model_id].node_type_counts = node_type_counts
+
+ progress.step()
+ progress.setLabelText("Gathering Model Inputs and Outputs")
+
+ # Inputs Table
+ model_summary.ui.inputsTable.setRowCount(
+ len(self.digest_models[model_id].model_inputs)
+ )
+
+ for row_idx, (input_name, input_info) in enumerate(
+ self.digest_models[model_id].model_inputs.items()
+ ):
+ model_summary.ui.inputsTable.setItem(
+ row_idx, 0, QTableWidgetItem(input_name)
+ )
+ model_summary.ui.inputsTable.setItem(
+ row_idx, 1, QTableWidgetItem(str(input_info.shape))
+ )
+ model_summary.ui.inputsTable.setItem(
+ row_idx, 2, QTableWidgetItem(str(input_info.dtype))
+ )
+ model_summary.ui.inputsTable.setItem(
+ row_idx, 3, QTableWidgetItem(str(input_info.size_kbytes))
+ )
+
+ model_summary.ui.inputsTable.resizeColumnsToContents()
+ model_summary.ui.inputsTable.resizeRowsToContents()
+
+ # Outputs Table
+ model_summary.ui.outputsTable.setRowCount(
+ len(self.digest_models[model_id].model_outputs)
+ )
+ for row_idx, (output_name, output_info) in enumerate(
+ self.digest_models[model_id].model_outputs.items()
+ ):
+ model_summary.ui.outputsTable.setItem(
+ row_idx, 0, QTableWidgetItem(output_name)
+ )
+ model_summary.ui.outputsTable.setItem(
+ row_idx, 1, QTableWidgetItem(str(output_info.shape))
+ )
+ model_summary.ui.outputsTable.setItem(
+ row_idx, 2, QTableWidgetItem(str(output_info.dtype))
+ )
+ model_summary.ui.outputsTable.setItem(
+ row_idx, 3, QTableWidgetItem(str(output_info.size_kbytes))
+ )
+
+ model_summary.ui.outputsTable.resizeColumnsToContents()
+ model_summary.ui.outputsTable.resizeRowsToContents()
+
+ progress.step()
+ progress.setLabelText("Gathering Model Proto Data")
+
+ # ModelProto Info
+ model_summary.ui.modelProtoTable.setItem(
+ 0, 1, QTableWidgetItem(str(opt_model.model_version))
+ )
+ self.digest_models[model_id].model_version = opt_model.model_version
+
+ model_summary.ui.modelProtoTable.setItem(
+ 1, 1, QTableWidgetItem(str(opt_model.graph.name))
+ )
+ self.digest_models[model_id].graph_name = opt_model.graph.name
+
+ producer_txt = f"{opt_model.producer_name} {opt_model.producer_version}"
+ model_summary.ui.modelProtoTable.setItem(
+ 2, 1, QTableWidgetItem(producer_txt)
+ )
+ self.digest_models[model_id].producer_name = opt_model.producer_name
+ self.digest_models[model_id].producer_version = opt_model.producer_version
+
+ model_summary.ui.modelProtoTable.setItem(
+ 3, 1, QTableWidgetItem(str(opt_model.ir_version))
+ )
+ self.digest_models[model_id].ir_version = opt_model.ir_version
+
+ for imp in opt_model.opset_import:
+ row_idx = model_summary.ui.importsTable.rowCount()
+ model_summary.ui.importsTable.insertRow(row_idx)
+ if imp.domain == "" or imp.domain == "ai.onnx":
+ model_summary.ui.opsetVersion.setText(str(imp.version))
+ domain = "ai.onnx"
+ self.digest_models[model_id].opset = imp.version
+ else:
+ domain = imp.domain
+ model_summary.ui.importsTable.setItem(
+ row_idx, 0, QTableWidgetItem(str(domain))
+ )
+ model_summary.ui.importsTable.setItem(
+ row_idx, 1, QTableWidgetItem(str(imp.version))
+ )
+ row_idx += 1
+
+ self.digest_models[model_id].imports[imp.domain] = imp.version
+
+ progress.step()
+ progress.setLabelText("Wrapping Up Model Analysis")
+
+ model_summary.ui.importsTable.resizeColumnsToContents()
+ model_summary.ui.modelProtoTable.resizeColumnsToContents()
+ model_summary.setObjectName(model_name)
+ new_tab_idx = self.ui.tabWidget.count() - 1
+ self.ui.tabWidget.setTabText(new_tab_idx, "".join(model_name))
+ self.ui.tabWidget.setCurrentIndex(new_tab_idx)
+ self.ui.stackedWidget.setCurrentIndex(self.Page.SUMMARY)
+ self.ui.singleModelWidget.show()
+ progress.step()
+
+ movie = QMovie(":/assets/gifs/load.gif")
+ model_summary.ui.similarityImg.setMovie(movie)
+ movie.start()
+
+ # Start similarity Analysis
+ # Note: Should only be started after the model tab has been created
+ png_tmp_path = os.path.join(self.temp_dir.name, model_id)
+ os.makedirs(png_tmp_path, exist_ok=True)
+ self.model_similarity_thread[model_id] = SimilarityThread()
+ self.model_similarity_thread[model_id].completed_successfully.connect(
+ self.update_similarity_widget
+ )
+ self.model_similarity_thread[model_id].model_filepath = filepath
+ self.model_similarity_thread[model_id].png_filepath = os.path.join(
+ png_tmp_path, f"heatmap_{model_name}.png"
+ )
+ self.model_similarity_thread[model_id].model_id = model_id
+ self.model_similarity_thread[model_id].start()
+
+ progress.close()
+
+ except FileNotFoundError as e:
+ print(f"File not found: {e.filename}")
+
def dragEnterEvent(self, event: QDragEnterEvent):
if event.mimeData().hasUrls():
event.acceptProposedAction()
@@ -740,9 +998,7 @@ def save_reports(self):
)
node_counter = digest_model.get_node_type_counts()
if node_counter:
- onnx_utils.save_node_type_counts_csv_report(
- node_counter, node_type_filepath
- )
+ save_node_type_counts_csv_report(node_counter, node_type_filepath)
# Save the similarity image
similarity_png = self.model_similarity_report[digest_model.unique_id].grab()
@@ -754,6 +1010,10 @@ def save_reports(self):
txt_report_filepath = os.path.join(save_directory, f"{model_name}_report.txt")
digest_model.save_txt_report(txt_report_filepath)
+ # Save the yaml report
+ yaml_report_filepath = os.path.join(save_directory, f"{model_name}_report.yaml")
+ digest_model.save_yaml_report(yaml_report_filepath)
+
# Save the node list
nodes_report_filepath = os.path.join(save_directory, f"{model_name}_nodes.csv")
self.save_nodes_csv(nodes_report_filepath, False)
diff --git a/src/digest/model_class/digest_model.py b/src/digest/model_class/digest_model.py
new file mode 100644
index 0000000..130503c
--- /dev/null
+++ b/src/digest/model_class/digest_model.py
@@ -0,0 +1,170 @@
+# Copyright(C) 2024 Advanced Micro Devices, Inc. All rights reserved.
+
+import os
+import csv
+from collections import Counter, OrderedDict, defaultdict
+from typing import List, Dict, Optional, Any, Union
+
+
+class NodeParsingException(Exception):
+ pass
+
+
+# The classes are for type aliasing. Once python 3.10 is the minimum we can switch to TypeAlias
+class NodeShapeCounts(defaultdict[str, Counter]):
+ def __init__(self):
+ super().__init__(Counter) # Initialize with the Counter factory
+
+
+class NodeTypeCounts(Dict[str, int]):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+
+class TensorInfo:
+ "Used to store node input and output tensor information"
+
+ def __init__(self) -> None:
+ self.dtype: Optional[str] = None
+ self.dtype_bytes: Optional[int] = None
+ self.size_kbytes: Optional[float] = None
+ self.shape: List[Union[int, str]] = []
+
+
+class TensorData(OrderedDict[str, TensorInfo]):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+
+class NodeInfo:
+ def __init__(self) -> None:
+ self.flops: Optional[int] = None
+ self.parameters: int = 0
+ self.node_type: Optional[str] = None
+ self.attributes: OrderedDict[str, Any] = OrderedDict()
+ # We use an ordered dictionary because the order in which
+ # the inputs and outputs are listed in the node matter.
+ self.inputs = TensorData()
+ self.outputs = TensorData()
+
+ def get_input(self, index: int) -> TensorInfo:
+ return list(self.inputs.values())[index]
+
+ def get_output(self, index: int) -> TensorInfo:
+ return list(self.outputs.values())[index]
+
+ def __str__(self):
+ """Provides a human-readable string representation of NodeInfo."""
+ output = [
+ f"Node Type: {self.node_type}",
+ f"FLOPs: {self.flops if self.flops is not None else 'N/A'}",
+ f"Parameters: {self.parameters}",
+ ]
+
+ if self.attributes:
+ output.append("Attributes:")
+ for key, value in self.attributes.items():
+ output.append(f" - {key}: {value}")
+
+ if self.inputs:
+ output.append("Inputs:")
+ for name, tensor in self.inputs.items():
+ output.append(f" - {name}: {tensor}")
+
+ if self.outputs:
+ output.append("Outputs:")
+ for name, tensor in self.outputs.items():
+ output.append(f" - {name}: {tensor}")
+
+ return "\n".join(output)
+
+
+# The classes are for type aliasing. Once python 3.10 is the minimum Popwe can switch to TypeAlias
+class NodeData(OrderedDict[str, NodeInfo]):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+
+def save_nodes_csv_report(node_data: NodeData, filepath: str) -> None:
+
+ parent_dir = os.path.dirname(os.path.abspath(filepath))
+ if not os.path.exists(parent_dir):
+ raise FileNotFoundError(f"Directory {parent_dir} does not exist.")
+
+ flattened_data = []
+ fieldnames = ["Node Name", "Node Type", "Parameters", "FLOPs", "Attributes"]
+ input_fieldnames = []
+ output_fieldnames = []
+ for name, node_info in node_data.items():
+ row = OrderedDict()
+ row["Node Name"] = name
+ row["Node Type"] = str(node_info.node_type)
+ row["Parameters"] = str(node_info.parameters)
+ row["FLOPs"] = str(node_info.flops)
+ if node_info.attributes:
+ row["Attributes"] = str({k: v for k, v in node_info.attributes.items()})
+ else:
+ row["Attributes"] = ""
+
+ for i, (input_name, input_info) in enumerate(node_info.inputs.items()):
+ column_name = f"Input{i+1} (Shape, Dtype, Size (kB))"
+ row[column_name] = (
+ f"{input_name} ({input_info.shape}, {input_info.dtype}, {input_info.size_kbytes})"
+ )
+
+ # Dynamically add input column names to fieldnames if not already present
+ if column_name not in input_fieldnames:
+ input_fieldnames.append(column_name)
+
+ for i, (output_name, output_info) in enumerate(node_info.outputs.items()):
+ column_name = f"Output{i+1} (Shape, Dtype, Size (kB))"
+ row[column_name] = (
+ f"{output_name} ({output_info.shape}, "
+ f"{output_info.dtype}, {output_info.size_kbytes})"
+ )
+
+ # Dynamically add input column names to fieldnames if not already present
+ if column_name not in output_fieldnames:
+ output_fieldnames.append(column_name)
+
+ flattened_data.append(row)
+
+ fieldnames = fieldnames + input_fieldnames + output_fieldnames
+ with open(filepath, "w", encoding="utf-8", newline="") as csvfile:
+ writer = csv.DictWriter(csvfile, fieldnames=fieldnames, lineterminator="\n")
+ writer.writeheader()
+ writer.writerows(flattened_data)
+
+
+def save_node_type_counts_csv_report(node_data: NodeTypeCounts, filepath: str) -> None:
+
+ parent_dir = os.path.dirname(os.path.abspath(filepath))
+ if not os.path.exists(parent_dir):
+ raise FileNotFoundError(f"Directory {parent_dir} does not exist.")
+
+ header = ["Node Type", "Count"]
+
+ with open(filepath, "w", encoding="utf-8", newline="") as csvfile:
+ writer = csv.writer(csvfile, lineterminator="\n")
+ writer.writerow(header)
+ for node_type, node_count in node_data.items():
+ writer.writerow([node_type, node_count])
+
+
+def save_node_shape_counts_csv_report(
+ node_data: NodeShapeCounts, filepath: str
+) -> None:
+
+ parent_dir = os.path.dirname(os.path.abspath(filepath))
+ if not os.path.exists(parent_dir):
+ raise FileNotFoundError(f"Directory {parent_dir} does not exist.")
+
+ header = ["Node Type", "Input Tensors Shapes", "Count"]
+
+ with open(filepath, "w", encoding="utf-8", newline="") as csvfile:
+ writer = csv.writer(csvfile, dialect="excel", lineterminator="\n")
+ writer.writerow(header)
+ for node_type, node_info in node_data.items():
+ info_iter = iter(node_info.items())
+ for shape, count in info_iter:
+ writer.writerow([node_type, shape, count])
diff --git a/src/digest/model_class/digest_onnx_model.py b/src/digest/model_class/digest_onnx_model.py
new file mode 100644
index 0000000..c96a228
--- /dev/null
+++ b/src/digest/model_class/digest_onnx_model.py
@@ -0,0 +1,668 @@
+# Copyright(C) 2024 Advanced Micro Devices, Inc. All rights reserved.
+
+import os
+from uuid import uuid4
+from typing import List, Dict, Optional, Tuple, Union, cast
+from datetime import datetime
+import numpy as np
+import onnx
+import yaml
+from prettytable import PrettyTable
+from digest.model_class.digest_model import (
+ NodeTypeCounts,
+ NodeData,
+ NodeShapeCounts,
+ NodeInfo,
+ TensorData,
+ TensorInfo,
+ save_nodes_csv_report,
+)
+import utils.onnx_utils as onnx_utils
+
+
+class DigestOnnxModel:
+ def __init__(
+ self,
+ onnx_model: onnx.ModelProto,
+ onnx_filepath: Optional[str] = None,
+ model_name: Optional[str] = None,
+ save_proto: bool = True,
+ ) -> None:
+ # Public members exposed to the API
+ self.unique_id: str = str(uuid4())
+ self.filepath: Optional[str] = onnx_filepath
+ self.model_proto: Optional[onnx.ModelProto] = onnx_model if save_proto else None
+ self.model_name: Optional[str] = model_name
+ self.model_version: Optional[int] = None
+ self.graph_name: Optional[str] = None
+ self.producer_name: Optional[str] = None
+ self.producer_version: Optional[str] = None
+ self.ir_version: Optional[int] = None
+ self.opset: Optional[int] = None
+ self.imports: Dict[str, int] = {}
+ self.node_type_counts: NodeTypeCounts = NodeTypeCounts()
+ self.model_flops: Optional[int] = None
+ self.model_parameters: int = 0
+ self.node_type_flops: Dict[str, int] = {}
+ self.node_type_parameters: Dict[str, int] = {}
+ self.per_node_info = NodeData()
+ self.model_inputs = TensorData()
+ self.model_outputs = TensorData()
+
+ # Private members not intended to be exposed
+ self.input_tensors_: Dict[str, onnx.ValueInfoProto] = {}
+ self.output_tensors_: Dict[str, onnx.ValueInfoProto] = {}
+ self.value_tensors_: Dict[str, onnx.ValueInfoProto] = {}
+ self.init_tensors_: Dict[str, onnx.TensorProto] = {}
+
+ self.update_state(onnx_model)
+
+ def update_state(self, model_proto: onnx.ModelProto) -> None:
+ self.model_version = model_proto.model_version
+ self.graph_name = model_proto.graph.name
+ self.producer_name = model_proto.producer_name
+ self.producer_version = model_proto.producer_version
+ self.ir_version = model_proto.ir_version
+ self.opset = onnx_utils.get_opset(model_proto)
+ self.imports = {
+ import_.domain: import_.version for import_ in model_proto.opset_import
+ }
+
+ self.model_inputs = onnx_utils.get_model_input_shapes_types(model_proto)
+ self.model_outputs = onnx_utils.get_model_output_shapes_types(model_proto)
+
+ self.node_type_counts = onnx_utils.get_node_type_counts(model_proto)
+ self.parse_model_nodes(model_proto)
+
+ def get_node_tensor_info_(
+ self, onnx_node: onnx.NodeProto
+ ) -> Tuple[TensorData, TensorData]:
+ """
+ This function is set to private because it is not intended to be used
+ outside of the DigestOnnxModel class.
+ """
+
+ input_tensor_info = TensorData()
+ for node_input in onnx_node.input:
+ input_tensor_info[node_input] = TensorInfo()
+ if (
+ node_input in self.input_tensors_
+ or node_input in self.value_tensors_
+ or node_input in self.output_tensors_
+ ):
+ tensor = (
+ self.input_tensors_.get(node_input)
+ or self.value_tensors_.get(node_input)
+ or self.output_tensors_.get(node_input)
+ )
+ if tensor:
+ for dim in tensor.type.tensor_type.shape.dim:
+ if dim.HasField("dim_value"):
+ input_tensor_info[node_input].shape.append(dim.dim_value)
+ elif dim.HasField("dim_param"):
+ input_tensor_info[node_input].shape.append(dim.dim_param)
+
+ dtype_str, dtype_bytes = onnx_utils.tensor_type_to_str_and_size(
+ tensor.type.tensor_type.elem_type
+ )
+ elif node_input in self.init_tensors_:
+ input_tensor_info[node_input].shape.extend(
+ [dim for dim in self.init_tensors_[node_input].dims]
+ )
+ dtype_str, dtype_bytes = onnx_utils.tensor_type_to_str_and_size(
+ self.init_tensors_[node_input].data_type
+ )
+ else:
+ dtype_str = None
+ dtype_bytes = None
+
+ input_tensor_info[node_input].dtype = dtype_str
+ input_tensor_info[node_input].dtype_bytes = dtype_bytes
+
+ if (
+ all(isinstance(s, int) for s in input_tensor_info[node_input].shape)
+ and dtype_bytes
+ ):
+ tensor_size = float(
+ np.prod(np.array(input_tensor_info[node_input].shape))
+ )
+ input_tensor_info[node_input].size_kbytes = (
+ tensor_size * float(dtype_bytes) / 1024.0
+ )
+
+ output_tensor_info = TensorData()
+ for node_output in onnx_node.output:
+ output_tensor_info[node_output] = TensorInfo()
+ if (
+ node_output in self.input_tensors_
+ or node_output in self.value_tensors_
+ or node_output in self.output_tensors_
+ ):
+ tensor = (
+ self.input_tensors_.get(node_output)
+ or self.value_tensors_.get(node_output)
+ or self.output_tensors_.get(node_output)
+ )
+ if tensor:
+ output_tensor_info[node_output].shape.extend(
+ [
+ int(dim.dim_value)
+ for dim in tensor.type.tensor_type.shape.dim
+ ]
+ )
+ dtype_str, dtype_bytes = onnx_utils.tensor_type_to_str_and_size(
+ tensor.type.tensor_type.elem_type
+ )
+ elif node_output in self.init_tensors_:
+ output_tensor_info[node_output].shape.extend(
+ [dim for dim in self.init_tensors_[node_output].dims]
+ )
+ dtype_str, dtype_bytes = onnx_utils.tensor_type_to_str_and_size(
+ self.init_tensors_[node_output].data_type
+ )
+
+ else:
+ dtype_str = None
+ dtype_bytes = None
+
+ output_tensor_info[node_output].dtype = dtype_str
+ output_tensor_info[node_output].dtype_bytes = dtype_bytes
+
+ if (
+ all(isinstance(s, int) for s in output_tensor_info[node_output].shape)
+ and dtype_bytes
+ ):
+ tensor_size = float(
+ np.prod(np.array(output_tensor_info[node_output].shape))
+ )
+ output_tensor_info[node_output].size_kbytes = (
+ tensor_size * float(dtype_bytes) / 1024.0
+ )
+
+ return input_tensor_info, output_tensor_info
+
+ def parse_model_nodes(self, onnx_model: onnx.ModelProto) -> None:
+ """
+ Calculate total number of FLOPs found in the onnx model.
+ FLOP is defined as one floating-point operation. This distinguishes
+ from multiply-accumulates (MACs) where FLOPs == 2 * MACs.
+ """
+
+ # Initialze to zero so we can accumulate. Set to None during the
+ # model FLOPs calculation if it errors out.
+ self.model_flops = 0
+
+ # Check to see if the model inputs have any dynamic shapes
+ if onnx_utils.get_dynamic_input_dims(onnx_model):
+ self.model_flops = None
+
+ try:
+ onnx_model, _ = onnx_utils.optimize_onnx_model(onnx_model)
+
+ onnx_model = onnx.shape_inference.infer_shapes(
+ onnx_model, strict_mode=True, data_prop=True
+ )
+ except Exception as e: # pylint: disable=broad-except
+ print(f"ONNX utils: {str(e)}")
+ self.model_flops = None
+
+ # If the ONNX model contains one of the following unsupported ops, then this
+ # function will return None since the FLOP total is expected to be incorrect
+ unsupported_ops = [
+ "Einsum",
+ "RNN",
+ "GRU",
+ "DeformConv",
+ ]
+
+ if not self.input_tensors_:
+ self.input_tensors_ = {
+ tensor.name: tensor for tensor in onnx_model.graph.input
+ }
+
+ if not self.output_tensors_:
+ self.output_tensors_ = {
+ tensor.name: tensor for tensor in onnx_model.graph.output
+ }
+
+ if not self.value_tensors_:
+ self.value_tensors_ = {
+ tensor.name: tensor for tensor in onnx_model.graph.value_info
+ }
+
+ if not self.init_tensors_:
+ self.init_tensors_ = {
+ tensor.name: tensor for tensor in onnx_model.graph.initializer
+ }
+
+ for node in onnx_model.graph.node: # pylint: disable=E1101
+
+ node_info = NodeInfo()
+
+ # TODO: I have encountered models containing nodes with no name. It would be a good idea
+ # to have this type of model info fed back to the user through a warnings section.
+ if not node.name:
+ node.name = f"{node.op_type}_{len(self.per_node_info)}"
+
+ node_info.node_type = node.op_type
+ input_tensor_info, output_tensor_info = self.get_node_tensor_info_(node)
+ node_info.inputs = input_tensor_info
+ node_info.outputs = output_tensor_info
+
+ # Check if this node has parameters through the init tensors
+ for input_name, input_tensor in node_info.inputs.items():
+ if input_name in self.init_tensors_:
+ if all(isinstance(dim, int) for dim in input_tensor.shape):
+ input_parameters = int(np.prod(np.array(input_tensor.shape)))
+ node_info.parameters += input_parameters
+ self.model_parameters += input_parameters
+ self.node_type_parameters[node.op_type] = (
+ self.node_type_parameters.get(node.op_type, 0)
+ + input_parameters
+ )
+ else:
+ print(f"Tensor with params has unknown shape: {input_name}")
+
+ for attribute in node.attribute:
+ node_info.attributes.update(onnx_utils.attribute_to_dict(attribute))
+
+ # if node.name in self.per_node_info:
+ # print(f"Node name {node.name} is a duplicate.")
+
+ self.per_node_info[node.name] = node_info
+
+ if node.op_type in unsupported_ops:
+ self.model_flops = None
+ node_info.flops = None
+
+ try:
+
+ if (
+ node.op_type == "MatMul"
+ or node.op_type == "MatMulInteger"
+ or node.op_type == "QLinearMatMul"
+ ):
+
+ input_a = node_info.get_input(0).shape
+ if node.op_type == "QLinearMatMul":
+ input_b = node_info.get_input(3).shape
+ else:
+ input_b = node_info.get_input(1).shape
+
+ if not all(
+ isinstance(dim, int) for dim in input_a
+ ) or not isinstance(input_b[-1], int):
+ node_info.flops = None
+ self.model_flops = None
+ continue
+
+ node_info.flops = int(
+ 2 * np.prod(np.array(input_a), dtype=np.int64) * input_b[-1]
+ )
+
+ elif (
+ node.op_type == "Mul"
+ or node.op_type == "Div"
+ or node.op_type == "Add"
+ ):
+ input_a = node_info.get_input(0).shape
+ input_b = node_info.get_input(1).shape
+
+ if not all(isinstance(dim, int) for dim in input_a) or not all(
+ isinstance(dim, int) for dim in input_b
+ ):
+ node_info.flops = None
+ self.model_flops = None
+ continue
+
+ node_info.flops = int(
+ np.prod(np.array(input_a), dtype=np.int64)
+ ) + int(np.prod(np.array(input_b), dtype=np.int64))
+
+ elif node.op_type == "Gemm" or node.op_type == "QGemm":
+ x_shape = node_info.get_input(0).shape
+ if node.op_type == "Gemm":
+ w_shape = node_info.get_input(1).shape
+ else:
+ w_shape = node_info.get_input(3).shape
+
+ if not all(isinstance(dim, int) for dim in x_shape) or not all(
+ isinstance(dim, int) for dim in w_shape
+ ):
+ node_info.flops = None
+ self.model_flops = None
+ continue
+
+ mm_dims = [
+ (
+ x_shape[0]
+ if not node_info.attributes.get("transA", 0)
+ else x_shape[1]
+ ),
+ (
+ x_shape[1]
+ if not node_info.attributes.get("transA", 0)
+ else x_shape[0]
+ ),
+ (
+ w_shape[1]
+ if not node_info.attributes.get("transB", 0)
+ else w_shape[0]
+ ),
+ ]
+
+ node_info.flops = int(
+ 2 * np.prod(np.array(mm_dims), dtype=np.int64)
+ )
+
+ if len(mm_dims) == 3: # if there is a bias input
+ bias_shape = node_info.get_input(2).shape
+ node_info.flops += int(np.prod(np.array(bias_shape)))
+
+ elif (
+ node.op_type == "Conv"
+ or node.op_type == "ConvInteger"
+ or node.op_type == "QLinearConv"
+ or node.op_type == "ConvTranspose"
+ ):
+ # N, C, d1, ..., dn
+ x_shape = node_info.get_input(0).shape
+
+ # M, C/group, k1, ..., kn. Note C and M are swapped for ConvTranspose
+ if node.op_type == "QLinearConv":
+ w_shape = node_info.get_input(3).shape
+ else:
+ w_shape = node_info.get_input(1).shape
+
+ if not all(isinstance(dim, int) for dim in x_shape):
+ node_info.flops = None
+ self.model_flops = None
+ continue
+
+ x_shape_ints = cast(List[int], x_shape)
+ w_shape_ints = cast(List[int], w_shape)
+
+ has_bias = False # Note, ConvInteger has no bias
+ if node.op_type == "Conv" and len(node_info.inputs) == 3:
+ has_bias = True
+ elif node.op_type == "QLinearConv" and len(node_info.inputs) == 9:
+ has_bias = True
+
+ num_dims = len(x_shape_ints) - 2
+ strides = node_info.attributes.get(
+ "strides", [1] * num_dims
+ ) # type: List[int]
+ dilation = node_info.attributes.get(
+ "dilations", [1] * num_dims
+ ) # type: List[int]
+ kernel_shape = w_shape_ints[2:]
+ batch_size = x_shape_ints[0]
+ out_channels = w_shape_ints[0]
+ out_dims = [batch_size, out_channels]
+ output_shape = node_info.attributes.get(
+ "output_shape", []
+ ) # type: List[int]
+
+ # If output_shape is given then we do not need to compute it ourselves
+ # The output_shape attribute does not include batch_size or channels and
+ # is only valid for ConvTranspose
+ if output_shape:
+ out_dims.extend(output_shape)
+ else:
+ auto_pad = node_info.attributes.get(
+ "auto_pad", "NOTSET".encode()
+ ).decode()
+ # SAME expects padding so that the output_shape = CEIL(input_shape / stride)
+ if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
+ out_dims.extend(
+ [x * s for x, s in zip(x_shape_ints[2:], strides)]
+ )
+ else:
+ # NOTSET means just use pads attribute
+ if auto_pad == "NOTSET":
+ pads = node_info.attributes.get(
+ "pads", [0] * num_dims * 2
+ )
+ # VALID essentially means no padding
+ elif auto_pad == "VALID":
+ pads = [0] * num_dims * 2
+
+ for i in range(num_dims):
+ dim_in = x_shape_ints[i + 2] # type: int
+
+ if node.op_type == "ConvTranspose":
+ out_dim = (
+ strides[i] * (dim_in - 1)
+ + ((kernel_shape[i] - 1) * dilation[i] + 1)
+ - pads[i]
+ - pads[i + num_dims]
+ )
+ else:
+ out_dim = (
+ dim_in
+ + pads[i]
+ + pads[i + num_dims]
+ - dilation[i] * (kernel_shape[i] - 1)
+ - 1
+ ) // strides[i] + 1
+
+ out_dims.append(out_dim)
+
+ kernel_flops = int(
+ np.prod(np.array(kernel_shape)) * w_shape_ints[1]
+ )
+ output_points = int(np.prod(np.array(out_dims)))
+ bias_ops = output_points if has_bias else int(0)
+ node_info.flops = 2 * kernel_flops * output_points + bias_ops
+
+ elif node.op_type == "LSTM" or node.op_type == "DynamicQuantizeLSTM":
+
+ x_shape = node_info.get_input(
+ 0
+ ).shape # seq_length, batch_size, input_dim
+
+ if not all(isinstance(dim, int) for dim in x_shape):
+ node_info.flops = None
+ self.model_flops = None
+ continue
+
+ x_shape_ints = cast(List[int], x_shape)
+ hidden_size = node_info.attributes["hidden_size"]
+ direction = (
+ 2
+ if node_info.attributes.get("direction")
+ == "bidirectional".encode()
+ else 1
+ )
+
+ has_bias = True if len(node_info.inputs) >= 4 else False
+ if has_bias:
+ bias_shape = node_info.get_input(3).shape
+ if isinstance(bias_shape[1], int):
+ bias_ops = bias_shape[1]
+ else:
+ bias_ops = 0
+ else:
+ bias_ops = 0
+ # seq_length, batch_size, input_dim = x_shape
+ if not isinstance(bias_ops, int):
+ bias_ops = int(0)
+ num_gates = int(4)
+ gate_input_flops = int(2 * x_shape_ints[2] * hidden_size)
+ gate_hid_flops = int(2 * hidden_size * hidden_size)
+ unit_flops = (
+ num_gates * (gate_input_flops + gate_hid_flops) + bias_ops
+ )
+ node_info.flops = (
+ x_shape_ints[1] * x_shape_ints[0] * direction * unit_flops
+ )
+ # In this case we just hit an op that doesn't have FLOPs
+ else:
+ node_info.flops = None
+
+ except IndexError as err:
+ print(f"Error parsing node {node.name}: {err}")
+ node_info.flops = None
+ self.model_flops = None
+ continue
+
+ # Update the model level flops count
+ if node_info.flops is not None and self.model_flops is not None:
+ self.model_flops += node_info.flops
+
+ # Update the node type flops count
+ self.node_type_flops[node.op_type] = (
+ self.node_type_flops.get(node.op_type, 0) + node_info.flops
+ )
+
+ def save_txt_report(self, filepath: str) -> None:
+
+ parent_dir = os.path.dirname(os.path.abspath(filepath))
+ if not os.path.exists(parent_dir):
+ raise FileNotFoundError(f"Directory {parent_dir} does not exist.")
+
+ report_date = datetime.now().strftime("%B %d, %Y")
+
+ with open(filepath, "w", encoding="utf-8") as f_p:
+ f_p.write(f"Report created on {report_date}\n")
+ if self.filepath:
+ f_p.write(f"ONNX file: {self.filepath}\n")
+ f_p.write(f"Name of the model: {self.model_name}\n")
+ f_p.write(f"Model version: {self.model_version}\n")
+ f_p.write(f"Name of the graph: {self.graph_name}\n")
+ f_p.write(f"Producer: {self.producer_name} {self.producer_version}\n")
+ f_p.write(f"Ir version: {self.ir_version}\n")
+ f_p.write(f"Opset: {self.opset}\n\n")
+ f_p.write("Import list\n")
+ for name, version in self.imports.items():
+ f_p.write(f"\t{name}: {version}\n")
+
+ f_p.write("\n")
+ f_p.write(f"Total graph nodes: {sum(self.node_type_counts.values())}\n")
+ f_p.write(f"Number of parameters: {self.model_parameters}\n")
+ if self.model_flops:
+ f_p.write(f"Number of FLOPs: {self.model_flops}\n")
+ f_p.write("\n")
+
+ table_op_intensity = PrettyTable()
+ table_op_intensity.field_names = ["Operation", "FLOPs", "Intensity (%)"]
+ for op_type, count in self.node_type_flops.items():
+ if count > 0:
+ table_op_intensity.add_row(
+ [
+ op_type,
+ count,
+ 100.0 * float(count) / float(self.model_flops),
+ ]
+ )
+
+ f_p.write("Op intensity:\n")
+ f_p.write(table_op_intensity.get_string())
+ f_p.write("\n\n")
+
+ node_counts_table = PrettyTable()
+ node_counts_table.field_names = ["Node", "Occurrences"]
+ for op, count in self.node_type_counts.items():
+ node_counts_table.add_row([op, count])
+ f_p.write("Nodes and their occurrences:\n")
+ f_p.write(node_counts_table.get_string())
+ f_p.write("\n\n")
+
+ input_table = PrettyTable()
+ input_table.field_names = [
+ "Input Name",
+ "Shape",
+ "Type",
+ "Tensor Size (KB)",
+ ]
+ for input_name, input_details in self.model_inputs.items():
+ if input_details.size_kbytes:
+ kbytes = f"{input_details.size_kbytes:.2f}"
+ else:
+ kbytes = ""
+
+ input_table.add_row(
+ [
+ input_name,
+ input_details.shape,
+ input_details.dtype,
+ kbytes,
+ ]
+ )
+ f_p.write("Input Tensor(s) Information:\n")
+ f_p.write(input_table.get_string())
+ f_p.write("\n\n")
+
+ output_table = PrettyTable()
+ output_table.field_names = [
+ "Output Name",
+ "Shape",
+ "Type",
+ "Tensor Size (KB)",
+ ]
+ for output_name, output_details in self.model_outputs.items():
+ if output_details.size_kbytes:
+ kbytes = f"{output_details.size_kbytes:.2f}"
+ else:
+ kbytes = ""
+
+ output_table.add_row(
+ [
+ output_name,
+ output_details.shape,
+ output_details.dtype,
+ kbytes,
+ ]
+ )
+ f_p.write("Output Tensor(s) Information:\n")
+ f_p.write(output_table.get_string())
+ f_p.write("\n\n")
+
+ def save_yaml_report(self, filepath: str) -> None:
+
+ parent_dir = os.path.dirname(os.path.abspath(filepath))
+ if not os.path.exists(parent_dir):
+ raise FileNotFoundError(f"Directory {parent_dir} does not exist.")
+
+ report_date = datetime.now().strftime("%B %d, %Y")
+
+ input_tensors = dict({k: vars(v) for k, v in self.model_inputs.items()})
+ output_tensors = dict({k: vars(v) for k, v in self.model_outputs.items()})
+
+ yaml_data = {
+ "report_date": report_date,
+ "onnx_file": self.filepath,
+ "model_name": self.model_name,
+ "model_version": self.model_version,
+ "graph_name": self.graph_name,
+ "producer_name": self.producer_name,
+ "ir_version": self.ir_version,
+ "opset": self.opset,
+ "import_list": self.imports,
+ "graph_nodes": sum(self.node_type_counts.values()),
+ "model_parameters": self.model_parameters,
+ "model_flops": self.model_flops,
+ "operator_intensity": self.node_type_flops,
+ "node_histogram": dict(self.node_type_counts),
+ "input_tensors": input_tensors,
+ "output_tensors": output_tensors,
+ }
+
+ with open(filepath, "w", encoding="utf-8") as f_p:
+ yaml.dump(yaml_data, f_p, sort_keys=False)
+
+ def save_nodes_csv_report(self, filepath: str) -> None:
+ save_nodes_csv_report(self.per_node_info, filepath)
+
+ def get_node_type_counts(self) -> Union[NodeTypeCounts, None]:
+ if not self.node_type_counts and self.model_proto:
+ self.node_type_counts = onnx_utils.get_node_type_counts(self.model_proto)
+ return self.node_type_counts if self.node_type_counts else None
+
+ def get_node_shape_counts(self) -> NodeShapeCounts:
+ tensor_shape_counter = NodeShapeCounts()
+ for _, info in self.per_node_info.items():
+ shape_hash = tuple([tuple(v.shape) for _, v in info.inputs.items()])
+ if info.node_type:
+ tensor_shape_counter[info.node_type][shape_hash] += 1
+ return tensor_shape_counter
diff --git a/src/digest/modelsummary.py b/src/digest/modelsummary.py
index 1e3872e..5f732fe 100644
--- a/src/digest/modelsummary.py
+++ b/src/digest/modelsummary.py
@@ -14,14 +14,14 @@
from digest.freeze_inputs import FreezeInputs
from digest.popup_window import PopupWindow
from digest.qt_utils import apply_dark_style_sheet
-from utils import onnx_utils
+from digest.model_class.digest_onnx_model import DigestOnnxModel
ROOT_FOLDER = os.path.dirname(os.path.abspath(__file__))
class modelSummary(QWidget):
- def __init__(self, digest_model: onnx_utils.DigestOnnxModel, parent=None):
+ def __init__(self, digest_model: DigestOnnxModel, parent=None):
super().__init__(parent)
self.ui = Ui_modelSummary()
self.ui.setupUi(self)
diff --git a/src/digest/multi_model_analysis.py b/src/digest/multi_model_analysis.py
index d7f6bab..08e3ce6 100644
--- a/src/digest/multi_model_analysis.py
+++ b/src/digest/multi_model_analysis.py
@@ -11,6 +11,11 @@
from digest.ui.multimodelanalysis_ui import Ui_multiModelAnalysis
from digest.histogramchartwidget import StackedHistogramWidget
from digest.qt_utils import apply_dark_style_sheet
+from digest.model_class.digest_onnx_model import DigestOnnxModel
+from digest.model_class.digest_model import (
+ save_node_shape_counts_csv_report,
+ save_node_type_counts_csv_report,
+)
from utils import onnx_utils
ROOT_FOLDER = os.path.dirname(__file__)
@@ -21,7 +26,7 @@ class MultiModelAnalysis(QWidget):
def __init__(
self,
- model_list: List[onnx_utils.DigestOnnxModel],
+ model_list: List[DigestOnnxModel],
parent=None,
):
super().__init__(parent)
@@ -203,7 +208,7 @@ def save_reports(self):
node_type_counter = digest_model.get_node_type_counts()
if node_type_counter:
- onnx_utils.save_node_type_counts_csv_report(
+ save_node_type_counts_csv_report(
node_type_counter, node_type_filepath
)
@@ -212,7 +217,7 @@ def save_reports(self):
node_shape_filepath = os.path.join(
save_directory, f"{digest_model.model_name}_node_shape_counts.csv"
)
- onnx_utils.save_node_shape_counts_csv_report(
+ save_node_shape_counts_csv_report(
node_shape_counts, node_shape_filepath
)
@@ -234,14 +239,14 @@ def save_reports(self):
global_node_type_counter = onnx_utils.NodeTypeCounts(
self.global_node_type_counter.most_common()
)
- onnx_utils.save_node_type_counts_csv_report(
+ save_node_type_counts_csv_report(
global_node_type_counter, global_filepath
)
global_filepath = os.path.join(
save_directory, "global_node_shape_counts.csv"
)
- onnx_utils.save_node_shape_counts_csv_report(
+ save_node_shape_counts_csv_report(
self.global_node_shape_counter, global_filepath
)
diff --git a/src/digest/multi_model_selection_page.py b/src/digest/multi_model_selection_page.py
index d7b6a39..d24996b 100644
--- a/src/digest/multi_model_selection_page.py
+++ b/src/digest/multi_model_selection_page.py
@@ -23,6 +23,7 @@
from digest.multi_model_analysis import MultiModelAnalysis
from digest.qt_utils import apply_dark_style_sheet, prompt_user_ram_limit
from utils import onnx_utils
+from digest.model_class.digest_onnx_model import DigestOnnxModel
class AnalysisThread(QThread):
@@ -33,7 +34,7 @@ class AnalysisThread(QThread):
def __init__(self):
super().__init__()
- self.model_dict: Dict[str, Optional[onnx_utils.DigestOnnxModel]] = {}
+ self.model_dict: Dict[str, Optional[DigestOnnxModel]] = {}
self.user_canceled = False
def run(self):
@@ -49,7 +50,7 @@ def run(self):
continue
model_name = os.path.splitext(os.path.basename(file))[0]
model_proto = onnx_utils.load_onnx(file, False)
- self.model_dict[file] = onnx_utils.DigestOnnxModel(
+ self.model_dict[file] = DigestOnnxModel(
model_proto, onnx_filepath=file, model_name=model_name, save_proto=False
)
@@ -58,7 +59,7 @@ def run(self):
model_list = [
model
for model in self.model_dict.values()
- if isinstance(model, onnx_utils.DigestOnnxModel)
+ if isinstance(model, DigestOnnxModel)
]
self.completed.emit(model_list)
@@ -94,7 +95,7 @@ def __init__(
self.ui.openAnalysisBtn.clicked.connect(self.start_analysis)
- self.model_dict: Dict[str, Optional[onnx_utils.DigestOnnxModel]] = {}
+ self.model_dict: Dict[str, Optional[DigestOnnxModel]] = {}
self.analysis_thread: Optional[AnalysisThread] = None
self.progress: Optional[ProgressDialog] = None
@@ -289,7 +290,7 @@ def start_analysis(self):
self.analysis_thread.model_dict = self.model_dict
self.analysis_thread.start()
- def open_analysis(self, model_list: List[onnx_utils.DigestOnnxModel]):
+ def open_analysis(self, model_list: List[DigestOnnxModel]):
multi_model_analysis = MultiModelAnalysis(model_list)
self.analysis_window.setCentralWidget(multi_model_analysis)
self.analysis_window.setWindowIcon(QIcon(":/assets/images/digest_logo_500.jpg"))
diff --git a/src/digest/node_summary.py b/src/digest/node_summary.py
index 99eb35f..01aaf09 100644
--- a/src/digest/node_summary.py
+++ b/src/digest/node_summary.py
@@ -6,6 +6,10 @@
from PySide6.QtWidgets import QWidget, QTableWidgetItem, QFileDialog
from digest.ui.nodessummary_ui import Ui_nodesSummary
from digest.qt_utils import apply_dark_style_sheet
+from digest.model_class.digest_model import (
+ save_node_shape_counts_csv_report,
+ save_nodes_csv_report,
+)
from utils import onnx_utils
ROOT_FOLDER = os.path.dirname(__file__)
@@ -111,8 +115,6 @@ def save_csv_file(self):
self, "Save CSV", os.getcwd(), "CSV(*.csv)"
)
if filepath and self.ui.allNodesBtn.isChecked():
- onnx_utils.save_nodes_csv_report(self.node_data, filepath)
+ save_nodes_csv_report(self.node_data, filepath)
elif filepath and self.ui.shapeCountsBtn.isChecked():
- onnx_utils.save_node_shape_counts_csv_report(
- self.node_shape_counts, filepath
- )
+ save_node_shape_counts_csv_report(self.node_shape_counts, filepath)
diff --git a/src/digest/thread.py b/src/digest/thread.py
index 3e03732..ef18617 100644
--- a/src/digest/thread.py
+++ b/src/digest/thread.py
@@ -4,13 +4,13 @@
import os
from typing import Optional
from PySide6.QtCore import QThread, Signal
-from utils import onnx_utils
+from digest.model_class.digest_onnx_model import DigestOnnxModel
from digest.subgraph_analysis.find_match import find_match
class StatsThread(QThread):
- completed = Signal(onnx_utils.DigestOnnxModel, str)
+ completed = Signal(DigestOnnxModel, str)
def __init__(
self,
@@ -31,7 +31,7 @@ def run(self):
if not self.unique_id:
raise ValueError("You must specify a unique id.")
- digest_model = onnx_utils.DigestOnnxModel(self.model, save_proto=False)
+ digest_model = DigestOnnxModel(self.model, save_proto=False)
self.completed.emit(digest_model, self.unique_id)
diff --git a/src/digest/ui/mainwindow.ui b/src/digest/ui/mainwindow.ui
index 8643efa..e7e28f3 100644
--- a/src/digest/ui/mainwindow.ui
+++ b/src/digest/ui/mainwindow.ui
@@ -179,7 +179,7 @@
Qt::FocusPolicy::NoFocus
- <html><head/><body><p>Open a local model file (Ctrl-O)</p></body></html>
+ <html><head/><body><p>Open (Ctrl-O)</p></body></html>
QPushButton {
diff --git a/src/digest/ui/mainwindow_ui.py b/src/digest/ui/mainwindow_ui.py
index 9904c77..9e3fe35 100644
--- a/src/digest/ui/mainwindow_ui.py
+++ b/src/digest/ui/mainwindow_ui.py
@@ -8,72 +8,125 @@
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
-from PySide6.QtCore import (QCoreApplication, QDate, QDateTime, QLocale,
- QMetaObject, QObject, QPoint, QRect,
- QSize, QTime, QUrl, Qt)
-from PySide6.QtGui import (QBrush, QColor, QConicalGradient, QCursor,
- QFont, QFontDatabase, QGradient, QIcon,
- QImage, QKeySequence, QLinearGradient, QPainter,
- QPalette, QPixmap, QRadialGradient, QTransform)
-from PySide6.QtWidgets import (QApplication, QFrame, QHBoxLayout, QLabel,
- QMainWindow, QPushButton, QSizePolicy, QSpacerItem,
- QStackedWidget, QStatusBar, QTabWidget, QVBoxLayout,
- QWidget)
+from PySide6.QtCore import (
+ QCoreApplication,
+ QDate,
+ QDateTime,
+ QLocale,
+ QMetaObject,
+ QObject,
+ QPoint,
+ QRect,
+ QSize,
+ QTime,
+ QUrl,
+ Qt,
+)
+from PySide6.QtGui import (
+ QBrush,
+ QColor,
+ QConicalGradient,
+ QCursor,
+ QFont,
+ QFontDatabase,
+ QGradient,
+ QIcon,
+ QImage,
+ QKeySequence,
+ QLinearGradient,
+ QPainter,
+ QPalette,
+ QPixmap,
+ QRadialGradient,
+ QTransform,
+)
+from PySide6.QtWidgets import (
+ QApplication,
+ QFrame,
+ QHBoxLayout,
+ QLabel,
+ QMainWindow,
+ QPushButton,
+ QSizePolicy,
+ QSpacerItem,
+ QStackedWidget,
+ QStatusBar,
+ QTabWidget,
+ QVBoxLayout,
+ QWidget,
+)
import resource_rc
+
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
if not MainWindow.objectName():
- MainWindow.setObjectName(u"MainWindow")
+ MainWindow.setObjectName("MainWindow")
MainWindow.resize(864, 783)
icon = QIcon()
- icon.addFile(u":/assets/images/digest_logo_500.jpg", QSize(), QIcon.Mode.Normal, QIcon.State.Off)
+ icon.addFile(
+ ":/assets/images/digest_logo_500.jpg",
+ QSize(),
+ QIcon.Mode.Normal,
+ QIcon.State.Off,
+ )
MainWindow.setWindowIcon(icon)
self.centralwidget = QWidget(MainWindow)
- self.centralwidget.setObjectName(u"centralwidget")
- self.centralwidget.setStyleSheet(u"")
+ self.centralwidget.setObjectName("centralwidget")
+ self.centralwidget.setStyleSheet("")
self.horizontalLayout_5 = QHBoxLayout(self.centralwidget)
- self.horizontalLayout_5.setObjectName(u"horizontalLayout_5")
+ self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.leftPanelWidget = QWidget(self.centralwidget)
- self.leftPanelWidget.setObjectName(u"leftPanelWidget")
- sizePolicy = QSizePolicy(QSizePolicy.Policy.Maximum, QSizePolicy.Policy.Preferred)
+ self.leftPanelWidget.setObjectName("leftPanelWidget")
+ sizePolicy = QSizePolicy(
+ QSizePolicy.Policy.Maximum, QSizePolicy.Policy.Preferred
+ )
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
- sizePolicy.setHeightForWidth(self.leftPanelWidget.sizePolicy().hasHeightForWidth())
+ sizePolicy.setHeightForWidth(
+ self.leftPanelWidget.sizePolicy().hasHeightForWidth()
+ )
self.leftPanelWidget.setSizePolicy(sizePolicy)
self.leftPanelWidget.setMinimumSize(QSize(85, 0))
self.leftPanelWidget.setMaximumSize(QSize(16777215, 16777215))
- self.leftPanelWidget.setStyleSheet(u"")
+ self.leftPanelWidget.setStyleSheet("")
self.verticalLayout_7 = QVBoxLayout(self.leftPanelWidget)
self.verticalLayout_7.setSpacing(0)
- self.verticalLayout_7.setObjectName(u"verticalLayout_7")
+ self.verticalLayout_7.setObjectName("verticalLayout_7")
self.verticalLayout_7.setContentsMargins(0, 0, 0, 0)
self.iconGroup = QWidget(self.leftPanelWidget)
- self.iconGroup.setObjectName(u"iconGroup")
+ self.iconGroup.setObjectName("iconGroup")
sizePolicy.setHeightForWidth(self.iconGroup.sizePolicy().hasHeightForWidth())
self.iconGroup.setSizePolicy(sizePolicy)
self.iconGroup.setMinimumSize(QSize(0, 0))
self.verticalLayout_8 = QVBoxLayout(self.iconGroup)
- self.verticalLayout_8.setObjectName(u"verticalLayout_8")
+ self.verticalLayout_8.setObjectName("verticalLayout_8")
self.verticalLayout_8.setContentsMargins(5, -1, 5, -1)
self.logoBtn = QPushButton(self.iconGroup)
- self.logoBtn.setObjectName(u"logoBtn")
+ self.logoBtn.setObjectName("logoBtn")
sizePolicy1 = QSizePolicy(QSizePolicy.Policy.Fixed, QSizePolicy.Policy.Fixed)
sizePolicy1.setHorizontalStretch(0)
sizePolicy1.setVerticalStretch(0)
sizePolicy1.setHeightForWidth(self.logoBtn.sizePolicy().hasHeightForWidth())
self.logoBtn.setSizePolicy(sizePolicy1)
self.logoBtn.setCursor(QCursor(Qt.CursorShape.PointingHandCursor))
- self.logoBtn.setStyleSheet(u"QPushButton {\n"
-" color: white;\n"
-" border: 0px;\n"
-" padding: 8px 8px;\n"
-" border-radius: 5px;\n"
-" margin-top: 5px;\n"
-"}\n"
-"")
+ self.logoBtn.setStyleSheet(
+ "QPushButton {\n"
+ " color: white;\n"
+ " border: 0px;\n"
+ " padding: 8px 8px;\n"
+ " border-radius: 5px;\n"
+ " margin-top: 5px;\n"
+ "}\n"
+ ""
+ )
icon1 = QIcon()
- icon1.addFile(u":/assets/images/remove_background_500_zoom.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off)
+ icon1.addFile(
+ ":/assets/images/remove_background_500_zoom.png",
+ QSize(),
+ QIcon.Mode.Normal,
+ QIcon.State.Off,
+ )
self.logoBtn.setIcon(icon1)
self.logoBtn.setIconSize(QSize(44, 44))
self.logoBtn.setCheckable(False)
@@ -82,40 +135,48 @@ def setupUi(self, MainWindow):
self.verticalLayout_8.addWidget(self.logoBtn, 0, Qt.AlignmentFlag.AlignHCenter)
self.ingestLine_2 = QFrame(self.iconGroup)
- self.ingestLine_2.setObjectName(u"ingestLine_2")
- sizePolicy1.setHeightForWidth(self.ingestLine_2.sizePolicy().hasHeightForWidth())
+ self.ingestLine_2.setObjectName("ingestLine_2")
+ sizePolicy1.setHeightForWidth(
+ self.ingestLine_2.sizePolicy().hasHeightForWidth()
+ )
self.ingestLine_2.setSizePolicy(sizePolicy1)
self.ingestLine_2.setMinimumSize(QSize(50, 0))
- self.ingestLine_2.setStyleSheet(u"color: rgb(100,100,100)")
+ self.ingestLine_2.setStyleSheet("color: rgb(100,100,100)")
self.ingestLine_2.setFrameShadow(QFrame.Shadow.Plain)
self.ingestLine_2.setLineWidth(2)
self.ingestLine_2.setFrameShape(QFrame.Shape.HLine)
- self.verticalLayout_8.addWidget(self.ingestLine_2, 0, Qt.AlignmentFlag.AlignHCenter)
+ self.verticalLayout_8.addWidget(
+ self.ingestLine_2, 0, Qt.AlignmentFlag.AlignHCenter
+ )
self.ingestWidget = QWidget(self.iconGroup)
- self.ingestWidget.setObjectName(u"ingestWidget")
+ self.ingestWidget.setObjectName("ingestWidget")
sizePolicy.setHeightForWidth(self.ingestWidget.sizePolicy().hasHeightForWidth())
self.ingestWidget.setSizePolicy(sizePolicy)
self.ingestLayout = QVBoxLayout(self.ingestWidget)
self.ingestLayout.setSpacing(15)
- self.ingestLayout.setObjectName(u"ingestLayout")
+ self.ingestLayout.setObjectName("ingestLayout")
self.ingestLayout.setContentsMargins(-1, 10, -1, -1)
self.openFileBtn = QPushButton(self.ingestWidget)
- self.openFileBtn.setObjectName(u"openFileBtn")
+ self.openFileBtn.setObjectName("openFileBtn")
sizePolicy1.setHeightForWidth(self.openFileBtn.sizePolicy().hasHeightForWidth())
self.openFileBtn.setSizePolicy(sizePolicy1)
self.openFileBtn.setCursor(QCursor(Qt.CursorShape.PointingHandCursor))
self.openFileBtn.setFocusPolicy(Qt.FocusPolicy.NoFocus)
- self.openFileBtn.setStyleSheet(u"QPushButton {\n"
-" color: white;\n"
-" border: 0px;\n"
-" padding: 8px 8px;\n"
-" border-radius: 5px;\n"
-" margin-top: 5px;\n"
-"}")
+ self.openFileBtn.setStyleSheet(
+ "QPushButton {\n"
+ " color: white;\n"
+ " border: 0px;\n"
+ " padding: 8px 8px;\n"
+ " border-radius: 5px;\n"
+ " margin-top: 5px;\n"
+ "}"
+ )
icon2 = QIcon()
- icon2.addFile(u":/assets/icons/file.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off)
+ icon2.addFile(
+ ":/assets/icons/file.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off
+ )
self.openFileBtn.setIcon(icon2)
self.openFileBtn.setIconSize(QSize(34, 34))
self.openFileBtn.setCheckable(False)
@@ -124,178 +185,228 @@ def setupUi(self, MainWindow):
self.ingestLayout.addWidget(self.openFileBtn, 0, Qt.AlignmentFlag.AlignHCenter)
self.openFolderBtn = QPushButton(self.ingestWidget)
- self.openFolderBtn.setObjectName(u"openFolderBtn")
- sizePolicy1.setHeightForWidth(self.openFolderBtn.sizePolicy().hasHeightForWidth())
+ self.openFolderBtn.setObjectName("openFolderBtn")
+ sizePolicy1.setHeightForWidth(
+ self.openFolderBtn.sizePolicy().hasHeightForWidth()
+ )
self.openFolderBtn.setSizePolicy(sizePolicy1)
self.openFolderBtn.setCursor(QCursor(Qt.CursorShape.PointingHandCursor))
self.openFolderBtn.setFocusPolicy(Qt.FocusPolicy.NoFocus)
- self.openFolderBtn.setStyleSheet(u"QPushButton {\n"
-" color: white;\n"
-" border: 0px;\n"
-" padding: 8px 8px;\n"
-" border-radius: 5px;\n"
-" margin-top: 5px;\n"
-"}\n"
-"")
+ self.openFolderBtn.setStyleSheet(
+ "QPushButton {\n"
+ " color: white;\n"
+ " border: 0px;\n"
+ " padding: 8px 8px;\n"
+ " border-radius: 5px;\n"
+ " margin-top: 5px;\n"
+ "}\n"
+ ""
+ )
icon3 = QIcon()
- icon3.addFile(u":/assets/icons/models.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off)
+ icon3.addFile(
+ ":/assets/icons/models.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off
+ )
self.openFolderBtn.setIcon(icon3)
self.openFolderBtn.setIconSize(QSize(34, 34))
self.openFolderBtn.setCheckable(True)
self.openFolderBtn.setAutoExclusive(False)
- self.ingestLayout.addWidget(self.openFolderBtn, 0, Qt.AlignmentFlag.AlignHCenter)
+ self.ingestLayout.addWidget(
+ self.openFolderBtn, 0, Qt.AlignmentFlag.AlignHCenter
+ )
self.huggingfaceBtn = QPushButton(self.ingestWidget)
- self.huggingfaceBtn.setObjectName(u"huggingfaceBtn")
- sizePolicy1.setHeightForWidth(self.huggingfaceBtn.sizePolicy().hasHeightForWidth())
+ self.huggingfaceBtn.setObjectName("huggingfaceBtn")
+ sizePolicy1.setHeightForWidth(
+ self.huggingfaceBtn.sizePolicy().hasHeightForWidth()
+ )
self.huggingfaceBtn.setSizePolicy(sizePolicy1)
self.huggingfaceBtn.setCursor(QCursor(Qt.CursorShape.PointingHandCursor))
self.huggingfaceBtn.setFocusPolicy(Qt.FocusPolicy.ClickFocus)
- self.huggingfaceBtn.setStyleSheet(u"QPushButton {\n"
-" color: white;\n"
-" border: 0px;\n"
-" padding: 8px 8px;\n"
-" border-radius: 5px;\n"
-" margin-top: 5px;\n"
-"}\n"
-"")
+ self.huggingfaceBtn.setStyleSheet(
+ "QPushButton {\n"
+ " color: white;\n"
+ " border: 0px;\n"
+ " padding: 8px 8px;\n"
+ " border-radius: 5px;\n"
+ " margin-top: 5px;\n"
+ "}\n"
+ ""
+ )
icon4 = QIcon()
- icon4.addFile(u":/assets/icons/huggingface.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off)
+ icon4.addFile(
+ ":/assets/icons/huggingface.png",
+ QSize(),
+ QIcon.Mode.Normal,
+ QIcon.State.Off,
+ )
self.huggingfaceBtn.setIcon(icon4)
self.huggingfaceBtn.setIconSize(QSize(36, 36))
self.huggingfaceBtn.setCheckable(True)
self.huggingfaceBtn.setAutoExclusive(False)
- self.ingestLayout.addWidget(self.huggingfaceBtn, 0, Qt.AlignmentFlag.AlignHCenter)
-
+ self.ingestLayout.addWidget(
+ self.huggingfaceBtn, 0, Qt.AlignmentFlag.AlignHCenter
+ )
- self.verticalLayout_8.addWidget(self.ingestWidget, 0, Qt.AlignmentFlag.AlignHCenter)
+ self.verticalLayout_8.addWidget(
+ self.ingestWidget, 0, Qt.AlignmentFlag.AlignHCenter
+ )
self.singleModelWidget = QWidget(self.iconGroup)
- self.singleModelWidget.setObjectName(u"singleModelWidget")
+ self.singleModelWidget.setObjectName("singleModelWidget")
self.singleModelToolsLayout = QVBoxLayout(self.singleModelWidget)
- self.singleModelToolsLayout.setObjectName(u"singleModelToolsLayout")
+ self.singleModelToolsLayout.setObjectName("singleModelToolsLayout")
self.ingestLine = QFrame(self.singleModelWidget)
- self.ingestLine.setObjectName(u"ingestLine")
+ self.ingestLine.setObjectName("ingestLine")
sizePolicy1.setHeightForWidth(self.ingestLine.sizePolicy().hasHeightForWidth())
self.ingestLine.setSizePolicy(sizePolicy1)
self.ingestLine.setMinimumSize(QSize(50, 0))
- self.ingestLine.setStyleSheet(u"color: rgb(100,100,100)")
+ self.ingestLine.setStyleSheet("color: rgb(100,100,100)")
self.ingestLine.setFrameShadow(QFrame.Shadow.Plain)
self.ingestLine.setLineWidth(2)
self.ingestLine.setFrameShape(QFrame.Shape.HLine)
- self.singleModelToolsLayout.addWidget(self.ingestLine, 0, Qt.AlignmentFlag.AlignHCenter)
+ self.singleModelToolsLayout.addWidget(
+ self.ingestLine, 0, Qt.AlignmentFlag.AlignHCenter
+ )
self.summaryBtn = QPushButton(self.singleModelWidget)
- self.summaryBtn.setObjectName(u"summaryBtn")
+ self.summaryBtn.setObjectName("summaryBtn")
sizePolicy1.setHeightForWidth(self.summaryBtn.sizePolicy().hasHeightForWidth())
self.summaryBtn.setSizePolicy(sizePolicy1)
self.summaryBtn.setCursor(QCursor(Qt.CursorShape.PointingHandCursor))
self.summaryBtn.setFocusPolicy(Qt.FocusPolicy.ClickFocus)
- self.summaryBtn.setStyleSheet(u"QPushButton {\n"
-" color: white;\n"
-" border: 1px solid rgba(60, 60, 60, 0.8);\n"
-" padding: 8px 8px;\n"
-" border-radius: 5px;\n"
-" margin-top: 5px;\n"
-"}\n"
-"")
+ self.summaryBtn.setStyleSheet(
+ "QPushButton {\n"
+ " color: white;\n"
+ " border: 1px solid rgba(60, 60, 60, 0.8);\n"
+ " padding: 8px 8px;\n"
+ " border-radius: 5px;\n"
+ " margin-top: 5px;\n"
+ "}\n"
+ ""
+ )
icon5 = QIcon()
- icon5.addFile(u":/assets/icons/summary.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off)
+ icon5.addFile(
+ ":/assets/icons/summary.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off
+ )
self.summaryBtn.setIcon(icon5)
self.summaryBtn.setIconSize(QSize(32, 32))
self.summaryBtn.setCheckable(True)
self.summaryBtn.setAutoExclusive(False)
- self.singleModelToolsLayout.addWidget(self.summaryBtn, 0, Qt.AlignmentFlag.AlignHCenter)
+ self.singleModelToolsLayout.addWidget(
+ self.summaryBtn, 0, Qt.AlignmentFlag.AlignHCenter
+ )
self.saveBtn = QPushButton(self.singleModelWidget)
- self.saveBtn.setObjectName(u"saveBtn")
+ self.saveBtn.setObjectName("saveBtn")
sizePolicy1.setHeightForWidth(self.saveBtn.sizePolicy().hasHeightForWidth())
self.saveBtn.setSizePolicy(sizePolicy1)
self.saveBtn.setCursor(QCursor(Qt.CursorShape.PointingHandCursor))
self.saveBtn.setFocusPolicy(Qt.FocusPolicy.NoFocus)
- self.saveBtn.setStyleSheet(u"QPushButton {\n"
-" color: white;\n"
-" border: 1px solid rgba(60, 60, 60, 0.8);\n"
-" padding: 8px 8px;\n"
-" border-radius: 5px;\n"
-" margin-top: 5px;\n"
-"}\n"
-"")
+ self.saveBtn.setStyleSheet(
+ "QPushButton {\n"
+ " color: white;\n"
+ " border: 1px solid rgba(60, 60, 60, 0.8);\n"
+ " padding: 8px 8px;\n"
+ " border-radius: 5px;\n"
+ " margin-top: 5px;\n"
+ "}\n"
+ ""
+ )
icon6 = QIcon()
- icon6.addFile(u":/assets/icons/save.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off)
+ icon6.addFile(
+ ":/assets/icons/save.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off
+ )
self.saveBtn.setIcon(icon6)
self.saveBtn.setIconSize(QSize(32, 32))
self.saveBtn.setCheckable(False)
self.saveBtn.setAutoExclusive(False)
- self.singleModelToolsLayout.addWidget(self.saveBtn, 0, Qt.AlignmentFlag.AlignHCenter)
+ self.singleModelToolsLayout.addWidget(
+ self.saveBtn, 0, Qt.AlignmentFlag.AlignHCenter
+ )
self.nodesListBtn = QPushButton(self.singleModelWidget)
- self.nodesListBtn.setObjectName(u"nodesListBtn")
- sizePolicy1.setHeightForWidth(self.nodesListBtn.sizePolicy().hasHeightForWidth())
+ self.nodesListBtn.setObjectName("nodesListBtn")
+ sizePolicy1.setHeightForWidth(
+ self.nodesListBtn.sizePolicy().hasHeightForWidth()
+ )
self.nodesListBtn.setSizePolicy(sizePolicy1)
self.nodesListBtn.setCursor(QCursor(Qt.CursorShape.PointingHandCursor))
self.nodesListBtn.setFocusPolicy(Qt.FocusPolicy.NoFocus)
- self.nodesListBtn.setStyleSheet(u"QPushButton {\n"
-" color: white;\n"
-" border: 1px solid rgba(60, 60, 60, 0.8);\n"
-" padding: 8px 8px;\n"
-" border-radius: 5px;\n"
-" margin-top: 5px;\n"
-"}\n"
-"")
+ self.nodesListBtn.setStyleSheet(
+ "QPushButton {\n"
+ " color: white;\n"
+ " border: 1px solid rgba(60, 60, 60, 0.8);\n"
+ " padding: 8px 8px;\n"
+ " border-radius: 5px;\n"
+ " margin-top: 5px;\n"
+ "}\n"
+ ""
+ )
icon7 = QIcon()
- icon7.addFile(u":/assets/icons/node_list.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off)
+ icon7.addFile(
+ ":/assets/icons/node_list.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off
+ )
self.nodesListBtn.setIcon(icon7)
self.nodesListBtn.setIconSize(QSize(32, 32))
self.nodesListBtn.setCheckable(False)
self.nodesListBtn.setAutoExclusive(False)
- self.singleModelToolsLayout.addWidget(self.nodesListBtn, 0, Qt.AlignmentFlag.AlignHCenter)
+ self.singleModelToolsLayout.addWidget(
+ self.nodesListBtn, 0, Qt.AlignmentFlag.AlignHCenter
+ )
self.subgraphBtn = QPushButton(self.singleModelWidget)
- self.subgraphBtn.setObjectName(u"subgraphBtn")
+ self.subgraphBtn.setObjectName("subgraphBtn")
sizePolicy1.setHeightForWidth(self.subgraphBtn.sizePolicy().hasHeightForWidth())
self.subgraphBtn.setSizePolicy(sizePolicy1)
self.subgraphBtn.setCursor(QCursor(Qt.CursorShape.PointingHandCursor))
self.subgraphBtn.setFocusPolicy(Qt.FocusPolicy.ClickFocus)
- self.subgraphBtn.setStyleSheet(u"QPushButton {\n"
-" color: white;\n"
-" border: 1px solid rgba(60, 60, 60, 0.8);\n"
-" padding: 8px 8px;\n"
-" border-radius: 5px;\n"
-" margin-top: 5px;\n"
-"}\n"
-"")
+ self.subgraphBtn.setStyleSheet(
+ "QPushButton {\n"
+ " color: white;\n"
+ " border: 1px solid rgba(60, 60, 60, 0.8);\n"
+ " padding: 8px 8px;\n"
+ " border-radius: 5px;\n"
+ " margin-top: 5px;\n"
+ "}\n"
+ ""
+ )
icon8 = QIcon()
- icon8.addFile(u":/assets/icons/subgraph.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off)
+ icon8.addFile(
+ ":/assets/icons/subgraph.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off
+ )
self.subgraphBtn.setIcon(icon8)
self.subgraphBtn.setIconSize(QSize(28, 28))
self.subgraphBtn.setCheckable(True)
self.subgraphBtn.setAutoExclusive(False)
- self.singleModelToolsLayout.addWidget(self.subgraphBtn, 0, Qt.AlignmentFlag.AlignHCenter)
-
+ self.singleModelToolsLayout.addWidget(
+ self.subgraphBtn, 0, Qt.AlignmentFlag.AlignHCenter
+ )
self.verticalLayout_8.addWidget(self.singleModelWidget)
- self.verticalSpacer_2 = QSpacerItem(20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
+ self.verticalSpacer_2 = QSpacerItem(
+ 20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding
+ )
self.verticalLayout_8.addItem(self.verticalSpacer_2)
-
self.verticalLayout_7.addWidget(self.iconGroup)
- self.iconSpacer = QSpacerItem(10, 375, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
+ self.iconSpacer = QSpacerItem(
+ 10, 375, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding
+ )
self.verticalLayout_7.addItem(self.iconSpacer)
self.bottomFrame = QFrame(self.leftPanelWidget)
- self.bottomFrame.setObjectName(u"bottomFrame")
+ self.bottomFrame.setObjectName("bottomFrame")
sizePolicy2 = QSizePolicy(QSizePolicy.Policy.Maximum, QSizePolicy.Policy.Fixed)
sizePolicy2.setHorizontalStretch(0)
sizePolicy2.setVerticalStretch(0)
@@ -306,24 +417,28 @@ def setupUi(self, MainWindow):
self.bottomFrame.setFrameShadow(QFrame.Shadow.Raised)
self.verticalLayout_6 = QVBoxLayout(self.bottomFrame)
self.verticalLayout_6.setSpacing(20)
- self.verticalLayout_6.setObjectName(u"verticalLayout_6")
+ self.verticalLayout_6.setObjectName("verticalLayout_6")
self.verticalLayout_6.setContentsMargins(8, -1, -1, -1)
self.infoBtn = QPushButton(self.bottomFrame)
- self.infoBtn.setObjectName(u"infoBtn")
+ self.infoBtn.setObjectName("infoBtn")
sizePolicy1.setHeightForWidth(self.infoBtn.sizePolicy().hasHeightForWidth())
self.infoBtn.setSizePolicy(sizePolicy1)
self.infoBtn.setCursor(QCursor(Qt.CursorShape.PointingHandCursor))
self.infoBtn.setFocusPolicy(Qt.FocusPolicy.NoFocus)
- self.infoBtn.setStyleSheet(u"QPushButton {\n"
-" color: white;\n"
-" border: 0px;\n"
-" padding: 8px 8px;\n"
-" border-radius: 5px;\n"
-" margin-top: 5px;\n"
-"}\n"
-"")
+ self.infoBtn.setStyleSheet(
+ "QPushButton {\n"
+ " color: white;\n"
+ " border: 0px;\n"
+ " padding: 8px 8px;\n"
+ " border-radius: 5px;\n"
+ " margin-top: 5px;\n"
+ "}\n"
+ ""
+ )
icon9 = QIcon()
- icon9.addFile(u":/assets/icons/info.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off)
+ icon9.addFile(
+ ":/assets/icons/info.png", QSize(), QIcon.Mode.Normal, QIcon.State.Off
+ )
self.infoBtn.setIcon(icon9)
self.infoBtn.setIconSize(QSize(24, 24))
self.infoBtn.setCheckable(False)
@@ -332,166 +447,201 @@ def setupUi(self, MainWindow):
self.verticalLayout_6.addWidget(self.infoBtn, 0, Qt.AlignmentFlag.AlignHCenter)
self.exitBtn = QPushButton(self.bottomFrame)
- self.exitBtn.setObjectName(u"exitBtn")
+ self.exitBtn.setObjectName("exitBtn")
sizePolicy1.setHeightForWidth(self.exitBtn.sizePolicy().hasHeightForWidth())
self.exitBtn.setSizePolicy(sizePolicy1)
self.exitBtn.setCursor(QCursor(Qt.CursorShape.PointingHandCursor))
self.exitBtn.setFocusPolicy(Qt.FocusPolicy.NoFocus)
- self.exitBtn.setStyleSheet(u"QPushButton {\n"
-" color: white;\n"
-" border: 0px;\n"
-" padding: 8px 8px;\n"
-" border-radius: 5px;\n"
-" margin-top: 5px;\n"
-"}\n"
-"")
+ self.exitBtn.setStyleSheet(
+ "QPushButton {\n"
+ " color: white;\n"
+ " border: 0px;\n"
+ " padding: 8px 8px;\n"
+ " border-radius: 5px;\n"
+ " margin-top: 5px;\n"
+ "}\n"
+ ""
+ )
icon10 = QIcon()
- icon10.addFile(u":/assets/icons/close-window-64.ico", QSize(), QIcon.Mode.Normal, QIcon.State.Off)
+ icon10.addFile(
+ ":/assets/icons/close-window-64.ico",
+ QSize(),
+ QIcon.Mode.Normal,
+ QIcon.State.Off,
+ )
self.exitBtn.setIcon(icon10)
self.exitBtn.setIconSize(QSize(24, 24))
self.verticalLayout_6.addWidget(self.exitBtn, 0, Qt.AlignmentFlag.AlignHCenter)
-
- self.verticalLayout_7.addWidget(self.bottomFrame, 0, Qt.AlignmentFlag.AlignHCenter)
-
+ self.verticalLayout_7.addWidget(
+ self.bottomFrame, 0, Qt.AlignmentFlag.AlignHCenter
+ )
self.horizontalLayout_5.addWidget(self.leftPanelWidget)
self.appContentArea = QWidget(self.centralwidget)
- self.appContentArea.setObjectName(u"appContentArea")
- sizePolicy3 = QSizePolicy(QSizePolicy.Policy.Preferred, QSizePolicy.Policy.Preferred)
+ self.appContentArea.setObjectName("appContentArea")
+ sizePolicy3 = QSizePolicy(
+ QSizePolicy.Policy.Preferred, QSizePolicy.Policy.Preferred
+ )
sizePolicy3.setHorizontalStretch(0)
sizePolicy3.setVerticalStretch(0)
- sizePolicy3.setHeightForWidth(self.appContentArea.sizePolicy().hasHeightForWidth())
+ sizePolicy3.setHeightForWidth(
+ self.appContentArea.sizePolicy().hasHeightForWidth()
+ )
self.appContentArea.setSizePolicy(sizePolicy3)
- self.appContentArea.setStyleSheet(u"")
+ self.appContentArea.setStyleSheet("")
self.verticalLayout_13 = QVBoxLayout(self.appContentArea)
self.verticalLayout_13.setSpacing(0)
- self.verticalLayout_13.setObjectName(u"verticalLayout_13")
+ self.verticalLayout_13.setObjectName("verticalLayout_13")
self.verticalLayout_13.setContentsMargins(5, 0, 0, 0)
self.appHeaderWidget = QWidget(self.appContentArea)
- self.appHeaderWidget.setObjectName(u"appHeaderWidget")
- self.appHeaderWidget.setStyleSheet(u"")
+ self.appHeaderWidget.setObjectName("appHeaderWidget")
+ self.appHeaderWidget.setStyleSheet("")
self.horizontalLayout = QHBoxLayout(self.appHeaderWidget)
- self.horizontalLayout.setObjectName(u"horizontalLayout")
+ self.horizontalLayout.setObjectName("horizontalLayout")
self.verticalLayout_13.addWidget(self.appHeaderWidget)
self.stackedWidget = QStackedWidget(self.appContentArea)
- self.stackedWidget.setObjectName(u"stackedWidget")
+ self.stackedWidget.setObjectName("stackedWidget")
self.stackedWidget.setAcceptDrops(True)
self.splashPage = QWidget()
- self.splashPage.setObjectName(u"splashPage")
- self.splashPage.setStyleSheet(u"")
+ self.splashPage.setObjectName("splashPage")
+ self.splashPage.setStyleSheet("")
self.verticalLayout_3 = QVBoxLayout(self.splashPage)
- self.verticalLayout_3.setObjectName(u"verticalLayout_3")
+ self.verticalLayout_3.setObjectName("verticalLayout_3")
self.splashVerticalWidget = QWidget(self.splashPage)
- self.splashVerticalWidget.setObjectName(u"splashVerticalWidget")
+ self.splashVerticalWidget.setObjectName("splashVerticalWidget")
self.splashVerticalWidget.setAcceptDrops(True)
- self.splashVerticalWidget.setStyleSheet(u"/*This setting with override the style sheet. If you intend on creating a different style for this such as a light theme then I recommend that you remove this style.*/\n"
-"QWidget{\n"
-" background-color: qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:1, stop:0 rgba(30, 30, 30, 255), stop:1 rgba(60, 60, 60, 255));\n"
-"}")
+ self.splashVerticalWidget.setStyleSheet(
+ "/*This setting with override the style sheet. If you intend on creating a different style for this such as a light theme then I recommend that you remove this style.*/\n"
+ "QWidget{\n"
+ " background-color: qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:1, stop:0 rgba(30, 30, 30, 255), stop:1 rgba(60, 60, 60, 255));\n"
+ "}"
+ )
self.verticalLayout = QVBoxLayout(self.splashVerticalWidget)
- self.verticalLayout.setObjectName(u"verticalLayout")
- self.verticalSpacer = QSpacerItem(20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
+ self.verticalLayout.setObjectName("verticalLayout")
+ self.verticalSpacer = QSpacerItem(
+ 20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding
+ )
self.verticalLayout.addItem(self.verticalSpacer)
self.Logo = QLabel(self.splashVerticalWidget)
- self.Logo.setObjectName(u"Logo")
+ self.Logo.setObjectName("Logo")
sizePolicy3.setHeightForWidth(self.Logo.sizePolicy().hasHeightForWidth())
self.Logo.setSizePolicy(sizePolicy3)
font = QFont()
- font.setFamilies([u"Montserrat 13"])
+ font.setFamilies(["Montserrat 13"])
font.setBold(True)
font.setUnderline(True)
self.Logo.setFont(font)
- self.Logo.setStyleSheet(u"background: transparent")
- self.Logo.setPixmap(QPixmap(u":/assets/images/remove_background_200_zoom.png"))
+ self.Logo.setStyleSheet("background: transparent")
+ self.Logo.setPixmap(QPixmap(":/assets/images/remove_background_200_zoom.png"))
self.Logo.setScaledContents(False)
self.Logo.setMargin(0)
- self.verticalLayout.addWidget(self.Logo, 0, Qt.AlignmentFlag.AlignHCenter|Qt.AlignmentFlag.AlignBottom)
+ self.verticalLayout.addWidget(
+ self.Logo, 0, Qt.AlignmentFlag.AlignHCenter | Qt.AlignmentFlag.AlignBottom
+ )
self.subTitle = QLabel(self.splashVerticalWidget)
- self.subTitle.setObjectName(u"subTitle")
+ self.subTitle.setObjectName("subTitle")
font1 = QFont()
- font1.setFamilies([u"Montserrat"])
+ font1.setFamilies(["Montserrat"])
font1.setWeight(QFont.Thin)
font1.setKerning(True)
self.subTitle.setFont(font1)
self.subTitle.setAutoFillBackground(False)
- self.subTitle.setStyleSheet(u"QLabel {\n"
-" background-color: transparent;\n"
-" color: red;\n"
-" font-family: Montserrat;\n"
-" font-size: 24px;\n"
-" letter-spacing: 15px;\n"
-"}")
+ self.subTitle.setStyleSheet(
+ "QLabel {\n"
+ " background-color: transparent;\n"
+ " color: red;\n"
+ " font-family: Montserrat;\n"
+ " font-size: 24px;\n"
+ " letter-spacing: 15px;\n"
+ "}"
+ )
self.subTitle.setTextFormat(Qt.TextFormat.AutoText)
- self.verticalLayout.addWidget(self.subTitle, 0, Qt.AlignmentFlag.AlignHCenter|Qt.AlignmentFlag.AlignVCenter)
+ self.verticalLayout.addWidget(
+ self.subTitle,
+ 0,
+ Qt.AlignmentFlag.AlignHCenter | Qt.AlignmentFlag.AlignVCenter,
+ )
- self.verticalSpacer_4 = QSpacerItem(20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
+ self.verticalSpacer_4 = QSpacerItem(
+ 20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding
+ )
self.verticalLayout.addItem(self.verticalSpacer_4)
-
self.verticalLayout_3.addWidget(self.splashVerticalWidget)
self.stackedWidget.addWidget(self.splashPage)
self.summaryPage = QWidget()
- self.summaryPage.setObjectName(u"summaryPage")
+ self.summaryPage.setObjectName("summaryPage")
self.verticalLayout_4 = QVBoxLayout(self.summaryPage)
- self.verticalLayout_4.setObjectName(u"verticalLayout_4")
+ self.verticalLayout_4.setObjectName("verticalLayout_4")
self.tabWidget = QTabWidget(self.summaryPage)
- self.tabWidget.setObjectName(u"tabWidget")
+ self.tabWidget.setObjectName("tabWidget")
sizePolicy3.setHeightForWidth(self.tabWidget.sizePolicy().hasHeightForWidth())
self.tabWidget.setSizePolicy(sizePolicy3)
- self.tabWidget.setStyleSheet(u"")
+ self.tabWidget.setStyleSheet("")
self.tabWidget.setDocumentMode(False)
self.tabWidget.setTabsClosable(True)
self.tabWidget.setMovable(True)
self.tab = QWidget()
- self.tab.setObjectName(u"tab")
+ self.tab.setObjectName("tab")
self.tab.setEnabled(False)
- self.tab.setStyleSheet(u"")
+ self.tab.setStyleSheet("")
self.verticalLayout_2 = QVBoxLayout(self.tab)
- self.verticalLayout_2.setObjectName(u"verticalLayout_2")
+ self.verticalLayout_2.setObjectName("verticalLayout_2")
self.tabWidget.addTab(self.tab, "")
self.verticalLayout_4.addWidget(self.tabWidget)
self.stackedWidget.addWidget(self.summaryPage)
self.subgraphPage = QWidget()
- self.subgraphPage.setObjectName(u"subgraphPage")
+ self.subgraphPage.setObjectName("subgraphPage")
self.verticalLayout_37 = QVBoxLayout(self.subgraphPage)
- self.verticalLayout_37.setObjectName(u"verticalLayout_37")
+ self.verticalLayout_37.setObjectName("verticalLayout_37")
self.widget_2 = QWidget(self.subgraphPage)
- self.widget_2.setObjectName(u"widget_2")
+ self.widget_2.setObjectName("widget_2")
sizePolicy3.setHeightForWidth(self.widget_2.sizePolicy().hasHeightForWidth())
self.widget_2.setSizePolicy(sizePolicy3)
- self.widget_2.setStyleSheet(u"")
+ self.widget_2.setStyleSheet("")
self.verticalLayout_10 = QVBoxLayout(self.widget_2)
- self.verticalLayout_10.setObjectName(u"verticalLayout_10")
+ self.verticalLayout_10.setObjectName("verticalLayout_10")
self.subgraphIcon = QLabel(self.widget_2)
- self.subgraphIcon.setObjectName(u"subgraphIcon")
- sizePolicy4 = QSizePolicy(QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Minimum)
+ self.subgraphIcon.setObjectName("subgraphIcon")
+ sizePolicy4 = QSizePolicy(
+ QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Minimum
+ )
sizePolicy4.setHorizontalStretch(0)
sizePolicy4.setVerticalStretch(0)
- sizePolicy4.setHeightForWidth(self.subgraphIcon.sizePolicy().hasHeightForWidth())
+ sizePolicy4.setHeightForWidth(
+ self.subgraphIcon.sizePolicy().hasHeightForWidth()
+ )
self.subgraphIcon.setSizePolicy(sizePolicy4)
- self.subgraphIcon.setPixmap(QPixmap(u":/assets/icons/subgraph.png"))
+ self.subgraphIcon.setPixmap(QPixmap(":/assets/icons/subgraph.png"))
- self.verticalLayout_10.addWidget(self.subgraphIcon, 0, Qt.AlignmentFlag.AlignHCenter|Qt.AlignmentFlag.AlignBottom)
+ self.verticalLayout_10.addWidget(
+ self.subgraphIcon,
+ 0,
+ Qt.AlignmentFlag.AlignHCenter | Qt.AlignmentFlag.AlignBottom,
+ )
self.comingSoonLabel = QLabel(self.widget_2)
- self.comingSoonLabel.setObjectName(u"comingSoonLabel")
-
- self.verticalLayout_10.addWidget(self.comingSoonLabel, 0, Qt.AlignmentFlag.AlignHCenter|Qt.AlignmentFlag.AlignTop)
+ self.comingSoonLabel.setObjectName("comingSoonLabel")
+ self.verticalLayout_10.addWidget(
+ self.comingSoonLabel,
+ 0,
+ Qt.AlignmentFlag.AlignHCenter | Qt.AlignmentFlag.AlignTop,
+ )
self.verticalLayout_37.addWidget(self.widget_2)
@@ -499,12 +649,11 @@ def setupUi(self, MainWindow):
self.verticalLayout_13.addWidget(self.stackedWidget)
-
self.horizontalLayout_5.addWidget(self.appContentArea)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QStatusBar(MainWindow)
- self.statusbar.setObjectName(u"statusbar")
+ self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
@@ -513,63 +662,102 @@ def setupUi(self, MainWindow):
self.stackedWidget.setCurrentIndex(0)
self.tabWidget.setCurrentIndex(0)
-
QMetaObject.connectSlotsByName(MainWindow)
+
# setupUi
def retranslateUi(self, MainWindow):
- MainWindow.setWindowTitle(QCoreApplication.translate("MainWindow", u"DigestAI", None))
-#if QT_CONFIG(tooltip)
- self.openFileBtn.setToolTip(QCoreApplication.translate("MainWindow", u"Open a local model file (Ctrl-O)
", None))
-#endif // QT_CONFIG(tooltip)
+ MainWindow.setWindowTitle(
+ QCoreApplication.translate("MainWindow", "DigestAI", None)
+ )
+ # if QT_CONFIG(tooltip)
+ self.openFileBtn.setToolTip(
+ QCoreApplication.translate(
+ "MainWindow",
+ "Open (Ctrl-O)
",
+ None,
+ )
+ )
+ # endif // QT_CONFIG(tooltip)
self.openFileBtn.setText("")
-#if QT_CONFIG(shortcut)
- self.openFileBtn.setShortcut(QCoreApplication.translate("MainWindow", u"Ctrl+O", None))
-#endif // QT_CONFIG(shortcut)
-#if QT_CONFIG(tooltip)
- self.openFolderBtn.setToolTip(QCoreApplication.translate("MainWindow", u"
Multi-Model Analysis
", None))
-#endif // QT_CONFIG(tooltip)
+ # if QT_CONFIG(shortcut)
+ self.openFileBtn.setShortcut(
+ QCoreApplication.translate("MainWindow", "Ctrl+O", None)
+ )
+ # endif // QT_CONFIG(shortcut)
+ # if QT_CONFIG(tooltip)
+ self.openFolderBtn.setToolTip(
+ QCoreApplication.translate(
+ "MainWindow",
+ "Multi-Model Analysis
",
+ None,
+ )
+ )
+ # endif // QT_CONFIG(tooltip)
self.openFolderBtn.setText("")
-#if QT_CONFIG(tooltip)
- self.huggingfaceBtn.setToolTip(QCoreApplication.translate("MainWindow", u"Huggingface", None))
-#endif // QT_CONFIG(tooltip)
+ # if QT_CONFIG(tooltip)
+ self.huggingfaceBtn.setToolTip(
+ QCoreApplication.translate("MainWindow", "Huggingface", None)
+ )
+ # endif // QT_CONFIG(tooltip)
self.huggingfaceBtn.setText("")
-#if QT_CONFIG(tooltip)
- self.summaryBtn.setToolTip(QCoreApplication.translate("MainWindow", u"Summary", None))
-#endif // QT_CONFIG(tooltip)
+ # if QT_CONFIG(tooltip)
+ self.summaryBtn.setToolTip(
+ QCoreApplication.translate("MainWindow", "Summary", None)
+ )
+ # endif // QT_CONFIG(tooltip)
self.summaryBtn.setText("")
-#if QT_CONFIG(tooltip)
- self.saveBtn.setToolTip(QCoreApplication.translate("MainWindow", u"Save Report (Ctrl-S)", None))
-#endif // QT_CONFIG(tooltip)
+ # if QT_CONFIG(tooltip)
+ self.saveBtn.setToolTip(
+ QCoreApplication.translate("MainWindow", "Save Report (Ctrl-S)", None)
+ )
+ # endif // QT_CONFIG(tooltip)
self.saveBtn.setText("")
-#if QT_CONFIG(shortcut)
- self.saveBtn.setShortcut(QCoreApplication.translate("MainWindow", u"Ctrl+S", None))
-#endif // QT_CONFIG(shortcut)
-#if QT_CONFIG(tooltip)
- self.nodesListBtn.setToolTip(QCoreApplication.translate("MainWindow", u"Node List", None))
-#endif // QT_CONFIG(tooltip)
+ # if QT_CONFIG(shortcut)
+ self.saveBtn.setShortcut(
+ QCoreApplication.translate("MainWindow", "Ctrl+S", None)
+ )
+ # endif // QT_CONFIG(shortcut)
+ # if QT_CONFIG(tooltip)
+ self.nodesListBtn.setToolTip(
+ QCoreApplication.translate("MainWindow", "Node List", None)
+ )
+ # endif // QT_CONFIG(tooltip)
self.nodesListBtn.setText("")
-#if QT_CONFIG(shortcut)
- self.nodesListBtn.setShortcut(QCoreApplication.translate("MainWindow", u"Ctrl+S", None))
-#endif // QT_CONFIG(shortcut)
-#if QT_CONFIG(tooltip)
- self.subgraphBtn.setToolTip(QCoreApplication.translate("MainWindow", u"Subgraph", None))
-#endif // QT_CONFIG(tooltip)
+ # if QT_CONFIG(shortcut)
+ self.nodesListBtn.setShortcut(
+ QCoreApplication.translate("MainWindow", "Ctrl+S", None)
+ )
+ # endif // QT_CONFIG(shortcut)
+ # if QT_CONFIG(tooltip)
+ self.subgraphBtn.setToolTip(
+ QCoreApplication.translate("MainWindow", "Subgraph", None)
+ )
+ # endif // QT_CONFIG(tooltip)
self.subgraphBtn.setText("")
-#if QT_CONFIG(tooltip)
- self.infoBtn.setToolTip(QCoreApplication.translate("MainWindow", u"Info", None))
-#endif // QT_CONFIG(tooltip)
+ # if QT_CONFIG(tooltip)
+ self.infoBtn.setToolTip(QCoreApplication.translate("MainWindow", "Info", None))
+ # endif // QT_CONFIG(tooltip)
self.infoBtn.setText("")
-#if QT_CONFIG(tooltip)
- self.exitBtn.setToolTip(QCoreApplication.translate("MainWindow", u"Exit", None))
-#endif // QT_CONFIG(tooltip)
+ # if QT_CONFIG(tooltip)
+ self.exitBtn.setToolTip(QCoreApplication.translate("MainWindow", "Exit", None))
+ # endif // QT_CONFIG(tooltip)
self.exitBtn.setText("")
self.Logo.setText("")
-#if QT_CONFIG(tooltip)
+ # if QT_CONFIG(tooltip)
self.tabWidget.setToolTip("")
-#endif // QT_CONFIG(tooltip)
- self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), QCoreApplication.translate("MainWindow", u"Tab 1", None))
+ # endif // QT_CONFIG(tooltip)
+ self.tabWidget.setTabText(
+ self.tabWidget.indexOf(self.tab),
+ QCoreApplication.translate("MainWindow", "Tab 1", None),
+ )
self.subgraphIcon.setText("")
- self.comingSoonLabel.setText(QCoreApplication.translate("MainWindow", u"Coming soon...
", None))
- # retranslateUi
+ self.comingSoonLabel.setText(
+ QCoreApplication.translate(
+ "MainWindow",
+ 'Coming soon...
',
+ None,
+ )
+ )
+ # retranslateUi
diff --git a/src/utils/onnx_utils.py b/src/utils/onnx_utils.py
index d8a6894..4d4b293 100644
--- a/src/utils/onnx_utils.py
+++ b/src/utils/onnx_utils.py
@@ -1,95 +1,19 @@
# Copyright(C) 2024 Advanced Micro Devices, Inc. All rights reserved.
import os
-import csv
import tempfile
-from uuid import uuid4
-from collections import Counter, OrderedDict, defaultdict
-from typing import List, Dict, Optional, Any, Tuple, Union, cast
-from datetime import datetime
+from collections import Counter
+from typing import List, Optional, Tuple, Union
import numpy as np
import onnx
import onnxruntime as ort
-from prettytable import PrettyTable
-
-
-class NodeParsingException(Exception):
- pass
-
-
-# The classes are for type aliasing. Once python 3.10 is the minimum we can switch to TypeAlias
-class NodeShapeCounts(defaultdict[str, Counter]):
- def __init__(self):
- super().__init__(Counter) # Initialize with the Counter factory
-
-
-class NodeTypeCounts(Dict[str, int]):
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
-
-
-class TensorInfo:
- "Used to store node input and output tensor information"
-
- def __init__(self) -> None:
- self.dtype: Optional[str] = None
- self.dtype_bytes: Optional[int] = None
- self.size_kbytes: Optional[float] = None
- self.shape: List[Union[int, str]] = []
-
-
-class TensorData(OrderedDict[str, TensorInfo]):
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
-
-
-class NodeInfo:
- def __init__(self) -> None:
- self.flops: Optional[int] = None
- self.parameters: int = 0
- self.node_type: Optional[str] = None
- self.attributes: OrderedDict[str, Any] = OrderedDict()
- # We use an ordered dictionary because the order in which
- # the inputs and outputs are listed in the node matter.
- self.inputs = TensorData()
- self.outputs = TensorData()
-
- def get_input(self, index: int) -> TensorInfo:
- return list(self.inputs.values())[index]
-
- def get_output(self, index: int) -> TensorInfo:
- return list(self.outputs.values())[index]
-
- def __str__(self):
- """Provides a human-readable string representation of NodeInfo."""
- output = [
- f"Node Type: {self.node_type}",
- f"FLOPs: {self.flops if self.flops is not None else 'N/A'}",
- f"Parameters: {self.parameters}",
- ]
-
- if self.attributes:
- output.append("Attributes:")
- for key, value in self.attributes.items():
- output.append(f" - {key}: {value}")
-
- if self.inputs:
- output.append("Inputs:")
- for name, tensor in self.inputs.items():
- output.append(f" - {name}: {tensor}")
-
- if self.outputs:
- output.append("Outputs:")
- for name, tensor in self.outputs.items():
- output.append(f" - {name}: {tensor}")
-
- return "\n".join(output)
-
-
-# The classes are for type aliasing. Once python 3.10 is the minimum we can switch to TypeAlias
-class NodeData(OrderedDict[str, NodeInfo]):
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
+from digest.model_class.digest_model import (
+ NodeTypeCounts,
+ NodeData,
+ NodeShapeCounts,
+ TensorData,
+ TensorInfo,
+)
# Convert tensor type to human-readable string and size in bytes
@@ -117,706 +41,6 @@ def tensor_type_to_str_and_size(elem_type) -> Tuple[str, int]:
return type_mapping.get(elem_type, ("unknown", 0))
-class DigestOnnxModel:
- def __init__(
- self,
- onnx_model: onnx.ModelProto,
- onnx_filepath: Optional[str] = None,
- model_name: Optional[str] = None,
- save_proto: bool = True,
- ) -> None:
- # Public members exposed to the API
- self.unique_id: str = str(uuid4())
- self.filepath: Optional[str] = onnx_filepath
- self.model_proto: Optional[onnx.ModelProto] = onnx_model if save_proto else None
- self.model_name: Optional[str] = model_name
- self.model_version: Optional[int] = None
- self.graph_name: Optional[str] = None
- self.producer_name: Optional[str] = None
- self.producer_version: Optional[str] = None
- self.ir_version: Optional[int] = None
- self.opset: Optional[int] = None
- self.imports: Dict[str, int] = {}
- self.node_type_counts: NodeTypeCounts = NodeTypeCounts()
- self.model_flops: Optional[int] = None
- self.model_parameters: int = 0
- self.node_type_flops: Dict[str, int] = {}
- self.node_type_parameters: Dict[str, int] = {}
- self.per_node_info = NodeData()
- self.model_inputs = TensorData()
- self.model_outputs = TensorData()
-
- # Private members not intended to be exposed
- self.input_tensors_: Dict[str, onnx.ValueInfoProto] = {}
- self.output_tensors_: Dict[str, onnx.ValueInfoProto] = {}
- self.value_tensors_: Dict[str, onnx.ValueInfoProto] = {}
- self.init_tensors_: Dict[str, onnx.TensorProto] = {}
-
- self.update_state(onnx_model)
-
- def update_state(self, model_proto: onnx.ModelProto) -> None:
- self.model_version = model_proto.model_version
- self.graph_name = model_proto.graph.name
- self.producer_name = model_proto.producer_name
- self.producer_version = model_proto.producer_version
- self.ir_version = model_proto.ir_version
- self.opset = get_opset(model_proto)
- self.imports = {
- import_.domain: import_.version for import_ in model_proto.opset_import
- }
-
- self.model_inputs = get_model_input_shapes_types(model_proto)
- self.model_outputs = get_model_output_shapes_types(model_proto)
-
- self.node_type_counts = get_node_type_counts(model_proto)
- self.parse_model_nodes(model_proto)
-
- def get_node_tensor_info_(
- self, onnx_node: onnx.NodeProto
- ) -> Tuple[TensorData, TensorData]:
- """
- This function is set to private because it is not intended to be used
- outside of the DigestOnnxModel class.
- """
-
- input_tensor_info = TensorData()
- for node_input in onnx_node.input:
- input_tensor_info[node_input] = TensorInfo()
- if (
- node_input in self.input_tensors_
- or node_input in self.value_tensors_
- or node_input in self.output_tensors_
- ):
- tensor = (
- self.input_tensors_.get(node_input)
- or self.value_tensors_.get(node_input)
- or self.output_tensors_.get(node_input)
- )
- if tensor:
- for dim in tensor.type.tensor_type.shape.dim:
- if dim.HasField("dim_value"):
- input_tensor_info[node_input].shape.append(dim.dim_value)
- elif dim.HasField("dim_param"):
- input_tensor_info[node_input].shape.append(dim.dim_param)
-
- dtype_str, dtype_bytes = tensor_type_to_str_and_size(
- tensor.type.tensor_type.elem_type
- )
- elif node_input in self.init_tensors_:
- input_tensor_info[node_input].shape.extend(
- [dim for dim in self.init_tensors_[node_input].dims]
- )
- dtype_str, dtype_bytes = tensor_type_to_str_and_size(
- self.init_tensors_[node_input].data_type
- )
- else:
- dtype_str = None
- dtype_bytes = None
-
- input_tensor_info[node_input].dtype = dtype_str
- input_tensor_info[node_input].dtype_bytes = dtype_bytes
-
- if (
- all(isinstance(s, int) for s in input_tensor_info[node_input].shape)
- and dtype_bytes
- ):
- tensor_size = float(
- np.prod(np.array(input_tensor_info[node_input].shape))
- )
- input_tensor_info[node_input].size_kbytes = (
- tensor_size * float(dtype_bytes) / 1024.0
- )
-
- output_tensor_info = TensorData()
- for node_output in onnx_node.output:
- output_tensor_info[node_output] = TensorInfo()
- if (
- node_output in self.input_tensors_
- or node_output in self.value_tensors_
- or node_output in self.output_tensors_
- ):
- tensor = (
- self.input_tensors_.get(node_output)
- or self.value_tensors_.get(node_output)
- or self.output_tensors_.get(node_output)
- )
- if tensor:
- output_tensor_info[node_output].shape.extend(
- [
- int(dim.dim_value)
- for dim in tensor.type.tensor_type.shape.dim
- ]
- )
- dtype_str, dtype_bytes = tensor_type_to_str_and_size(
- tensor.type.tensor_type.elem_type
- )
- elif node_output in self.init_tensors_:
- output_tensor_info[node_output].shape.extend(
- [dim for dim in self.init_tensors_[node_output].dims]
- )
- dtype_str, dtype_bytes = tensor_type_to_str_and_size(
- self.init_tensors_[node_output].data_type
- )
-
- else:
- dtype_str = None
- dtype_bytes = None
-
- output_tensor_info[node_output].dtype = dtype_str
- output_tensor_info[node_output].dtype_bytes = dtype_bytes
-
- if (
- all(isinstance(s, int) for s in output_tensor_info[node_output].shape)
- and dtype_bytes
- ):
- tensor_size = float(
- np.prod(np.array(output_tensor_info[node_output].shape))
- )
- output_tensor_info[node_output].size_kbytes = (
- tensor_size * float(dtype_bytes) / 1024.0
- )
-
- return input_tensor_info, output_tensor_info
-
- def parse_model_nodes(self, onnx_model: onnx.ModelProto) -> None:
- """
- Calculate total number of FLOPs found in the onnx model.
- FLOP is defined as one floating-point operation. This distinguishes
- from multiply-accumulates (MACs) where FLOPs == 2 * MACs.
- """
-
- # Initialze to zero so we can accumulate. Set to None during the
- # model FLOPs calculation if it errors out.
- self.model_flops = 0
-
- # Check to see if the model inputs have any dynamic shapes
- if get_dynamic_input_dims(onnx_model):
- self.model_flops = None
-
- try:
- onnx_model, _ = optimize_onnx_model(onnx_model)
-
- onnx_model = onnx.shape_inference.infer_shapes(
- onnx_model, strict_mode=True, data_prop=True
- )
- except Exception as e: # pylint: disable=broad-except
- print(f"ONNX utils: {str(e)}")
- self.model_flops = None
-
- # If the ONNX model contains one of the following unsupported ops, then this
- # function will return None since the FLOP total is expected to be incorrect
- unsupported_ops = [
- "Einsum",
- "RNN",
- "GRU",
- "DeformConv",
- ]
-
- if not self.input_tensors_:
- self.input_tensors_ = {
- tensor.name: tensor for tensor in onnx_model.graph.input
- }
-
- if not self.output_tensors_:
- self.output_tensors_ = {
- tensor.name: tensor for tensor in onnx_model.graph.output
- }
-
- if not self.value_tensors_:
- self.value_tensors_ = {
- tensor.name: tensor for tensor in onnx_model.graph.value_info
- }
-
- if not self.init_tensors_:
- self.init_tensors_ = {
- tensor.name: tensor for tensor in onnx_model.graph.initializer
- }
-
- for node in onnx_model.graph.node: # pylint: disable=E1101
-
- node_info = NodeInfo()
-
- # TODO: I have encountered models containing nodes with no name. It would be a good idea
- # to have this type of model info fed back to the user through a warnings section.
- if not node.name:
- node.name = f"{node.op_type}_{len(self.per_node_info)}"
-
- node_info.node_type = node.op_type
- input_tensor_info, output_tensor_info = self.get_node_tensor_info_(node)
- node_info.inputs = input_tensor_info
- node_info.outputs = output_tensor_info
-
- # Check if this node has parameters through the init tensors
- for input_name, input_tensor in node_info.inputs.items():
- if input_name in self.init_tensors_:
- if all(isinstance(dim, int) for dim in input_tensor.shape):
- input_parameters = int(np.prod(np.array(input_tensor.shape)))
- node_info.parameters += input_parameters
- self.model_parameters += input_parameters
- self.node_type_parameters[node.op_type] = (
- self.node_type_parameters.get(node.op_type, 0)
- + input_parameters
- )
- else:
- print(f"Tensor with params has unknown shape: {input_name}")
-
- for attribute in node.attribute:
- node_info.attributes.update(attribute_to_dict(attribute))
-
- # if node.name in self.per_node_info:
- # print(f"Node name {node.name} is a duplicate.")
-
- self.per_node_info[node.name] = node_info
-
- if node.op_type in unsupported_ops:
- self.model_flops = None
- node_info.flops = None
-
- try:
-
- if (
- node.op_type == "MatMul"
- or node.op_type == "MatMulInteger"
- or node.op_type == "QLinearMatMul"
- ):
-
- input_a = node_info.get_input(0).shape
- if node.op_type == "QLinearMatMul":
- input_b = node_info.get_input(3).shape
- else:
- input_b = node_info.get_input(1).shape
-
- if not all(
- isinstance(dim, int) for dim in input_a
- ) or not isinstance(input_b[-1], int):
- node_info.flops = None
- self.model_flops = None
- continue
-
- node_info.flops = int(
- 2 * np.prod(np.array(input_a), dtype=np.int64) * input_b[-1]
- )
-
- elif (
- node.op_type == "Mul"
- or node.op_type == "Div"
- or node.op_type == "Add"
- ):
- input_a = node_info.get_input(0).shape
- input_b = node_info.get_input(1).shape
-
- if not all(isinstance(dim, int) for dim in input_a) or not all(
- isinstance(dim, int) for dim in input_b
- ):
- node_info.flops = None
- self.model_flops = None
- continue
-
- node_info.flops = int(
- np.prod(np.array(input_a), dtype=np.int64)
- ) + int(np.prod(np.array(input_b), dtype=np.int64))
-
- elif node.op_type == "Gemm" or node.op_type == "QGemm":
- x_shape = node_info.get_input(0).shape
- if node.op_type == "Gemm":
- w_shape = node_info.get_input(1).shape
- else:
- w_shape = node_info.get_input(3).shape
-
- if not all(isinstance(dim, int) for dim in x_shape) or not all(
- isinstance(dim, int) for dim in w_shape
- ):
- node_info.flops = None
- self.model_flops = None
- continue
-
- mm_dims = [
- (
- x_shape[0]
- if not node_info.attributes.get("transA", 0)
- else x_shape[1]
- ),
- (
- x_shape[1]
- if not node_info.attributes.get("transA", 0)
- else x_shape[0]
- ),
- (
- w_shape[1]
- if not node_info.attributes.get("transB", 0)
- else w_shape[0]
- ),
- ]
-
- node_info.flops = int(
- 2 * np.prod(np.array(mm_dims), dtype=np.int64)
- )
-
- if len(mm_dims) == 3: # if there is a bias input
- bias_shape = node_info.get_input(2).shape
- node_info.flops += int(np.prod(np.array(bias_shape)))
-
- elif (
- node.op_type == "Conv"
- or node.op_type == "ConvInteger"
- or node.op_type == "QLinearConv"
- or node.op_type == "ConvTranspose"
- ):
- # N, C, d1, ..., dn
- x_shape = node_info.get_input(0).shape
-
- # M, C/group, k1, ..., kn. Note C and M are swapped for ConvTranspose
- if node.op_type == "QLinearConv":
- w_shape = node_info.get_input(3).shape
- else:
- w_shape = node_info.get_input(1).shape
-
- if not all(isinstance(dim, int) for dim in x_shape):
- node_info.flops = None
- self.model_flops = None
- continue
-
- x_shape_ints = cast(List[int], x_shape)
- w_shape_ints = cast(List[int], w_shape)
-
- has_bias = False # Note, ConvInteger has no bias
- if node.op_type == "Conv" and len(node_info.inputs) == 3:
- has_bias = True
- elif node.op_type == "QLinearConv" and len(node_info.inputs) == 9:
- has_bias = True
-
- num_dims = len(x_shape_ints) - 2
- strides = node_info.attributes.get(
- "strides", [1] * num_dims
- ) # type: List[int]
- dilation = node_info.attributes.get(
- "dilations", [1] * num_dims
- ) # type: List[int]
- kernel_shape = w_shape_ints[2:]
- batch_size = x_shape_ints[0]
- out_channels = w_shape_ints[0]
- out_dims = [batch_size, out_channels]
- output_shape = node_info.attributes.get(
- "output_shape", []
- ) # type: List[int]
-
- # If output_shape is given then we do not need to compute it ourselves
- # The output_shape attribute does not include batch_size or channels and
- # is only valid for ConvTranspose
- if output_shape:
- out_dims.extend(output_shape)
- else:
- auto_pad = node_info.attributes.get(
- "auto_pad", "NOTSET".encode()
- ).decode()
- # SAME expects padding so that the output_shape = CEIL(input_shape / stride)
- if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
- out_dims.extend(
- [x * s for x, s in zip(x_shape_ints[2:], strides)]
- )
- else:
- # NOTSET means just use pads attribute
- if auto_pad == "NOTSET":
- pads = node_info.attributes.get(
- "pads", [0] * num_dims * 2
- )
- # VALID essentially means no padding
- elif auto_pad == "VALID":
- pads = [0] * num_dims * 2
-
- for i in range(num_dims):
- dim_in = x_shape_ints[i + 2] # type: int
-
- if node.op_type == "ConvTranspose":
- out_dim = (
- strides[i] * (dim_in - 1)
- + ((kernel_shape[i] - 1) * dilation[i] + 1)
- - pads[i]
- - pads[i + num_dims]
- )
- else:
- out_dim = (
- dim_in
- + pads[i]
- + pads[i + num_dims]
- - dilation[i] * (kernel_shape[i] - 1)
- - 1
- ) // strides[i] + 1
-
- out_dims.append(out_dim)
-
- kernel_flops = int(
- np.prod(np.array(kernel_shape)) * w_shape_ints[1]
- )
- output_points = int(np.prod(np.array(out_dims)))
- bias_ops = output_points if has_bias else int(0)
- node_info.flops = 2 * kernel_flops * output_points + bias_ops
-
- elif node.op_type == "LSTM" or node.op_type == "DynamicQuantizeLSTM":
-
- x_shape = node_info.get_input(
- 0
- ).shape # seq_length, batch_size, input_dim
-
- if not all(isinstance(dim, int) for dim in x_shape):
- node_info.flops = None
- self.model_flops = None
- continue
-
- x_shape_ints = cast(List[int], x_shape)
- hidden_size = node_info.attributes["hidden_size"]
- direction = (
- 2
- if node_info.attributes.get("direction")
- == "bidirectional".encode()
- else 1
- )
-
- has_bias = True if len(node_info.inputs) >= 4 else False
- if has_bias:
- bias_shape = node_info.get_input(3).shape
- if isinstance(bias_shape[1], int):
- bias_ops = bias_shape[1]
- else:
- bias_ops = 0
- else:
- bias_ops = 0
- # seq_length, batch_size, input_dim = x_shape
- if not isinstance(bias_ops, int):
- bias_ops = int(0)
- num_gates = int(4)
- gate_input_flops = int(2 * x_shape_ints[2] * hidden_size)
- gate_hid_flops = int(2 * hidden_size * hidden_size)
- unit_flops = (
- num_gates * (gate_input_flops + gate_hid_flops) + bias_ops
- )
- node_info.flops = (
- x_shape_ints[1] * x_shape_ints[0] * direction * unit_flops
- )
- # In this case we just hit an op that doesn't have FLOPs
- else:
- node_info.flops = None
-
- except IndexError as err:
- print(f"Error parsing node {node.name}: {err}")
- node_info.flops = None
- self.model_flops = None
- continue
-
- # Update the model level flops count
- if node_info.flops is not None and self.model_flops is not None:
- self.model_flops += node_info.flops
-
- # Update the node type flops count
- self.node_type_flops[node.op_type] = (
- self.node_type_flops.get(node.op_type, 0) + node_info.flops
- )
-
- def save_txt_report(self, filepath: str) -> None:
-
- parent_dir = os.path.dirname(os.path.abspath(filepath))
- if not os.path.exists(parent_dir):
- raise FileNotFoundError(f"Directory {parent_dir} does not exist.")
-
- report_date = datetime.now().strftime("%B %d, %Y")
-
- with open(filepath, "w", encoding="utf-8") as f_p:
- f_p.write(f"Report created on {report_date}\n")
- if self.filepath:
- f_p.write(f"ONNX file: {self.filepath}\n")
- f_p.write(f"Name of the model: {self.model_name}\n")
- f_p.write(f"Model version: {self.model_version}\n")
- f_p.write(f"Name of the graph: {self.graph_name}\n")
- f_p.write(f"Producer: {self.producer_name} {self.producer_version}\n")
- f_p.write(f"Ir version: {self.ir_version}\n")
- f_p.write(f"Opset: {self.opset}\n\n")
- f_p.write("Import list\n")
- for name, version in self.imports.items():
- f_p.write(f"\t{name}: {version}\n")
-
- f_p.write("\n")
- f_p.write(f"Total graph nodes: {sum(self.node_type_counts.values())}\n")
- f_p.write(f"Number of parameters: {self.model_parameters}\n")
- if self.model_flops:
- f_p.write(f"Number of FLOPs: {self.model_flops}\n")
- f_p.write("\n")
-
- table_op_intensity = PrettyTable()
- table_op_intensity.field_names = ["Operation", "FLOPs", "Intensity (%)"]
- for op_type, count in self.node_type_flops.items():
- if count > 0:
- table_op_intensity.add_row(
- [
- op_type,
- count,
- 100.0 * float(count) / float(self.model_flops),
- ]
- )
-
- f_p.write("Op intensity:\n")
- f_p.write(table_op_intensity.get_string())
- f_p.write("\n\n")
-
- node_counts_table = PrettyTable()
- node_counts_table.field_names = ["Node", "Occurrences"]
- for op, count in self.node_type_counts.items():
- node_counts_table.add_row([op, count])
- f_p.write("Nodes and their occurrences:\n")
- f_p.write(node_counts_table.get_string())
- f_p.write("\n\n")
-
- input_table = PrettyTable()
- input_table.field_names = [
- "Input Name",
- "Shape",
- "Type",
- "Tensor Size (KB)",
- ]
- for input_name, input_details in self.model_inputs.items():
- if input_details.size_kbytes:
- kbytes = f"{input_details.size_kbytes:.2f}"
- else:
- kbytes = ""
-
- input_table.add_row(
- [
- input_name,
- input_details.shape,
- input_details.dtype,
- kbytes,
- ]
- )
- f_p.write("Input Tensor(s) Information:\n")
- f_p.write(input_table.get_string())
- f_p.write("\n\n")
-
- output_table = PrettyTable()
- output_table.field_names = [
- "Output Name",
- "Shape",
- "Type",
- "Tensor Size (KB)",
- ]
- for output_name, output_details in self.model_outputs.items():
- if output_details.size_kbytes:
- kbytes = f"{output_details.size_kbytes:.2f}"
- else:
- kbytes = ""
-
- output_table.add_row(
- [
- output_name,
- output_details.shape,
- output_details.dtype,
- kbytes,
- ]
- )
- f_p.write("Output Tensor(s) Information:\n")
- f_p.write(output_table.get_string())
- f_p.write("\n\n")
-
- def save_nodes_csv_report(self, filepath: str) -> None:
- save_nodes_csv_report(self.per_node_info, filepath)
-
- def get_node_type_counts(self) -> Union[NodeTypeCounts, None]:
- if not self.node_type_counts and self.model_proto:
- self.node_type_counts = get_node_type_counts(self.model_proto)
- return self.node_type_counts if self.node_type_counts else None
-
- def get_node_shape_counts(self) -> NodeShapeCounts:
- tensor_shape_counter = NodeShapeCounts()
- for _, info in self.per_node_info.items():
- shape_hash = tuple([tuple(v.shape) for _, v in info.inputs.items()])
- if info.node_type:
- tensor_shape_counter[info.node_type][shape_hash] += 1
- return tensor_shape_counter
-
-
-def save_nodes_csv_report(node_data: NodeData, filepath: str) -> None:
-
- parent_dir = os.path.dirname(os.path.abspath(filepath))
- if not os.path.exists(parent_dir):
- raise FileNotFoundError(f"Directory {parent_dir} does not exist.")
-
- flattened_data = []
- fieldnames = ["Node Name", "Node Type", "Parameters", "FLOPs", "Attributes"]
- input_fieldnames = []
- output_fieldnames = []
- for name, node_info in node_data.items():
- row = OrderedDict()
- row["Node Name"] = name
- row["Node Type"] = str(node_info.node_type)
- row["Parameters"] = str(node_info.parameters)
- row["FLOPs"] = str(node_info.flops)
- if node_info.attributes:
- row["Attributes"] = str({k: v for k, v in node_info.attributes.items()})
- else:
- row["Attributes"] = ""
-
- for i, (input_name, input_info) in enumerate(node_info.inputs.items()):
- column_name = f"Input{i+1} (Shape, Dtype, Size (kB))"
- row[column_name] = (
- f"{input_name} ({input_info.shape}, {input_info.dtype}, {input_info.size_kbytes})"
- )
-
- # Dynamically add input column names to fieldnames if not already present
- if column_name not in input_fieldnames:
- input_fieldnames.append(column_name)
-
- for i, (output_name, output_info) in enumerate(node_info.outputs.items()):
- column_name = f"Output{i+1} (Shape, Dtype, Size (kB))"
- row[column_name] = (
- f"{output_name} ({output_info.shape}, "
- f"{output_info.dtype}, {output_info.size_kbytes})"
- )
-
- # Dynamically add input column names to fieldnames if not already present
- if column_name not in output_fieldnames:
- output_fieldnames.append(column_name)
-
- flattened_data.append(row)
-
- fieldnames = fieldnames + input_fieldnames + output_fieldnames
- with open(filepath, "w", encoding="utf-8", newline="") as csvfile:
- writer = csv.DictWriter(csvfile, fieldnames=fieldnames, lineterminator="\n")
- writer.writeheader()
- writer.writerows(flattened_data)
-
-
-def save_node_type_counts_csv_report(node_data: NodeTypeCounts, filepath: str) -> None:
-
- parent_dir = os.path.dirname(os.path.abspath(filepath))
- if not os.path.exists(parent_dir):
- raise FileNotFoundError(f"Directory {parent_dir} does not exist.")
-
- header = ["Node Type", "Count"]
-
- with open(filepath, "w", encoding="utf-8", newline="") as csvfile:
- writer = csv.writer(csvfile, lineterminator="\n")
- writer.writerow(header)
- for node_type, node_count in node_data.items():
- writer.writerow([node_type, node_count])
-
-
-def save_node_shape_counts_csv_report(
- node_data: NodeShapeCounts, filepath: str
-) -> None:
-
- parent_dir = os.path.dirname(os.path.abspath(filepath))
- if not os.path.exists(parent_dir):
- raise FileNotFoundError(f"Directory {parent_dir} does not exist.")
-
- header = ["Node Type", "Input Tensors Shapes", "Count"]
-
- with open(filepath, "w", encoding="utf-8", newline="") as csvfile:
- writer = csv.writer(csvfile, dialect="excel", lineterminator="\n")
- writer.writerow(header)
- for node_type, node_info in node_data.items():
- info_iter = iter(node_info.items())
- for shape, count in info_iter:
- writer.writerow([node_type, shape, count])
-
-
def load_onnx(onnx_path: str, load_external_data: bool = True) -> onnx.ModelProto:
if os.path.exists(onnx_path):
return onnx.load(onnx_path, load_external_data=load_external_data)
diff --git a/test/test_gui.py b/test/test_gui.py
index 0e1d351..05239c6 100644
--- a/test/test_gui.py
+++ b/test/test_gui.py
@@ -147,6 +147,8 @@ def test_save_tables(self):
node_summary = node_window.main_window.centralWidget()
self.assertIsInstance(node_summary, NodeSummary)
+
+ # This line of code seems redundant but we do this to clean pylance
if isinstance(node_summary, NodeSummary):
QTest.mouseClick(
node_summary.ui.saveCsvBtn, Qt.MouseButton.LeftButton
diff --git a/test/test_reports.py b/test/test_reports.py
index a16c4d8..01302a4 100644
--- a/test/test_reports.py
+++ b/test/test_reports.py
@@ -6,7 +6,8 @@
import unittest
import tempfile
import csv
-from utils.onnx_utils import DigestOnnxModel, load_onnx
+import utils.onnx_utils as onnx_utils
+from digest.model_class.digest_onnx_model import DigestOnnxModel
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
TEST_ONNX = os.path.join(TEST_DIR, "resnet18.onnx")
@@ -46,10 +47,13 @@ def compare_csv_files(self, file1, file2, skip_lines=0):
self.assertEqual(row1, row2, msg=f"Difference in row: {row1} vs {row2}")
def test_against_example_reports(self):
- model_proto = load_onnx(TEST_ONNX)
+ model_proto = onnx_utils.load_onnx(TEST_ONNX)
model_name = os.path.splitext(os.path.basename(TEST_ONNX))[0]
digest_model = DigestOnnxModel(
- model_proto, onnx_filepath=TEST_ONNX, model_name=model_name, save_proto=False,
+ model_proto,
+ onnx_filepath=TEST_ONNX,
+ model_name=model_name,
+ save_proto=False,
)
with tempfile.TemporaryDirectory() as tmpdir:
From 27f727ad8290d95673ba61a223057a138811267c Mon Sep 17 00:00:00 2001
From: Philip Colangelo
Date: Mon, 9 Dec 2024 16:48:01 -0500
Subject: [PATCH 02/13] Refined support for step3 -s - added steps 4-6
---
examples/analysis.py | 26 +-
src/digest/main.py | 272 ++++++++----------
src/digest/model_class/digest_model.py | 81 +++++-
src/digest/model_class/digest_onnx_model.py | 116 ++++----
src/digest/model_class/digest_report_model.py | 135 +++++++++
src/digest/modelsummary.py | 37 ++-
src/digest/multi_model_analysis.py | 40 +--
src/digest/multi_model_selection_page.py | 41 ++-
src/digest/thread.py | 35 ++-
test/resnet18_reports/resnet18_heatmap.png | Bin 0 -> 103019 bytes
test/resnet18_reports/resnet18_histogram.png | Bin 0 -> 10616 bytes
.../resnet18_node_type_counts.csv | 8 +
.../resnet18_nodes.csv} | 0
.../resnet18_report.txt} | 11 +-
test/resnet18_reports/resnet18_report.yaml | 55 ++++
test/test_gui.py | 125 +++++---
test/test_reports.py | 42 ++-
17 files changed, 679 insertions(+), 345 deletions(-)
create mode 100644 src/digest/model_class/digest_report_model.py
create mode 100644 test/resnet18_reports/resnet18_heatmap.png
create mode 100644 test/resnet18_reports/resnet18_histogram.png
create mode 100644 test/resnet18_reports/resnet18_node_type_counts.csv
rename test/{resnet18_test_nodes.csv => resnet18_reports/resnet18_nodes.csv} (100%)
rename test/{resnet18_test_summary.txt => resnet18_reports/resnet18_report.txt} (86%)
create mode 100644 test/resnet18_reports/resnet18_report.yaml
diff --git a/examples/analysis.py b/examples/analysis.py
index a0bc277..0cd6344 100644
--- a/examples/analysis.py
+++ b/examples/analysis.py
@@ -6,14 +6,16 @@
import csv
from collections import Counter, defaultdict
from tqdm import tqdm
+from digest.model_class.digest_model import (
+ NodeShapeCounts,
+ NodeTypeCounts,
+ save_node_shape_counts_csv_report,
+ save_node_type_counts_csv_report,
+)
+from digest.model_class.digest_onnx_model import DigestOnnxModel
from utils.onnx_utils import (
get_dynamic_input_dims,
load_onnx,
- DigestOnnxModel,
- save_node_shape_counts_csv_report,
- save_node_type_counts_csv_report,
- NodeTypeCounts,
- NodeShapeCounts,
)
GLOBAL_MODEL_HEADERS = [
@@ -88,7 +90,7 @@ def main(onnx_files: str, output_dir: str):
# Model summary text report
summary_filepath = os.path.join(output_dir, f"{model_name}_summary.txt")
- digest_model.save_txt_report(summary_filepath)
+ digest_model.save_text_report(summary_filepath)
# Model summary yaml report
summary_filepath = os.path.join(output_dir, f"{model_name}_summary.yaml")
@@ -99,25 +101,23 @@ def main(onnx_files: str, output_dir: str):
digest_model.save_nodes_csv_report(nodes_filepath)
# Save csv containing node type counter
- node_type_counter = digest_model.get_node_type_counts()
node_type_filepath = os.path.join(
output_dir, f"{model_name}_node_type_counts.csv"
)
- if node_type_counter:
- save_node_type_counts_csv_report(node_type_counter, node_type_filepath)
+
+ digest_model.save_node_type_counts_csv_report(node_type_filepath)
# Update global data structure for node type counter
- global_node_type_counter.update(node_type_counter)
+ global_node_type_counter.update(digest_model.get_node_type_counts())
# Save csv containing node shape counts per op_type
- node_shape_counts = digest_model.get_node_shape_counts()
node_shape_filepath = os.path.join(
output_dir, f"{model_name}_node_shape_counts.csv"
)
- save_node_shape_counts_csv_report(node_shape_counts, node_shape_filepath)
+ digest_model.save_node_shape_counts_csv_report(node_shape_filepath)
# Update global data structure for node shape counter
- for node_type, shape_counts in node_shape_counts.items():
+ for node_type, shape_counts in digest_model.get_node_shape_counts().items():
global_node_shape_counter[node_type].update(shape_counts)
if len(onnx_file_list) > 1:
diff --git a/src/digest/main.py b/src/digest/main.py
index 01dc01c..7e71b58 100644
--- a/src/digest/main.py
+++ b/src/digest/main.py
@@ -5,7 +5,7 @@
import sys
import argparse
from datetime import datetime
-from typing import Dict, Tuple, Optional
+from typing import Dict, Tuple, Optional, Union
import tempfile
from enum import IntEnum
import yaml
@@ -33,7 +33,7 @@
QMenu,
)
from PySide6.QtGui import QDragEnterEvent, QDropEvent, QPixmap, QMovie, QIcon, QFont
-from PySide6.QtCore import Qt, QDir
+from PySide6.QtCore import Qt
from digest.dialog import StatusDialog, InfoDialog, WarnDialog, ProgressDialog
from digest.thread import StatsThread, SimilarityThread
@@ -44,8 +44,9 @@
from digest.modelsummary import modelSummary
from digest.node_summary import NodeSummary
from digest.qt_utils import apply_dark_style_sheet
+from digest.model_class.digest_model import DigestModel
from digest.model_class.digest_onnx_model import DigestOnnxModel
-from digest.model_class.digest_model import save_node_type_counts_csv_report
+from digest.model_class.digest_report_model import DigestReportModel
from utils import onnx_utils
GUI_CONFIG = os.path.join(os.path.dirname(__file__), "gui_config.yaml")
@@ -163,11 +164,12 @@ def __init__(self, model_file: Optional[str] = None):
self.status_dialog = None
self.err_open_dialog = None
self.temp_dir = tempfile.TemporaryDirectory()
- self.digest_models: Dict[str, DigestOnnxModel] = {}
+ self.digest_models: Dict[str, Union[DigestOnnxModel, DigestReportModel]] = {}
# QThread containers
self.model_nodes_stats_thread: Dict[str, StatsThread] = {}
self.model_similarity_thread: Dict[str, SimilarityThread] = {}
+
self.model_similarity_report: Dict[str, SimilarityAnalysisReport] = {}
self.ui.singleModelWidget.hide()
@@ -223,11 +225,12 @@ def __init__(self, model_file: Optional[str] = None):
# Load model file if given as input to the executable
if model_file:
- if (
- os.path.exists(model_file)
- and os.path.splitext(model_file)[-1] == ".onnx"
- ):
+ exists = os.path.exists(model_file)
+ ext = os.path.splitext(model_file)[-1]
+ if exists and ext == ".onnx":
self.load_onnx(model_file)
+ elif exists and ext == ".yaml":
+ self.load_report(model_file)
else:
self.err_open_dialog = StatusDialog(
f"Could not open {model_file}", parent=self
@@ -249,6 +252,7 @@ def tab_focused(self, index):
if (
self.stats_save_button_flag[unique_id]
and self.similarity_save_button_flag[unique_id]
+ and not isinstance(widget.digest_model, DigestReportModel)
):
self.ui.saveBtn.setEnabled(True)
else:
@@ -259,11 +263,17 @@ def closeTab(self, index):
if isinstance(summary_widget, modelSummary):
unique_id = summary_widget.digest_model.unique_id
summary_widget.deleteLater()
- tab_thread = self.model_nodes_stats_thread[unique_id]
+
+ tab_thread = self.model_nodes_stats_thread.get(unique_id)
if tab_thread:
tab_thread.exit()
+ tab_thread.wait(5000)
+
if not tab_thread.isRunning():
del self.model_nodes_stats_thread[unique_id]
+ else:
+ print(f"Warning: Thread for {unique_id} did not finish in time")
+
# delete the digest model to free up used memory
if unique_id in self.digest_models:
del self.digest_models[unique_id]
@@ -294,9 +304,9 @@ def openFile(self):
)
bad_ext_dialog.show()
- def update_flops_label(
+ def update_cards(
self,
- digest_model: DigestOnnxModel,
+ digest_model: DigestModel,
unique_id: str,
):
self.digest_models[unique_id].model_flops = digest_model.model_flops
@@ -305,10 +315,11 @@ def update_flops_label(
self.digest_models[unique_id].node_type_parameters = (
digest_model.node_type_parameters
)
- self.digest_models[unique_id].per_node_info = digest_model.per_node_info
+ self.digest_models[unique_id].node_data = digest_model.node_data
# We must iterate over the tabWidget and match to the tab_name because the user
# may have switched the currentTab during the threads execution.
+ curr_index = -1
for index in range(self.ui.tabWidget.count()):
widget = self.ui.tabWidget.widget(index)
if (
@@ -341,11 +352,14 @@ def update_flops_label(
pie_chart_labels,
pie_chart_data,
)
+ curr_index = index
break
self.stats_save_button_flag[unique_id] = True
- if self.ui.tabWidget.currentIndex() == index:
- if self.similarity_save_button_flag[unique_id]:
+ if self.ui.tabWidget.currentIndex() == curr_index:
+ if self.similarity_save_button_flag[unique_id] and not isinstance(
+ digest_model, DigestReportModel
+ ):
self.ui.saveBtn.setEnabled(True)
def open_similarity_report(self, model_id: str, image_path, most_similar_models):
@@ -359,10 +373,11 @@ def update_similarity_widget(
completed_successfully: bool,
model_id: str,
most_similar: str,
- png_filepath: str,
+ png_filepath: Union[str, None],
):
-
widget = None
+ digest_model = None
+ curr_index = -1
for index in range(self.ui.tabWidget.count()):
tab_widget = self.ui.tabWidget.widget(index)
if (
@@ -370,10 +385,12 @@ def update_similarity_widget(
and tab_widget.digest_model.unique_id == model_id
):
widget = tab_widget
+ digest_model = tab_widget.digest_model
+ curr_index = index
break
- if completed_successfully and isinstance(widget, modelSummary):
- widget_width = widget.ui.similarityImg.width()
+ if completed_successfully and isinstance(widget, modelSummary) and png_filepath:
+ widget_width = widget.ui.similarityWidget.width()
widget.ui.similarityImg.setPixmap(
QPixmap(png_filepath).scaledToWidth(widget_width)
)
@@ -383,12 +400,20 @@ def update_similarity_widget(
# Show most correlated models
widget.ui.similarityCorrelation.show()
widget.ui.similarityCorrelationStatic.show()
- most_similar_models = most_similar.split(",")
- text = (
- "\n"
- f"{most_similar_models[0]}, {most_similar_models[1]}, and {most_similar_models[2]}."
- ""
- )
+ if most_similar:
+ most_similar_models = most_similar.split(",")
+ text = (
+ "\n"
+ f"{most_similar_models[0]}, {most_similar_models[1]}, "
+ f"and {most_similar_models[2]}. "
+ ""
+ )
+ else:
+ # currently the similarity widget expects the most_similar_models
+ # to allows contains 3 models. For now we will just send three empty
+ # strings but at some point we should handle an arbitrary case.
+ most_similar_models = ["", "", ""]
+ text = ""
# Create option to click to enlarge image
widget.ui.similarityImg.mousePressEvent = (
@@ -404,15 +429,19 @@ def update_similarity_widget(
widget.ui.similarityCorrelation.setText(text)
elif isinstance(widget, modelSummary):
# Remove animation and set text to failing message
- widget.ui.similarityImg.setMovie(QMovie(None))
+ widget.ui.similarityImg.setMovie(QMovie())
widget.ui.similarityImg.setText("Failed to perform similarity analysis")
else:
- print("Tab widget is not of type modelSummary which is unexpected.")
+ print(
+ f"Tab widget is of type {type(widget)} and not of type modelSummary "
+ "which is unexpected."
+ )
- #
self.similarity_save_button_flag[model_id] = True
- if self.ui.tabWidget.currentIndex() == index:
- if self.stats_save_button_flag[model_id]:
+ if self.ui.tabWidget.currentIndex() == curr_index:
+ if self.stats_save_button_flag[model_id] and not isinstance(
+ digest_model, DigestReportModel
+ ):
self.ui.saveBtn.setEnabled(True)
def load_onnx(self, filepath: str):
@@ -456,10 +485,11 @@ def load_onnx(self, filepath: str):
self.digest_models[model_id] = digest_model
# We must set the proto for the model_summary freeze_inputs
- self.digest_models[model_id].model_proto = opt_model
+ digest_model.model_proto = opt_model
- model_summary = modelSummary(self.digest_models[model_id])
- model_summary.freeze_inputs.complete_signal.connect(self.load_onnx)
+ model_summary = modelSummary(digest_model)
+ if model_summary.freeze_inputs:
+ model_summary.freeze_inputs.complete_signal.connect(self.load_onnx)
dynamic_input_dims = onnx_utils.get_dynamic_input_dims(opt_model)
if dynamic_input_dims:
@@ -493,14 +523,13 @@ def load_onnx(self, filepath: str):
model_summary.ui.modelFilename.setText(filepath)
model_summary.ui.generatedDate.setText(datetime.now().strftime("%B %d, %Y"))
- self.digest_models[model_id].model_name = model_name
- self.digest_models[model_id].filepath = filepath
-
- self.digest_models[model_id].model_inputs = (
- onnx_utils.get_model_input_shapes_types(opt_model)
+ digest_model.model_name = model_name
+ digest_model.filepath = filepath
+ digest_model.model_inputs = onnx_utils.get_model_input_shapes_types(
+ opt_model
)
- self.digest_models[model_id].model_outputs = (
- onnx_utils.get_model_output_shapes_types(opt_model)
+ digest_model.model_outputs = onnx_utils.get_model_output_shapes_types(
+ opt_model
)
progress.step()
@@ -511,9 +540,7 @@ def load_onnx(self, filepath: str):
# Kick off model stats thread
self.model_nodes_stats_thread[model_id] = StatsThread()
- self.model_nodes_stats_thread[model_id].completed.connect(
- self.update_flops_label
- )
+ self.model_nodes_stats_thread[model_id].completed.connect(self.update_cards)
self.model_nodes_stats_thread[model_id].model = opt_model
self.model_nodes_stats_thread[model_id].tab_name = model_name
@@ -531,7 +558,7 @@ def load_onnx(self, filepath: str):
model_summary.ui.opHistogramChart.bar_spacing = bar_spacing
model_summary.ui.opHistogramChart.set_data(node_type_counts)
model_summary.ui.nodes.setText(str(sum(node_type_counts.values())))
- self.digest_models[model_id].node_type_counts = node_type_counts
+ digest_model.node_type_counts = node_type_counts
progress.step()
progress.setLabelText("Gathering Model Inputs and Outputs")
@@ -590,24 +617,24 @@ def load_onnx(self, filepath: str):
model_summary.ui.modelProtoTable.setItem(
0, 1, QTableWidgetItem(str(opt_model.model_version))
)
- self.digest_models[model_id].model_version = opt_model.model_version
+ digest_model.model_version = opt_model.model_version
model_summary.ui.modelProtoTable.setItem(
1, 1, QTableWidgetItem(str(opt_model.graph.name))
)
- self.digest_models[model_id].graph_name = opt_model.graph.name
+ digest_model.graph_name = opt_model.graph.name
producer_txt = f"{opt_model.producer_name} {opt_model.producer_version}"
model_summary.ui.modelProtoTable.setItem(
2, 1, QTableWidgetItem(producer_txt)
)
- self.digest_models[model_id].producer_name = opt_model.producer_name
- self.digest_models[model_id].producer_version = opt_model.producer_version
+ digest_model.producer_name = opt_model.producer_name
+ digest_model.producer_version = opt_model.producer_version
model_summary.ui.modelProtoTable.setItem(
3, 1, QTableWidgetItem(str(opt_model.ir_version))
)
- self.digest_models[model_id].ir_version = opt_model.ir_version
+ digest_model.ir_version = opt_model.ir_version
for imp in opt_model.opset_import:
row_idx = model_summary.ui.importsTable.rowCount()
@@ -615,7 +642,7 @@ def load_onnx(self, filepath: str):
if imp.domain == "" or imp.domain == "ai.onnx":
model_summary.ui.opsetVersion.setText(str(imp.version))
domain = "ai.onnx"
- self.digest_models[model_id].opset = imp.version
+ digest_model.opset = imp.version
else:
domain = imp.domain
model_summary.ui.importsTable.setItem(
@@ -626,7 +653,7 @@ def load_onnx(self, filepath: str):
)
row_idx += 1
- self.digest_models[model_id].imports[imp.domain] = imp.version
+ digest_model.imports[imp.domain] = imp.version
progress.step()
progress.setLabelText("Wrapping Up Model Analysis")
@@ -649,6 +676,7 @@ def load_onnx(self, filepath: str):
# Note: Should only be started after the model tab has been created
png_tmp_path = os.path.join(self.temp_dir.name, model_id)
os.makedirs(png_tmp_path, exist_ok=True)
+ assert os.path.exists(png_tmp_path), f"Error with creating {png_tmp_path}"
self.model_similarity_thread[model_id] = SimilarityThread()
self.model_similarity_thread[model_id].completed_successfully.connect(
self.update_similarity_widget
@@ -685,12 +713,10 @@ def load_report(self, filepath: str):
try:
- progress = ProgressDialog("Loading Digest Report File...", 8, self)
+ progress = ProgressDialog("Loading Digest Report File...", 2, self)
QApplication.processEvents() # Process pending events
- with open(filepath, "r", encoding="utf-8") as yaml_f:
- report_data = yaml.safe_load(yaml_f)
- model_name = report_data["model_name"]
+ digest_model = DigestReportModel(filepath)
model_id = digest_model.unique_id
@@ -700,30 +726,7 @@ def load_report(self, filepath: str):
self.digest_models[model_id] = digest_model
- # We must set the proto for the model_summary freeze_inputs
- self.digest_models[model_id].model_proto = opt_model
-
- model_summary = modelSummary(self.digest_models[model_id])
- model_summary.freeze_inputs.complete_signal.connect(self.load_onnx)
-
- dynamic_input_dims = onnx_utils.get_dynamic_input_dims(opt_model)
- if dynamic_input_dims:
- model_summary.ui.freezeButton.setVisible(True)
- model_summary.ui.warningLabel.setText(
- "⚠️ Some model details are unavailable due to dynamic input dimensions. "
- "See section Input Tensor(s) Information below for more details."
- )
- model_summary.ui.warningLabel.show()
-
- elif not opt_passed:
- model_summary.ui.warningLabel.setText(
- "⚠️ The model could not be optimized either due to an ONNX Runtime "
- "session error or it did not pass the ONNX checker."
- )
- model_summary.ui.warningLabel.show()
-
- progress.step()
- progress.setLabelText("Checking for dynamic Inputs")
+ model_summary = modelSummary(digest_model)
self.ui.tabWidget.addTab(model_summary, "")
model_summary.ui.flops.setText("Loading...")
@@ -733,50 +736,25 @@ def load_report(self, filepath: str):
model_summary.ui.similarityCorrelationStatic.hide()
model_summary.file = filepath
- model_summary.setObjectName(model_name)
- model_summary.ui.modelName.setText(model_name)
+ model_summary.setObjectName(digest_model.model_name)
+ model_summary.ui.modelName.setText(digest_model.model_name)
model_summary.ui.modelFilename.setText(filepath)
model_summary.ui.generatedDate.setText(datetime.now().strftime("%B %d, %Y"))
- self.digest_models[model_id].model_name = model_name
- self.digest_models[model_id].filepath = filepath
-
- self.digest_models[model_id].model_inputs = (
- onnx_utils.get_model_input_shapes_types(opt_model)
- )
- self.digest_models[model_id].model_outputs = (
- onnx_utils.get_model_output_shapes_types(opt_model)
+ model_summary.ui.parameters.setText(
+ format(digest_model.model_parameters, ",")
)
- progress.step()
- progress.setLabelText("Calculating Parameter Count")
-
- parameter_count = onnx_utils.get_parameter_count(opt_model)
- model_summary.ui.parameters.setText(format(parameter_count, ","))
-
- # Kick off model stats thread
- self.model_nodes_stats_thread[model_id] = StatsThread()
- self.model_nodes_stats_thread[model_id].completed.connect(
- self.update_flops_label
- )
-
- self.model_nodes_stats_thread[model_id].model = opt_model
- self.model_nodes_stats_thread[model_id].tab_name = model_name
- self.model_nodes_stats_thread[model_id].unique_id = model_id
- self.model_nodes_stats_thread[model_id].start()
-
- progress.step()
- progress.setLabelText("Calculating Node Type Counts")
-
- node_type_counts = onnx_utils.get_node_type_counts(opt_model)
+ node_type_counts = digest_model.node_type_counts
if len(node_type_counts) < 15:
bar_spacing = 40
else:
bar_spacing = 20
+
model_summary.ui.opHistogramChart.bar_spacing = bar_spacing
model_summary.ui.opHistogramChart.set_data(node_type_counts)
+
model_summary.ui.nodes.setText(str(sum(node_type_counts.values())))
- self.digest_models[model_id].node_type_counts = node_type_counts
progress.step()
progress.setLabelText("Gathering Model Inputs and Outputs")
@@ -833,77 +811,65 @@ def load_report(self, filepath: str):
# ModelProto Info
model_summary.ui.modelProtoTable.setItem(
- 0, 1, QTableWidgetItem(str(opt_model.model_version))
+ 0, 1, QTableWidgetItem(str(digest_model.model_data["model_version"]))
)
- self.digest_models[model_id].model_version = opt_model.model_version
model_summary.ui.modelProtoTable.setItem(
- 1, 1, QTableWidgetItem(str(opt_model.graph.name))
+ 1, 1, QTableWidgetItem(str(digest_model.model_data["graph_name"]))
)
- self.digest_models[model_id].graph_name = opt_model.graph.name
- producer_txt = f"{opt_model.producer_name} {opt_model.producer_version}"
+ producer_txt = (
+ f"{digest_model.model_data['producer_name']} "
+ f"{digest_model.model_data['producer_version']}"
+ )
model_summary.ui.modelProtoTable.setItem(
2, 1, QTableWidgetItem(producer_txt)
)
- self.digest_models[model_id].producer_name = opt_model.producer_name
- self.digest_models[model_id].producer_version = opt_model.producer_version
model_summary.ui.modelProtoTable.setItem(
- 3, 1, QTableWidgetItem(str(opt_model.ir_version))
+ 3, 1, QTableWidgetItem(str(digest_model.model_data["ir_version"]))
)
- self.digest_models[model_id].ir_version = opt_model.ir_version
- for imp in opt_model.opset_import:
+ for domain, version in digest_model.model_data["import_list"].items():
row_idx = model_summary.ui.importsTable.rowCount()
model_summary.ui.importsTable.insertRow(row_idx)
- if imp.domain == "" or imp.domain == "ai.onnx":
- model_summary.ui.opsetVersion.setText(str(imp.version))
+ if domain == "" or domain == "ai.onnx":
+ model_summary.ui.opsetVersion.setText(str(version))
domain = "ai.onnx"
- self.digest_models[model_id].opset = imp.version
- else:
- domain = imp.domain
+
model_summary.ui.importsTable.setItem(
row_idx, 0, QTableWidgetItem(str(domain))
)
model_summary.ui.importsTable.setItem(
- row_idx, 1, QTableWidgetItem(str(imp.version))
+ row_idx, 1, QTableWidgetItem(str(version))
)
row_idx += 1
- self.digest_models[model_id].imports[imp.domain] = imp.version
-
progress.step()
progress.setLabelText("Wrapping Up Model Analysis")
model_summary.ui.importsTable.resizeColumnsToContents()
model_summary.ui.modelProtoTable.resizeColumnsToContents()
- model_summary.setObjectName(model_name)
+ model_summary.setObjectName(digest_model.model_name)
new_tab_idx = self.ui.tabWidget.count() - 1
- self.ui.tabWidget.setTabText(new_tab_idx, "".join(model_name))
+ self.ui.tabWidget.setTabText(new_tab_idx, "".join(digest_model.model_name))
self.ui.tabWidget.setCurrentIndex(new_tab_idx)
self.ui.stackedWidget.setCurrentIndex(self.Page.SUMMARY)
self.ui.singleModelWidget.show()
progress.step()
+ self.update_cards(digest_model, digest_model.unique_id)
+
movie = QMovie(":/assets/gifs/load.gif")
model_summary.ui.similarityImg.setMovie(movie)
movie.start()
- # Start similarity Analysis
- # Note: Should only be started after the model tab has been created
- png_tmp_path = os.path.join(self.temp_dir.name, model_id)
- os.makedirs(png_tmp_path, exist_ok=True)
- self.model_similarity_thread[model_id] = SimilarityThread()
- self.model_similarity_thread[model_id].completed_successfully.connect(
- self.update_similarity_widget
+ self.update_similarity_widget(
+ bool(digest_model.similarity_heatmap_path),
+ digest_model.unique_id,
+ "",
+ digest_model.similarity_heatmap_path,
)
- self.model_similarity_thread[model_id].model_filepath = filepath
- self.model_similarity_thread[model_id].png_filepath = os.path.join(
- png_tmp_path, f"heatmap_{model_name}.png"
- )
- self.model_similarity_thread[model_id].model_id = model_id
- self.model_similarity_thread[model_id].start()
progress.close()
@@ -921,6 +887,9 @@ def dropEvent(self, event: QDropEvent):
if file_path.endswith(".onnx"):
self.load_onnx(file_path)
break
+ elif file_path.endswith(".yaml"):
+ self.load_report(file_path)
+ break
## functions for changing menu page
def logo_clicked(self):
@@ -971,11 +940,8 @@ def save_reports(self):
if not save_directory:
return
- # Create a QDir object
- directory = QDir(save_directory)
-
# Check if the directory exists and is writable
- if not directory.exists() and directory.isWritable(): # type: ignore
+ if not os.path.exists(save_directory) or not os.access(save_directory, os.W_OK):
self.show_warning_dialog(
f"The directory {save_directory} is not valid or writable."
)
@@ -996,19 +962,19 @@ def save_reports(self):
node_type_filepath = os.path.join(
save_directory, f"{model_name}_node_type_counts.csv"
)
- node_counter = digest_model.get_node_type_counts()
- if node_counter:
- save_node_type_counts_csv_report(node_counter, node_type_filepath)
+ digest_model.save_node_type_counts_csv_report(node_type_filepath)
# Save the similarity image
- similarity_png = self.model_similarity_report[digest_model.unique_id].grab()
+ similarity_png = self.model_similarity_report[
+ digest_model.unique_id
+ ].enlarged_image_label.grab()
similarity_png.save(
os.path.join(save_directory, f"{model_name}_heatmap.png"), "PNG"
)
# Save the text report
txt_report_filepath = os.path.join(save_directory, f"{model_name}_report.txt")
- digest_model.save_txt_report(txt_report_filepath)
+ digest_model.save_text_report(txt_report_filepath)
# Save the yaml report
yaml_report_filepath = os.path.join(save_directory, f"{model_name}_report.yaml")
@@ -1089,7 +1055,7 @@ def open_node_summary(self):
digest_models = self.digest_models[model_id]
node_summary = NodeSummary(
- model_name=model_name, node_data=digest_models.per_node_info
+ model_name=model_name, node_data=digest_models.node_data
)
self.nodes_window[model_id] = PopupWindow(
diff --git a/src/digest/model_class/digest_model.py b/src/digest/model_class/digest_model.py
index 130503c..3c2fe12 100644
--- a/src/digest/model_class/digest_model.py
+++ b/src/digest/model_class/digest_model.py
@@ -2,10 +2,19 @@
import os
import csv
+from enum import Enum
+from dataclasses import dataclass, field
+from uuid import uuid4
+from abc import ABC, abstractmethod
from collections import Counter, OrderedDict, defaultdict
from typing import List, Dict, Optional, Any, Union
+class SupportedModelTypes(Enum):
+ ONNX = "onnx"
+ REPORT = "report"
+
+
class NodeParsingException(Exception):
pass
@@ -21,14 +30,13 @@ def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
+@dataclass
class TensorInfo:
"Used to store node input and output tensor information"
-
- def __init__(self) -> None:
- self.dtype: Optional[str] = None
- self.dtype_bytes: Optional[int] = None
- self.size_kbytes: Optional[float] = None
- self.shape: List[Union[int, str]] = []
+ dtype: Optional[str] = None
+ dtype_bytes: Optional[int] = None
+ size_kbytes: Optional[float] = None
+ shape: List[Union[int, str]] = field(default_factory=list)
class TensorData(OrderedDict[str, TensorInfo]):
@@ -39,7 +47,7 @@ def __init__(self, *args, **kwargs):
class NodeInfo:
def __init__(self) -> None:
self.flops: Optional[int] = None
- self.parameters: int = 0
+ self.parameters: int = 0 # TODO: should we make this Optional[int] = None?
self.node_type: Optional[str] = None
self.attributes: OrderedDict[str, Any] = OrderedDict()
# We use an ordered dictionary because the order in which
@@ -79,12 +87,59 @@ def __str__(self):
return "\n".join(output)
-# The classes are for type aliasing. Once python 3.10 is the minimum Popwe can switch to TypeAlias
+# The classes are for type aliasing. Once python 3.10 is the minimum we can switch to TypeAlias
class NodeData(OrderedDict[str, NodeInfo]):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
+class DigestModel(ABC):
+ def __init__(self, filepath: str, model_name: str):
+ # Public members exposed to the API
+ self.unique_id: str = str(uuid4())
+ self.filepath: Optional[str] = filepath
+ self.model_name: str = model_name
+ self.model_type: Optional[SupportedModelTypes] = None
+ self.node_type_counts: NodeTypeCounts = NodeTypeCounts()
+ self.model_flops: Optional[int] = None
+ self.model_parameters: int = 0
+ self.node_type_flops: Dict[str, int] = {}
+ self.node_type_parameters: Dict[str, int] = {}
+ self.node_data = NodeData()
+ self.model_inputs = TensorData()
+ self.model_outputs = TensorData()
+
+ def get_node_shape_counts(self) -> NodeShapeCounts:
+ tensor_shape_counter = NodeShapeCounts()
+ for _, info in self.node_data.items():
+ shape_hash = tuple([tuple(v.shape) for _, v in info.inputs.items()])
+ if info.node_type:
+ tensor_shape_counter[info.node_type][shape_hash] += 1
+ return tensor_shape_counter
+
+ @abstractmethod
+ def parse_model_nodes(self, *args) -> None:
+ pass
+
+ @abstractmethod
+ def save_yaml_report(self, filepath: str) -> None:
+ pass
+
+ @abstractmethod
+ def save_text_report(self, filepath: str) -> None:
+ pass
+
+ def save_nodes_csv_report(self, filepath: str) -> None:
+ save_nodes_csv_report(self.node_data, filepath)
+
+ def save_node_type_counts_csv_report(self, filepath: str) -> None:
+ if self.node_type_counts:
+ save_node_type_counts_csv_report(self.node_type_counts, filepath)
+
+ def save_node_shape_counts_csv_report(self, filepath: str) -> None:
+ save_node_shape_counts_csv_report(self.get_node_shape_counts(), filepath)
+
+
def save_nodes_csv_report(node_data: NodeData, filepath: str) -> None:
parent_dir = os.path.dirname(os.path.abspath(filepath))
@@ -136,7 +191,9 @@ def save_nodes_csv_report(node_data: NodeData, filepath: str) -> None:
writer.writerows(flattened_data)
-def save_node_type_counts_csv_report(node_data: NodeTypeCounts, filepath: str) -> None:
+def save_node_type_counts_csv_report(
+ node_type_counts: NodeTypeCounts, filepath: str
+) -> None:
parent_dir = os.path.dirname(os.path.abspath(filepath))
if not os.path.exists(parent_dir):
@@ -147,12 +204,12 @@ def save_node_type_counts_csv_report(node_data: NodeTypeCounts, filepath: str) -
with open(filepath, "w", encoding="utf-8", newline="") as csvfile:
writer = csv.writer(csvfile, lineterminator="\n")
writer.writerow(header)
- for node_type, node_count in node_data.items():
+ for node_type, node_count in node_type_counts.items():
writer.writerow([node_type, node_count])
def save_node_shape_counts_csv_report(
- node_data: NodeShapeCounts, filepath: str
+ node_shape_counts: NodeShapeCounts, filepath: str
) -> None:
parent_dir = os.path.dirname(os.path.abspath(filepath))
@@ -164,7 +221,7 @@ def save_node_shape_counts_csv_report(
with open(filepath, "w", encoding="utf-8", newline="") as csvfile:
writer = csv.writer(csvfile, dialect="excel", lineterminator="\n")
writer.writerow(header)
- for node_type, node_info in node_data.items():
+ for node_type, node_info in node_shape_counts.items():
info_iter = iter(node_info.items())
for shape, count in info_iter:
writer.writerow([node_type, shape, count])
diff --git a/src/digest/model_class/digest_onnx_model.py b/src/digest/model_class/digest_onnx_model.py
index c96a228..2ee4583 100644
--- a/src/digest/model_class/digest_onnx_model.py
+++ b/src/digest/model_class/digest_onnx_model.py
@@ -1,38 +1,37 @@
# Copyright(C) 2024 Advanced Micro Devices, Inc. All rights reserved.
import os
-from uuid import uuid4
from typing import List, Dict, Optional, Tuple, Union, cast
from datetime import datetime
+import yaml
import numpy as np
import onnx
-import yaml
from prettytable import PrettyTable
from digest.model_class.digest_model import (
+ DigestModel,
+ SupportedModelTypes,
NodeTypeCounts,
- NodeData,
- NodeShapeCounts,
NodeInfo,
TensorData,
TensorInfo,
- save_nodes_csv_report,
)
import utils.onnx_utils as onnx_utils
-class DigestOnnxModel:
+class DigestOnnxModel(DigestModel):
def __init__(
self,
onnx_model: onnx.ModelProto,
- onnx_filepath: Optional[str] = None,
- model_name: Optional[str] = None,
+ onnx_filepath: str = "",
+ model_name: str = "",
save_proto: bool = True,
) -> None:
+ super().__init__(onnx_filepath, model_name)
+
+ self.model_type = SupportedModelTypes.ONNX
+
# Public members exposed to the API
- self.unique_id: str = str(uuid4())
- self.filepath: Optional[str] = onnx_filepath
self.model_proto: Optional[onnx.ModelProto] = onnx_model if save_proto else None
- self.model_name: Optional[str] = model_name
self.model_version: Optional[int] = None
self.graph_name: Optional[str] = None
self.producer_name: Optional[str] = None
@@ -40,14 +39,6 @@ def __init__(
self.ir_version: Optional[int] = None
self.opset: Optional[int] = None
self.imports: Dict[str, int] = {}
- self.node_type_counts: NodeTypeCounts = NodeTypeCounts()
- self.model_flops: Optional[int] = None
- self.model_parameters: int = 0
- self.node_type_flops: Dict[str, int] = {}
- self.node_type_parameters: Dict[str, int] = {}
- self.per_node_info = NodeData()
- self.model_inputs = TensorData()
- self.model_outputs = TensorData()
# Private members not intended to be exposed
self.input_tensors_: Dict[str, onnx.ValueInfoProto] = {}
@@ -242,7 +233,7 @@ def parse_model_nodes(self, onnx_model: onnx.ModelProto) -> None:
# TODO: I have encountered models containing nodes with no name. It would be a good idea
# to have this type of model info fed back to the user through a warnings section.
if not node.name:
- node.name = f"{node.op_type}_{len(self.per_node_info)}"
+ node.name = f"{node.op_type}_{len(self.node_data)}"
node_info.node_type = node.op_type
input_tensor_info, output_tensor_info = self.get_node_tensor_info_(node)
@@ -266,10 +257,10 @@ def parse_model_nodes(self, onnx_model: onnx.ModelProto) -> None:
for attribute in node.attribute:
node_info.attributes.update(onnx_utils.attribute_to_dict(attribute))
- # if node.name in self.per_node_info:
+ # if node.name in self.node_data:
# print(f"Node name {node.name} is a duplicate.")
- self.per_node_info[node.name] = node_info
+ self.node_data[node.name] = node_info
if node.op_type in unsupported_ops:
self.model_flops = None
@@ -515,7 +506,42 @@ def parse_model_nodes(self, onnx_model: onnx.ModelProto) -> None:
self.node_type_flops.get(node.op_type, 0) + node_info.flops
)
- def save_txt_report(self, filepath: str) -> None:
+ def save_yaml_report(self, filepath: str) -> None:
+
+ parent_dir = os.path.dirname(os.path.abspath(filepath))
+ if not os.path.exists(parent_dir):
+ raise FileNotFoundError(f"Directory {parent_dir} does not exist.")
+
+ report_date = datetime.now().strftime("%B %d, %Y")
+
+ input_tensors = dict({k: vars(v) for k, v in self.model_inputs.items()})
+ output_tensors = dict({k: vars(v) for k, v in self.model_outputs.items()})
+
+ yaml_data = {
+ "report_date": report_date,
+ "onnx_file": self.filepath,
+ "model_name": self.model_name,
+ "model_version": self.model_version,
+ "graph_name": self.graph_name,
+ "producer_name": self.producer_name,
+ "producer_version": self.producer_version,
+ "ir_version": self.ir_version,
+ "opset": self.opset,
+ "import_list": self.imports,
+ "graph_nodes": sum(self.node_type_counts.values()),
+ "model_parameters": self.model_parameters,
+ "model_flops": self.model_flops,
+ "node_type_counts": dict(self.node_type_counts),
+ "node_type_flops": dict(self.node_type_flops),
+ "node_type_parameters": self.node_type_parameters,
+ "input_tensors": input_tensors,
+ "output_tensors": output_tensors,
+ }
+
+ with open(filepath, "w", encoding="utf-8") as f_p:
+ yaml.dump(yaml_data, f_p, sort_keys=False)
+
+ def save_text_report(self, filepath: str) -> None:
parent_dir = os.path.dirname(os.path.abspath(filepath))
if not os.path.exists(parent_dir):
@@ -618,51 +644,7 @@ def save_txt_report(self, filepath: str) -> None:
f_p.write(output_table.get_string())
f_p.write("\n\n")
- def save_yaml_report(self, filepath: str) -> None:
-
- parent_dir = os.path.dirname(os.path.abspath(filepath))
- if not os.path.exists(parent_dir):
- raise FileNotFoundError(f"Directory {parent_dir} does not exist.")
-
- report_date = datetime.now().strftime("%B %d, %Y")
-
- input_tensors = dict({k: vars(v) for k, v in self.model_inputs.items()})
- output_tensors = dict({k: vars(v) for k, v in self.model_outputs.items()})
-
- yaml_data = {
- "report_date": report_date,
- "onnx_file": self.filepath,
- "model_name": self.model_name,
- "model_version": self.model_version,
- "graph_name": self.graph_name,
- "producer_name": self.producer_name,
- "ir_version": self.ir_version,
- "opset": self.opset,
- "import_list": self.imports,
- "graph_nodes": sum(self.node_type_counts.values()),
- "model_parameters": self.model_parameters,
- "model_flops": self.model_flops,
- "operator_intensity": self.node_type_flops,
- "node_histogram": dict(self.node_type_counts),
- "input_tensors": input_tensors,
- "output_tensors": output_tensors,
- }
-
- with open(filepath, "w", encoding="utf-8") as f_p:
- yaml.dump(yaml_data, f_p, sort_keys=False)
-
- def save_nodes_csv_report(self, filepath: str) -> None:
- save_nodes_csv_report(self.per_node_info, filepath)
-
def get_node_type_counts(self) -> Union[NodeTypeCounts, None]:
if not self.node_type_counts and self.model_proto:
self.node_type_counts = onnx_utils.get_node_type_counts(self.model_proto)
return self.node_type_counts if self.node_type_counts else None
-
- def get_node_shape_counts(self) -> NodeShapeCounts:
- tensor_shape_counter = NodeShapeCounts()
- for _, info in self.per_node_info.items():
- shape_hash = tuple([tuple(v.shape) for _, v in info.inputs.items()])
- if info.node_type:
- tensor_shape_counter[info.node_type][shape_hash] += 1
- return tensor_shape_counter
diff --git a/src/digest/model_class/digest_report_model.py b/src/digest/model_class/digest_report_model.py
new file mode 100644
index 0000000..5027ee4
--- /dev/null
+++ b/src/digest/model_class/digest_report_model.py
@@ -0,0 +1,135 @@
+import os
+from collections import OrderedDict
+import csv
+import ast
+import re
+from typing import Tuple, Optional
+import yaml
+from digest.model_class.digest_model import (
+ DigestModel,
+ SupportedModelTypes,
+ NodeData,
+ NodeInfo,
+ TensorData,
+ TensorInfo,
+)
+
+
+def parse_tensor_info(csv_tensor_cell_value) -> Tuple[str, list, str, float]:
+ """This is a helper function that expects the input to come from parsing
+ the nodes csv and extracting either an input or output tensor."""
+
+ # Use regex to split the string into name and details
+ match = re.match(r"(.*?)\s*\((.*)\)$", csv_tensor_cell_value)
+ if not match:
+ raise ValueError(f"Invalid format for tensor info: {csv_tensor_cell_value}")
+
+ name, details = match.groups()
+
+ # Split details, but keep the shape as a single item
+ match = re.match(r"(\[.*?\])\s*,\s*(.*?)\s*,\s*(.*)", details)
+ if not match:
+ raise ValueError(f"Invalid format for tensor details: {details}")
+
+ shape_str, dtype, size = match.groups()
+
+ # Ensure shape is stored as a list
+ shape = ast.literal_eval(shape_str)
+ if not isinstance(shape, list):
+ shape = list(shape)
+
+ return name.strip(), shape, dtype.strip(), float(size.split()[0])
+
+
+class DigestReportModel(DigestModel):
+ def __init__(
+ self,
+ report_filepath: str,
+ ) -> None:
+
+ self.model_type = SupportedModelTypes.REPORT
+
+ self.model_data = OrderedDict()
+ with open(report_filepath, "r", encoding="utf-8") as yaml_f:
+ self.model_data = yaml.safe_load(yaml_f)
+
+ model_name = self.model_data["model_name"]
+ super().__init__(report_filepath, model_name)
+
+ self.similarity_heatmap_path: Optional[str] = None
+ self.node_data = NodeData()
+
+ # Given the path to the digest report, let's check if its a complete cache
+ # and we can grab the nodes csv data and the similarity heatmap
+ cache_dir = os.path.dirname(os.path.abspath(report_filepath))
+ expected_heatmap_file = os.path.join(cache_dir, f"{model_name}_heatmap.png")
+ if os.path.exists(expected_heatmap_file):
+ self.similarity_heatmap_path = expected_heatmap_file
+
+ expected_nodes_file = os.path.join(cache_dir, f"{model_name}_nodes.csv")
+ if os.path.exists(expected_nodes_file):
+ with open(expected_nodes_file, "r", encoding="utf-8") as csvfile:
+ reader = csv.DictReader(csvfile)
+ for row in reader:
+ node_name = row["Node Name"]
+ node_info = NodeInfo()
+ node_info.node_type = row["Node Type"]
+ if row["Parameters"]:
+ node_info.parameters = int(row["Parameters"])
+ if ast.literal_eval(row["FLOPs"]):
+ node_info.flops = int(row["FLOPs"])
+ node_info.attributes = (
+ OrderedDict(ast.literal_eval(row["Attributes"]))
+ if row["Attributes"]
+ else OrderedDict()
+ )
+
+ node_info.inputs = TensorData()
+ node_info.outputs = TensorData()
+
+ # Process inputs and outputs
+ for key, value in row.items():
+ if key.startswith("Input") and value:
+ input_name, shape, dtype, size = parse_tensor_info(value)
+ node_info.inputs[input_name] = TensorInfo()
+ node_info.inputs[input_name].shape = shape
+ node_info.inputs[input_name].dtype = dtype
+ node_info.inputs[input_name].size_kbytes = size
+
+ elif key.startswith("Output") and value:
+ output_name, shape, dtype, size = parse_tensor_info(value)
+ node_info.outputs[output_name] = TensorInfo()
+ node_info.outputs[output_name].shape = shape
+ node_info.outputs[output_name].dtype = dtype
+ node_info.outputs[output_name].size_kbytes = size
+
+ self.node_data[node_name] = node_info
+
+ # Unpack the model type agnostic values
+ self.model_flops = self.model_data["model_flops"]
+ self.model_parameters = self.model_data["model_parameters"]
+ self.node_type_flops = self.model_data["node_type_flops"]
+ self.node_type_parameters = self.model_data["node_type_parameters"]
+ self.node_type_counts = self.model_data["node_type_counts"]
+
+ self.model_inputs = TensorData(
+ {
+ key: TensorInfo(**val)
+ for key, val in self.model_data["input_tensors"].items()
+ }
+ )
+ self.model_outputs = TensorData(
+ {
+ key: TensorInfo(**val)
+ for key, val in self.model_data["output_tensors"].items()
+ }
+ )
+
+ def parse_model_nodes(self) -> None:
+ """There are no model nodes to parse"""
+
+ def save_yaml_report(self, filepath: str) -> None:
+ """Report models are not intended to be saved"""
+
+ def save_text_report(self, filepath: str) -> None:
+ """Report models are not intended to be saved"""
diff --git a/src/digest/modelsummary.py b/src/digest/modelsummary.py
index 5f732fe..5aa43c9 100644
--- a/src/digest/modelsummary.py
+++ b/src/digest/modelsummary.py
@@ -3,7 +3,7 @@
import os
# pylint: disable=invalid-name
-from typing import Optional
+from typing import Optional, Union
# pylint: disable=no-name-in-module
from PySide6.QtWidgets import QWidget
@@ -15,36 +15,47 @@
from digest.popup_window import PopupWindow
from digest.qt_utils import apply_dark_style_sheet
from digest.model_class.digest_onnx_model import DigestOnnxModel
+from digest.model_class.digest_report_model import DigestReportModel
+
ROOT_FOLDER = os.path.dirname(os.path.abspath(__file__))
class modelSummary(QWidget):
- def __init__(self, digest_model: DigestOnnxModel, parent=None):
+ def __init__(
+ self, digest_model: Union[DigestOnnxModel, DigestReportModel], parent=None
+ ):
super().__init__(parent)
self.ui = Ui_modelSummary()
self.ui.setupUi(self)
apply_dark_style_sheet(self)
self.file: Optional[str] = None
- self.ui.freezeButton.setVisible(False)
- self.ui.freezeButton.clicked.connect(self.open_freeze_inputs)
self.ui.warningLabel.hide()
self.digest_model = digest_model
- self.model_proto: ModelProto = (
- digest_model.model_proto if digest_model.model_proto else ModelProto()
- )
+ self.model_proto: Optional[ModelProto] = None
model_name: str = digest_model.model_name if digest_model.model_name else ""
- self.freeze_inputs = FreezeInputs(self.model_proto, model_name)
- self.freeze_inputs.complete_signal.connect(self.close_freeze_window)
+
+ # There is no freezing if the model is not ONNX
+ self.ui.freezeButton.setVisible(False)
+ self.freeze_inputs: Optional[FreezeInputs] = None
self.freeze_window: Optional[QWidget] = None
+ if isinstance(digest_model, DigestOnnxModel):
+ self.model_proto = (
+ digest_model.model_proto if digest_model.model_proto else ModelProto()
+ )
+ self.freeze_inputs = FreezeInputs(self.model_proto, model_name)
+ self.ui.freezeButton.clicked.connect(self.open_freeze_inputs)
+ self.freeze_inputs.complete_signal.connect(self.close_freeze_window)
+
def open_freeze_inputs(self):
- self.freeze_window = PopupWindow(
- self.freeze_inputs, "Freeze Model Inputs", self
- )
- self.freeze_window.open()
+ if self.freeze_inputs:
+ self.freeze_window = PopupWindow(
+ self.freeze_inputs, "Freeze Model Inputs", self
+ )
+ self.freeze_window.open()
def close_freeze_window(self):
if self.freeze_window:
diff --git a/src/digest/multi_model_analysis.py b/src/digest/multi_model_analysis.py
index 08e3ce6..e63de50 100644
--- a/src/digest/multi_model_analysis.py
+++ b/src/digest/multi_model_analysis.py
@@ -11,12 +11,15 @@
from digest.ui.multimodelanalysis_ui import Ui_multiModelAnalysis
from digest.histogramchartwidget import StackedHistogramWidget
from digest.qt_utils import apply_dark_style_sheet
-from digest.model_class.digest_onnx_model import DigestOnnxModel
from digest.model_class.digest_model import (
+ NodeTypeCounts,
+ NodeShapeCounts,
save_node_shape_counts_csv_report,
save_node_type_counts_csv_report,
)
-from utils import onnx_utils
+from digest.model_class.digest_onnx_model import DigestOnnxModel
+from digest.model_class.digest_report_model import DigestReportModel
+import utils.onnx_utils as onnx_utils
ROOT_FOLDER = os.path.dirname(__file__)
@@ -26,7 +29,7 @@ class MultiModelAnalysis(QWidget):
def __init__(
self,
- model_list: List[DigestOnnxModel],
+ model_list: List[Union[DigestOnnxModel, DigestReportModel]],
parent=None,
):
super().__init__(parent)
@@ -46,9 +49,7 @@ def __init__(
self.global_node_type_counter: Counter[str] = Counter()
# Holds the data for node shape counts across all models
- self.global_node_shape_counter: onnx_utils.NodeShapeCounts = defaultdict(
- Counter
- )
+ self.global_node_shape_counter: NodeShapeCounts = defaultdict(Counter)
# Holds the data for all models statistics
self.global_model_data: Dict[str, Dict[str, Union[int, None]]] = {}
@@ -62,13 +63,17 @@ def __init__(
self.ui.dataTable.setSortingEnabled(False)
for row, model in enumerate(model_list):
+
+ if not isinstance(model, DigestOnnxModel):
+ continue
+
item = QTableWidgetItem(str(model.model_name))
self.ui.dataTable.setItem(row, 0, item)
item = QTableWidgetItem(str(model.opset))
self.ui.dataTable.setItem(row, 1, item)
- item = QTableWidgetItem(str(len(model.per_node_info)))
+ item = QTableWidgetItem(str(len(model.node_data)))
self.ui.dataTable.setItem(row, 2, item)
item = QTableWidgetItem(str(model.model_parameters))
@@ -88,6 +93,9 @@ def __init__(
if digest_model.model_name is None:
digest_model.model_name = f"model_{i}"
+ if not isinstance(digest_model, DigestOnnxModel):
+ continue
+
if digest_model.model_proto:
dynamic_input_dims = onnx_utils.get_dynamic_input_dims(
digest_model.model_proto
@@ -197,29 +205,21 @@ def save_reports(self):
save_directory, f"{digest_model.model_name}_summary.txt"
)
- digest_model.save_txt_report(summary_filepath)
+ digest_model.save_text_report(summary_filepath)
# Save csv of node type counts
node_type_filepath = os.path.join(
save_directory, f"{digest_model.model_name}_node_type_counts.csv"
)
- # Save csv containing node type counter
- node_type_counter = digest_model.get_node_type_counts()
-
- if node_type_counter:
- save_node_type_counts_csv_report(
- node_type_counter, node_type_filepath
- )
+ if digest_model.node_type_counts:
+ digest_model.save_node_type_counts_csv_report(node_type_filepath)
# Save csv containing node shape counts per op_type
- node_shape_counts = digest_model.get_node_shape_counts()
node_shape_filepath = os.path.join(
save_directory, f"{digest_model.model_name}_node_shape_counts.csv"
)
- save_node_shape_counts_csv_report(
- node_shape_counts, node_shape_filepath
- )
+ digest_model.save_node_shape_counts_csv_report(node_shape_filepath)
# Save csv containing all node-level information
nodes_filepath = os.path.join(
@@ -236,7 +236,7 @@ def save_reports(self):
global_filepath = os.path.join(
save_directory, "global_node_type_counts.csv"
)
- global_node_type_counter = onnx_utils.NodeTypeCounts(
+ global_node_type_counter = NodeTypeCounts(
self.global_node_type_counter.most_common()
)
save_node_type_counts_csv_report(
diff --git a/src/digest/multi_model_selection_page.py b/src/digest/multi_model_selection_page.py
index d24996b..4c16bf4 100644
--- a/src/digest/multi_model_selection_page.py
+++ b/src/digest/multi_model_selection_page.py
@@ -2,7 +2,7 @@
import os
import glob
-from typing import List, Optional, Dict
+from typing import List, Optional, Dict, Union
from collections import defaultdict
from google.protobuf.message import DecodeError
import onnx
@@ -22,8 +22,9 @@
from digest.ui.multimodelselection_page_ui import Ui_MultiModelSelection
from digest.multi_model_analysis import MultiModelAnalysis
from digest.qt_utils import apply_dark_style_sheet, prompt_user_ram_limit
-from utils import onnx_utils
from digest.model_class.digest_onnx_model import DigestOnnxModel
+from digest.model_class.digest_report_model import DigestReportModel
+from utils import onnx_utils
class AnalysisThread(QThread):
@@ -34,7 +35,9 @@ class AnalysisThread(QThread):
def __init__(self):
super().__init__()
- self.model_dict: Dict[str, Optional[DigestOnnxModel]] = {}
+ self.model_dict: Dict[
+ str, Optional[Union[DigestOnnxModel, DigestReportModel]]
+ ] = {}
self.user_canceled = False
def run(self):
@@ -48,11 +51,17 @@ def run(self):
self.step_progress.emit()
if model:
continue
- model_name = os.path.splitext(os.path.basename(file))[0]
- model_proto = onnx_utils.load_onnx(file, False)
- self.model_dict[file] = DigestOnnxModel(
- model_proto, onnx_filepath=file, model_name=model_name, save_proto=False
- )
+ model_name, file_ext = os.path.splitext(os.path.basename(file))
+ if file_ext == ".onnx":
+ model_proto = onnx_utils.load_onnx(file, False)
+ self.model_dict[file] = DigestOnnxModel(
+ model_proto,
+ onnx_filepath=file,
+ model_name=model_name,
+ save_proto=False,
+ )
+ elif file_ext == ".yaml":
+ self.model_dict[file] = DigestReportModel(file)
self.close_progress.emit()
@@ -60,6 +69,7 @@ def run(self):
model
for model in self.model_dict.values()
if isinstance(model, DigestOnnxModel)
+ or isinstance(model, DigestReportModel)
]
self.completed.emit(model_list)
@@ -95,7 +105,9 @@ def __init__(
self.ui.openAnalysisBtn.clicked.connect(self.start_analysis)
- self.model_dict: Dict[str, Optional[DigestOnnxModel]] = {}
+ self.model_dict: Dict[
+ str, Optional[Union[DigestOnnxModel, DigestReportModel]]
+ ] = {}
self.analysis_thread: Optional[AnalysisThread] = None
self.progress: Optional[ProgressDialog] = None
@@ -204,9 +216,10 @@ def set_directory(self, directory: str):
try:
models_loaded += 1
model = onnx.load(filepath, load_external_data=False)
- dialog_msg = f"""Warning: System RAM has exceeded the threshold of {memory_limit_percentage}%.
- No further models will be loaded.
- """
+ dialog_msg = (
+ f"Warning: System RAM has exceeded the threshold of {memory_limit_percentage}%. "
+ "No further models will be loaded. "
+ )
if prompt_user_ram_limit(
sys_ram_percent_limit=memory_limit_percentage,
message=dialog_msg,
@@ -290,7 +303,9 @@ def start_analysis(self):
self.analysis_thread.model_dict = self.model_dict
self.analysis_thread.start()
- def open_analysis(self, model_list: List[DigestOnnxModel]):
+ def open_analysis(
+ self, model_list: List[Union[DigestOnnxModel, DigestReportModel]]
+ ):
multi_model_analysis = MultiModelAnalysis(model_list)
self.analysis_window.setCentralWidget(multi_model_analysis)
self.analysis_window.setWindowIcon(QIcon(":/assets/images/digest_logo_500.jpg"))
diff --git a/src/digest/thread.py b/src/digest/thread.py
index ef18617..cb70123 100644
--- a/src/digest/thread.py
+++ b/src/digest/thread.py
@@ -2,12 +2,37 @@
# pylint: disable=no-name-in-module
import os
-from typing import Optional
-from PySide6.QtCore import QThread, Signal
+from typing import List, Optional
+from PySide6.QtCore import QThread, Signal, QEventLoop, QTimer
from digest.model_class.digest_onnx_model import DigestOnnxModel
from digest.subgraph_analysis.find_match import find_match
+def wait_threads(threads: List[QThread], timeout=10000) -> bool:
+
+ loop = QEventLoop()
+ timer = QTimer()
+ timer.setSingleShot(True)
+ timer.timeout.connect(loop.quit)
+
+ def check_threads():
+ if all(thread.isFinished() for thread in threads):
+ loop.quit()
+
+ check_timer = QTimer()
+ check_timer.timeout.connect(check_threads)
+ check_timer.start(100) # Check every 100ms
+
+ timer.start(timeout)
+ loop.exec()
+
+ check_timer.stop()
+ timer.stop()
+
+ # Return True if all threads finished, False if timed out
+ return all(thread.isFinished() for thread in threads)
+
+
class StatsThread(QThread):
completed = Signal(DigestOnnxModel, str)
@@ -35,6 +60,9 @@ def run(self):
self.completed.emit(digest_model, self.unique_id)
+ def wait(self, timeout=1000):
+ wait_threads([self], timeout)
+
class SimilarityThread(QThread):
@@ -78,3 +106,6 @@ def run(self):
False, self.model_id, most_similar, self.png_filepath
)
print(f"Issue creating similarity analysis: {e}")
+
+ def wait(self, timeout=1000):
+ wait_threads([self], timeout)
diff --git a/test/resnet18_reports/resnet18_heatmap.png b/test/resnet18_reports/resnet18_heatmap.png
new file mode 100644
index 0000000000000000000000000000000000000000..e2ae67d2b40cb8dda4977947b3396ad3ed021969
GIT binary patch
literal 103019
zcmeFac|6u__bz;)c|wswB$Y-QWXez(nrWhvIU&hBkBulvB}t~xAVNq8AtVV&l1!PB
zdCENPW4Z5V@BRMq`@Fw*?|-}lABOJ{q9yMQE
z(!GxF>s`jzJAjkXiERIgfcZK-o3b+arm~7Z8~&C_^eVm}7h1B(
z^Z)x*S?t#@U^Az;DCJZ_n`0Z_y`N8E$TFZ544orAF!DF2iGEWv_H|HW~}!
z5!P_5RA_qitG&H2>oQA#==sHAVPUn+@$vD8LS=)~y5Ei692)6L>;C%sd|9FC@o2Tp
zwGJ&S-oJmp;bs5OP|LfquEsnrZT*Ja8-F{ixcEc+r%ng&v=c506weHIG)?bSsOuFEanK{MTDJUv>-=gt@-%dlX
zy!R;LfWOW$!2xMk9S{Pw{hc`
z;*RADW-a2A-d7SLW09&cW9H1>w&=LUD^|EDj?JAnui^4+zI9w$K8Gf*PtgU61s0-}*BF|cy
zVsZA^@#Cl8IZHY?IQYNBF}Sj5g;Y-#3+tVbkVQePl6Y~(f@NDS#l0K7nP^zDP5&d?
zJzw9{k>7z^mn}%2njFJ9{*vkDHgDd%hvH^@YSDf_ew^w`?^nj*2)4|hw`kGt*tD*2
zY}JoWTiLOW?Sox4^RwhxWfZiu_o#Yu;AliSTMmxVb-x*E!1@8*k7|vuz&kZM1It
zb_0Ly9cc+Ii#{px^Y!^09L3Wajyr_?y&MPHO7X?;hzLpRKWtuHBF`Q_zVz|qKKBL7
z-~TQOOl^-%`&d-e?5iSM|2bg$No!fVS_h%KckkZ6fB#6bsiy4kFBY7FKNWHOM~@yg
zsd?*HA~(f*$+$eK6sy+I*eL1Hx4Q7b=8Z$Lc7MKH{_^F>gH5O3Pfm<6&6ztl>|~tQ
zYopRD@*azNlQSn4UXpbg{SvM?|95e4VQIu(hi1Q%_LCzm7E-|`KVQGl@J_l=u~u04
zc72YAZ)j*qZ)1Ldw8J)o#DIJE?roIPX1_b-?noh?;4)_0_U(ndzW(Z%Gs&~o4IOqv
zSUB_-iA!2G&B@El+oX|lXX$2j9kDPy64R;=9IIddMnO-+uEl(_xI
zB7KX~4)u{8^%RhIH*fmLE@jvC`Qg^H3`Dt
zZajIi+Mwvs^#c6{4}RCjSUv>P{N$>lSk1I`n>IC7nPr~Cq2ZNt4Z^D)iBMX|X!{;>
z5D{!H!OJC+kMtGjIx$)rr!D6=*wGqXrn@bGmag@C49`xxt|#hA#-D?C`oDU$<$X@h
z`EQZzPJ{6<^!oMd4+V;Oi(UN5Xvut$TxsGwH8Dg-%wOBJa;z39+B2WuE$H=Y(MRHD
z#ert;Zp6izt&uxwKQz^mSn~Dd>Fu_iKNU}%S~j0cIJct0Kf`Hgn|+Ut(ye8eGc)Cm
zKUaQwG(rjcJr^13(4hsBlato%-@RKxoNiEodjDXr5D0cD3TbqqV
zk9IVcpVZF7>wWun!qUnLALFU`7Kz|uukQ~70`@B@&Dw78S?A*&0qMSvf}75~IaI$}
z!PTzqGTYq?pDGg#>6D6Bo3m_If34I1yGTJv$
zB&66d)a3)}K}LoE*L^>~cq4sQdj4;`u(|{$_qk8f_KsAP`6v9mGZ%Zcn?l~
zetuLTqwg=5qt>+a)aMKj52L8fqVja$z=8CB;c8QH>}S}kS9b>5E39zjkm@CE+Bk4{
z*9Zz;71n%vCn)I9?%mmnK2@*JAMfk$zuYhXaMRNH3}mR+W_2N0af`mD{c3SqMPHvD
z+<)khe$88(^wGBHWea9?U+a2vu`0o#-(cIeZ8&W&)syB)@ZUm0v>zxxS@7x8DglAZ
zXG~MCV;QkHDX4wtk>U_M=a9+lrTLs)Tt<5H`2%EK-r9NkVULk#4H`dqn^e8=&hz3r
zj>CL=#ZKGG_R&zh!$VmYDTL@P9`V~QV}leOpFVy1SXg+r>Em5zgmCVM4+%Qi%Yuu5($4>Uy&gp;UuVmm2M=atWnY~gUulvx!c+O|aA047Q0On)
zG?WpJrArrca99^eVV~N+$DExQ9k_P=dTFwmc8J@gLqbBraEsj3Y168&Uym)~mtC;S
z)m65oI9L!x`V!)Dd}5-vzdx-he(~bP4<#&CVarl#Oz|&2(j5|$Z(p6ws@8VKD6r_m
zC%=uYWzSjLE8<$dpD{c4{pIOe-IG(TekZkhYp;1+yY}Usi_@?1BaeoM?U7);@*9~R
zKY21Ex#}>YtVDKf6HZrtr|t1XgCa}upHr>x$JsA!H!L>)^F_c+MOj&Sk*=;Tjswz5
z!JpEI4Bs3)V8#;9`Sa#UIR1@HF2Z}iK=h_EjwTu$Hcf3_in6dzMa2-2!f7n&JiGXlKfLbMMg0zA`gKLwLa~7PV+j|H6U9?n;#%X0?eUP%gQQ9mUMXu&6oeURHC;6osa7qK$DbFUy8s~6)YR0IJ)7SgC$TI_HF_QsbL5we
z7WE&-S`d}?LW*)oFW4R}*O7B(K3L-yueu9YoJCYGKwhlPaBA$e7v5P~h5s4qEpw?f
z5ySQ`+p0I?`t|Fz1B!}@grQKJx10Rfa`WcRhg)>tpYymfVdyO4hnIbD!Gu;9&8^k@I&ptXrqk
z{K@b0qaB53&YnKKeCEuVUCdMNFXFl5kn#cjNi
ze*wR2P;Rb*>AMjF$G?BF5u7=fS!RDMDA2`bi(=pI3hs5}KmG`Zpva&R$b0R!ZTBAS
zxNz_Oee25S0@pn}=$U|4_1uA`QIR=`^?rI5k{1$Nl8r;nMlLKa`aC^z@j&wd?L$v28|+{CB+c
zdF0}Wv7vmuc=KObmo3NWm?PZM|9(B_$32*ctPXUx@40d?P=cVWYzS~vV@pfErd^F9
z3ROW-(VCq*#}YF3GLR&xWI1)EdLWeABoeFK}Ie3{F#dS->#a#wTB;V(aB=GRNE+|67uqI5(oRpj>y@26k_M{`OFS1
z`n@8me`2W4sX~%>QFn6nL4V;j`0$jhEmKdu(xF8CG$rNX^6ds&9QyR};2&}Rs2&0w
z`U;s%dPwUCLldzx*Udg@3UgUlSp01*^SE@0p&`tLlYX}I
zXA+}UV1sFLTD3Xi^m@7Rfa`Hhb@jFt2UOM6eou^D@5E1Py9{t*C(;46d1aj+qE966c_()kIOW`n!h}g+!bE&6Y1*2
zi4$Ssk0T@J;Eifb4T1Zl2Y(Q*;dx$G5F+FBBhiqRN8EJv`t>(5CkNv6&z*~R956PE
zE5@g6Esy3xwQn?Y8{;{D{(P-^{diM-=pY06fp^htq>9g>a-kq+Fqdo`*14fUFT{29JkGBoINmy8VFRoqfoYULI=DnF
z35EeiE|V}{3COfoO6pNQpVO@~W*L5lAr9_@qO5)&qH+}%CxY_Ph_!py^yC7P;7z7T
z4Y{|To*Gbze;A$QK9kq9p~ykJP4UhkxB&b>7UU3*2ev=0l98+{unQMB
zM2vscvGd&TS7*-0$46F*9Zqe**ZQi!gq!)aRvueEkF76_FwH@(lDx)GBM}El9bhcb
zZyB+u(9B5B^b`E&XL@>j!?ZfIUcH;CF}^nrY^-TWf9*=0QJby&2VAxBDmyJ%))@Wt
z_VoN%zGq71!N%v>;Y;bY{tKnSxFYuOo;~k5lcSGDU@1h07j9TgKOODIoXfuwPzD&-
zNgJqO`^BH4wQf@`z+UNv(W=oOZ~_SmvdmtTs~3M_blI${mw{xrV%4812Olsp`m-i^
zU}#8)^ojP_3%PR%l!VH`C
z%{Bk~=jdQZ<<&})XNq1+0d)vY;-KE}@evXg{pJ+$?qZb~>cXp2n_ge6dH^^N!0;K^
zvh_#2;82~L99S-n4JW`c_QSp-J|E>^rHBaYC6wj_s}^Ojp4@LQj;{x$h>m*~b;NAn
zzB!xEyjcxFqA}mcHz;VqE>s6%4B#{V3;98hvqrv@hRM%Hh6VlljnYr76+i$GiXI$s
z+YA!AuA&wo=+2#8Mn>z>?7F}E=xP#e&^Ox^E6uW&FND)+Hv&StuTTeWMv`>w^p_Y61slqH-yD9y)O
zP*7k}or1h_uQ0^SUs&@`ZN|>EzBd3D`|#-qlCD(=IRwl>u~;6Z`sDd@?bj}VMntMm
z2(ueUrhKk$4>&|vv!|!0aPSgoW*?K}W!SjtZPSp{%uYddn7?$B4tN{L@*OZ%nW5?&
z;C<*TjVMD;o;>+wF2VSbaBe=(f0$KE@!#&cP|l5Nl+D4CujJGNpj`vAk2kH6!hU`L
zuX3)GdN
zZE6c+Sg@ZvcTV5X@Sl2{Wl97H%#{uTK++t*N0OZ6T&3}@9~yh_1P0FEwSWKIckkXo
z`Z&8S;FWp|F&*Sw2rt(vO(rY3!A_P#t?^xZc$JJyFix)(R;CA-RrSXk?Mzn^d%$Ck
zA3L_k+`LCb)-JmIW{v4=y!BK^y4RmSM%%V;$Ns%|)rC?lJ<{Zh!-z`R`rYB9NbEs+
z?$|}#@2#o&MVEhyKbsE%t&f_7CGiEIW#iz;1rT!pd4aG1K$=_ioB3*XR)Mb&(G+~`
zrDoa#5If)CVDBP>2R$e1tVuv8lQFsD{R^B7?Koqg%_iUIF8<>Z+DR5Mv
z5CzaTeFFjta5g->y$5PjcJeMN4w3Of9fS^Xi&ygAJD0J0KxX43{re9dY|Oo}gesb(
z)o=5`j?k-_#Ln6+TkZ&jI^RMzk$s-6?`J#ujAdWqw~e!U+ZOUb?QtDZ>b#
zAWH68hYWf*we4u@Ph{0#A6Y-JOGAE-ygZ#eJV(FqP9PSr7?Yluo6=P_pu{KvVbCP7yO;7d$TcEd&{{`9NptHCFFFgjguy$Y;IB<
z4;;i#HuT!3_j9_g9}uM{`{*XGqD!UXzp5<%{cn}A7ECbRQWA7^b>-mVa?+kXW5%z>
zMtNOb0i0O|iULl_7H)534V~nwxIYq0y=H1_Ycr67k&n4*VyR^Bw7ZSO*cINsa25+R
z668%l7UkzlE&)u=_Q}ry8;pw-re{jEUB7F5!PV?v8DUdNuT3iNAtbh$|Jua>e$0bB
zc)sqP3~6B7w=a=!yXp!x=WKEna$NmWf31d(cLn{V+<{1!tUdHtEk+&h^R=vuK((U$
zM`UsHv>N~Flh}TTuGE%4s6wI6eRIvyduK6_?cbT~oPL1Gb-OfjAw=KA>)@!HTTnpI%Ru}My
zf!rA10jPwmb{NSDO4MpOIey=QT)c=8)Uh<%&ZsYwB>6)Mq;ycB<>bxpHhvdKKGC94
z2}Efhb}KG(QVJ;l?Vo`@bO=BTbB&YwDh)&HEmPCm7tE4Rl~cs2Yx?<2l8vhrHS3Ys
z#ZL&u^W{;T9<2Po0}Mk1kQJC%W_@{jkPAAZL)(kzr&7!XOp^n_gTB7Gc!uQF+q{zR
ze`R0Y8dxOV|9Ly1!u7|WeSwZ6W&J0MSQX@%Wnw1F2*^VM!e>2(Ke4y50bil)ez>(<
z-rimUH9g%Z19eeFZsIu@71@#18`WO^!pWC&>Jm~8(t#GmE8|E22wk5TFTVaO;He3&
z6OPXg1soHWW^izF#shZzsPjSYSMnn9nu+*#en!6(Mi|Z}9neq^C<04usOkP=4+U
zMDNbZUwl}+CKp-PVX)GW3ECM0(g5Nfh#$`i-lpK{`p%9B+<3>G5LAb^ww<4_)u?XS
zMxx-sx$X;jHkj-L4aR#Q0)OF1f>82Z0wW~|0O1gPF6Nu_bu5J$n|Z3nGVc$CrkUpo
zm*O}PO@%(XA-UK%=^mh6kK2+DoWS=ekT&z!{3!=xZSv8qn8ui%o-1W!R`}%0@7cpV
zGBWbRdB@>
zkkCXVZ?#%tv5nB?Ax+9ilvL<9rZw|+9XK#=r)`H0^aI)xvpU(sFA3|X-d=ury_+@r
z$v`Xvg$oJ_{=;hSs&y@=UI(2PG-pUJ$C=i4o7hqH=3>Olm$#M(DVw)_TMq#M3AGu*
zD%vaH>3bn^BTn$atngj%JvvcnzO&g4Tv+^}E?
zvjlo5yLazKyDZ+gd;>D2q(#FF9NZk^BtR5-C<_w!0_a)VpAiFn{X2jd5Wvunln7Qn
zaPT0>WN0%W*Fj)BfDRHW1h2IH9h@eBp8d+oYtg7cR9JNUi1(W1iT74oU^;hAmFtA~
zA#AHFli>BI$^GAh&T(aAvZZpc^t8)HoN(O^^&0g3_|pTN-3jxyD;Dj;Zq
zbtgc@O)#r_cVOlm-vR~50_f-T3KxF-h(n~AbDK`tlVcn!V+GJM^O2iOdEfV`c6?{>
zKYg=8X}*%UNqGlAomG|-$1bIO7*Cd&*>;{<#OUI7PzA{&MqG;idn0
z0siL~|670Q>F-D1uYoIRJ}c|g|IYR{K@$K4Waw;JW`r7umd!e}+e8c!E7Q`^bdw(a
z6EWVZ;s$-YK74FjU=jMh{*NC&-WWyFTD9v~r@w~Twg5V&z`oC%hPoaeYH@RWg+c?l
zIOjiH?`?nD0;~{@9+Ct^67Z(){5dm+bd3*#g1(GV$
zoO$z5@VGC1{(K0c39IbzN;F`Qgh{VQzc^AoX#>P_eF#MNA3VSr%Yguz1zF-U5F)ZB
z!K>=(YRdt4$Z3v$jqn~4wjIm<>1EyY^2%tj9Q}J968op5od{~A3+A5hT1m#LNI0iY
zpN8N^gcTmmQ(HrC&!
zyJgwB4IA|6NdQN&+W1@)#@enUYruHcY~N0)GU{YpF*=MlKvzoKCKK)W@#^E9W^NIX
z;cyOq_1X_N`Kp{sx8DkZ_zBottrRi`2pb^KB4jKSviMV*INQ9vy`Q3uMhp`<2FYr_
zqodTp2b%;S+k*jbGp&}u0(?q3hz>{3uWZ89pnTMOg}!am{{7t7C;Oi~dzJtN&lD7i
z&uzSSUj9OKtPyWej(SV?^6Ms7UGA3=;)<=fUdw^w%AWe>A9$7J)Sa;x3nlNqpdOhr
zPk-B1$Yx+!`>?2|UK_Dv-s8jr9_|CrZ1=?LLg}y+~`}rcT
zAYFl0oQO4~ZVnZ7=#hZemvj758m6OkM;WhYs0E$@TT22ev|0%HBo_{LR)KU9N2#{0
zx+pz0-cP+tcoERYptRX)4GOZ?+(
zR3U+S904*4H%HuiTL1zG--XTZJ8csI6|P*lLPdE0{{1htGbar_xFDg^=LU+Cl$0dX
zh3y!Gcp|s(UTj=&K?*HjykH~HnLc*czs85ayavxVmfr0MD8wY$5Db`**zY8K>{Ed6cl
z%a(bTwXR;hdPYR)J}s>fag!4-jy-+96;TH6FDN0g3}1(sTIV{pxkS2u0fa=v@Oa0F
z)=S&R8k$T@Oo0B3S9aZ6sCu8kj`M*b3K#nH*-InHneC_O;N86t$gHqtjgSyjo6qP0
zSV-_f`_QtlpGo>7J~F_XE@&_3TCu6uE>t;aWYwy-T*W?w0$6v3WYc`K(r
z3oi;`TIN}{4mHEP@)}|a(VG!Epl#Fw#-FE`ZJMn8noDr&*1Js$bF!s)T$<>iXBlks
zGCUf0l)w8U^W9J2>gUG)DlIQ|t1?Zj{B*I=S_;Ir`2h*;b#9}qeR`cWrgo|4xE#eN
zk3*|U<-3M;>wPXa@uK?hMBUV+9f%cF^38UOtO7MGxwx{Q~_}^tGDSffO5S=;a|LQLjcdEN{>*`R^MqgPrAE4M+
zu;2-B>1*Tin{HEMb%<0x21+1wHmb7(wh~KZ8*yg2Y7BLBRwmcj@<4=q0^3`OO1|4$~iK_c-1CZw@Le8=xaGHPAavw%VRD
zdfoQy*%PT2!@bsbF5VWY9VRaXDzwsDHKs!7egHz%_u8Xxxo(liRiGIJCbeqND-R0d
zWuOnU8*u<}4YJ=7lvIdy=<7at`t)va@L|6Vs#J6@R=pXRUWqlOZ!tx8E+zzcz{gE4-M_Zpm%u>=MO|ElX8~V>D0dyb0TiI8
zaDE_R6L`imMV?MvyLIbUVBNcq9v#4^UA|3!4#)*)(_Mdmc**2&_(d=N{Km5q9Pk5_
zG|1~j5`n+RW4i;UH+qs|<)jK4qU4pSa~WEJ)*qR#NC^W4L@6eX^3(nGO)V`rxFdMR
z_3PICqMj@|x1jq-Zx3OXgXQNIzg$R1hsi2I!OB!CA(R$`!k1@w4WLJ2t(NB6a9%_B65
z$-O|DyS=@ADmMhDJe>DPlob4gGsM2kk+Ibj4sM%fdWJ%kJQOVDM_w9q^3kma%+mW7
zd6Z;iXaha4N?G;%+KpOAW*LUtNTD{!75RqHD)|y~MjSqZ$~PBzKt`)k_lcClTtL1h
zQ0pFhX|5Pukc?`Vp_=iDZZ}YvKFFJZ4jWA>U`ZoSi`t{06bB+MJ$-fPhA2J2+Javrh7XAFRk
zk%ADd4Snn)QEMCzXiqRNAa3A5c!86MlmfMM2BcDGYTh-b`Hj8Esv^tz7+ooii%^8K
zfy}5l3o6~x0S*b0ae7m2i&i}nJ5B*QUJ`O8FzqpD$UQ*KcNa~!
zvgEEY=c;JTVF7;n?apKR3#xAT>(}?uu}*|@Cd9dKKg0nn^f~M`m;~Atg-Ld4HE34e
zqLw1g6Qb}v%K9*LmdX?hK~Qwyu@z*8l%5=JthUHsQES!)2!HO~xyo}S(?tXZfS{&xGy_&5Jo#T;g;$v=8c+~Itl7k0r~_owW+
z&yWpr3{*xwG_~IS88l6<+SxK4QB4(37#0gp!2+2oISU(wKHfo7*#DNpAUNT4e=Qxz
zsk0!!Oz^#M{>x~ae17%s<${L;e<~LJU)J53b4E@B2ZP6t-)l#LFND8fV5izYNseQt
z=f*AIdoV$@xyZoPN`@&2`UhcgVFG`+wPNQ3QUYOS+J=IKlp*{!6RHwY;RmEziXB*D
zH=z1t9*7}R9@NF!3jBuXh3_wQVZ^CDhmx&daBmIajq$O%soyXzM!pJ#g_6(lj{sT{
zN+|8n)C?idQmPS&hDMBOGCUqPV1mYS{DS*)^T`OTHd#3F(HJCDhlD8W>+4_duY|G;
z=|tx`Lsyg=MGY#c_HglZmMCv}JC_ad>o(D@-BFhr3bW*wSGw|uo11tsoZSedaN3IN1JavPUNlR1VWBG;20BCqlHm-~_aAo+rm0TBzI_JE_M-W-Sc
zdRc3(iP+g~s_CD>m5}vK!~*TGddpE?YXwvwlnSA!Hy8SEhaHCjQLq)>YD(p(jth7t
z2!L#H9k)k$K$BDmy^wqN7DEL;cOm03)DjfUX-`t2w~`aBz1b&~5a|NUB2Y_-)HPH9RNe2fnuTHYu&mB&gA4;$BihtICd;(n+4Ap4;}2TQ-P(202UxS6ptlK
zmnxZ>3Pa=qu-6j{d_g9yKye`eD{wpZ#f$eKRDqdE=Z04h;lBdtEq`7<%=G6_?WHIF
zmQ};g`uak}ZSpo#E(Q}|ZfUV0+Jv6aD-=8=LAz>;+3P2MUX`8(geC={_AMsyWl{pWz)qMCK6g5fsCUqtgP9I*(P(bM~jcOsdyb8sl&U9mgVt@icJ0qCuaKRX+1`RiGrX*d!&
z%uAMVzQEH;{1B}%YSvkI*9JpVC<9(5YdSEmgniFS6uke~>tV&5H{dAEhrKC*iZ^XM
z23RMVi^|tO&8vj7`HLVv<^aw)po{~&5&D2~1sV3CoSUp+agZ+5L+h^U08o?ecAPcp
zyHRTtlqv;VL#GBBHn{~M(&!|qzW}oYi885sHxXVj02WIfCX?FqfP{o?NMw9yV|bSv
zuYryRdogSi4u8Tt>^qYx)@|OL@y8WlWZBk~HTO-%RO{>zc_UA6aXhFkK4bL&UBhYk
zp~sMxOY<8)376koR@R|K`Fq@f8qOL&;FQElKK2+l$*#g^hYXI-i_SX(BMB02jUCof>o;y>$to#fg#e1A4z>)g7vfI-tC)PV!ENo{^`hPuyiV&rt%o{SvXBLl&Ve8m^8^w$lZ
z*|ZdO7J4WgC{V{xE>Jv>A@CIlWdG=5PO>P99yymGSek$S`_~#>BPQr+wQum#s6;qY
zsjbffQ|jG$NWTPPgR92B%A?c>lNdwy@~5@@_8glPD~Zjpuu5kUPusBJJvzNGn|(Ie
zj#fE=Gy-&BB;CCD{5=4
zWBX(UU*g#z-_6;iqJr}1Dl}bGN6jL)u@+3
zFZwnJ$acNlIkhfBnow&%K8O>ORRx;@Hj{-$brv9?R+tNvYiRlufW+aG)Gzm62BFv6
zvh3Tzf4n-wp()W$P`rZxDrlUyxU^9E6h66M}tKhDi%C8OHyI1uI
zEB{ojRzmmO$~giv?!8hJ6Y%uRl8D@taO+lrr<}r(BOEioU0V4V7W;(jJg`wgilJR#jb-}Tu4~u>;^~ppl0Gv8+HoxcjE2X>@sF^s&uRST}W#KiLF>(lv&v^?m4H0FY71m+{BPLcM$e5Wm0
z%9>G~i}=QP4Gj&61(KtZ&*kq?)3>c_u^H8R4njggWT!ZMcp-|~1yQ}Pz?1X^FalXw
z*iZ8LK8FXWwk|t-=#l6R
zjHH!*`SK;w{Vj;Lqd_(9&|Pwc08El?pssOPrjX-^$C69zWh+$775B>65o_
zAL0CreT(X!$%X<1A`7m8B#p*IP%CCX`G8n;8LQk61#VRX)yb?M|?`vv;QOpP&zQd@E(s3^iMfBNVQO-qS
z9l@CB7b8(4QSPAN;LeM|fn#wM?WM*^e+C?#Cdtw~fB7eIhv7}3am2$M}Slp7*P6mH~#
ze)NbN>8%o7vryO3^~7q>;iI`#FoPW*?C4s9jj|i
zaxXqWRMcW#-doR~3k?hm;Pphuywbzhq3Zr>ZdL?sO@V+zgU`ak!tm@IfwwR^dLz`N
zLBGd1T;=8E_6`ovzHY*|C?w?J?mi1|4Mq9H$&-z3Z8uTw0DBHAEk{4O2l*1m2IHI?
zHf)HDi-TW!cChkZ6g&)751u~?cZ91}ORS%-?-GEf)#BpGmCkp73Gco#6I=G(Vy0U&
z&Vy&6XGry1F3+}Jp@8)(c`X%Zv-u}r)|vdTWBosWs>_c45hQI9w3v$X6y__3F@^Y@
zH<_*BKVE=m`N3#DL5Ce|X>M+&nKE?6#bk=7?lEW&QoikD2nZLeZol4o%=g~CJ^T0H
zm`I&gBku#QH{#68%d;SFTy(7l{!z365t$_=kgX~y6&6V_cs)**HZe<3u`D$|L`0E*R@R3C;b{*c=_r})8A*?^ZL<@fB)vG
z<~#jZ_}5b7#k>CfCH2G1|Nc!OdOw4b9sboQX`}2VRA~bkxNh;uJc7EvgqL?bjn@v0
z7~nXgaws^l6Lz5@_P)PAS{e82I^Bn0b;eX~Y~@f?R8(YatZ95KcAd=VB%1+HCC^>}>$VP5
z2&1H*^ZBO8!-VHW43ct)4)PDZO7O{BN@u;gX+{{r{sWNpOJFsX+M!!?nO#Qzrs`-<
zuS%|;e9OLi_+!|(xIC01(D(~Qrrv7(=Lk19H|c^9%>|LO0P(`gjVm+V3_^X7ln^zLHFv_lO;2~t!-F(`*{N}>uV24@X|={rmGP~n*%n9^G;|0d
zoN$I+jSUwB5${nRv>`4g*L;BQt+aiBLGb3e3P0HPj|?Yq8ylO$#bwCWfKW@|nLxP!
zThwssOrkMH8UVzAwCv>YS(+7sYhG^R?-GbjlyM>VV^qa0ct~n=Y>b7xx&YrPfLv%{
zDjns3n2d(I;F>iv6_27PVtS&YsK}e-2|GJGtR|}bZr{O@;34dR4}uoCJ|8~-rHs1#
zpAD=c`VjNbuFfW@W9nxC>J$J}BD&4^*vQA|YSWOpQ=QxX=VcZUqwd1DhYwNW)J5l#
zt)cJtmpKYJWiZR(xvoS-ZGiuBkB!Za*x1;Yr{bBBHX-aNA(=yv&8w+d0oep}l#m$B
z#Hhdagr~hU&CZO3LNroJTdiqOM)Bx6bOg}phTKJlY?$yQTt=-UT)%z)eoR|i1#%*i
z3s~O~ZSA@znIM#ChGY8QIJ1;b+>~CLAR%nr;!YvQpB)Or1VfOt!{QhJ
z2b`XC2&g=yjzR~g6(Cb7R?}O_Dx3WZ;wS`tp~vOM1ev60E$XFVW(c}dFXn2XJ>4!cxNvN;cxU%&C}tueA_yHx
zEEt0Z1Ehj)W8J6|h$Qd()-fXlkjje+52RS7ZoE!uqM-_1o^a^UXNR~O(Z7))%vZ%n&hg=)x=7mx~&)p%nbFmcp+?g%N$F+sS=`iGgM~`Zo{K5b>u^2WUVPWdvQE9Q|!O*8BVV%HyX%zEP}Mdb-`F
z0%6p62%V=ZbIO%4^h>o15spz&4J|FRv(VFsjESM4GQbK@ifnwr6ShA%q|(uNp?cKX
zdP6O`xlkn2q;J04!Qyf!6mqm!NfF_C>HF~EN1)GLI6(Z)Jxov~g|=>uFL2=E;_3$^
zzlV8lR6evEjTd|F5E2y3shxzFx(9#|AGGK}d8>7`O@$_jrZgD^!;ez;*vQCMgU`z_
zG%^`@w)F4z#nUI*pUZ>A6R8JYr`p=uAX%43W|X9#&U>lragUkPVu0rrZM=3xy~fg3A}Q~;*xz#w5>NH6Y|z!?R<&MGe-NZ+58X$=XTosX{=(h80{443!sT18_sTbRK>m1A>?{)JI55NW%4*I~P?d7pfi2
zFrh3OKo!HQ{8(ba1X8|UM5G0JE?_wZOPGdn0}9m2-#B6roVw~CU5M3%frqYHfJoEO
z(eXLqT;ZuS+s$NNg;EFe*J6x#fn=TG&A{;K^Qb6#0#q2_K5FA3FflUdZD3}CHgEvC
zy9jnfo)7pkI6QixH0TDa1AJI0iMv!)(G^O?B3f8korQKzy#UObi0^Nc6_S&yl&CdYY!WQ(a1RI=0t@KptJy2?&nzQRWRQIHPDyRW4p_H-y-uVY
zj1GG9@OXboDprWb;Q2u!6CM7+*$tl_Gy2NRciy~x``Mc}4{&dZMj(EA45dI|((WeXj3~-@JyAY=le;+(bNA`mczlx^j3K-0v~
z_3c}+0}5}{W4={XBv%Hx07)yQGO0x;Rb~hYKTv)SQl1yGIwop^QbcLU>&b)M-XB7u
zEpJzvnA%K!j1v%~x{>=-Noi$nUaFqbO2|b!cI;SGalv%T^Ub#h*?X+;m04HARFNJ6
zT{t)VgFS`abE3D*{patoTK_LFb;sP5|03R){|5FIt#57Jzi*%CM5@0(7m_6|tP%gI
zS_OPUm;l$QfO1qtL{G0rk(AvO6|BFGE{W$rI#5F}w>=MuXKgpkHPotvO2gP?Z=aG8
z6uu6^F1xt6IL%QYe
zqKBvol9iY#>o)03iQk|kqzD~wOctE?>6%ANAxd3Ik}f7Ehcbc0tD9VqOmW>2`%)3D
z&qQaD?JX=VjZdtFl7!cXfo?_?O57nx2n-Z>YBi!D-nn}>8+_HKb`ltH$J}Ga7cKw*
z{cW%4t_W)DR<5XB!FKyhrmGC_BajCs)U%Q*w7k;NcFIi-3!*-WAuLdmWIBkdKrq7GK`DjYP#8M-R|ukw6jt{%@(7yQYmot&q3l;qGJlT2=X7(Bf{8Hqkpn&PCL7}maM5&Af4LsZny&?ywJdd!aM0XY{v=*EQ;
za1iRj(E@cp(Y8|^o*$}ho#$wR2Ye78qeaFRj5z>KHM)lb9l-1Jx%9RLM80@IG!yD`
zBQjiPa$Pa>Kbp5hQyHfn%a;qyfhH+k2X+i+>|1cF42VN9^0$Dn*Moh~X!Ke+xzOjKu8*J#;)(ZzR+E)V
zR8%Li@=N=G_vAVzYMB2n301jxaWj}P&_sCY7%u!G=@Bap7c}GkBXPwDD$Q1CcKHUTjslWwOX-~G-ZVwoFFD+t`Iv`a3UOO7Cn)x
z<;F8tV!og5AOaqFt-cv%jTS5kbc2!1A*n59wbImhq#1B*I4osZC1rkS5YrVb4)xc$
zmSLWfiUV*XwV4+MJ309>8nyvWlfOWL5sNsl7WXNUEg~);;76_yDpj`TIlrCHyMePDO6qiD>jzE6I
z{Y2GTbb*w3tvw4g3=qqavY4}OK81paAkV^U;p-M~OI#a!9;C2?-|A(;+B-v;%Sv#^
zPP}>yo>kJhLwgoiR8;u70}U7f;wA_zxTu
zi95}W&-VdPY53oHV+19R{X>)>nj2z
zwvK!?kWe9l69%PT7-%kC7Y1uFt_@=Hkr`YD=|{tDVgN3HUv}@9DJ!Hb{&9jwti0{W
z3Pl;WFJC}c$WM&+DX9+_H(L#@mRd^{nUj9d5OI~s40P~sBqnYLD1h5g+*lAlK>(+z
zNkLvd=Xyzw!C5TDCav^`kf5VHlu(a!G3izvByHmT{FWS#|vaRM~9xRn};MRJ@RqC$#22is^72mbmeak5I3t;
zJgjRRgR4|DG&S+qCaNP^8IDA^0QkvL2)7XCDxXy*KzIE6r}Qe^sY`t`L=?AOpU$3p
zUT@^tpf=m=v0q)Qd)nj2Sz&$Paf5fgVWioizI4!SxwjHJz@rz^hSUC|p1t0vI&)xN
zK-iJfrwicQDX*;=KKvi$?57_m&(hf}a~5XSyk2AB`Y(L(w0+tzJSW$0tCoesKhhv{
zxs9BhfjN>37w>t^^)HH1NK2R)s=rfcR35$&Kw%n(glkqZqHnF!5c`jy*ZIlak$rvX
zV)Os~*IwxOad;NG|IgofArZn7HnLf2m3s5NcY{culuNNNr9d?4>FGaTcEWfJmHv#T
z)=>`X3gg;FEYQX8K6N+MqSv5ILI44^FFEraDi;I`31_-X1S5iu@tPd;V2t9;jcq@2
z+`kIDa9&)t3v>$ui+Y)1)|Qrf)hQN~A#sIOiBXdJuNIyvkW+FG02y1<@>IYZYY8{-
zvoyqKT;shwgt~jj>bT)AEp?}adm+(^Jh5;)i
zZfbJUZGtp`Rb+spf@alxSO;jrv^#Su1O)*-77!HVnm&y9L-&FQwi%6uW^^s_#YO3u
zbjRIDUZfSQ@bdH|nTQwn2cdsu0k*Ojj0T)#9}){ps}B!7TPp+^N&%pz_wU~)&^ho9
zkQPAIXG7PaEEE`6)iARanl)Mq@IWw;Wf)}$Bb-?U1vd~BB-n%E0JAWD;Ti+TsW8AE
z08)&K0x5$H^=Vib)5()3sm3BTGcA*KmLMA@cr8IYG=+P9UWkjw!r?681!mJF8<-I
zb1<9-Q$~cj;1Fqy&*y`TboBJ;8y+6B3C02{(8ZL{?<_$J6L3K$hhjZ;25ghx%*+hH
z`vY9n9?8k#sPurzcQN7ujq*io>{_~^g8#^KydJr`Af9<;CX9`af;Q6a0svJEqRmL}
zxI?c5qG%3OuSeg~z~KWRm|>ub8g^(IUS*Z_#AtbF#R#(}nRG!0y56=6qcRhmu1Yoo
zFd~`;e++)99&>HzV)&ye0m6hY(22oB*iltgMaV3uO%0gi5Qjb)nRzPO9&mw-xpY&?
zx|{FuRv*DHmcz0Q0xW`O2HN+5@N*kji>70sO-}u4YRW~#ek?A2h9)AeXdu(l4oQA6
z8Z0tRIOFaK#6KPo*R3`|#Xel@fcp#)VZE4*>%-hNE{jos27oau(pV7yiP$DoIY%y_8km`O4nV(?2~Z5xE^OUR`La9!mN^zSBM$s?@|l^JUpA6^OGshUsa?2?=udU(
z_Nnm}IY~?nf)hVQw491jqh2~H>yKBuESRmJ`!U+u4qzG?Is-jAE;O}-NoW$@_e99B
zOIaUcuK*L_=EQ2=K;?Vaf^J1u#*mh6cda+L
z>A#3?P>;ndN<=>wN5N$yac+ErUWo8M
z3`U;b-gL7f^q3WBxd9MB^ofkR2@eOFjOfUr>Y@W+jZO-Hm_OD|e{uH9Z$PLoBlC+4pF>1}``M-$X<-%24T(v7lo
zW}yn0T9ng}A^aS;K}O{VW;qbV;m@9dILv}R*jz>EGVb5di-R@{oGWQbZy|hz{|e
zBoitxeTR>ZaX%eg!$PgrofWm^<$ly=kj9N!P~{iHA*ia#NiJ&A1u?V!$GSy3W03B5
zg=9)u3I=xZ)Dc-k7W{cKNOk`Z%Pdy8@hsrYR)ij0FRxV6u&N26iBL?UqWT~b(jYIG
z{Bj@$bam-=1srF(um~9t1p?D*>{)zwf<(4Rh5`jcgFUmuJM!sZiOU08@RG&9APFRD
zo4L77tsOvBTI!|Pw~BH=>8g$M)I=A_uD)a>@)nO23{pc=HDF(KRT{>~$t8=P`LUBH
z4KrVea(m;FI&yV@xHyb7`$NH}tK7f|1IcN9_QL|Ko;{>}x+Vr|{sBP7U(4|Zu4l0N
zmBoZ_(q`NY|JSfxZqm}$-cI~-s-o2zB%cpWe}Im_jP)7F&C6gYL4LvO97Fy_2%+~i
zO9!3}JOt`leC4B18N)5hByc?;h>~piN4@wTduO-~*UzRtDS42Qsp)DGIH>jU#^i}c
zYbgQuc_2vaF_%H(RcN8ZXpXySMsURGz6tsbXw39D5b_ZDbXgAFT?Fz1bis$>IcE4c
z%GkH0(e1f;Fs?i@GBRT5zzao01_AR5Gv3|7k)S$;#xl
zxB}=GZW=|w&Q#6s#4=H%S0x`j-WdmhZq$Y!V~ivGU;s)!kC=&(=;n5a;iRPxmIi98
zzjKxZ*i4iw!JU4X_c?E~$csLh!kOdcUX{y~l~(>NT*JN#gDtpE)8|KMbku>17dvOR
zXFv`bd~$YJk^zcye!#-l*Uxm1$bY;5zNcRs9gV2KVQ@g};7pB@Q*=zsxzcb&@5P^Z
zxVaxKk=#9zwlWuITFCVb=HhQ3CI$+}3;@&An_%vxl&
zT8K$u?C!rm@;W+H+{4Oz#{2A`xuv%Mi%rqf!F+!>i&)~)k|xDUn2T`LacRF>?-I6746@o;F%0qQd75?YLA
zKzSEHB}19P%^C$CKkC30Li5CZeU=pFD0~3DG^c=6Jy!gcJ{h4+n3|h`NC;O!yb)`S
zLq4bLJM;5Bpdg`}4kC@~6|@{%*HA+P9e!x!fd7y8PoNl}U?J-eod@`%o)};b>gBqd
zd!URUCNa|9lgFhk8Wp#09^m}0=Vd^vx1yplv!7E^_1x$;5U!?suP(dFOJl@<{C6Y-
zQv({!E;1;>=tlGj!0H@oi8Mx)@R7g3NPy2n7usUrhNunAD}uJOY_VxOK_(!{5LL=R4B0%<5qXA4>{2$e7Z
z3#J$}H&j(skqaSPK8lfpP(%NJ22dgCf+H$vbd5NzCbk1b_+N=P@S7^fb#T03i+0
z`g62|-o`cpoENSyeeqZZ^dDomAf`dIWAAtb$wwYOcpwxw2MLIy{ZRXpzv8f&63LGt
zkSNWs=GxDwe#F{6pO<$&FmUIK7cagZ=}ZP7k`>Yy=F*_-ncSI&RcmsFI~vXmUmNV}
zCoeT9HDy}b-m#N@?pN^XfJ_x_7jKJ-K_FzU<46IQQg{gcD^MId<1njKHT;^WVqh-o
z@JAe0u1r8nM+FCVMFm8i5cuHRnIqBHuE{*nW%vql>*QkGHg
z`S})ZUx2Yi9K>wx*MjjUS)uT$dnana!sI;)ja#-%wAD5=HO+cAnh|Jb5#R+F23b(@
zf+)K!dc-{a%l;}Js#@*=zwUdj*j!s&QP5KXFBSq25ggDGve9X}x_LaHJ4>|$DsVaF
zL}v(e&b=X3XCiX}j)(o#pFe*Wfu8e0^SEk6Cxi*e(lFEUfVkM{^CmZUGo&p1bN~LK
z%1%wCs|C<{5$oI^$U}YSmRT3OtEn|aIPynZrlYT^X)vcv0}9_!qeeM6JBz+-PeEa5
zt#l9_psixe6V10u-^=%k3#a%nb{1XIGOcai3bFt
zm373yb)xFW%n?qqJ21M;aS&jnppgDZ+DYOGZn4YIQ6yJX0_<6``a&c?qQ22Oj>br|
zrhr0=!sbrZ)m_XzNq!zm7xK>l1z--_6fb%j#Kw8W#mR=VN8d#gqu<6IqbN|E@t_fu
zgUnor@vHDvL=hHB>X*+Ab)0%CY
zmG`v{uXyd&)AL|mSMP|nZ7lQlHrkbaKKSaI%bJ@yI%RLTFl*XAchXo#8gXh0fi_v+|E)!XTj`P*h91ZIkF$JZmM-=tOg{Mml2m}Sdo-?txE=gpif
zuWC-OzZFf=!#pTdIyZTl^@4HaB+#_`uzd>cQD0Fyh-6PRyl`I%I9JvhE|^MPFA6aP
zYH@VBiKxw08g$>wczzphI{
zK-pFT$j(<0i}=ARJ0`kIK~$<~-MMk*)R0$ZtLxlhdEkvu-Qlk#8Q_ul6-nAzk~b^-Z=o_i_|6!o3)p&knu$^
zV8Qc;c`325pjeS+5?@>y%(A_D@8c%;uD32=q|&q6DVDVg)3!
zXXYb8=djUVE5Htxu+e20&fM5(DofqfEmjZ0s6bk#5+2+o+Cz_piN>t
z9~8PPpfdw77#O#x^+_PJ?wO49$PC~ws#EEVa3r9%0f2j9s|{kb1B2p_kQVThc6N5V
zT&d;Oy10lB27+#x7=Rb1%36sumEcOcS%SxT;pn5VBl>!Jo6t6*P-!lkEN+l1`2LTa0LczYu)59&cZrxfto`&({{rmTqe_KEX
zlG>2`Ep_)r)72Z~+nbrE?YeJu`GL};*lQo_HpbB5>GDa^CJhU=cy$6(H=Hp_0uZ
zV3L;>#GT-9wWn?D57^FA=bMV4i!)GsYXZqwCNBQ&Ielm^zMM_Swe8Gk3zQ0e;r9J@
zXI^MzFRhUyNBX9p9(yGuAfV1!CL|341jLt&5m9-~3cihc^qJTNb-s9kOZX!K)9NDmK=3DvBu6wh9XF
z*}=89t#67^=!@tFzL#fawVH6!*DtGj$^uur`Mbs*`~94bjn49Bc<6RTr{k_eE-;)t
z*gkq$BZ0w(xUA^DairqGI~%8ct*E#FG*q#4=_VKhiqHOmac~w4AuV>Z51Y%)JsT7A
zV@_rH1{vE+bor8RoBuH4?wrS$tS^s<&V5wX%sI@*OTow7F7)$MpQoWSvkZ)fSuKCn
zc!ACCvZO_`JPz4CfBkaoz5U0ZX%Brq(A6gMa-X=FS=KgRj3ymSn|vl9Xz`+%4hG&4
zJ+}>;GTZ)D(@Evu54i?be3@AbA~pFjw$r>ws}||yPk*+)9^Vo%h|QVYF=@p>cUf-R*_AKNj{?k0^j}zn|MAPH~;mUpg@hU^=s4nH2tT;PG-M<
z|2W;J_HDkm&$pLYJsoGX@t5b$;%;j$jXHDgRO=@iT3$h;
zE#5Wy>NDXgu!nv&Ez_`qsyPhXu=
zRCK+9z4pZX(6(xNHq>VL)>qTBE{%4~pBF!D_xg$1)u$#`2EUzM8TqQRccB(tJ+
zi=Vt-wL3Z|p!xc5MVXr28chK6+E-sG&)LwntVgEO_N<0M12S!Oy?@_6ckqR!-Hu&`
zCdK2|IC^MXP4|7#{^PtfHM?KR4fpz6ebKDe)X*5MuKsPsp{o%i`gnY-n|;2Yf}7n|
ze%kdG8$M4oUYGq+?f48`?}kc=E!OWZ*^?1!KjmVE)|9?)uu_6Mw$UNh$aJ#E6zur}`jA
zi1By-sv44EcAKeEQSrUcj_R+HoYHyUf>7~-8U>>!rb&Y0_ULq^GwfY9e!`a?lA%XLA!u&qgp3|4)yRry;6Y;OVkv
HeATWY%iBe^+}8JP7I@6CeZWtOd=K(5pNnz|gs7yVLQvRSWGo7qgJ;g{
zGYPWz@XQaEel&ZpD33&;AfrI|2rkABMY0Xx3OtOg5u^YVD`4JU2_p3}v`BT&rhX7X
z1DuAa*D*aCQ}wR3?7CQ6(uHT)uR`pQ1n)z_UBd3aJs=8R%4Uy3__IP}f_c0F8Go5Mx%2O%xSB9!F_L3+{paUcDO$gM
z`=%T^8|^w;DqHQf3@x2yXh04?+qU^{(*Iy@6NLG))EP-3L*x9|7pV^dCrW&@*-Jqe
zMYyk{FAlKquCkkQ`O|I!ms9^?iH1sNJ6RaAqO_!>3nc(Z18!}efL2G3%18v$oR;Ba
z{o=&kJ9gym=ym#y?7`tf!gm>P_(f_8xaPTYXI@NEh|R!F;&zNG0c<70G}u+cE8RNo
zpArR*=46sDjSHA#UVeJntfM=91^_Y(lm{+!;`)~lAH@Bmq^yjohKROv%xc3@_wsHB
zeR?+X`Gpo#wa~uA#>R$aO0{N!C>rm8N&7x+d=p{go&}+WxYzqoQz`7RPiw1l&fnjY
z7cB-zEP>j~g+`akhlaKmu?Qzp%z(W5%qbJK8pX7)x+0nhuCTm9?iyYy=r|%@6pr88
z5d5hH>ejvBm<)vABgv054Zf;>`PsAKHT`w;McVh^K@$j9x@JZ*5-r0^@otc1k>U_X
zvG>~l7c*W9+qp&LVKb1$deWq0N~QWeNrtNM>dFaXT7B
z=qIXJEMVv%o2ib9p7x8G9ag3$FtQJfLWn4>c@g`i#6PgQ^zE194<4Og_N+_Si1v+p
zepBnzcG5U~r4PG2+x*(1sM*kJ@vXLf%un>OnK$l4Qvi-8T!%M|~Yo`P%96XlJ$CPGc)0urYEd&KlCOW5>CK05PZ$-!_apWD1M<
zyD-GS^gWM|XWgYaPssUQG1UHL^euS#FLUR>u_Hy?2Bj(XyzFV9r5BGpv0f7&F=_<&
zkR0YlWD<~GoYrRxy)raTZ-3?et;4GjUy2}
zyR`VzCo0P4y}s|=yZ1$ArmW&H4W1lqGCyUZkr5j`E*BUF7y-NU*B%Cknglf%=lhhE
zqLjDzjsCN2nFq~P@{N(^7ZLal*|T-4jG3ZOLh==ynsj1@8gqh-3Gq+(1S)$P`WaZ+
z9j{tQjh&wye5P#mqm;RUZ)QKX4axeDy|KxUCNBA}<}aI6eqvo%lMmrs4=qjDVIq`)8;AEGi%lUqv{-s{2h%_E-UxDpB#9i@RIq@
z!jjJ!j>ZPcH&5#CQ(FK2vH9*VUZ+g&dKak;bXZk0^7VY9q3#oI!LANy{y+K>`$B*BfTv0ss=KReG!PWVrtKwbH
z-19~ATjjXm
z0lvm#I@Y`0Mz`rsjTjTZywD#RrTg{udLB}1+*NghvPRhclQZ6$E|DK{qPm+)t3j)b
zSB^Yq@^_fwoL75R8$Z@^xpY>0dg^@M)a+|&1D^i0_BvcXr_d}&sekMxjo#BA-2Q9T
zuk_>T|6C3Axnk7HvvFyc@VOdcEw)9BOKbnX&l@w`GsdURq#?$Wt2O`k54GlW+caRl
zhPLGdBXU4(0WfLL6}{5cUC!6>;o;R*YF(`lH_*A;?%<*EX+!?2-t>K!cD?rV;hC&;
zm&j!V#NcODv>nN)@SiRp>^$S`n>RP%Utr+^zO7T$7J(o@P_V-d1Zt_c$O~anQH2P25;HTeJu3`ofpg7U
z(5aiRe;1tGF3m0`pt4w27P$qRG#v+7EOSgy&dYF5qM=M>pT-eQ^PNFKMWfHm4mKGC
zs4sgSk^6uCu}(4>GSiW&mCW|?7sT1?+(Zx4N@wa6JltJE4#`KIw9dY{Psoeu4fQh)
zt9(dE<7dhSwOJRJK;Kg+G@deL3Y*OyGD#vEq6EboIno=1(BZ?gp0bf((}Z(xR@
zxXCoQAO}I3$mh0gLn~ax=#(r8!^@&j>I^=jjOs?3ilrPC#k+uurzZrZ3wObSgSiDN
z*uE!6JHCaY_>>*BkBlgGjp!uA3s&*&Deo$&)(~=BTbZvY5(zZjvdS5s4M)&SB4{Sk
z4s4ct^}484WvtN_l@z;0_A#W;cGsv@wGUsXy2iv^(c(b%9u>px=e(g}$g%>7!k39k
zwqvJGqM4g{VsNfV2dIN&id7Vjya8F6q99gFxBig@50WDbPP-Vt`%epy>@8F$M*VkD
zez39p%9@4D^)airlSf*LT22-Vdd)v0yI-NTWsMS*hm?
zQ(S&z%Zs>k`=WvQv876@_XHeJ=*CuKke?D}YjVGQk)dmGr?q`?c{2&%AS;Z4WiXTL
z$pk;Yzpe}zF^nvGFF?fcB0em$BhRH-mZBcFdjU?WbS
zkc>7tze)JVF~yL%D>%rK%F2sqXqn77=(XDE?n}^VIeQ|Kd~$uA6E)R3q-Y0esPOZT<=fC#eAv!HDO2%**q-t
ze^I$;jVLJVQP|nG*u;)j;+Os|;TL
zkJEx+lZQx3t=MHjoqvm;Y7h{;p-QWrrWyMtc90I95*vB7&KdUO>3X-`(yQv9ug%Fo
z!;5Nwi6rMvEBG(4m#=J&U9a#P_)6S^X@>elKC{X0}$=-$BTz&iDXgwL!WMIe>xg?*H
zKA7r@KLsSXc-gYV{aq)@f=wI`FE`X=>H@Ko@J5IL#2mjc<&2QAZ{BQ*igLa-NKa?V
z6mbwCR|H_o%h{UXXCS>f$%;WhA6oih!-g5X55J+qKmWsplLC=oo7M{x&pRM}9IZwZ
z=lU4mkVm6xudFe~(==v4ddIdUug|R30$o7jC?Xc(i670bbgYPTkckuY{IE6S0If2CTG85sHb?E4YhX-sh
zi(4N#BaTm_ui?dhF<~S?LeW`R0=@95Wk`yMM;5yPmn=FHuBLD5hpFr-N0zf^@>a+Q
zP@K0E4C2X6Jk4X;lWrK10JPliil}(;W#uyN>27c!s?ys=@?r828JElfo(S3_5#;Tp)&$6DTc{aN6
z=%G_bF3hVSlulQ3s5;bp?R}HF8P<{hMjAy|F|9s-{@snL+u`BieNq+AWfXn+;1Q~)
ze(6=TJ^j!7@*f(Zg*DdMSNpwe)3Kt<6W8
z2OPYj&@j5kfV9yewO?{kKhyo*#zRKO#a5)`z6yOy-?C}AuWHwCPvT}5Z|t?ky6uV(
z?>jf23Sk$#<%IFc3FrQScDCW7MiX`tjKM
zMl3JQ@s5RYBMXWAt@-nhugkG_L0Z0KS!GVBJw8+nZ{|6FUB7;P8Pal)sY^dK&7yB6
zxHk>*3*pvnQ>x5+-gidZLeD<9S4H9g%Gv=n5cc3J}{q@m>wz&(6+}_@6
z&Vt`!t$uP@wfHI&rtjgzrCP>6F4UMf_meW)Gybq{-yR;4
z1zMW_TX_FA)kt@r_G3UgW&E8p_ZBzYrN6^ZaeY6PJ!|IftT|ER)TW)y)R_S%GPjlW
z9eOiXcju3Xefp*8t@2%I*u18H17(vtR|9#rpA_4^u$c4eQbtN-+?I<@rC)koPw745
z>;RLQ^HkiAUo>wWcwxZV9alg3oNO7muzU56KY}#Q=Lvn23d9r7#k>G&!X!!6~W8D9&0;yxACAA;Q>aQ
zGv~TLJZ6;ZHsP+0slmZfLFX@-{k)R;d5r(JDiy^+r+BwbmB&P_;`A#-^w#a@`{la-
zVx84C9c!M{SJGXQUVSw}w`u?Pd4cCIc_hD{zUM{f*Z#UmE5kCz4z6>@-^yx;x%22~2L;Y#C_o~QNP1zipp}I<^;W2>
zp{g{s-lXLJ1SUp*(+seAV%qH4NUNOkliH?PXFFPW{avqXKcyi$|NGr=U@~cfimVqL
zI|?_4-_`4~Cn@W=*Q?v1p1xC$P5yO|E=j3Rc2N+2I#}q-;o-6=oaD6oUd?0I7eq3E
zV{yr_)y_#BZluu`?b=1!Y2uR-%5w3xwaSsT&;!c=1YLgbbLlZnedhh~{}`P|ufSj&Q~)6V(-82cFcf-Gd6
z7qxZE7V%^y8NU+)6v}(;M5dxsVD-boV=LFDFwQY&!8&)uSA*#BM4_hZr!lmcBK8`T
zbEk(Sqs0uBJ~}n>D3ds*mD$`j3x6WFe3wStNy1;RUn^(zS8nx6z)6_K@c+yLfRMrc+Qv-y*^b+R8~}
zhA(q-&}w|8oOqj;HvyAX{$y8L8fM<=0RnOw(K(;xOOs%aKB%GY#u`--d1>Yi*OBqR
zD*mR4DHV|>_2R&xSM(=Z#_h2hF-42lD9C+Ve9e%+eKu^7mbF&4tmE=G8XsXI|
z7a1DFHoIdV_66kK*-`UKf5$+jXYK(UW&IWunEZHopJEXp8I&9+?NYl1YsH2TX0?j1
z>bPpvc@{DF836w_k)dr6Qc1#e2PavpB^nCZtzi=yH<`+}3f1iLCui#jx$;l+wM`HC
z^s?uZ#mg*UZvjq3%_yVIOvO1j@51a1qiN0qypsi*qlu6wYU6y7jsc~nj-{pLwW<}b
z-qizPLxwGAJMV8P@(aqd#n*0!rs0hz8af#_ve@E5?>3OOiupFG8MA