From a3931adca8b95e4eceb5328146ae0f048c5c1f76 Mon Sep 17 00:00:00 2001 From: Ria Date: Tue, 23 Sep 2025 22:16:08 +0530 Subject: [PATCH] Added Fully Quantum KAN code --- .vscode/PythonImportHelper-v2-Completion.json | 24375 ++++++++++++++++ .../example_notebooks/Fully_QuantumKAN.ipynb | 2832 ++ .../experiments/fully_qkan_digits.py | 263 + .../experiments/fully_qkan_equations.py | 285 + .../experiments/fully_qkan_higgs_boson.py | 285 + .../experiments/fully_qkan_iris.py | 457 + .../fully_qkan_social_networks_ad.py | 486 + .../experiments/fully_qkan_titanic.py | 334 + .../{digits.py => hybrid_qkan_digits.py} | 2 +- ...{equations.py => hybrid_qkan_equations.py} | 2 +- .../{iris.py => hybrid_qkan_iris.py} | 2 +- ...rk_gluon.py => hybrid_qkan_quark_gluon.py} | 0 ...d.py => hybrid_qkan_social_networks_ad.py} | 2 +- .../{titanic.py => hybrid_qkan_titanic.py} | 2 +- .../FullyQKAN_model_components/LabelMixer.py | 35 + .../models/FullyQKAN_model_components/QCBM.py | 35 + .../QuantumBlock.py | 52 + 17 files changed, 29444 insertions(+), 5 deletions(-) create mode 100644 .vscode/PythonImportHelper-v2-Completion.json create mode 100644 Quantum_KAN_for_HEP_Ria_Khatoniar/example_notebooks/Fully_QuantumKAN.ipynb create mode 100644 Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_digits.py create mode 100644 Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_equations.py create mode 100644 Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_higgs_boson.py create mode 100644 Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_iris.py create mode 100644 Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_social_networks_ad.py create mode 100644 Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_titanic.py rename Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/{digits.py => hybrid_qkan_digits.py} (96%) rename Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/{equations.py => hybrid_qkan_equations.py} (97%) rename Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/{iris.py => hybrid_qkan_iris.py} (96%) rename Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/{quark_gluon.py => hybrid_qkan_quark_gluon.py} (100%) rename Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/{social_networks_ad.py => hybrid_qkan_social_networks_ad.py} (96%) rename Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/{titanic.py => hybrid_qkan_titanic.py} (96%) create mode 100644 Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/LabelMixer.py create mode 100644 Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/QCBM.py create mode 100644 Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/QuantumBlock.py diff --git a/.vscode/PythonImportHelper-v2-Completion.json b/.vscode/PythonImportHelper-v2-Completion.json new file mode 100644 index 0000000..84d08d9 --- /dev/null +++ b/.vscode/PythonImportHelper-v2-Completion.json @@ -0,0 +1,24375 @@ +[ + { + "label": "sys", + "kind": 6, + "isExtraImport": true, + "importPath": "sys", + "description": "sys", + "detail": "sys", + "documentation": {} + }, + { + "label": "os", + "kind": 6, + "isExtraImport": true, + "importPath": "os", + "description": "os", + "detail": "os", + "documentation": {} + }, + { + "label": "path", + "importPath": "os", + "description": "os", + "isExtraImport": true, + "detail": "os", + "documentation": {} + }, + { + "label": "makedirs", + "importPath": "os", + "description": "os", + "isExtraImport": true, + "detail": "os", + "documentation": {} + }, + { + "label": "Equivariant_QCNN.data.data", + "kind": 6, + "isExtraImport": true, + "importPath": "Equivariant_QCNN.data.data", + "description": "Equivariant_QCNN.data.data", + "detail": "Equivariant_QCNN.data.data", + "documentation": {} + }, + { + "label": "Equivariant_QCNN.training.Training", + "kind": 6, + "isExtraImport": true, + "importPath": "Equivariant_QCNN.training.Training", + "description": "Equivariant_QCNN.training.Training", + "detail": "Equivariant_QCNN.training.Training", + "documentation": {} + }, + { + "label": "Equivariant_QCNN.models.QCNN_circuit", + "kind": 6, + "isExtraImport": true, + "importPath": "Equivariant_QCNN.models.QCNN_circuit", + "description": "Equivariant_QCNN.models.QCNN_circuit", + "detail": "Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "numpy", + "kind": 6, + "isExtraImport": true, + "importPath": "numpy", + "description": "numpy", + "detail": "numpy", + "documentation": {} + }, + { + "label": "asarray", + "importPath": "numpy", + "description": "numpy", + "isExtraImport": true, + "detail": "numpy", + "documentation": {} + }, + { + "label": "cov", + "importPath": "numpy", + "description": "numpy", + "isExtraImport": true, + "detail": "numpy", + "documentation": {} + }, + { + "label": "trace", + "importPath": "numpy", + "description": "numpy", + "isExtraImport": true, + "detail": "numpy", + "documentation": {} + }, + { + "label": "iscomplexobj", + "importPath": "numpy", + "description": "numpy", + "isExtraImport": true, + "detail": "numpy", + "documentation": {} + }, + { + "label": "data.data", + "kind": 6, + "isExtraImport": true, + "importPath": "data.data", + "description": "data.data", + "detail": "data.data", + "documentation": {} + }, + { + "label": "data_load_and_process", + "importPath": "data.data", + "description": "data.data", + "isExtraImport": true, + "detail": "data.data", + "documentation": {} + }, + { + "label": "torch", + "kind": 6, + "isExtraImport": true, + "importPath": "torch", + "description": "torch", + "detail": "torch", + "documentation": {} + }, + { + "label": "nn", + "importPath": "torch", + "description": "torch", + "isExtraImport": true, + "detail": "torch", + "documentation": {} + }, + { + "label": "nn", + "importPath": "torch", + "description": "torch", + "isExtraImport": true, + "detail": "torch", + "documentation": {} + }, + { + "label": "optim", + "importPath": "torch", + "description": "torch", + "isExtraImport": true, + "detail": "torch", + "documentation": {} + }, + { + "label": "nn", + "importPath": "torch", + "description": "torch", + "isExtraImport": true, + "detail": "torch", + "documentation": {} + }, + { + "label": "nn", + "importPath": "torch", + "description": "torch", + "isExtraImport": true, + "detail": "torch", + "documentation": {} + }, + { + "label": "torch.nn", + "kind": 6, + "isExtraImport": true, + "importPath": "torch.nn", + "description": "torch.nn", + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Module", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Linear", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Parameter", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Sequential", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "ReLU", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "LeakyReLU", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Linear", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Parameter", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Linear", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Parameter", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Linear", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Parameter", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Module", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "ModuleList", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Linear", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "LeakyReLU", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Linear", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "ModuleList", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Module", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "ModuleList", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Linear", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "LeakyReLU", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Sequential", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Linear", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "ReLU", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Linear", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "ReLU", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Sigmoid", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "ModuleList", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "LeakyReLU", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "Linear", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "BatchNorm1d", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "BatchNorm", + "importPath": "torch.nn", + "description": "torch.nn", + "isExtraImport": true, + "detail": "torch.nn", + "documentation": {} + }, + { + "label": "benchmarking.Benchmarking", + "kind": 6, + "isExtraImport": true, + "importPath": "benchmarking.Benchmarking", + "description": "benchmarking.Benchmarking", + "detail": "benchmarking.Benchmarking", + "documentation": {} + }, + { + "label": "Encoding_to_Embedding", + "importPath": "benchmarking.Benchmarking", + "description": "benchmarking.Benchmarking", + "isExtraImport": true, + "detail": "benchmarking.Benchmarking", + "documentation": {} + }, + { + "label": "tensorflow", + "kind": 6, + "isExtraImport": true, + "importPath": "tensorflow", + "description": "tensorflow", + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "argmax", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "image", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "random_uniform_initializer", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "Variable", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "constant", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "repeat", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "tile", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "shape", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "gather", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "pad", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "multiply", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "add", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "random_uniform_initializer", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "Variable", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "constant", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "repeat", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "tile", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "shape", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "concat", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "gather", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "pad", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "constant", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "sin", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "cos", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "tan", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "asin", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "acos", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "atan", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "sinh", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "cosh", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "tanh", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "concat", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "map_fn", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "train", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "keras", + "importPath": "tensorflow", + "description": "tensorflow", + "isExtraImport": true, + "detail": "tensorflow", + "documentation": {} + }, + { + "label": "PCA", + "importPath": "sklearn.decomposition", + "description": "sklearn.decomposition", + "isExtraImport": true, + "detail": "sklearn.decomposition", + "documentation": {} + }, + { + "label": "PCA", + "importPath": "sklearn.decomposition", + "description": "sklearn.decomposition", + "isExtraImport": true, + "detail": "sklearn.decomposition", + "documentation": {} + }, + { + "label": "TruncatedSVD", + "importPath": "sklearn.decomposition", + "description": "sklearn.decomposition", + "isExtraImport": true, + "detail": "sklearn.decomposition", + "documentation": {} + }, + { + "label": "PCA", + "importPath": "sklearn.decomposition", + "description": "sklearn.decomposition", + "isExtraImport": true, + "detail": "sklearn.decomposition", + "documentation": {} + }, + { + "label": "PCA", + "importPath": "sklearn.decomposition", + "description": "sklearn.decomposition", + "isExtraImport": true, + "detail": "sklearn.decomposition", + "documentation": {} + }, + { + "label": "PCA", + "importPath": "sklearn.decomposition", + "description": "sklearn.decomposition", + "isExtraImport": true, + "detail": "sklearn.decomposition", + "documentation": {} + }, + { + "label": "PCA", + "importPath": "sklearn.decomposition", + "description": "sklearn.decomposition", + "isExtraImport": true, + "detail": "sklearn.decomposition", + "documentation": {} + }, + { + "label": "PCA", + "importPath": "sklearn.decomposition", + "description": "sklearn.decomposition", + "isExtraImport": true, + "detail": "sklearn.decomposition", + "documentation": {} + }, + { + "label": "PCA", + "importPath": "sklearn.decomposition", + "description": "sklearn.decomposition", + "isExtraImport": true, + "detail": "sklearn.decomposition", + "documentation": {} + }, + { + "label": "PCA", + "importPath": "sklearn.decomposition", + "description": "sklearn.decomposition", + "isExtraImport": true, + "detail": "sklearn.decomposition", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras.models", + "description": "tensorflow.keras.models", + "isExtraImport": true, + "detail": "tensorflow.keras.models", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras.models", + "description": "tensorflow.keras.models", + "isExtraImport": true, + "detail": "tensorflow.keras.models", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras.models", + "description": "tensorflow.keras.models", + "isExtraImport": true, + "detail": "tensorflow.keras.models", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras.models", + "description": "tensorflow.keras.models", + "isExtraImport": true, + "detail": "tensorflow.keras.models", + "documentation": {} + }, + { + "label": "Sequential", + "importPath": "tensorflow.keras.models", + "description": "tensorflow.keras.models", + "isExtraImport": true, + "detail": "tensorflow.keras.models", + "documentation": {} + }, + { + "label": "tensorflow.keras", + "kind": 6, + "isExtraImport": true, + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "layers", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "losses", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "layers", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "losses", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "layers", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "losses", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "losses", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "optimizers", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "Model", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "layers", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "models", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "datasets", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "layers", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "models", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "optimizers", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "layers", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "models", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "datasets", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "layers", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "models", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "optimizers", + "importPath": "tensorflow.keras", + "description": "tensorflow.keras", + "isExtraImport": true, + "detail": "tensorflow.keras", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "GridSearchCV", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "GridSearchCV", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "train_test_split", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "cross_val_score", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "GridSearchCV", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "KFold", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "StratifiedKFold", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "GridSearchCV", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "KFold", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "StratifiedKFold", + "importPath": "sklearn.model_selection", + "description": "sklearn.model_selection", + "isExtraImport": true, + "detail": "sklearn.model_selection", + "documentation": {} + }, + { + "label": "h5py", + "kind": 6, + "isExtraImport": true, + "importPath": "h5py", + "description": "h5py", + "detail": "h5py", + "documentation": {} + }, + { + "label": "File", + "importPath": "h5py", + "description": "h5py", + "isExtraImport": true, + "detail": "h5py", + "documentation": {} + }, + { + "label": "torchvision", + "kind": 6, + "isExtraImport": true, + "importPath": "torchvision", + "description": "torchvision", + "detail": "torchvision", + "documentation": {} + }, + { + "label": "datasets", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "transforms", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "datasets", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "models", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "models", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "transforms", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "transforms", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "datasets", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "transforms", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "datasets", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "transforms", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "datasets", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "transforms", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "datasets", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "transforms", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "transforms", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "transforms", + "importPath": "torchvision", + "description": "torchvision", + "isExtraImport": true, + "detail": "torchvision", + "documentation": {} + }, + { + "label": "torch.utils.data", + "kind": 6, + "isExtraImport": true, + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Subset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Dataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "TensorDataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Dataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "TensorDataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Dataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Dataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "random_split", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "random_split", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Subset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "TensorDataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "random_split", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Subset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "TensorDataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "random_split", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Subset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "TensorDataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Dataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "random_split", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Dataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Dataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "Dataset", + "importPath": "torch.utils.data", + "description": "torch.utils.data", + "isExtraImport": true, + "detail": "torch.utils.data", + "documentation": {} + }, + { + "label": "data_load_and_process", + "importPath": "data", + "description": "data", + "isExtraImport": true, + "detail": "data", + "documentation": {} + }, + { + "label": "matplotlib.pyplot", + "kind": 6, + "isExtraImport": true, + "importPath": "matplotlib.pyplot", + "description": "matplotlib.pyplot", + "detail": "matplotlib.pyplot", + "documentation": {} + }, + { + "label": "pennylane", + "kind": 6, + "isExtraImport": true, + "importPath": "pennylane", + "description": "pennylane", + "detail": "pennylane", + "documentation": {} + }, + { + "label": "numpy", + "importPath": "pennylane", + "description": "pennylane", + "isExtraImport": true, + "detail": "pennylane", + "documentation": {} + }, + { + "label": "numpy", + "importPath": "pennylane", + "description": "pennylane", + "isExtraImport": true, + "detail": "pennylane", + "documentation": {} + }, + { + "label": "numpy", + "importPath": "pennylane", + "description": "pennylane", + "isExtraImport": true, + "detail": "pennylane", + "documentation": {} + }, + { + "label": "numpy", + "importPath": "pennylane", + "description": "pennylane", + "isExtraImport": true, + "detail": "pennylane", + "documentation": {} + }, + { + "label": "AmplitudeEmbedding", + "importPath": "pennylane.templates.embeddings", + "description": "pennylane.templates.embeddings", + "isExtraImport": true, + "detail": "pennylane.templates.embeddings", + "documentation": {} + }, + { + "label": "AngleEmbedding", + "importPath": "pennylane.templates.embeddings", + "description": "pennylane.templates.embeddings", + "isExtraImport": true, + "detail": "pennylane.templates.embeddings", + "documentation": {} + }, + { + "label": "torch.nn.functional", + "kind": 6, + "isExtraImport": true, + "importPath": "torch.nn.functional", + "description": "torch.nn.functional", + "detail": "torch.nn.functional", + "documentation": {} + }, + { + "label": "Equivariant_QCNN.models.utils.unitary", + "kind": 6, + "isExtraImport": true, + "importPath": "Equivariant_QCNN.models.utils.unitary", + "description": "Equivariant_QCNN.models.utils.unitary", + "detail": "Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "Equivariant_QCNN.models.utils.embedding", + "kind": 6, + "isExtraImport": true, + "importPath": "Equivariant_QCNN.models.utils.embedding", + "description": "Equivariant_QCNN.models.utils.embedding", + "detail": "Equivariant_QCNN.models.utils.embedding", + "documentation": {} + }, + { + "label": "circuit_training", + "importPath": "training.Training", + "description": "training.Training", + "isExtraImport": true, + "detail": "training.Training", + "documentation": {} + }, + { + "label": "autograd.numpy", + "kind": 6, + "isExtraImport": true, + "importPath": "autograd.numpy", + "description": "autograd.numpy", + "detail": "autograd.numpy", + "documentation": {} + }, + { + "label": "accuracy_test", + "importPath": "Equivariant_QCNN.benchmarking.Benchmarking", + "description": "Equivariant_QCNN.benchmarking.Benchmarking", + "isExtraImport": true, + "detail": "Equivariant_QCNN.benchmarking.Benchmarking", + "documentation": {} + }, + { + "label": "matplotlib", + "kind": 6, + "isExtraImport": true, + "importPath": "matplotlib", + "description": "matplotlib", + "detail": "matplotlib", + "documentation": {} + }, + { + "label": "cm", + "importPath": "matplotlib", + "description": "matplotlib", + "isExtraImport": true, + "detail": "matplotlib", + "documentation": {} + }, + { + "label": "scipy", + "kind": 6, + "isExtraImport": true, + "importPath": "scipy", + "description": "scipy", + "detail": "scipy", + "documentation": {} + }, + { + "label": "copy", + "kind": 6, + "isExtraImport": true, + "importPath": "copy", + "description": "copy", + "detail": "copy", + "documentation": {} + }, + { + "label": "deepcopy", + "importPath": "copy", + "description": "copy", + "isExtraImport": true, + "detail": "copy", + "documentation": {} + }, + { + "label": "deepcopy", + "importPath": "copy", + "description": "copy", + "isExtraImport": true, + "detail": "copy", + "documentation": {} + }, + { + "label": "time", + "kind": 6, + "isExtraImport": true, + "importPath": "time", + "description": "time", + "detail": "time", + "documentation": {} + }, + { + "label": "time", + "importPath": "time", + "description": "time", + "isExtraImport": true, + "detail": "time", + "documentation": {} + }, + { + "label": "time", + "importPath": "time", + "description": "time", + "isExtraImport": true, + "detail": "time", + "documentation": {} + }, + { + "label": "torchvision.transforms", + "kind": 6, + "isExtraImport": true, + "importPath": "torchvision.transforms", + "description": "torchvision.transforms", + "detail": "torchvision.transforms", + "documentation": {} + }, + { + "label": "ToTensor", + "importPath": "torchvision.transforms", + "description": "torchvision.transforms", + "isExtraImport": true, + "detail": "torchvision.transforms", + "documentation": {} + }, + { + "label": "os,", + "kind": 6, + "isExtraImport": true, + "importPath": "os.", + "description": "os.", + "detail": "os.", + "documentation": {} + }, + { + "label": "torch.distributed", + "kind": 6, + "isExtraImport": true, + "importPath": "torch.distributed", + "description": "torch.distributed", + "detail": "torch.distributed", + "documentation": {} + }, + { + "label": "_LRScheduler", + "importPath": "torch.optim.lr_scheduler", + "description": "torch.optim.lr_scheduler", + "isExtraImport": true, + "detail": "torch.optim.lr_scheduler", + "documentation": {} + }, + { + "label": "ReduceLROnPlateau", + "importPath": "torch.optim.lr_scheduler", + "description": "torch.optim.lr_scheduler", + "isExtraImport": true, + "detail": "torch.optim.lr_scheduler", + "documentation": {} + }, + { + "label": "CosineAnnealingLR", + "importPath": "torch.optim.lr_scheduler", + "description": "torch.optim.lr_scheduler", + "isExtraImport": true, + "detail": "torch.optim.lr_scheduler", + "documentation": {} + }, + { + "label": "sklearn.metrics", + "kind": 6, + "isExtraImport": true, + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "accuracy_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "accuracy_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "mean_absolute_error", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "auc", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "confusion_matrix", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "confusion_matrix", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "auc", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "confusion_matrix", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "confusion_matrix", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "auc", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "confusion_matrix", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "confusion_matrix", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "confusion_matrix", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "confusion_matrix", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "ConfusionMatrixDisplay", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "auc", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_auc_score", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "roc_curve", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "auc", + "importPath": "sklearn.metrics", + "description": "sklearn.metrics", + "isExtraImport": true, + "detail": "sklearn.metrics", + "documentation": {} + }, + { + "label": "backend", + "importPath": "keras", + "description": "keras", + "isExtraImport": true, + "detail": "keras", + "documentation": {} + }, + { + "label": "Constant", + "importPath": "keras.initializers", + "description": "keras.initializers", + "isExtraImport": true, + "detail": "keras.initializers", + "documentation": {} + }, + { + "label": "PReLU", + "importPath": "keras.layers", + "description": "keras.layers", + "isExtraImport": true, + "detail": "keras.layers", + "documentation": {} + }, + { + "label": "cirq", + "kind": 6, + "isExtraImport": true, + "importPath": "cirq", + "description": "cirq", + "detail": "cirq", + "documentation": {} + }, + { + "label": "devices", + "importPath": "cirq", + "description": "cirq", + "isExtraImport": true, + "detail": "cirq", + "documentation": {} + }, + { + "label": "GridQubit", + "importPath": "cirq", + "description": "cirq", + "isExtraImport": true, + "detail": "cirq", + "documentation": {} + }, + { + "label": "ExpressionMap", + "importPath": "cirq", + "description": "cirq", + "isExtraImport": true, + "detail": "cirq", + "documentation": {} + }, + { + "label": "devices", + "importPath": "cirq", + "description": "cirq", + "isExtraImport": true, + "detail": "cirq", + "documentation": {} + }, + { + "label": "GridQubit", + "importPath": "cirq", + "description": "cirq", + "isExtraImport": true, + "detail": "cirq", + "documentation": {} + }, + { + "label": "ExpressionMap", + "importPath": "cirq", + "description": "cirq", + "isExtraImport": true, + "detail": "cirq", + "documentation": {} + }, + { + "label": "GridQubit", + "importPath": "cirq", + "description": "cirq", + "isExtraImport": true, + "detail": "cirq", + "documentation": {} + }, + { + "label": "ops", + "importPath": "cirq", + "description": "cirq", + "isExtraImport": true, + "detail": "cirq", + "documentation": {} + }, + { + "label": "ops", + "importPath": "cirq", + "description": "cirq", + "isExtraImport": true, + "detail": "cirq", + "documentation": {} + }, + { + "label": "TSNE", + "importPath": "sklearn.manifold", + "description": "sklearn.manifold", + "isExtraImport": true, + "detail": "sklearn.manifold", + "documentation": {} + }, + { + "label": "LocallyLinearEmbedding", + "importPath": "sklearn.manifold", + "description": "sklearn.manifold", + "isExtraImport": true, + "detail": "sklearn.manifold", + "documentation": {} + }, + { + "label": "TSNE", + "importPath": "sklearn.manifold", + "description": "sklearn.manifold", + "isExtraImport": true, + "detail": "sklearn.manifold", + "documentation": {} + }, + { + "label": "TSNE", + "importPath": "sklearn.manifold", + "description": "sklearn.manifold", + "isExtraImport": true, + "detail": "sklearn.manifold", + "documentation": {} + }, + { + "label": "TSNE", + "importPath": "sklearn.manifold", + "description": "sklearn.manifold", + "isExtraImport": true, + "detail": "sklearn.manifold", + "documentation": {} + }, + { + "label": "imageio", + "kind": 6, + "isExtraImport": true, + "importPath": "imageio", + "description": "imageio", + "detail": "imageio", + "documentation": {} + }, + { + "label": "AdamOptimizer", + "importPath": "pennylane.optimize", + "description": "pennylane.optimize", + "isExtraImport": true, + "detail": "pennylane.optimize", + "documentation": {} + }, + { + "label": "GradientDescentOptimizer", + "importPath": "pennylane.optimize", + "description": "pennylane.optimize", + "isExtraImport": true, + "detail": "pennylane.optimize", + "documentation": {} + }, + { + "label": "qutip", + "kind": 6, + "isExtraImport": true, + "importPath": "qutip", + "description": "qutip", + "detail": "qutip", + "documentation": {} + }, + { + "label": "Bloch", + "importPath": "qutip", + "description": "qutip", + "isExtraImport": true, + "detail": "qutip", + "documentation": {} + }, + { + "label": "Bloch", + "importPath": "qutip", + "description": "qutip", + "isExtraImport": true, + "detail": "qutip", + "documentation": {} + }, + { + "label": "itertools", + "kind": 6, + "isExtraImport": true, + "importPath": "itertools", + "description": "itertools", + "detail": "itertools", + "documentation": {} + }, + { + "label": "product", + "importPath": "itertools", + "description": "itertools", + "isExtraImport": true, + "detail": "itertools", + "documentation": {} + }, + { + "label": "combinations", + "importPath": "itertools", + "description": "itertools", + "isExtraImport": true, + "detail": "itertools", + "documentation": {} + }, + { + "label": "repeat", + "importPath": "itertools", + "description": "itertools", + "isExtraImport": true, + "detail": "itertools", + "documentation": {} + }, + { + "label": "zip_longest", + "importPath": "itertools", + "description": "itertools", + "isExtraImport": true, + "detail": "itertools", + "documentation": {} + }, + { + "label": "product", + "importPath": "itertools", + "description": "itertools", + "isExtraImport": true, + "detail": "itertools", + "documentation": {} + }, + { + "label": "repeat", + "importPath": "itertools", + "description": "itertools", + "isExtraImport": true, + "detail": "itertools", + "documentation": {} + }, + { + "label": "product", + "importPath": "itertools", + "description": "itertools", + "isExtraImport": true, + "detail": "itertools", + "documentation": {} + }, + { + "label": "sympy", + "kind": 6, + "isExtraImport": true, + "importPath": "sympy", + "description": "sympy", + "detail": "sympy", + "documentation": {} + }, + { + "label": "default_sort_key", + "importPath": "sympy", + "description": "sympy", + "isExtraImport": true, + "detail": "sympy", + "documentation": {} + }, + { + "label": "tensorflow_quantum", + "kind": 6, + "isExtraImport": true, + "importPath": "tensorflow_quantum", + "description": "tensorflow_quantum", + "detail": "tensorflow_quantum", + "documentation": {} + }, + { + "label": "energyflow", + "kind": 6, + "isExtraImport": true, + "importPath": "energyflow", + "description": "energyflow", + "detail": "energyflow", + "documentation": {} + }, + { + "label": "LogNorm", + "importPath": "matplotlib.colors", + "description": "matplotlib.colors", + "isExtraImport": true, + "detail": "matplotlib.colors", + "documentation": {} + }, + { + "label": "LogNorm", + "importPath": "matplotlib.colors", + "description": "matplotlib.colors", + "isExtraImport": true, + "detail": "matplotlib.colors", + "documentation": {} + }, + { + "label": "LogNorm", + "importPath": "matplotlib.colors", + "description": "matplotlib.colors", + "isExtraImport": true, + "detail": "matplotlib.colors", + "documentation": {} + }, + { + "label": "IsolationForest", + "importPath": "sklearn.ensemble", + "description": "sklearn.ensemble", + "isExtraImport": true, + "detail": "sklearn.ensemble", + "documentation": {} + }, + { + "label": "RandomForestClassifier", + "importPath": "sklearn.ensemble", + "description": "sklearn.ensemble", + "isExtraImport": true, + "detail": "sklearn.ensemble", + "documentation": {} + }, + { + "label": "setuptools", + "kind": 6, + "isExtraImport": true, + "importPath": "setuptools", + "description": "setuptools", + "detail": "setuptools", + "documentation": {} + }, + { + "label": "setup", + "importPath": "setuptools", + "description": "setuptools", + "isExtraImport": true, + "detail": "setuptools", + "documentation": {} + }, + { + "label": "setup", + "importPath": "setuptools", + "description": "setuptools", + "isExtraImport": true, + "detail": "setuptools", + "documentation": {} + }, + { + "label": "find_packages", + "importPath": "setuptools", + "description": "setuptools", + "isExtraImport": true, + "detail": "setuptools", + "documentation": {} + }, + { + "label": "setup", + "importPath": "setuptools", + "description": "setuptools", + "isExtraImport": true, + "detail": "setuptools", + "documentation": {} + }, + { + "label": "generate_circuit", + "importPath": "qcnn_drc.circuit_constructor", + "description": "qcnn_drc.circuit_constructor", + "isExtraImport": true, + "detail": "qcnn_drc.circuit_constructor", + "documentation": {} + }, + { + "label": "math", + "kind": 6, + "isExtraImport": true, + "importPath": "math", + "description": "math", + "detail": "math", + "documentation": {} + }, + { + "label": "sqrt", + "importPath": "math", + "description": "math", + "isExtraImport": true, + "detail": "math", + "documentation": {} + }, + { + "label": "ceil", + "importPath": "math", + "description": "math", + "isExtraImport": true, + "detail": "math", + "documentation": {} + }, + { + "label": "ReUploadingPQC", + "importPath": "qcnn_drc.data_reuploading", + "description": "qcnn_drc.data_reuploading", + "isExtraImport": true, + "detail": "qcnn_drc.data_reuploading", + "documentation": {} + }, + { + "label": "setup", + "importPath": "distutils.core", + "description": "distutils.core", + "isExtraImport": true, + "detail": "distutils.core", + "documentation": {} + }, + { + "label": "setup", + "importPath": "distutils.core", + "description": "distutils.core", + "isExtraImport": true, + "detail": "distutils.core", + "documentation": {} + }, + { + "label": "warnings", + "kind": 6, + "isExtraImport": true, + "importPath": "warnings", + "description": "warnings", + "detail": "warnings", + "documentation": {} + }, + { + "label": "Path", + "importPath": "pathlib", + "description": "pathlib", + "isExtraImport": true, + "detail": "pathlib", + "documentation": {} + }, + { + "label": "Path", + "importPath": "pathlib", + "description": "pathlib", + "isExtraImport": true, + "detail": "pathlib", + "documentation": {} + }, + { + "label": "Path", + "importPath": "pathlib", + "description": "pathlib", + "isExtraImport": true, + "detail": "pathlib", + "documentation": {} + }, + { + "label": "Path", + "importPath": "pathlib", + "description": "pathlib", + "isExtraImport": true, + "detail": "pathlib", + "documentation": {} + }, + { + "label": "Path", + "importPath": "pathlib", + "description": "pathlib", + "isExtraImport": true, + "detail": "pathlib", + "documentation": {} + }, + { + "label": "tqdm", + "kind": 6, + "isExtraImport": true, + "importPath": "tqdm", + "description": "tqdm", + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "tqdm", + "importPath": "tqdm", + "description": "tqdm", + "isExtraImport": true, + "detail": "tqdm", + "documentation": {} + }, + { + "label": "urlretrieve", + "importPath": "urllib.request", + "description": "urllib.request", + "isExtraImport": true, + "detail": "urllib.request", + "documentation": {} + }, + { + "label": "urlretrieve", + "importPath": "urllib.request", + "description": "urllib.request", + "isExtraImport": true, + "detail": "urllib.request", + "documentation": {} + }, + { + "label": "tabulate", + "importPath": "tabulate", + "description": "tabulate", + "isExtraImport": true, + "detail": "tabulate", + "documentation": {} + }, + { + "label": "extract_samples", + "importPath": "qml_hep_lhc.data.utils", + "description": "qml_hep_lhc.data.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.data.utils", + "documentation": {} + }, + { + "label": "create_tf_ds", + "importPath": "qml_hep_lhc.data.utils", + "description": "qml_hep_lhc.data.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.data.utils", + "documentation": {} + }, + { + "label": "tf_ds_to_numpy", + "importPath": "qml_hep_lhc.data.utils", + "description": "qml_hep_lhc.data.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.data.utils", + "documentation": {} + }, + { + "label": "shuffle", + "importPath": "sklearn.utils", + "description": "sklearn.utils", + "isExtraImport": true, + "detail": "sklearn.utils", + "documentation": {} + }, + { + "label": "DataPreprocessor", + "importPath": "qml_hep_lhc.data.preprocessor", + "description": "qml_hep_lhc.data.preprocessor", + "isExtraImport": true, + "detail": "qml_hep_lhc.data.preprocessor", + "documentation": {} + }, + { + "label": "BaseDataModule", + "importPath": "qml_hep_lhc.data.base_data_module", + "description": "qml_hep_lhc.data.base_data_module", + "isExtraImport": true, + "detail": "qml_hep_lhc.data.base_data_module", + "documentation": {} + }, + { + "label": "_download_raw_dataset", + "importPath": "qml_hep_lhc.data.base_data_module", + "description": "qml_hep_lhc.data.base_data_module", + "isExtraImport": true, + "detail": "qml_hep_lhc.data.base_data_module", + "documentation": {} + }, + { + "label": "BaseDataModule", + "importPath": "qml_hep_lhc.data.base_data_module", + "description": "qml_hep_lhc.data.base_data_module", + "isExtraImport": true, + "detail": "qml_hep_lhc.data.base_data_module", + "documentation": {} + }, + { + "label": "BaseDataModule", + "importPath": "qml_hep_lhc.data.base_data_module", + "description": "qml_hep_lhc.data.base_data_module", + "isExtraImport": true, + "detail": "qml_hep_lhc.data.base_data_module", + "documentation": {} + }, + { + "label": "ELECTRON_PHOTON_SMALL_DATASET_URL", + "importPath": "qml_hep_lhc.data.constants", + "description": "qml_hep_lhc.data.constants", + "isExtraImport": true, + "detail": "qml_hep_lhc.data.constants", + "documentation": {} + }, + { + "label": "mnist", + "importPath": "tensorflow.keras.datasets", + "description": "tensorflow.keras.datasets", + "isExtraImport": true, + "detail": "tensorflow.keras.datasets", + "documentation": {} + }, + { + "label": "StandardScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "Normalizer", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "MinMaxScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "PowerTransformer", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "MinMaxScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "MinMaxScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "MinMaxScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "MinMaxScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "MinMaxScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "MinMaxScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "MinMaxScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "MinMaxScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "MinMaxScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "MinMaxScaler", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "normalize", + "importPath": "sklearn.preprocessing", + "description": "sklearn.preprocessing", + "isExtraImport": true, + "detail": "sklearn.preprocessing", + "documentation": {} + }, + { + "label": "numba", + "kind": 6, + "isExtraImport": true, + "importPath": "numba", + "description": "numba", + "detail": "numba", + "documentation": {} + }, + { + "label": "njit", + "importPath": "numba", + "description": "numba", + "isExtraImport": true, + "detail": "numba", + "documentation": {} + }, + { + "label": "prange", + "importPath": "numba", + "description": "numba", + "isExtraImport": true, + "detail": "numba", + "documentation": {} + }, + { + "label": "to_categorical", + "importPath": "tensorflow.keras.utils", + "description": "tensorflow.keras.utils", + "isExtraImport": true, + "detail": "tensorflow.keras.utils", + "documentation": {} + }, + { + "label": "ParseAction", + "importPath": "qml_hep_lhc.utils", + "description": "qml_hep_lhc.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "_import_class", + "importPath": "qml_hep_lhc.utils", + "description": "qml_hep_lhc.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "_import_class", + "importPath": "qml_hep_lhc.utils", + "description": "qml_hep_lhc.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "_import_class", + "importPath": "qml_hep_lhc.utils", + "description": "qml_hep_lhc.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "ParseAction", + "importPath": "qml_hep_lhc.utils", + "description": "qml_hep_lhc.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "ParseAction", + "importPath": "qml_hep_lhc.utils", + "description": "qml_hep_lhc.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "_import_class", + "importPath": "qml_hep_lhc.utils", + "description": "qml_hep_lhc.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "ParseAction", + "importPath": "qml_hep_lhc.utils", + "description": "qml_hep_lhc.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "_import_class", + "importPath": "qml_hep_lhc.utils", + "description": "qml_hep_lhc.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "_import_class", + "importPath": "qml_hep_lhc.utils", + "description": "qml_hep_lhc.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "_import_class", + "importPath": "qml_hep_lhc.utils", + "description": "qml_hep_lhc.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "logging", + "kind": 6, + "isExtraImport": true, + "importPath": "logging", + "description": "logging", + "detail": "logging", + "documentation": {} + }, + { + "label": "warning", + "importPath": "logging", + "description": "logging", + "isExtraImport": true, + "detail": "logging", + "documentation": {} + }, + { + "label": "Layer", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Activation", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Layer", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Add", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Activation", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Concatenate", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Layer", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Conv2D", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "BatchNormalization", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Activation", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Layer", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dense", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dropout", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dense", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Activation", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "AveragePooling2D", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "add", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dense", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Activation", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "AveragePooling2D", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "add", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "BatchNormalization", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Conv2D", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dense", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dense", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dropout", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dense", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dense", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dense", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dropout", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dense", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "BatchNormalization", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Conv2D", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Input", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dense", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dropout", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Flatten", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "Dense", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "MaxPool2D", + "importPath": "tensorflow.keras.layers", + "description": "tensorflow.keras.layers", + "isExtraImport": true, + "detail": "tensorflow.keras.layers", + "documentation": {} + }, + { + "label": "cluster_state_circuit", + "importPath": "qml_hep_lhc.ansatzes.utils", + "description": "qml_hep_lhc.ansatzes.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.ansatzes.utils", + "documentation": {} + }, + { + "label": "cluster_state_circuit", + "importPath": "qml_hep_lhc.ansatzes.utils", + "description": "qml_hep_lhc.ansatzes.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.ansatzes.utils", + "documentation": {} + }, + { + "label": "NQubit", + "importPath": "qml_hep_lhc.ansatzes", + "description": "qml_hep_lhc.ansatzes", + "isExtraImport": true, + "detail": "qml_hep_lhc.ansatzes", + "documentation": {} + }, + { + "label": "normalize_padding", + "importPath": "qml_hep_lhc.layers.utils", + "description": "qml_hep_lhc.layers.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "normalize_tuple", + "importPath": "qml_hep_lhc.layers.utils", + "description": "qml_hep_lhc.layers.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "convolution_iters", + "importPath": "qml_hep_lhc.layers.utils", + "description": "qml_hep_lhc.layers.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "get_count_of_qubits", + "importPath": "qml_hep_lhc.layers.utils", + "description": "qml_hep_lhc.layers.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "get_num_in_symbols", + "importPath": "qml_hep_lhc.layers.utils", + "description": "qml_hep_lhc.layers.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "get_count_of_qubits", + "importPath": "qml_hep_lhc.layers.utils", + "description": "qml_hep_lhc.layers.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "get_num_in_symbols", + "importPath": "qml_hep_lhc.layers.utils", + "description": "qml_hep_lhc.layers.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "get_count_of_qubits", + "importPath": "qml_hep_lhc.layers.utils", + "description": "qml_hep_lhc.layers.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "get_num_in_symbols", + "importPath": "qml_hep_lhc.layers.utils", + "description": "qml_hep_lhc.layers.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "get_count_of_qubits", + "importPath": "qml_hep_lhc.layers.utils", + "description": "qml_hep_lhc.layers.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "get_num_in_symbols", + "importPath": "qml_hep_lhc.layers.utils", + "description": "qml_hep_lhc.layers.utils", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "TwoLayerPQC", + "importPath": "qml_hep_lhc.layers", + "description": "qml_hep_lhc.layers", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers", + "documentation": {} + }, + { + "label": "NQubitPQC", + "importPath": "qml_hep_lhc.layers", + "description": "qml_hep_lhc.layers", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers", + "documentation": {} + }, + { + "label": "QConv2D", + "importPath": "qml_hep_lhc.layers", + "description": "qml_hep_lhc.layers", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers", + "documentation": {} + }, + { + "label": "TwoLayerPQC", + "importPath": "qml_hep_lhc.layers", + "description": "qml_hep_lhc.layers", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers", + "documentation": {} + }, + { + "label": "NQubitPQC", + "importPath": "qml_hep_lhc.layers", + "description": "qml_hep_lhc.layers", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers", + "documentation": {} + }, + { + "label": "QConv2D", + "importPath": "qml_hep_lhc.layers", + "description": "qml_hep_lhc.layers", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers", + "documentation": {} + }, + { + "label": "QConv2D", + "importPath": "qml_hep_lhc.layers", + "description": "qml_hep_lhc.layers", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers", + "documentation": {} + }, + { + "label": "TwoLayerPQC", + "importPath": "qml_hep_lhc.layers", + "description": "qml_hep_lhc.layers", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers", + "documentation": {} + }, + { + "label": "TwoLayerPQC", + "importPath": "qml_hep_lhc.layers", + "description": "qml_hep_lhc.layers", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers", + "documentation": {} + }, + { + "label": "NQubitPQC", + "importPath": "qml_hep_lhc.layers", + "description": "qml_hep_lhc.layers", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers", + "documentation": {} + }, + { + "label": "AmplitudeMap", + "importPath": "qml_hep_lhc.encodings", + "description": "qml_hep_lhc.encodings", + "isExtraImport": true, + "detail": "qml_hep_lhc.encodings", + "documentation": {} + }, + { + "label": "numbers", + "kind": 6, + "isExtraImport": true, + "importPath": "numbers", + "description": "numbers", + "detail": "numbers", + "documentation": {} + }, + { + "label": "re", + "kind": 6, + "isExtraImport": true, + "importPath": "re", + "description": "re", + "detail": "re", + "documentation": {} + }, + { + "label": "numbers", + "importPath": "sympy.core", + "description": "sympy.core", + "isExtraImport": true, + "detail": "sympy.core", + "documentation": {} + }, + { + "label": "numbers", + "importPath": "sympy.core", + "description": "sympy.core", + "isExtraImport": true, + "detail": "sympy.core", + "documentation": {} + }, + { + "label": "numbers", + "importPath": "sympy.core", + "description": "sympy.core", + "isExtraImport": true, + "detail": "sympy.core", + "documentation": {} + }, + { + "label": "TrigonometricFunction", + "importPath": "sympy.functions.elementary.trigonometric", + "description": "sympy.functions.elementary.trigonometric", + "isExtraImport": true, + "detail": "sympy.functions.elementary.trigonometric", + "documentation": {} + }, + { + "label": "InverseTrigonometricFunction", + "importPath": "sympy.functions.elementary.trigonometric", + "description": "sympy.functions.elementary.trigonometric", + "isExtraImport": true, + "detail": "sympy.functions.elementary.trigonometric", + "documentation": {} + }, + { + "label": "HyperbolicFunction", + "importPath": "sympy.functions.elementary.trigonometric", + "description": "sympy.functions.elementary.trigonometric", + "isExtraImport": true, + "detail": "sympy.functions.elementary.trigonometric", + "documentation": {} + }, + { + "label": "TrigonometricFunction", + "importPath": "sympy.functions.elementary.trigonometric", + "description": "sympy.functions.elementary.trigonometric", + "isExtraImport": true, + "detail": "sympy.functions.elementary.trigonometric", + "documentation": {} + }, + { + "label": "TrigonometricFunction", + "importPath": "sympy.functions.elementary.trigonometric", + "description": "sympy.functions.elementary.trigonometric", + "isExtraImport": true, + "detail": "sympy.functions.elementary.trigonometric", + "documentation": {} + }, + { + "label": "l2", + "importPath": "tensorflow.keras.regularizers", + "description": "tensorflow.keras.regularizers", + "isExtraImport": true, + "detail": "tensorflow.keras.regularizers", + "documentation": {} + }, + { + "label": "ResNet50", + "importPath": "tensorflow.keras.applications", + "description": "tensorflow.keras.applications", + "isExtraImport": true, + "detail": "tensorflow.keras.applications", + "documentation": {} + }, + { + "label": "ResNet50", + "importPath": "tensorflow.keras.applications", + "description": "tensorflow.keras.applications", + "isExtraImport": true, + "detail": "tensorflow.keras.applications", + "documentation": {} + }, + { + "label": "BaseModel", + "importPath": "qml_hep_lhc.models.base_model", + "description": "qml_hep_lhc.models.base_model", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.base_model", + "documentation": {} + }, + { + "label": "BaseModel", + "importPath": "qml_hep_lhc.models.base_model", + "description": "qml_hep_lhc.models.base_model", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.base_model", + "documentation": {} + }, + { + "label": "BaseModel", + "importPath": "qml_hep_lhc.models.base_model", + "description": "qml_hep_lhc.models.base_model", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.base_model", + "documentation": {} + }, + { + "label": "BaseModel", + "importPath": "qml_hep_lhc.models.base_model", + "description": "qml_hep_lhc.models.base_model", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.base_model", + "documentation": {} + }, + { + "label": "BaseModel", + "importPath": "qml_hep_lhc.models.base_model", + "description": "qml_hep_lhc.models.base_model", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.base_model", + "documentation": {} + }, + { + "label": "BaseModel", + "importPath": "qml_hep_lhc.models.base_model", + "description": "qml_hep_lhc.models.base_model", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.base_model", + "documentation": {} + }, + { + "label": "BaseModel", + "importPath": "qml_hep_lhc.models.base_model", + "description": "qml_hep_lhc.models.base_model", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.base_model", + "documentation": {} + }, + { + "label": "BaseModel", + "importPath": "qml_hep_lhc.models.base_model", + "description": "qml_hep_lhc.models.base_model", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.base_model", + "documentation": {} + }, + { + "label": "BottleneckResidual", + "importPath": "qml_hep_lhc.models.classical.resnet.bottleneck", + "description": "qml_hep_lhc.models.classical.resnet.bottleneck", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.classical.resnet.bottleneck", + "documentation": {} + }, + { + "label": "BottleneckResidual", + "importPath": "qml_hep_lhc.models.classical.resnet.bottleneck", + "description": "qml_hep_lhc.models.classical.resnet.bottleneck", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.classical.resnet.bottleneck", + "documentation": {} + }, + { + "label": "QCNN", + "importPath": "qml_hep_lhc.models", + "description": "qml_hep_lhc.models", + "isExtraImport": true, + "detail": "qml_hep_lhc.models", + "documentation": {} + }, + { + "label": "QCNN", + "importPath": "qml_hep_lhc.models", + "description": "qml_hep_lhc.models", + "isExtraImport": true, + "detail": "qml_hep_lhc.models", + "documentation": {} + }, + { + "label": "QCNN", + "importPath": "qml_hep_lhc.models", + "description": "qml_hep_lhc.models", + "isExtraImport": true, + "detail": "qml_hep_lhc.models", + "documentation": {} + }, + { + "label": "QCNN", + "importPath": "qml_hep_lhc.models", + "description": "qml_hep_lhc.models", + "isExtraImport": true, + "detail": "qml_hep_lhc.models", + "documentation": {} + }, + { + "label": "QCNN", + "importPath": "qml_hep_lhc.models", + "description": "qml_hep_lhc.models", + "isExtraImport": true, + "detail": "qml_hep_lhc.models", + "documentation": {} + }, + { + "label": "QConv2D", + "importPath": "qml_hep_lhc.layers.qconv2d", + "description": "qml_hep_lhc.layers.qconv2d", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.qconv2d", + "documentation": {} + }, + { + "label": "QConv2D", + "importPath": "qml_hep_lhc.layers.qconv2d", + "description": "qml_hep_lhc.layers.qconv2d", + "isExtraImport": true, + "detail": "qml_hep_lhc.layers.qconv2d", + "documentation": {} + }, + { + "label": "default", + "importPath": "email.policy", + "description": "email.policy", + "isExtraImport": true, + "detail": "email.policy", + "documentation": {} + }, + { + "label": "default", + "importPath": "email.policy", + "description": "email.policy", + "isExtraImport": true, + "detail": "email.policy", + "documentation": {} + }, + { + "label": "AUC", + "importPath": "tensorflow.keras.metrics", + "description": "tensorflow.keras.metrics", + "isExtraImport": true, + "detail": "tensorflow.keras.metrics", + "documentation": {} + }, + { + "label": "RectifiedAdam", + "importPath": "tensorflow_addons.optimizers", + "description": "tensorflow_addons.optimizers", + "isExtraImport": true, + "detail": "tensorflow_addons.optimizers", + "documentation": {} + }, + { + "label": "Lookahead", + "importPath": "tensorflow_addons.optimizers", + "description": "tensorflow_addons.optimizers", + "isExtraImport": true, + "detail": "tensorflow_addons.optimizers", + "documentation": {} + }, + { + "label": "custom_accuracy", + "importPath": "qml_hep_lhc.models.metrics", + "description": "qml_hep_lhc.models.metrics", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.metrics", + "documentation": {} + }, + { + "label": "qAUC", + "importPath": "qml_hep_lhc.models.metrics", + "description": "qml_hep_lhc.models.metrics", + "isExtraImport": true, + "detail": "qml_hep_lhc.models.metrics", + "documentation": {} + }, + { + "label": "importlib", + "kind": 6, + "isExtraImport": true, + "importPath": "importlib", + "description": "importlib", + "detail": "importlib", + "documentation": {} + }, + { + "label": "import_module", + "importPath": "importlib", + "description": "importlib", + "isExtraImport": true, + "detail": "importlib", + "documentation": {} + }, + { + "label": "argparse", + "kind": 6, + "isExtraImport": true, + "importPath": "argparse", + "description": "argparse", + "detail": "argparse", + "documentation": {} + }, + { + "label": "Action", + "importPath": "argparse", + "description": "argparse", + "isExtraImport": true, + "detail": "argparse", + "documentation": {} + }, + { + "label": "ArgumentParser", + "importPath": "argparse", + "description": "argparse", + "isExtraImport": true, + "detail": "argparse", + "documentation": {} + }, + { + "label": "wandb", + "kind": 6, + "isExtraImport": true, + "importPath": "wandb", + "description": "wandb", + "detail": "wandb", + "documentation": {} + }, + { + "label": "Callback", + "importPath": "tensorflow.keras.callbacks", + "description": "tensorflow.keras.callbacks", + "isExtraImport": true, + "detail": "tensorflow.keras.callbacks", + "documentation": {} + }, + { + "label": "EarlyStopping", + "importPath": "tensorflow.keras.callbacks", + "description": "tensorflow.keras.callbacks", + "isExtraImport": true, + "detail": "tensorflow.keras.callbacks", + "documentation": {} + }, + { + "label": "ModelCheckpoint", + "importPath": "tensorflow.keras.callbacks", + "description": "tensorflow.keras.callbacks", + "isExtraImport": true, + "detail": "tensorflow.keras.callbacks", + "documentation": {} + }, + { + "label": "ReduceLROnPlateau", + "importPath": "tensorflow.keras.callbacks", + "description": "tensorflow.keras.callbacks", + "isExtraImport": true, + "detail": "tensorflow.keras.callbacks", + "documentation": {} + }, + { + "label": "_setup_callbacks", + "importPath": "callbacks", + "description": "callbacks", + "isExtraImport": true, + "detail": "callbacks", + "documentation": {} + }, + { + "label": "unitary_group", + "importPath": "scipy.stats", + "description": "scipy.stats", + "isExtraImport": true, + "detail": "scipy.stats", + "documentation": {} + }, + { + "label": "unitary_group", + "importPath": "scipy.stats", + "description": "scipy.stats", + "isExtraImport": true, + "detail": "scipy.stats", + "documentation": {} + }, + { + "label": "rv_continuous", + "importPath": "scipy.stats", + "description": "scipy.stats", + "isExtraImport": true, + "detail": "scipy.stats", + "documentation": {} + }, + { + "label": "wasserstein_distance", + "importPath": "scipy.stats", + "description": "scipy.stats", + "isExtraImport": true, + "detail": "scipy.stats", + "documentation": {} + }, + { + "label": "entropy", + "importPath": "scipy.stats", + "description": "scipy.stats", + "isExtraImport": true, + "detail": "scipy.stats", + "documentation": {} + }, + { + "label": "ttest_ind", + "importPath": "scipy.stats", + "description": "scipy.stats", + "isExtraImport": true, + "detail": "scipy.stats", + "documentation": {} + }, + { + "label": "RandomLayers", + "importPath": "pennylane.templates", + "description": "pennylane.templates", + "isExtraImport": true, + "detail": "pennylane.templates", + "documentation": {} + }, + { + "label": "RandomLayers", + "importPath": "pennylane.templates", + "description": "pennylane.templates", + "isExtraImport": true, + "detail": "pennylane.templates", + "documentation": {} + }, + { + "label": "torch.optim", + "kind": 6, + "isExtraImport": true, + "importPath": "torch.optim", + "description": "torch.optim", + "detail": "torch.optim", + "documentation": {} + }, + { + "label": "Adam", + "importPath": "torch.optim", + "description": "torch.optim", + "isExtraImport": true, + "detail": "torch.optim", + "documentation": {} + }, + { + "label": "Adam", + "importPath": "torch.optim", + "description": "torch.optim", + "isExtraImport": true, + "detail": "torch.optim", + "documentation": {} + }, + { + "label": "Optimizer", + "importPath": "torch.optim", + "description": "torch.optim", + "isExtraImport": true, + "detail": "torch.optim", + "documentation": {} + }, + { + "label": "Adam", + "importPath": "torch.optim", + "description": "torch.optim", + "isExtraImport": true, + "detail": "torch.optim", + "documentation": {} + }, + { + "label": "load_data", + "importPath": "data_preprocessing", + "description": "data_preprocessing", + "isExtraImport": true, + "detail": "data_preprocessing", + "documentation": {} + }, + { + "label": "get_dataloader", + "importPath": "data_preprocessing", + "description": "data_preprocessing", + "isExtraImport": true, + "detail": "data_preprocessing", + "documentation": {} + }, + { + "label": "load_data", + "importPath": "data_preprocessing", + "description": "data_preprocessing", + "isExtraImport": true, + "detail": "data_preprocessing", + "documentation": {} + }, + { + "label": "get_dataloader", + "importPath": "data_preprocessing", + "description": "data_preprocessing", + "isExtraImport": true, + "detail": "data_preprocessing", + "documentation": {} + }, + { + "label": "load_data", + "importPath": "data_preprocessing", + "description": "data_preprocessing", + "isExtraImport": true, + "detail": "data_preprocessing", + "documentation": {} + }, + { + "label": "get_dataloader", + "importPath": "data_preprocessing", + "description": "data_preprocessing", + "isExtraImport": true, + "detail": "data_preprocessing", + "documentation": {} + }, + { + "label": "load_data", + "importPath": "data_preprocessing", + "description": "data_preprocessing", + "isExtraImport": true, + "detail": "data_preprocessing", + "documentation": {} + }, + { + "label": "get_dataloader", + "importPath": "data_preprocessing", + "description": "data_preprocessing", + "isExtraImport": true, + "detail": "data_preprocessing", + "documentation": {} + }, + { + "label": "load_data", + "importPath": "data_preprocessing", + "description": "data_preprocessing", + "isExtraImport": true, + "detail": "data_preprocessing", + "documentation": {} + }, + { + "label": "get_dataloader", + "importPath": "data_preprocessing", + "description": "data_preprocessing", + "isExtraImport": true, + "detail": "data_preprocessing", + "documentation": {} + }, + { + "label": "train_vanilla_gan", + "importPath": "vanilla_gan", + "description": "vanilla_gan", + "isExtraImport": true, + "detail": "vanilla_gan", + "documentation": {} + }, + { + "label": "train_wgan", + "importPath": "wgan", + "description": "wgan", + "isExtraImport": true, + "detail": "wgan", + "documentation": {} + }, + { + "label": "train_total_variation_gan", + "importPath": "total_variation", + "description": "total_variation", + "isExtraImport": true, + "detail": "total_variation", + "documentation": {} + }, + { + "label": "train_perceptual_gan", + "importPath": "perceptual_loss", + "description": "perceptual_loss", + "isExtraImport": true, + "detail": "perceptual_loss", + "documentation": {} + }, + { + "label": "spectral_norm", + "importPath": "torch.nn.utils", + "description": "torch.nn.utils", + "isExtraImport": true, + "detail": "torch.nn.utils", + "documentation": {} + }, + { + "label": "QuantumGenerator", + "importPath": "models.quantum_generator", + "description": "models.quantum_generator", + "isExtraImport": true, + "detail": "models.quantum_generator", + "documentation": {} + }, + { + "label": "QuantumGenerator", + "importPath": "models.quantum_generator", + "description": "models.quantum_generator", + "isExtraImport": true, + "detail": "models.quantum_generator", + "documentation": {} + }, + { + "label": "Discriminator", + "importPath": "models.discriminator", + "description": "models.discriminator", + "isExtraImport": true, + "detail": "models.discriminator", + "documentation": {} + }, + { + "label": "Discriminator", + "importPath": "models.discriminator", + "description": "models.discriminator", + "isExtraImport": true, + "detail": "models.discriminator", + "documentation": {} + }, + { + "label": "load_jet_data", + "importPath": "utils.data_loader", + "description": "utils.data_loader", + "isExtraImport": true, + "detail": "utils.data_loader", + "documentation": {} + }, + { + "label": "load_mnist_data", + "importPath": "utils.data_loader", + "description": "utils.data_loader", + "isExtraImport": true, + "detail": "utils.data_loader", + "documentation": {} + }, + { + "label": "calculate_fid", + "importPath": "utils.fid_score", + "description": "utils.fid_score", + "isExtraImport": true, + "detail": "utils.fid_score", + "documentation": {} + }, + { + "label": "plot_losses", + "importPath": "utils.plot_utils", + "description": "utils.plot_utils", + "isExtraImport": true, + "detail": "utils.plot_utils", + "documentation": {} + }, + { + "label": "plot_generated_samples", + "importPath": "utils.plot_utils", + "description": "utils.plot_utils", + "isExtraImport": true, + "detail": "utils.plot_utils", + "documentation": {} + }, + { + "label": "plot_losses", + "importPath": "utils.plot_utils", + "description": "utils.plot_utils", + "isExtraImport": true, + "detail": "utils.plot_utils", + "documentation": {} + }, + { + "label": "plot_generated_samples", + "importPath": "utils.plot_utils", + "description": "utils.plot_utils", + "isExtraImport": true, + "detail": "utils.plot_utils", + "documentation": {} + }, + { + "label": "torchvision.datasets", + "kind": 6, + "isExtraImport": true, + "importPath": "torchvision.datasets", + "description": "torchvision.datasets", + "detail": "torchvision.datasets", + "documentation": {} + }, + { + "label": "random", + "kind": 6, + "isExtraImport": true, + "importPath": "random", + "description": "random", + "detail": "random", + "documentation": {} + }, + { + "label": "inception_v3", + "importPath": "torchvision.models", + "description": "torchvision.models", + "isExtraImport": true, + "detail": "torchvision.models", + "documentation": {} + }, + { + "label": "sqrtm", + "importPath": "scipy.linalg", + "description": "scipy.linalg", + "isExtraImport": true, + "detail": "scipy.linalg", + "documentation": {} + }, + { + "label": "sqrtm", + "importPath": "scipy.linalg", + "description": "scipy.linalg", + "isExtraImport": true, + "detail": "scipy.linalg", + "documentation": {} + }, + { + "label": "sqrtm", + "importPath": "scipy.linalg", + "description": "scipy.linalg", + "isExtraImport": true, + "detail": "scipy.linalg", + "documentation": {} + }, + { + "label": "load_data", + "importPath": "src.data_preprocessing", + "description": "src.data_preprocessing", + "isExtraImport": true, + "detail": "src.data_preprocessing", + "documentation": {} + }, + { + "label": "normalize_and_resize", + "importPath": "src.data_preprocessing", + "description": "src.data_preprocessing", + "isExtraImport": true, + "detail": "src.data_preprocessing", + "documentation": {} + }, + { + "label": "sample_data", + "importPath": "src.data_preprocessing", + "description": "src.data_preprocessing", + "isExtraImport": true, + "detail": "src.data_preprocessing", + "documentation": {} + }, + { + "label": "apply_pca", + "importPath": "src.data_preprocessing", + "description": "src.data_preprocessing", + "isExtraImport": true, + "detail": "src.data_preprocessing", + "documentation": {} + }, + { + "label": "normalize_pca", + "importPath": "src.data_preprocessing", + "description": "src.data_preprocessing", + "isExtraImport": true, + "detail": "src.data_preprocessing", + "documentation": {} + }, + { + "label": "load_data", + "importPath": "src.data_preprocessing", + "description": "src.data_preprocessing", + "isExtraImport": true, + "detail": "src.data_preprocessing", + "documentation": {} + }, + { + "label": "normalize_and_resize", + "importPath": "src.data_preprocessing", + "description": "src.data_preprocessing", + "isExtraImport": true, + "detail": "src.data_preprocessing", + "documentation": {} + }, + { + "label": "sample_data", + "importPath": "src.data_preprocessing", + "description": "src.data_preprocessing", + "isExtraImport": true, + "detail": "src.data_preprocessing", + "documentation": {} + }, + { + "label": "apply_pca", + "importPath": "src.data_preprocessing", + "description": "src.data_preprocessing", + "isExtraImport": true, + "detail": "src.data_preprocessing", + "documentation": {} + }, + { + "label": "JetDataset", + "importPath": "src.data_preprocessing", + "description": "src.data_preprocessing", + "isExtraImport": true, + "detail": "src.data_preprocessing", + "documentation": {} + }, + { + "label": "create_qnode", + "importPath": "src.model", + "description": "src.model", + "isExtraImport": true, + "detail": "src.model", + "documentation": {} + }, + { + "label": "QuantumGAN", + "importPath": "src.model", + "description": "src.model", + "isExtraImport": true, + "detail": "src.model", + "documentation": {} + }, + { + "label": "create_generator_qnode", + "importPath": "src.model", + "description": "src.model", + "isExtraImport": true, + "detail": "src.model", + "documentation": {} + }, + { + "label": "create_discriminator_qnode", + "importPath": "src.model", + "description": "src.model", + "isExtraImport": true, + "detail": "src.model", + "documentation": {} + }, + { + "label": "QuantumGAN", + "importPath": "src.model", + "description": "src.model", + "isExtraImport": true, + "detail": "src.model", + "documentation": {} + }, + { + "label": "Logloss", + "importPath": "src.utils", + "description": "src.utils", + "isExtraImport": true, + "detail": "src.utils", + "documentation": {} + }, + { + "label": "plot_and_save_graphs", + "importPath": "src.utils", + "description": "src.utils", + "isExtraImport": true, + "detail": "src.utils", + "documentation": {} + }, + { + "label": "save_sample_images", + "importPath": "src.utils", + "description": "src.utils", + "isExtraImport": true, + "detail": "src.utils", + "documentation": {} + }, + { + "label": "perceptual_loss", + "importPath": "src.utils", + "description": "src.utils", + "isExtraImport": true, + "detail": "src.utils", + "documentation": {} + }, + { + "label": "quantum_fidelity", + "importPath": "src.utils", + "description": "src.utils", + "isExtraImport": true, + "detail": "src.utils", + "documentation": {} + }, + { + "label": "plot_losses", + "importPath": "src.utils", + "description": "src.utils", + "isExtraImport": true, + "detail": "src.utils", + "documentation": {} + }, + { + "label": "save_models", + "importPath": "src.utils", + "description": "src.utils", + "isExtraImport": true, + "detail": "src.utils", + "documentation": {} + }, + { + "label": "InceptionV3", + "importPath": "keras.applications.inception_v3", + "description": "keras.applications.inception_v3", + "isExtraImport": true, + "detail": "keras.applications.inception_v3", + "documentation": {} + }, + { + "label": "preprocess_input", + "importPath": "keras.applications.inception_v3", + "description": "keras.applications.inception_v3", + "isExtraImport": true, + "detail": "keras.applications.inception_v3", + "documentation": {} + }, + { + "label": "resize", + "importPath": "skimage.transform", + "description": "skimage.transform", + "isExtraImport": true, + "detail": "skimage.transform", + "documentation": {} + }, + { + "label": "matplotlib.gridspec", + "kind": 6, + "isExtraImport": true, + "importPath": "matplotlib.gridspec", + "description": "matplotlib.gridspec", + "detail": "matplotlib.gridspec", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Any", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "cast", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "FrozenSet", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Iterable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Iterator", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "overload", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Set", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Type", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "TYPE_CHECKING", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "TypeVar", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Any", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "cast", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "FrozenSet", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Iterable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Iterator", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "overload", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Set", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Type", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "TYPE_CHECKING", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "TypeVar", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Any", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "cast", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "FrozenSet", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Iterable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Iterator", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "overload", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Set", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Type", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "TYPE_CHECKING", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "TypeVar", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Any", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "cast", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "FrozenSet", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Iterable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Iterator", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "overload", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Set", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Type", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "TYPE_CHECKING", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "TypeVar", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Any", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Union", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "List", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "NamedTuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Sequence", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Any", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Dict", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Iterable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Tuple", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "*", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Literal", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Callable", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "Optional", + "importPath": "typing", + "description": "typing", + "isExtraImport": true, + "detail": "typing", + "documentation": {} + }, + { + "label": "quple", + "kind": 6, + "isExtraImport": true, + "importPath": "quple", + "description": "quple", + "detail": "quple", + "documentation": {} + }, + { + "label": "TemplateCircuitBlock", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "ParameterisedCircuit", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "TemplateCircuitBlock", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "TemplateCircuitBlock", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "ParameterisedCircuit", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "ParameterisedCircuit", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "ParameterisedCircuit", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "ParameterisedCircuit", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "QuantumCircuit", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "QuantumCircuit", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "TemplateCircuitBlock", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "ParameterisedCircuit", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "ParameterisedCircuit", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "TemplateCircuitBlock", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "QuantumCircuit", + "importPath": "quple", + "description": "quple", + "isExtraImport": true, + "detail": "quple", + "documentation": {} + }, + { + "label": "ABC", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "abstractmethod", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "ABC", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "abstractmethod", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "ABC", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "abstractmethod", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "ABC", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "abstractmethod", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "ABC", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "ABC", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "abstractmethod", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "ABC", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "abstractmethod", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "ABC", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "abstractmethod", + "importPath": "abc", + "description": "abc", + "isExtraImport": true, + "detail": "abc", + "documentation": {} + }, + { + "label": "signature", + "importPath": "inspect", + "description": "inspect", + "isExtraImport": true, + "detail": "inspect", + "documentation": {} + }, + { + "label": "isclass", + "importPath": "inspect", + "description": "inspect", + "isExtraImport": true, + "detail": "inspect", + "documentation": {} + }, + { + "label": "SingleQubitGate", + "importPath": "cirq.ops.gate_features", + "description": "cirq.ops.gate_features", + "isExtraImport": true, + "detail": "cirq.ops.gate_features", + "documentation": {} + }, + { + "label": "TwoQubitGate", + "importPath": "cirq.ops.gate_features", + "description": "cirq.ops.gate_features", + "isExtraImport": true, + "detail": "cirq.ops.gate_features", + "documentation": {} + }, + { + "label": "interaction_graph", + "importPath": "quple.components.interaction_graphs", + "description": "quple.components.interaction_graphs", + "isExtraImport": true, + "detail": "quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "cyclic", + "importPath": "quple.components.interaction_graphs", + "description": "quple.components.interaction_graphs", + "isExtraImport": true, + "detail": "quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "interaction_graph", + "importPath": "quple.components.interaction_graphs", + "description": "quple.components.interaction_graphs", + "isExtraImport": true, + "detail": "quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "merge_pqc", + "importPath": "quple.utils.utils", + "description": "quple.utils.utils", + "isExtraImport": true, + "detail": "quple.utils.utils", + "documentation": {} + }, + { + "label": "get_unique_symbols", + "importPath": "quple.utils.utils", + "description": "quple.utils.utils", + "isExtraImport": true, + "detail": "quple.utils.utils", + "documentation": {} + }, + { + "label": "parallel_run", + "importPath": "quple.utils.utils", + "description": "quple.utils.utils", + "isExtraImport": true, + "detail": "quple.utils.utils", + "documentation": {} + }, + { + "label": "parallel_run", + "importPath": "quple.utils.utils", + "description": "quple.utils.utils", + "isExtraImport": true, + "detail": "quple.utils.utils", + "documentation": {} + }, + { + "label": "natural_key", + "importPath": "quple.utils.utils", + "description": "quple.utils.utils", + "isExtraImport": true, + "detail": "quple.utils.utils", + "documentation": {} + }, + { + "label": "parallel_run", + "importPath": "quple.utils.utils", + "description": "quple.utils.utils", + "isExtraImport": true, + "detail": "quple.utils.utils", + "documentation": {} + }, + { + "label": "parallel_run", + "importPath": "quple.utils.utils", + "description": "quple.utils.utils", + "isExtraImport": true, + "detail": "quple.utils.utils", + "documentation": {} + }, + { + "label": "batching", + "importPath": "quple.utils.utils", + "description": "quple.utils.utils", + "isExtraImport": true, + "detail": "quple.utils.utils", + "documentation": {} + }, + { + "label": "flatten_list", + "importPath": "quple.utils.utils", + "description": "quple.utils.utils", + "isExtraImport": true, + "detail": "quple.utils.utils", + "documentation": {} + }, + { + "label": "pdb", + "kind": 6, + "isExtraImport": true, + "importPath": "pdb", + "description": "pdb", + "detail": "pdb", + "documentation": {} + }, + { + "label": "set_trace", + "importPath": "pdb", + "description": "pdb", + "isExtraImport": true, + "detail": "pdb", + "documentation": {} + }, + { + "label": "set_trace", + "importPath": "pdb", + "description": "pdb", + "isExtraImport": true, + "detail": "pdb", + "documentation": {} + }, + { + "label": "InsertStrategy", + "importPath": "cirq.circuits", + "description": "cirq.circuits", + "isExtraImport": true, + "detail": "cirq.circuits", + "documentation": {} + }, + { + "label": "InsertStrategy", + "importPath": "cirq.circuits", + "description": "cirq.circuits", + "isExtraImport": true, + "detail": "cirq.circuits", + "documentation": {} + }, + { + "label": "qasm", + "importPath": "cirq.protocols", + "description": "cirq.protocols", + "isExtraImport": true, + "detail": "cirq.protocols", + "documentation": {} + }, + { + "label": "qasm", + "importPath": "cirq.protocols", + "description": "cirq.protocols", + "isExtraImport": true, + "detail": "cirq.protocols", + "documentation": {} + }, + { + "label": "RXX", + "importPath": "quple.components.gate_ops", + "description": "quple.components.gate_ops", + "isExtraImport": true, + "detail": "quple.components.gate_ops", + "documentation": {} + }, + { + "label": "RYY", + "importPath": "quple.components.gate_ops", + "description": "quple.components.gate_ops", + "isExtraImport": true, + "detail": "quple.components.gate_ops", + "documentation": {} + }, + { + "label": "RZZ", + "importPath": "quple.components.gate_ops", + "description": "quple.components.gate_ops", + "isExtraImport": true, + "detail": "quple.components.gate_ops", + "documentation": {} + }, + { + "label": "CompositeGate", + "importPath": "quple.components.gate_ops", + "description": "quple.components.gate_ops", + "isExtraImport": true, + "detail": "quple.components.gate_ops", + "documentation": {} + }, + { + "label": "PauliRotation", + "importPath": "quple.components.gate_ops", + "description": "quple.components.gate_ops", + "isExtraImport": true, + "detail": "quple.components.gate_ops", + "documentation": {} + }, + { + "label": "RXX", + "importPath": "quple.components.gate_ops", + "description": "quple.components.gate_ops", + "isExtraImport": true, + "detail": "quple.components.gate_ops", + "documentation": {} + }, + { + "label": "RYY", + "importPath": "quple.components.gate_ops", + "description": "quple.components.gate_ops", + "isExtraImport": true, + "detail": "quple.components.gate_ops", + "documentation": {} + }, + { + "label": "RZZ", + "importPath": "quple.components.gate_ops", + "description": "quple.components.gate_ops", + "isExtraImport": true, + "detail": "quple.components.gate_ops", + "documentation": {} + }, + { + "label": "RXX", + "importPath": "quple.components.gate_ops", + "description": "quple.components.gate_ops", + "isExtraImport": true, + "detail": "quple.components.gate_ops", + "documentation": {} + }, + { + "label": "RYY", + "importPath": "quple.components.gate_ops", + "description": "quple.components.gate_ops", + "isExtraImport": true, + "detail": "quple.components.gate_ops", + "documentation": {} + }, + { + "label": "RZZ", + "importPath": "quple.components.gate_ops", + "description": "quple.components.gate_ops", + "isExtraImport": true, + "detail": "quple.components.gate_ops", + "documentation": {} + }, + { + "label": "QubitRegister", + "importPath": "quple.circuits.qubit_register", + "description": "quple.circuits.qubit_register", + "isExtraImport": true, + "detail": "quple.circuits.qubit_register", + "documentation": {} + }, + { + "label": "QubitRegister", + "importPath": "quple.circuits.qubit_register", + "description": "quple.circuits.qubit_register", + "isExtraImport": true, + "detail": "quple.circuits.qubit_register", + "documentation": {} + }, + { + "label": "svm", + "importPath": "sklearn", + "description": "sklearn", + "isExtraImport": true, + "detail": "sklearn", + "documentation": {} + }, + { + "label": "svm", + "importPath": "sklearn", + "description": "sklearn", + "isExtraImport": true, + "detail": "sklearn", + "documentation": {} + }, + { + "label": "metrics", + "importPath": "sklearn", + "description": "sklearn", + "isExtraImport": true, + "detail": "sklearn", + "documentation": {} + }, + { + "label": "preprocessing", + "importPath": "sklearn", + "description": "sklearn", + "isExtraImport": true, + "detail": "sklearn", + "documentation": {} + }, + { + "label": "abs2", + "importPath": "quple.utils.mathext", + "description": "quple.utils.mathext", + "isExtraImport": true, + "detail": "quple.utils.mathext", + "documentation": {} + }, + { + "label": "DataPrecision", + "importPath": "quple.utils.mathext", + "description": "quple.utils.mathext", + "isExtraImport": true, + "detail": "quple.utils.mathext", + "documentation": {} + }, + { + "label": "split_gramian_matrix", + "importPath": "quple.utils.mathext", + "description": "quple.utils.mathext", + "isExtraImport": true, + "detail": "quple.utils.mathext", + "documentation": {} + }, + { + "label": "datetime", + "kind": 6, + "isExtraImport": true, + "importPath": "datetime", + "description": "datetime", + "detail": "datetime", + "documentation": {} + }, + { + "label": "date", + "importPath": "datetime", + "description": "datetime", + "isExtraImport": true, + "detail": "datetime", + "documentation": {} + }, + { + "label": "six", + "kind": 6, + "isExtraImport": true, + "importPath": "six", + "description": "six", + "detail": "six", + "documentation": {} + }, + { + "label": "EncodingCircuit", + "importPath": "quple.data_encoding", + "description": "quple.data_encoding", + "isExtraImport": true, + "detail": "quple.data_encoding", + "documentation": {} + }, + { + "label": "EncodingCircuit", + "importPath": "quple.data_encoding", + "description": "quple.data_encoding", + "isExtraImport": true, + "detail": "quple.data_encoding", + "documentation": {} + }, + { + "label": "encoding_map_registry", + "importPath": "quple.data_encoding.encoding_maps", + "description": "quple.data_encoding.encoding_maps", + "isExtraImport": true, + "detail": "quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "functools", + "kind": 6, + "isExtraImport": true, + "importPath": "functools", + "description": "functools", + "detail": "functools", + "documentation": {} + }, + { + "label": "reduce", + "importPath": "functools", + "description": "functools", + "isExtraImport": true, + "detail": "functools", + "documentation": {} + }, + { + "label": "partial", + "importPath": "functools", + "description": "functools", + "isExtraImport": true, + "detail": "functools", + "documentation": {} + }, + { + "label": "partial", + "importPath": "functools", + "description": "functools", + "isExtraImport": true, + "detail": "functools", + "documentation": {} + }, + { + "label": "GeneralPauliZEncoding", + "importPath": "quple.data_encoding.general_pauli_z_encoding", + "description": "quple.data_encoding.general_pauli_z_encoding", + "isExtraImport": true, + "detail": "quple.data_encoding.general_pauli_z_encoding", + "documentation": {} + }, + { + "label": "GeneralPauliZEncoding", + "importPath": "quple.data_encoding.general_pauli_z_encoding", + "description": "quple.data_encoding.general_pauli_z_encoding", + "isExtraImport": true, + "detail": "quple.data_encoding.general_pauli_z_encoding", + "documentation": {} + }, + { + "label": "PauliBlock", + "importPath": "quple.circuits.templates.pauli_block", + "description": "quple.circuits.templates.pauli_block", + "isExtraImport": true, + "detail": "quple.circuits.templates.pauli_block", + "documentation": {} + }, + { + "label": "GeneralPauliEncoding", + "importPath": "quple.data_encoding.general_pauli_encoding", + "description": "quple.data_encoding.general_pauli_encoding", + "isExtraImport": true, + "detail": "quple.data_encoding.general_pauli_encoding", + "documentation": {} + }, + { + "label": "\\", + "importPath": "tensorflow_quantum.python.layers.circuit_executors", + "description": "tensorflow_quantum.python.layers.circuit_executors", + "isExtraImport": true, + "detail": "tensorflow_quantum.python.layers.circuit_executors", + "documentation": {} + }, + { + "label": "elementary", + "importPath": "tensorflow_quantum.python.layers.circuit_construction", + "description": "tensorflow_quantum.python.layers.circuit_construction", + "isExtraImport": true, + "detail": "tensorflow_quantum.python.layers.circuit_construction", + "documentation": {} + }, + { + "label": "util", + "importPath": "tensorflow_quantum.python", + "description": "tensorflow_quantum.python", + "isExtraImport": true, + "detail": "tensorflow_quantum.python", + "documentation": {} + }, + { + "label": "resolve_formulas", + "importPath": "quple.interface.tfq.tf_resolvers", + "description": "quple.interface.tfq.tf_resolvers", + "isExtraImport": true, + "detail": "quple.interface.tfq.tf_resolvers", + "documentation": {} + }, + { + "label": "Layer", + "importPath": "tensorflow.python.keras.engine.base_layer", + "description": "tensorflow.python.keras.engine.base_layer", + "isExtraImport": true, + "detail": "tensorflow.python.keras.engine.base_layer", + "documentation": {} + }, + { + "label": "conv_utils", + "importPath": "tensorflow.python.keras.utils", + "description": "tensorflow.python.keras.utils", + "isExtraImport": true, + "detail": "tensorflow.python.keras.utils", + "documentation": {} + }, + { + "label": "tensor_shape", + "importPath": "tensorflow.python.framework", + "description": "tensorflow.python.framework", + "isExtraImport": true, + "detail": "tensorflow.python.framework", + "documentation": {} + }, + { + "label": "constraints", + "importPath": "tensorflow.python.keras", + "description": "tensorflow.python.keras", + "isExtraImport": true, + "detail": "tensorflow.python.keras", + "documentation": {} + }, + { + "label": "initializers", + "importPath": "tensorflow.python.keras", + "description": "tensorflow.python.keras", + "isExtraImport": true, + "detail": "tensorflow.python.keras", + "documentation": {} + }, + { + "label": "regularizers", + "importPath": "tensorflow.python.keras", + "description": "tensorflow.python.keras", + "isExtraImport": true, + "detail": "tensorflow.python.keras", + "documentation": {} + }, + { + "label": "PQC", + "importPath": "quple.interface.tfq.layers", + "description": "quple.interface.tfq.layers", + "isExtraImport": true, + "detail": "quple.interface.tfq.layers", + "documentation": {} + }, + { + "label": "get_output_shape", + "importPath": "quple.interface.tfq.tf_utils", + "description": "quple.interface.tfq.tf_utils", + "isExtraImport": true, + "detail": "quple.interface.tfq.tf_utils", + "documentation": {} + }, + { + "label": "pandas", + "kind": 6, + "isExtraImport": true, + "importPath": "pandas", + "description": "pandas", + "detail": "pandas", + "documentation": {} + }, + { + "label": "Enum", + "importPath": "enum", + "description": "enum", + "isExtraImport": true, + "detail": "enum", + "documentation": {} + }, + { + "label": "Enum", + "importPath": "enum", + "description": "enum", + "isExtraImport": true, + "detail": "enum", + "documentation": {} + }, + { + "label": "Enum", + "importPath": "enum", + "description": "enum", + "isExtraImport": true, + "detail": "enum", + "documentation": {} + }, + { + "label": "Optimizer", + "importPath": "tensorflow.keras.optimizers", + "description": "tensorflow.keras.optimizers", + "isExtraImport": true, + "detail": "tensorflow.keras.optimizers", + "documentation": {} + }, + { + "label": "AbstractModel", + "importPath": "quple.models", + "description": "quple.models", + "isExtraImport": true, + "detail": "quple.models", + "documentation": {} + }, + { + "label": "QGAN", + "importPath": "quple.models.generative", + "description": "quple.models.generative", + "isExtraImport": true, + "detail": "quple.models.generative", + "documentation": {} + }, + { + "label": "Iterable", + "importPath": "collections.abc", + "description": "collections.abc", + "isExtraImport": true, + "detail": "collections.abc", + "documentation": {} + }, + { + "label": "Parameter", + "importPath": "qiskit.circuit", + "description": "qiskit.circuit", + "isExtraImport": true, + "detail": "qiskit.circuit", + "documentation": {} + }, + { + "label": "ParameterVector", + "importPath": "qiskit.circuit", + "description": "qiskit.circuit", + "isExtraImport": true, + "detail": "qiskit.circuit", + "documentation": {} + }, + { + "label": "ParameterVector", + "importPath": "qiskit.circuit", + "description": "qiskit.circuit", + "isExtraImport": true, + "detail": "qiskit.circuit", + "documentation": {} + }, + { + "label": "PauliFeatureMap", + "importPath": "qiskit.circuit.library", + "description": "qiskit.circuit.library", + "isExtraImport": true, + "detail": "qiskit.circuit.library", + "documentation": {} + }, + { + "label": "QuantumCircuit", + "importPath": "qiskit", + "description": "qiskit", + "isExtraImport": true, + "detail": "qiskit", + "documentation": {} + }, + { + "label": "QuantumRegister", + "importPath": "qiskit", + "description": "qiskit", + "isExtraImport": true, + "detail": "qiskit", + "documentation": {} + }, + { + "label": "ClassicalRegister", + "importPath": "qiskit", + "description": "qiskit", + "isExtraImport": true, + "detail": "qiskit", + "documentation": {} + }, + { + "label": "QuantumCircuit", + "importPath": "qiskit", + "description": "qiskit", + "isExtraImport": true, + "detail": "qiskit", + "documentation": {} + }, + { + "label": "QuantumRegister", + "importPath": "qiskit", + "description": "qiskit", + "isExtraImport": true, + "detail": "qiskit", + "documentation": {} + }, + { + "label": "FeatureMap", + "importPath": "qiskit.aqua.components.feature_maps", + "description": "qiskit.aqua.components.feature_maps", + "isExtraImport": true, + "detail": "qiskit.aqua.components.feature_maps", + "documentation": {} + }, + { + "label": "FeatureMap", + "importPath": "qiskit.aqua.components.feature_maps", + "description": "qiskit.aqua.components.feature_maps", + "isExtraImport": true, + "detail": "qiskit.aqua.components.feature_maps", + "documentation": {} + }, + { + "label": "operator", + "kind": 6, + "isExtraImport": true, + "importPath": "operator", + "description": "operator", + "detail": "operator", + "documentation": {} + }, + { + "label": "QSVM", + "importPath": "qiskit.aqua.algorithms.classifiers", + "description": "qiskit.aqua.algorithms.classifiers", + "isExtraImport": true, + "detail": "qiskit.aqua.algorithms.classifiers", + "documentation": {} + }, + { + "label": "psutil", + "kind": 6, + "isExtraImport": true, + "importPath": "psutil", + "description": "psutil", + "detail": "psutil", + "documentation": {} + }, + { + "label": "multiprocessing", + "kind": 6, + "isExtraImport": true, + "importPath": "multiprocessing", + "description": "multiprocessing", + "detail": "multiprocessing", + "documentation": {} + }, + { + "label": "ProcessPoolExecutor", + "importPath": "concurrent.futures ", + "description": "concurrent.futures ", + "isExtraImport": true, + "detail": "concurrent.futures ", + "documentation": {} + }, + { + "label": "rel_entr", + "importPath": "scipy.special", + "description": "scipy.special", + "isExtraImport": true, + "detail": "scipy.special", + "documentation": {} + }, + { + "label": "annotations", + "importPath": "__future__", + "description": "__future__", + "isExtraImport": true, + "detail": "__future__", + "documentation": {} + }, + { + "label": "clear_output", + "importPath": "IPython.display", + "description": "IPython.display", + "isExtraImport": true, + "detail": "IPython.display", + "documentation": {} + }, + { + "label": "Markdown", + "importPath": "IPython.display", + "description": "IPython.display", + "isExtraImport": true, + "detail": "IPython.display", + "documentation": {} + }, + { + "label": "display", + "importPath": "IPython.display", + "description": "IPython.display", + "isExtraImport": true, + "detail": "IPython.display", + "documentation": {} + }, + { + "label": "json", + "kind": 6, + "isExtraImport": true, + "importPath": "json", + "description": "json", + "detail": "json", + "documentation": {} + }, + { + "label": "torch_geometric", + "kind": 6, + "isExtraImport": true, + "importPath": "torch_geometric", + "description": "torch_geometric", + "detail": "torch_geometric", + "documentation": {} + }, + { + "label": "to_networkx", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "k_hop_subgraph", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "add_self_loops", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "remove_self_loops", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "add_self_loops", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "remove_self_loops", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "add_self_loops", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "degree", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "add_self_loops", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "degree", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "add_self_loops", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "degree", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "softmax", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "softmax", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "subgraph", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "add_self_loops", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "degree", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "to_networkx", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "subgraph", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "to_networkx", + "importPath": "torch_geometric.utils", + "description": "torch_geometric.utils", + "isExtraImport": true, + "detail": "torch_geometric.utils", + "documentation": {} + }, + { + "label": "get_dhce_data", + "importPath": "dhce.utils", + "description": "dhce.utils", + "isExtraImport": true, + "detail": "dhce.utils", + "documentation": {} + }, + { + "label": "tensorcircuit", + "kind": 6, + "isExtraImport": true, + "importPath": "tensorcircuit", + "description": "tensorcircuit", + "detail": "tensorcircuit", + "documentation": {} + }, + { + "label": "ml_collections", + "kind": 6, + "isExtraImport": true, + "importPath": "ml_collections", + "description": "ml_collections", + "detail": "ml_collections", + "documentation": {} + }, + { + "label": "config_flags", + "importPath": "ml_collections", + "description": "ml_collections", + "isExtraImport": true, + "detail": "ml_collections", + "documentation": {} + }, + { + "label": "torch_geometric.transforms", + "kind": 6, + "isExtraImport": true, + "importPath": "torch_geometric.transforms", + "description": "torch_geometric.transforms", + "detail": "torch_geometric.transforms", + "documentation": {} + }, + { + "label": "networkx", + "kind": 6, + "isExtraImport": true, + "importPath": "networkx", + "description": "networkx", + "detail": "networkx", + "documentation": {} + }, + { + "label": "jax", + "kind": 6, + "isExtraImport": true, + "importPath": "jax", + "description": "jax", + "detail": "jax", + "documentation": {} + }, + { + "label": "jit", + "importPath": "jax", + "description": "jax", + "isExtraImport": true, + "detail": "jax", + "documentation": {} + }, + { + "label": "vmap", + "importPath": "jax", + "description": "jax", + "isExtraImport": true, + "detail": "jax", + "documentation": {} + }, + { + "label": "get_config", + "importPath": "ego_net.config", + "description": "ego_net.config", + "isExtraImport": true, + "detail": "ego_net.config", + "documentation": {} + }, + { + "label": "MUTAGDataset", + "importPath": "ego_net.data", + "description": "ego_net.data", + "isExtraImport": true, + "detail": "ego_net.data", + "documentation": {} + }, + { + "label": "optax", + "kind": 6, + "isExtraImport": true, + "importPath": "optax", + "description": "optax", + "detail": "optax", + "documentation": {} + }, + { + "label": "jax.numpy", + "kind": 6, + "isExtraImport": true, + "importPath": "jax.numpy", + "description": "jax.numpy", + "detail": "jax.numpy", + "documentation": {} + }, + { + "label": "pickle", + "kind": 6, + "isExtraImport": true, + "importPath": "pickle", + "description": "pickle", + "detail": "pickle", + "documentation": {} + }, + { + "label": "MutagConfig", + "importPath": "mutag_dataset_config", + "description": "mutag_dataset_config", + "isExtraImport": true, + "detail": "mutag_dataset_config", + "documentation": {} + }, + { + "label": "tensorflow_datasets", + "kind": 6, + "isExtraImport": true, + "importPath": "tensorflow_datasets", + "description": "tensorflow_datasets", + "detail": "tensorflow_datasets", + "documentation": {} + }, + { + "label": "toml", + "kind": 6, + "isExtraImport": true, + "importPath": "toml", + "description": "toml", + "detail": "toml", + "documentation": {} + }, + { + "label": "util", + "importPath": "qgnn_hep", + "description": "qgnn_hep", + "isExtraImport": true, + "detail": "qgnn_hep", + "documentation": {} + }, + { + "label": "dataclasses", + "kind": 6, + "isExtraImport": true, + "importPath": "dataclasses", + "description": "dataclasses", + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "dataclass", + "importPath": "dataclasses", + "description": "dataclasses", + "isExtraImport": true, + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "field", + "importPath": "dataclasses", + "description": "dataclasses", + "isExtraImport": true, + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "dataclass", + "importPath": "dataclasses", + "description": "dataclasses", + "isExtraImport": true, + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "dataclass", + "importPath": "dataclasses", + "description": "dataclasses", + "isExtraImport": true, + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "field", + "importPath": "dataclasses", + "description": "dataclasses", + "isExtraImport": true, + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "dataclass", + "importPath": "dataclasses", + "description": "dataclasses", + "isExtraImport": true, + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "field", + "importPath": "dataclasses", + "description": "dataclasses", + "isExtraImport": true, + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "dataclass", + "importPath": "dataclasses", + "description": "dataclasses", + "isExtraImport": true, + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "field", + "importPath": "dataclasses", + "description": "dataclasses", + "isExtraImport": true, + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "dataclass", + "importPath": "dataclasses", + "description": "dataclasses", + "isExtraImport": true, + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "field", + "importPath": "dataclasses", + "description": "dataclasses", + "isExtraImport": true, + "detail": "dataclasses", + "documentation": {} + }, + { + "label": "jraph", + "kind": 6, + "isExtraImport": true, + "importPath": "jraph", + "description": "jraph", + "detail": "jraph", + "documentation": {} + }, + { + "label": "flax", + "kind": 6, + "isExtraImport": true, + "importPath": "flax", + "description": "flax", + "detail": "flax", + "documentation": {} + }, + { + "label": "linen", + "importPath": "flax", + "description": "flax", + "isExtraImport": true, + "detail": "flax", + "documentation": {} + }, + { + "label": "linen", + "importPath": "flax", + "description": "flax", + "isExtraImport": true, + "detail": "flax", + "documentation": {} + }, + { + "label": "linen", + "importPath": "flax", + "description": "flax", + "isExtraImport": true, + "detail": "flax", + "documentation": {} + }, + { + "label": "linen", + "importPath": "flax", + "description": "flax", + "isExtraImport": true, + "detail": "flax", + "documentation": {} + }, + { + "label": "add_graphs_tuples", + "importPath": "qgnn_hep.models.util", + "description": "qgnn_hep.models.util", + "isExtraImport": true, + "detail": "qgnn_hep.models.util", + "documentation": {} + }, + { + "label": "add_graphs_tuples", + "importPath": "qgnn_hep.models.util", + "description": "qgnn_hep.models.util", + "isExtraImport": true, + "detail": "qgnn_hep.models.util", + "documentation": {} + }, + { + "label": "MLP", + "importPath": "qgnn_hep.models.util", + "description": "qgnn_hep.models.util", + "isExtraImport": true, + "detail": "qgnn_hep.models.util", + "documentation": {} + }, + { + "label": "add_graphs_tuples", + "importPath": "qgnn_hep.models.util", + "description": "qgnn_hep.models.util", + "isExtraImport": true, + "detail": "qgnn_hep.models.util", + "documentation": {} + }, + { + "label": "QMLP", + "importPath": "qgnn_hep.models.util", + "description": "qgnn_hep.models.util", + "isExtraImport": true, + "detail": "qgnn_hep.models.util", + "documentation": {} + }, + { + "label": "app", + "importPath": "absl", + "description": "absl", + "isExtraImport": true, + "detail": "absl", + "documentation": {} + }, + { + "label": "flags", + "importPath": "absl", + "description": "absl", + "isExtraImport": true, + "detail": "absl", + "documentation": {} + }, + { + "label": "logging", + "importPath": "absl", + "description": "absl", + "isExtraImport": true, + "detail": "absl", + "documentation": {} + }, + { + "label": "logging", + "importPath": "absl", + "description": "absl", + "isExtraImport": true, + "detail": "absl", + "documentation": {} + }, + { + "label": "platform", + "importPath": "clu", + "description": "clu", + "isExtraImport": true, + "detail": "clu", + "documentation": {} + }, + { + "label": "checkpoint", + "importPath": "clu", + "description": "clu", + "isExtraImport": true, + "detail": "clu", + "documentation": {} + }, + { + "label": "metric_writers", + "importPath": "clu", + "description": "clu", + "isExtraImport": true, + "detail": "clu", + "documentation": {} + }, + { + "label": "metrics", + "importPath": "clu", + "description": "clu", + "isExtraImport": true, + "detail": "clu", + "documentation": {} + }, + { + "label": "parameter_overview", + "importPath": "clu", + "description": "clu", + "isExtraImport": true, + "detail": "clu", + "documentation": {} + }, + { + "label": "periodic_actions", + "importPath": "clu", + "description": "clu", + "isExtraImport": true, + "detail": "clu", + "documentation": {} + }, + { + "label": "train", + "kind": 6, + "isExtraImport": true, + "importPath": "train", + "description": "train", + "detail": "train", + "documentation": {} + }, + { + "label": "flax.core", + "kind": 6, + "isExtraImport": true, + "importPath": "flax.core", + "description": "flax.core", + "detail": "flax.core", + "documentation": {} + }, + { + "label": "flax.linen", + "kind": 6, + "isExtraImport": true, + "importPath": "flax.linen", + "description": "flax.linen", + "detail": "flax.linen", + "documentation": {} + }, + { + "label": "train_state", + "importPath": "flax.training", + "description": "flax.training", + "isExtraImport": true, + "detail": "flax.training", + "documentation": {} + }, + { + "label": "import_class", + "importPath": "training.util", + "description": "training.util", + "isExtraImport": true, + "detail": "training.util", + "documentation": {} + }, + { + "label": "input_pipeline", + "importPath": "qgnn_hep.data", + "description": "qgnn_hep.data", + "isExtraImport": true, + "detail": "qgnn_hep.data", + "documentation": {} + }, + { + "label": "Particle", + "importPath": "particle", + "description": "particle", + "isExtraImport": true, + "detail": "particle", + "documentation": {} + }, + { + "label": "Particle", + "importPath": "particle", + "description": "particle", + "isExtraImport": true, + "detail": "particle", + "documentation": {} + }, + { + "label": "Particle", + "importPath": "particle", + "description": "particle", + "isExtraImport": true, + "detail": "particle", + "documentation": {} + }, + { + "label": "Particle", + "importPath": "particle", + "description": "particle", + "isExtraImport": true, + "detail": "particle", + "documentation": {} + }, + { + "label": "torch_geometric.data", + "kind": 6, + "isExtraImport": true, + "importPath": "torch_geometric.data", + "description": "torch_geometric.data", + "detail": "torch_geometric.data", + "documentation": {} + }, + { + "label": "Data", + "importPath": "torch_geometric.data", + "description": "torch_geometric.data", + "isExtraImport": true, + "detail": "torch_geometric.data", + "documentation": {} + }, + { + "label": "InMemoryDataset", + "importPath": "torch_geometric.data", + "description": "torch_geometric.data", + "isExtraImport": true, + "detail": "torch_geometric.data", + "documentation": {} + }, + { + "label": "download_url", + "importPath": "torch_geometric.data", + "description": "torch_geometric.data", + "isExtraImport": true, + "detail": "torch_geometric.data", + "documentation": {} + }, + { + "label": "extract_zip", + "importPath": "torch_geometric.data", + "description": "torch_geometric.data", + "isExtraImport": true, + "detail": "torch_geometric.data", + "documentation": {} + }, + { + "label": "Data", + "importPath": "torch_geometric.data", + "description": "torch_geometric.data", + "isExtraImport": true, + "detail": "torch_geometric.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch_geometric.data", + "description": "torch_geometric.data", + "isExtraImport": true, + "detail": "torch_geometric.data", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch_geometric.data", + "description": "torch_geometric.data", + "isExtraImport": true, + "detail": "torch_geometric.data", + "documentation": {} + }, + { + "label": "Data", + "importPath": "torch_geometric.data", + "description": "torch_geometric.data", + "isExtraImport": true, + "detail": "torch_geometric.data", + "documentation": {} + }, + { + "label": "Dataset", + "importPath": "torch_geometric.data", + "description": "torch_geometric.data", + "isExtraImport": true, + "detail": "torch_geometric.data", + "documentation": {} + }, + { + "label": "torch_geometric.loader", + "kind": 6, + "isExtraImport": true, + "importPath": "torch_geometric.loader", + "description": "torch_geometric.loader", + "detail": "torch_geometric.loader", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch_geometric.loader", + "description": "torch_geometric.loader", + "isExtraImport": true, + "detail": "torch_geometric.loader", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "torch_geometric.loader", + "description": "torch_geometric.loader", + "isExtraImport": true, + "detail": "torch_geometric.loader", + "documentation": {} + }, + { + "label": "read_configurations", + "importPath": "training.utils", + "description": "training.utils", + "isExtraImport": true, + "detail": "training.utils", + "documentation": {} + }, + { + "label": "torch_geometric.nn", + "kind": 6, + "isExtraImport": true, + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "MessagePassing", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "MessagePassing", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "MessagePassing", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "MessagePassing", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "global_mean_pool", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "GCNConv", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "global_mean_pool", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "global_mean_pool", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "global_mean_pool", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "GINConv", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "global_add_pool", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "GCNConv", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "GATConv", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "global_mean_pool", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "global_max_pool", + "importPath": "torch_geometric.nn", + "description": "torch_geometric.nn", + "isExtraImport": true, + "detail": "torch_geometric.nn", + "documentation": {} + }, + { + "label": "quantum_net", + "importPath": "QNN_Node_Embedding", + "description": "QNN_Node_Embedding", + "isExtraImport": true, + "detail": "QNN_Node_Embedding", + "documentation": {} + }, + { + "label": "quantum_net", + "importPath": "QNN_Node_Embedding", + "description": "QNN_Node_Embedding", + "isExtraImport": true, + "detail": "QNN_Node_Embedding", + "documentation": {} + }, + { + "label": "quantum_net", + "importPath": "QNN_Node_Embedding", + "description": "QNN_Node_Embedding", + "isExtraImport": true, + "detail": "QNN_Node_Embedding", + "documentation": {} + }, + { + "label": "MPS", + "importPath": "Quantum_Classifiers", + "description": "Quantum_Classifiers", + "isExtraImport": true, + "detail": "Quantum_Classifiers", + "documentation": {} + }, + { + "label": "TTN", + "importPath": "Quantum_Classifiers", + "description": "Quantum_Classifiers", + "isExtraImport": true, + "detail": "Quantum_Classifiers", + "documentation": {} + }, + { + "label": "MPS", + "importPath": "Quantum_Classifiers", + "description": "Quantum_Classifiers", + "isExtraImport": true, + "detail": "Quantum_Classifiers", + "documentation": {} + }, + { + "label": "TTN", + "importPath": "Quantum_Classifiers", + "description": "Quantum_Classifiers", + "isExtraImport": true, + "detail": "Quantum_Classifiers", + "documentation": {} + }, + { + "label": "GCNConv", + "importPath": "GCNConv_Layers", + "description": "GCNConv_Layers", + "isExtraImport": true, + "detail": "GCNConv_Layers", + "documentation": {} + }, + { + "label": "plot_auc", + "importPath": "utils", + "description": "utils", + "isExtraImport": true, + "detail": "utils", + "documentation": {} + }, + { + "label": "read_configurations", + "importPath": "utils", + "description": "utils", + "isExtraImport": true, + "detail": "utils", + "documentation": {} + }, + { + "label": "math,", + "kind": 6, + "isExtraImport": true, + "importPath": "math.", + "description": "math.", + "detail": "math.", + "documentation": {} + }, + { + "label": "load_digits", + "importPath": "sklearn.datasets", + "description": "sklearn.datasets", + "isExtraImport": true, + "detail": "sklearn.datasets", + "documentation": {} + }, + { + "label": "load_iris", + "importPath": "sklearn.datasets", + "description": "sklearn.datasets", + "isExtraImport": true, + "detail": "sklearn.datasets", + "documentation": {} + }, + { + "label": "load_digits", + "importPath": "sklearn.datasets", + "description": "sklearn.datasets", + "isExtraImport": true, + "detail": "sklearn.datasets", + "documentation": {} + }, + { + "label": "load_iris", + "importPath": "sklearn.datasets", + "description": "sklearn.datasets", + "isExtraImport": true, + "detail": "sklearn.datasets", + "documentation": {} + }, + { + "label": "load_iris", + "importPath": "sklearn.datasets", + "description": "sklearn.datasets", + "isExtraImport": true, + "detail": "sklearn.datasets", + "documentation": {} + }, + { + "label": "make_swiss_roll", + "importPath": "sklearn.datasets", + "description": "sklearn.datasets", + "isExtraImport": true, + "detail": "sklearn.datasets", + "documentation": {} + }, + { + "label": "QSVT", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "quantum_lcu_block", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "QuantumSumBlock", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "KANLayer", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "QSVT", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "quantum_lcu_block", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "QuantumSumBlock", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "KANLayer", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "QSVT", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "quantum_lcu_block", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "QuantumSumBlock", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "KANLayer", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "QSVT", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "quantum_lcu_block", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "QuantumSumBlock", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "KANLayer", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "QSVT", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "quantum_lcu_block", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "QuantumSumBlock", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "KANLayer", + "importPath": "models.HybridQKAN_model_components", + "description": "models.HybridQKAN_model_components", + "isExtraImport": true, + "detail": "models.HybridQKAN_model_components", + "documentation": {} + }, + { + "label": "QCBMState", + "importPath": "QCBM", + "description": "QCBM", + "isExtraImport": true, + "detail": "QCBM", + "documentation": {} + }, + { + "label": "QSVT", + "importPath": "qsvt_sinepoly", + "description": "qsvt_sinepoly", + "isExtraImport": true, + "detail": "qsvt_sinepoly", + "documentation": {} + }, + { + "label": "QSVT", + "importPath": "qsvt_sinepoly", + "description": "qsvt_sinepoly", + "isExtraImport": true, + "detail": "qsvt_sinepoly", + "documentation": {} + }, + { + "label": "quantum_lcu_block", + "importPath": "LCU", + "description": "LCU", + "isExtraImport": true, + "detail": "LCU", + "documentation": {} + }, + { + "label": "quantum_lcu_block", + "importPath": "LCU", + "description": "LCU", + "isExtraImport": true, + "detail": "LCU", + "documentation": {} + }, + { + "label": "QuantumSumBlock", + "importPath": "quantum_summation", + "description": "quantum_summation", + "isExtraImport": true, + "detail": "quantum_summation", + "documentation": {} + }, + { + "label": "QuantumSumBlock", + "importPath": "quantum_summation", + "description": "quantum_summation", + "isExtraImport": true, + "detail": "quantum_summation", + "documentation": {} + }, + { + "label": "KANLayer", + "importPath": "SplineKANlayer", + "description": "SplineKANlayer", + "isExtraImport": true, + "detail": "SplineKANlayer", + "documentation": {} + }, + { + "label": "KANLayer", + "importPath": "SplineKANlayer", + "description": "SplineKANlayer", + "isExtraImport": true, + "detail": "SplineKANlayer", + "documentation": {} + }, + { + "label": "BSpline", + "importPath": "scipy.interpolate", + "description": "scipy.interpolate", + "isExtraImport": true, + "detail": "scipy.interpolate", + "documentation": {} + }, + { + "label": "torchdata", + "kind": 6, + "isExtraImport": true, + "importPath": "torchdata", + "description": "torchdata", + "detail": "torchdata", + "documentation": {} + }, + { + "label": "torchquantum", + "kind": 6, + "isExtraImport": true, + "importPath": "torchquantum", + "description": "torchquantum", + "detail": "torchquantum", + "documentation": {} + }, + { + "label": "Op2QAllLayer", + "importPath": "torchquantum.layer.entanglement.op2_layer", + "description": "torchquantum.layer.entanglement.op2_layer", + "isExtraImport": true, + "detail": "torchquantum.layer.entanglement.op2_layer", + "documentation": {} + }, + { + "label": "Op1QAllLayer", + "importPath": "torchquantum.layer.layers.layers", + "description": "torchquantum.layer.layers.layers", + "isExtraImport": true, + "detail": "torchquantum.layer.layers.layers", + "documentation": {} + }, + { + "label": "Op2QAllLayer", + "importPath": "torchquantum.layer.layers.layers", + "description": "torchquantum.layer.layers.layers", + "isExtraImport": true, + "detail": "torchquantum.layer.layers.layers", + "documentation": {} + }, + { + "label": "measure", + "importPath": "torchquantum.measurement", + "description": "torchquantum.measurement", + "isExtraImport": true, + "detail": "torchquantum.measurement", + "documentation": {} + }, + { + "label": "cupy", + "kind": 6, + "isExtraImport": true, + "importPath": "cupy", + "description": "cupy", + "detail": "cupy", + "documentation": {} + }, + { + "label": "tempfile", + "kind": 6, + "isExtraImport": true, + "importPath": "tempfile", + "description": "tempfile", + "detail": "tempfile", + "documentation": {} + }, + { + "label": "dgl", + "kind": 6, + "isExtraImport": true, + "importPath": "dgl", + "description": "dgl", + "detail": "dgl", + "documentation": {} + }, + { + "label": "DGLDataset", + "importPath": "dgl.data", + "description": "dgl.data", + "isExtraImport": true, + "detail": "dgl.data", + "documentation": {} + }, + { + "label": "GraphDataLoader", + "importPath": "dgl.dataloading", + "description": "dgl.dataloading", + "isExtraImport": true, + "detail": "dgl.dataloading", + "documentation": {} + }, + { + "label": "SubsetRandomSampler", + "importPath": "torch.utils.data.sampler", + "description": "torch.utils.data.sampler", + "isExtraImport": true, + "detail": "torch.utils.data.sampler", + "documentation": {} + }, + { + "label": "scipy.sparse", + "kind": 6, + "isExtraImport": true, + "importPath": "scipy.sparse", + "description": "scipy.sparse", + "detail": "scipy.sparse", + "documentation": {} + }, + { + "label": "csv", + "kind": 6, + "isExtraImport": true, + "importPath": "csv", + "description": "csv", + "detail": "csv", + "documentation": {} + }, + { + "label": "OrderedDict", + "importPath": "collections", + "description": "collections", + "isExtraImport": true, + "detail": "collections", + "documentation": {} + }, + { + "label": "defaultdict", + "importPath": "collections", + "description": "collections", + "isExtraImport": true, + "detail": "collections", + "documentation": {} + }, + { + "label": "joblib", + "kind": 6, + "isExtraImport": true, + "importPath": "joblib", + "description": "joblib", + "detail": "joblib", + "documentation": {} + }, + { + "label": "gc", + "kind": 6, + "isExtraImport": true, + "importPath": "gc", + "description": "gc", + "detail": "gc", + "documentation": {} + }, + { + "label": "torch.multiprocessing", + "kind": 6, + "isExtraImport": true, + "importPath": "torch.multiprocessing", + "description": "torch.multiprocessing", + "detail": "torch.multiprocessing", + "documentation": {} + }, + { + "label": "glob,", + "kind": 6, + "isExtraImport": true, + "importPath": "glob.", + "description": "glob.", + "detail": "glob.", + "documentation": {} + }, + { + "label": "pytorch_lightning", + "kind": 6, + "isExtraImport": true, + "importPath": "pytorch_lightning", + "description": "pytorch_lightning", + "detail": "pytorch_lightning", + "documentation": {} + }, + { + "label": "Trainer", + "importPath": "pytorch_lightning", + "description": "pytorch_lightning", + "isExtraImport": true, + "detail": "pytorch_lightning", + "documentation": {} + }, + { + "label": "Trainer", + "importPath": "pytorch_lightning", + "description": "pytorch_lightning", + "isExtraImport": true, + "detail": "pytorch_lightning", + "documentation": {} + }, + { + "label": "Trainer", + "importPath": "pytorch_lightning", + "description": "pytorch_lightning", + "isExtraImport": true, + "detail": "pytorch_lightning", + "documentation": {} + }, + { + "label": "losses", + "importPath": "pytorch_metric_learning", + "description": "pytorch_metric_learning", + "isExtraImport": true, + "detail": "pytorch_metric_learning", + "documentation": {} + }, + { + "label": "losses", + "importPath": "pytorch_metric_learning", + "description": "pytorch_metric_learning", + "isExtraImport": true, + "detail": "pytorch_metric_learning", + "documentation": {} + }, + { + "label": "losses", + "importPath": "pytorch_metric_learning", + "description": "pytorch_metric_learning", + "isExtraImport": true, + "detail": "pytorch_metric_learning", + "documentation": {} + }, + { + "label": "torchmetrics", + "kind": 6, + "isExtraImport": true, + "importPath": "torchmetrics", + "description": "torchmetrics", + "detail": "torchmetrics", + "documentation": {} + }, + { + "label": "CSVLogger", + "importPath": "pytorch_lightning.loggers", + "description": "pytorch_lightning.loggers", + "isExtraImport": true, + "detail": "pytorch_lightning.loggers", + "documentation": {} + }, + { + "label": "CSVLogger", + "importPath": "pytorch_lightning.loggers", + "description": "pytorch_lightning.loggers", + "isExtraImport": true, + "detail": "pytorch_lightning.loggers", + "documentation": {} + }, + { + "label": "CSVLogger", + "importPath": "pytorch_lightning.loggers", + "description": "pytorch_lightning.loggers", + "isExtraImport": true, + "detail": "pytorch_lightning.loggers", + "documentation": {} + }, + { + "label": "CSVLogger", + "importPath": "pytorch_lightning.loggers", + "description": "pytorch_lightning.loggers", + "isExtraImport": true, + "detail": "pytorch_lightning.loggers", + "documentation": {} + }, + { + "label": "CometLogger", + "importPath": "pytorch_lightning.loggers", + "description": "pytorch_lightning.loggers", + "isExtraImport": true, + "detail": "pytorch_lightning.loggers", + "documentation": {} + }, + { + "label": "Conv_Siamese", + "importPath": "qml_ssl.models.classical.models", + "description": "qml_ssl.models.classical.models", + "isExtraImport": true, + "detail": "qml_ssl.models.classical.models", + "documentation": {} + }, + { + "label": "Conv_Classifier", + "importPath": "qml_ssl.models.classical.models", + "description": "qml_ssl.models.classical.models", + "isExtraImport": true, + "detail": "qml_ssl.models.classical.models", + "documentation": {} + }, + { + "label": "generate_embeddings", + "importPath": "utils.utils", + "description": "utils.utils", + "isExtraImport": true, + "detail": "utils.utils", + "documentation": {} + }, + { + "label": "vmf_kde_on_circle", + "importPath": "utils.utils", + "description": "utils.utils", + "isExtraImport": true, + "detail": "utils.utils", + "documentation": {} + }, + { + "label": "pca_proj", + "importPath": "utils.utils", + "description": "utils.utils", + "isExtraImport": true, + "detail": "utils.utils", + "documentation": {} + }, + { + "label": "tsne_proj", + "importPath": "utils.utils", + "description": "utils.utils", + "isExtraImport": true, + "detail": "utils.utils", + "documentation": {} + }, + { + "label": "load_mnist_img", + "importPath": "utils.data_mnist", + "description": "utils.data_mnist", + "isExtraImport": true, + "detail": "utils.data_mnist", + "documentation": {} + }, + { + "label": "Hybrid_Contrastive", + "importPath": "qml_ssl.models.hybrid_contrastive", + "description": "qml_ssl.models.hybrid_contrastive", + "isExtraImport": true, + "detail": "qml_ssl.models.hybrid_contrastive", + "documentation": {} + }, + { + "label": "Hybrid_Contrastive", + "importPath": "qml_ssl.models.hybrid_contrastive", + "description": "qml_ssl.models.hybrid_contrastive", + "isExtraImport": true, + "detail": "qml_ssl.models.hybrid_contrastive", + "documentation": {} + }, + { + "label": "generate_embeddings", + "importPath": "qml_ssl.utils", + "description": "qml_ssl.utils", + "isExtraImport": true, + "detail": "qml_ssl.utils", + "documentation": {} + }, + { + "label": "vmf_kde_on_circle", + "importPath": "qml_ssl.utils", + "description": "qml_ssl.utils", + "isExtraImport": true, + "detail": "qml_ssl.utils", + "documentation": {} + }, + { + "label": "pca_proj", + "importPath": "qml_ssl.utils", + "description": "qml_ssl.utils", + "isExtraImport": true, + "detail": "qml_ssl.utils", + "documentation": {} + }, + { + "label": "tsne_proj", + "importPath": "qml_ssl.utils", + "description": "qml_ssl.utils", + "isExtraImport": true, + "detail": "qml_ssl.utils", + "documentation": {} + }, + { + "label": "generate_embeddings", + "importPath": "qml_ssl.utils", + "description": "qml_ssl.utils", + "isExtraImport": true, + "detail": "qml_ssl.utils", + "documentation": {} + }, + { + "label": "vmf_kde_on_circle", + "importPath": "qml_ssl.utils", + "description": "qml_ssl.utils", + "isExtraImport": true, + "detail": "qml_ssl.utils", + "documentation": {} + }, + { + "label": "pca_proj", + "importPath": "qml_ssl.utils", + "description": "qml_ssl.utils", + "isExtraImport": true, + "detail": "qml_ssl.utils", + "documentation": {} + }, + { + "label": "tsne_proj", + "importPath": "qml_ssl.utils", + "description": "qml_ssl.utils", + "isExtraImport": true, + "detail": "qml_ssl.utils", + "documentation": {} + }, + { + "label": "load_mnist_img", + "importPath": "qml_ssl.data", + "description": "qml_ssl.data", + "isExtraImport": true, + "detail": "qml_ssl.data", + "documentation": {} + }, + { + "label": "load_mnist_img", + "importPath": "qml_ssl.data", + "description": "qml_ssl.data", + "isExtraImport": true, + "detail": "qml_ssl.data", + "documentation": {} + }, + { + "label": "os.path", + "kind": 6, + "isExtraImport": true, + "importPath": "os.path", + "description": "os.path", + "detail": "os.path", + "documentation": {} + }, + { + "label": "shutil", + "kind": 6, + "isExtraImport": true, + "importPath": "shutil", + "description": "shutil", + "detail": "shutil", + "documentation": {} + }, + { + "label": "read_tu_data", + "importPath": "torch_geometric.io", + "description": "torch_geometric.io", + "isExtraImport": true, + "detail": "torch_geometric.io", + "documentation": {} + }, + { + "label": "loadmat", + "importPath": "scipy.io", + "description": "scipy.io", + "isExtraImport": true, + "detail": "scipy.io", + "documentation": {} + }, + { + "label": "SVC", + "importPath": "sklearn.svm", + "description": "sklearn.svm", + "isExtraImport": true, + "detail": "sklearn.svm", + "documentation": {} + }, + { + "label": "LinearSVC", + "importPath": "sklearn.svm", + "description": "sklearn.svm", + "isExtraImport": true, + "detail": "sklearn.svm", + "documentation": {} + }, + { + "label": "SVR", + "importPath": "sklearn.svm", + "description": "sklearn.svm", + "isExtraImport": true, + "detail": "sklearn.svm", + "documentation": {} + }, + { + "label": "LogisticRegression", + "importPath": "sklearn.linear_model", + "description": "sklearn.linear_model", + "isExtraImport": true, + "detail": "sklearn.linear_model", + "documentation": {} + }, + { + "label": "MLPClassifier", + "importPath": "sklearn.neural_network", + "description": "sklearn.neural_network", + "isExtraImport": true, + "detail": "sklearn.neural_network", + "documentation": {} + }, + { + "label": "seaborn", + "kind": 6, + "isExtraImport": true, + "importPath": "seaborn", + "description": "seaborn", + "detail": "seaborn", + "documentation": {} + }, + { + "label": "TUDataset", + "importPath": "torch_geometric.datasets", + "description": "torch_geometric.datasets", + "isExtraImport": true, + "detail": "torch_geometric.datasets", + "documentation": {} + }, + { + "label": "torch_scatter", + "kind": 6, + "isExtraImport": true, + "importPath": "torch_scatter", + "description": "torch_scatter", + "detail": "torch_scatter", + "documentation": {} + }, + { + "label": "Variable", + "importPath": "torch.autograd", + "description": "torch.autograd", + "isExtraImport": true, + "detail": "torch.autograd", + "documentation": {} + }, + { + "label": "ModelCheckpoint", + "importPath": "pytorch_lightning.callbacks", + "description": "pytorch_lightning.callbacks", + "isExtraImport": true, + "detail": "pytorch_lightning.callbacks", + "documentation": {} + }, + { + "label": "EarlyStopping", + "importPath": "pytorch_lightning.callbacks", + "description": "pytorch_lightning.callbacks", + "isExtraImport": true, + "detail": "pytorch_lightning.callbacks", + "documentation": {} + }, + { + "label": "ModelSummary", + "importPath": "pytorch_lightning.callbacks", + "description": "pytorch_lightning.callbacks", + "isExtraImport": true, + "detail": "pytorch_lightning.callbacks", + "documentation": {} + }, + { + "label": "cv2", + "kind": 6, + "isExtraImport": true, + "importPath": "cv2", + "description": "cv2", + "detail": "cv2", + "documentation": {} + }, + { + "label": "Config", + "importPath": "qssl.config", + "description": "qssl.config", + "isExtraImport": true, + "detail": "qssl.config", + "documentation": {} + }, + { + "label": "Config", + "importPath": "qssl.config", + "description": "qssl.config", + "isExtraImport": true, + "detail": "qssl.config", + "documentation": {} + }, + { + "label": "Config", + "importPath": "qssl.config", + "description": "qssl.config", + "isExtraImport": true, + "detail": "qssl.config", + "documentation": {} + }, + { + "label": "Config", + "importPath": "qssl.config", + "description": "qssl.config", + "isExtraImport": true, + "detail": "qssl.config", + "documentation": {} + }, + { + "label": "Losses", + "importPath": "qssl.loss.losses", + "description": "qssl.loss.losses", + "isExtraImport": true, + "detail": "qssl.loss.losses", + "documentation": {} + }, + { + "label": "DataLoader", + "importPath": "qssl.data.data_loader", + "description": "qssl.data.data_loader", + "isExtraImport": true, + "detail": "qssl.data.data_loader", + "documentation": {} + }, + { + "label": "QuantumCircuit", + "importPath": "qssl.models.qcl", + "description": "qssl.models.qcl", + "isExtraImport": true, + "detail": "qssl.models.qcl", + "documentation": {} + }, + { + "label": "QuantumCNN", + "importPath": "qssl.models.qcl", + "description": "qssl.models.qcl", + "isExtraImport": true, + "detail": "qssl.models.qcl", + "documentation": {} + }, + { + "label": "SiameseNetwork", + "importPath": "qssl.models.qcl", + "description": "qssl.models.qcl", + "isExtraImport": true, + "detail": "qssl.models.qcl", + "documentation": {} + }, + { + "label": "Trainer", + "importPath": "qssl.training.train", + "description": "qssl.training.train", + "isExtraImport": true, + "detail": "qssl.training.train", + "documentation": {} + }, + { + "label": "ray", + "kind": 6, + "isExtraImport": true, + "importPath": "ray", + "description": "ray", + "detail": "ray", + "documentation": {} + }, + { + "label": "tune", + "importPath": "ray", + "description": "ray", + "isExtraImport": true, + "detail": "ray", + "documentation": {} + }, + { + "label": "air", + "importPath": "ray", + "description": "ray", + "isExtraImport": true, + "detail": "ray", + "documentation": {} + }, + { + "label": "make_swiss_roll_dataset", + "importPath": "quantum_transformers.qmlperfcomp.swiss_roll", + "description": "quantum_transformers.qmlperfcomp.swiss_roll", + "isExtraImport": true, + "detail": "quantum_transformers.qmlperfcomp.swiss_roll", + "documentation": {} + }, + { + "label": "make_swiss_roll_dataset", + "importPath": "quantum_transformers.qmlperfcomp.swiss_roll", + "description": "quantum_transformers.qmlperfcomp.swiss_roll", + "isExtraImport": true, + "detail": "quantum_transformers.qmlperfcomp.swiss_roll", + "documentation": {} + }, + { + "label": "flax.training.train_state", + "kind": 6, + "isExtraImport": true, + "importPath": "flax.training.train_state", + "description": "flax.training.train_state", + "detail": "flax.training.train_state", + "documentation": {} + }, + { + "label": "get_circuit", + "importPath": "quantum_transformers.qmlperfcomp.tc_common", + "description": "quantum_transformers.qmlperfcomp.tc_common", + "isExtraImport": true, + "detail": "quantum_transformers.qmlperfcomp.tc_common", + "documentation": {} + }, + { + "label": "tarfile", + "kind": 6, + "isExtraImport": true, + "importPath": "tarfile", + "description": "tarfile", + "detail": "tarfile", + "documentation": {} + }, + { + "label": "gdown", + "kind": 6, + "isExtraImport": true, + "importPath": "gdown", + "description": "gdown", + "detail": "gdown", + "documentation": {} + }, + { + "label": "numpy.typing", + "kind": 6, + "isExtraImport": true, + "importPath": "numpy.typing", + "description": "numpy.typing", + "detail": "numpy.typing", + "documentation": {} + }, + { + "label": "QuantumLayer", + "importPath": "quantum_transformers.quantum_layer", + "description": "quantum_transformers.quantum_layer", + "isExtraImport": true, + "detail": "quantum_transformers.quantum_layer", + "documentation": {} + }, + { + "label": "accuracy_test", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "peekOfCode": "def accuracy_test(predictions, labels, cost_fn, binary = True):\n if cost_fn == 'mse':\n if binary == True:\n acc = 0\n for l, p in zip(labels, predictions):\n if np.abs(l - p) < 1:\n acc = acc + 1\n return acc / len(labels)\n else:\n acc = 0", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "documentation": {} + }, + { + "label": "round_predictions_f", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "peekOfCode": "def round_predictions_f(predictions, cost_fn):\n round_predictions = []\n if cost_fn == \"mse\": #consider binary = True\n for p in predictions:\n if p<0:\n round_predictions.append(-1)\n elif p>0:\n round_predictions.append(1) \n elif cost_fn == \"cross_entropy\": #consider binary = False\n for p in predictions:", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "documentation": {} + }, + { + "label": "Encoding_to_Embedding", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "peekOfCode": "def Encoding_to_Embedding(Encoding):\n if Encoding == 'img16x16x1':\n Embedding = \"Equivariant-Amplitude\"\n elif Encoding == 'resize256':\n Embedding = 'Amplitude'\n elif Encoding == 'pca8':\n Embedding = 'Angle'\n return Embedding\ndef Benchmarking(dataset, classes, Unitaries, U_num_params, Encodings, circuit, cost_fn, binary=True):\n I = len(Unitaries)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "documentation": {} + }, + { + "label": "Benchmarking", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "peekOfCode": "def Benchmarking(dataset, classes, Unitaries, U_num_params, Encodings, circuit, cost_fn, binary=True):\n I = len(Unitaries)\n J = len(Encodings)\n for i in range(I):\n for j in range(J):\n f = open('QCNN/Result/result.txt', 'a')\n U = Unitaries[i]\n U_params = U_num_params[i]\n Encoding = Encodings[j]\n Embedding = Encoding_to_Embedding(Encoding)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.Benchmarking", + "documentation": {} + }, + { + "label": "get_n_params", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "def get_n_params(model):\n np=0\n for p in list(model.parameters()):\n np += p.nelement()\n return np\ndef accuracy_test(predictions, labels):\n acc = 0\n for (p,l) in zip(predictions, labels):\n if p[0] >= p[1]:\n pred = 0", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "accuracy_test", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "def accuracy_test(predictions, labels):\n acc = 0\n for (p,l) in zip(predictions, labels):\n if p[0] >= p[1]:\n pred = 0\n else:\n pred = 1\n if pred == l:\n acc = acc + 1\n acc = acc / len(labels)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "Benchmarking_CNN", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "def Benchmarking_CNN(dataset, classes, Encodings, Encodings_size, binary, optimizer):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n input_size = Encodings_size[i]\n final_layer_size = int(input_size / 4)\n X_train, X_test, Y_train, Y_test = data.data_load_and_process(dataset, classes=classes, feature_reduction=Encoding, binary=binary)\n CNN = nn.Sequential(\n nn.Conv1d(in_channels=1, out_channels=n_feature, kernel_size=2, padding=1),\n nn.ReLU(),\n nn.MaxPool1d(kernel_size=2),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "steps", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "steps = 200\nn_feature = 2\nbatch_size = 25\ndef Benchmarking_CNN(dataset, classes, Encodings, Encodings_size, binary, optimizer):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n input_size = Encodings_size[i]\n final_layer_size = int(input_size / 4)\n X_train, X_test, Y_train, Y_test = data.data_load_and_process(dataset, classes=classes, feature_reduction=Encoding, binary=binary)\n CNN = nn.Sequential(", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "n_feature", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "n_feature = 2\nbatch_size = 25\ndef Benchmarking_CNN(dataset, classes, Encodings, Encodings_size, binary, optimizer):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n input_size = Encodings_size[i]\n final_layer_size = int(input_size / 4)\n X_train, X_test, Y_train, Y_test = data.data_load_and_process(dataset, classes=classes, feature_reduction=Encoding, binary=binary)\n CNN = nn.Sequential(\n nn.Conv1d(in_channels=1, out_channels=n_feature, kernel_size=2, padding=1),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "batch_size", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "batch_size = 25\ndef Benchmarking_CNN(dataset, classes, Encodings, Encodings_size, binary, optimizer):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n input_size = Encodings_size[i]\n final_layer_size = int(input_size / 4)\n X_train, X_test, Y_train, Y_test = data.data_load_and_process(dataset, classes=classes, feature_reduction=Encoding, binary=binary)\n CNN = nn.Sequential(\n nn.Conv1d(in_channels=1, out_channels=n_feature, kernel_size=2, padding=1),\n nn.ReLU(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "steps", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "steps = 200\ndataset = 'fashion_mnist'\nclasses = [0,1]\nbinary = False\nEncodings = ['pca8']\nEncodings_size = [8, 8]\nfor i in range(5):\n Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,\n binary=binary, optimizer='adam')\n #Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "dataset", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "dataset = 'fashion_mnist'\nclasses = [0,1]\nbinary = False\nEncodings = ['pca8']\nEncodings_size = [8, 8]\nfor i in range(5):\n Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,\n binary=binary, optimizer='adam')\n #Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,\n # binary=binary, optimizer='nesterov')", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "classes", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "classes = [0,1]\nbinary = False\nEncodings = ['pca8']\nEncodings_size = [8, 8]\nfor i in range(5):\n Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,\n binary=binary, optimizer='adam')\n #Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,\n # binary=binary, optimizer='nesterov')", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "binary", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "binary = False\nEncodings = ['pca8']\nEncodings_size = [8, 8]\nfor i in range(5):\n Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,\n binary=binary, optimizer='adam')\n #Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,\n # binary=binary, optimizer='nesterov')", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "Encodings", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "Encodings = ['pca8']\nEncodings_size = [8, 8]\nfor i in range(5):\n Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,\n binary=binary, optimizer='adam')\n #Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,\n # binary=binary, optimizer='nesterov')", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "Encodings_size", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "peekOfCode": "Encodings_size = [8, 8]\nfor i in range(5):\n Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,\n binary=binary, optimizer='adam')\n #Benchmarking_CNN(dataset=dataset, classes=classes, Encodings=Encodings, Encodings_size=Encodings_size,\n # binary=binary, optimizer='nesterov')", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.CNN", + "documentation": {} + }, + { + "label": "Unitaries", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "peekOfCode": "Unitaries = [\"U2_equiv\", \"U_5\", \"U_6\"]#,\"U_9\", \"U_15\", \"U_5\"] \nU_num_params = [6,10,10]\nEncodings = [\"img16x16x1\"]\ndataset = 'quark_gluon'\nclasses = [0,1]\nbinary = True\ncost_fn = 'mse'\nBenchmarking.Benchmarking(dataset, classes, Unitaries, U_num_params, Encodings, circuit='QCNN', cost_fn=cost_fn, binary=binary)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "documentation": {} + }, + { + "label": "U_num_params", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "peekOfCode": "U_num_params = [6,10,10]\nEncodings = [\"img16x16x1\"]\ndataset = 'quark_gluon'\nclasses = [0,1]\nbinary = True\ncost_fn = 'mse'\nBenchmarking.Benchmarking(dataset, classes, Unitaries, U_num_params, Encodings, circuit='QCNN', cost_fn=cost_fn, binary=binary)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "documentation": {} + }, + { + "label": "Encodings", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "peekOfCode": "Encodings = [\"img16x16x1\"]\ndataset = 'quark_gluon'\nclasses = [0,1]\nbinary = True\ncost_fn = 'mse'\nBenchmarking.Benchmarking(dataset, classes, Unitaries, U_num_params, Encodings, circuit='QCNN', cost_fn=cost_fn, binary=binary)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "documentation": {} + }, + { + "label": "dataset", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "peekOfCode": "dataset = 'quark_gluon'\nclasses = [0,1]\nbinary = True\ncost_fn = 'mse'\nBenchmarking.Benchmarking(dataset, classes, Unitaries, U_num_params, Encodings, circuit='QCNN', cost_fn=cost_fn, binary=binary)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "documentation": {} + }, + { + "label": "classes", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "peekOfCode": "classes = [0,1]\nbinary = True\ncost_fn = 'mse'\nBenchmarking.Benchmarking(dataset, classes, Unitaries, U_num_params, Encodings, circuit='QCNN', cost_fn=cost_fn, binary=binary)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "documentation": {} + }, + { + "label": "binary", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "peekOfCode": "binary = True\ncost_fn = 'mse'\nBenchmarking.Benchmarking(dataset, classes, Unitaries, U_num_params, Encodings, circuit='QCNN', cost_fn=cost_fn, binary=binary)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "documentation": {} + }, + { + "label": "cost_fn", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "peekOfCode": "cost_fn = 'mse'\nBenchmarking.Benchmarking(dataset, classes, Unitaries, U_num_params, Encodings, circuit='QCNN', cost_fn=cost_fn, binary=binary)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.benchmarking.result", + "documentation": {} + }, + { + "label": "data_load_and_process", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.data.data", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.data.data", + "peekOfCode": "def data_load_and_process(dataset, classes=[0, 1], feature_reduction= \"img16x16x1\", binary=True):\n if dataset == 'fashion_mnist':\n (x_train, y_train), (x_test, y_test) = tf.keras.datasets.fashion_mnist.load_data()\n x_train, x_test = x_train[..., np.newaxis] / 255.0, x_test[..., np.newaxis] / 255.0 # normalize the data\n elif dataset == 'mnist':\n (x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()\n x_train, x_test = x_train[..., np.newaxis] / 255.0, x_test[..., np.newaxis] / 255.0 # normalize the data\n elif dataset == \"quark_gluon\":\n QG_path = \"/home/lazaror/quantum/pruebas/EQCNN_local_testing/EQNN_for_HEP/Equivariant_QCNN/data/Q-G_resize/QG-bilinear-ECAL-(16, 16, 1).h5py\" ## quark_gluon-16x16-MMS.h5 solo tiene una clase\n with h5py.File(QG_path, \"r\") as file:", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.data.data", + "documentation": {} + }, + { + "label": "load_hybrid_data", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.data.hybrid_data", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.data.hybrid_data", + "peekOfCode": "def load_hybrid_data(dataset, train_size, test_size, classes = [0,1]):\n transform = transforms.Compose([\n transforms.Resize((16, 16)), \n transforms.ToTensor() \n ])\n if dataset == \"mnist\":\n fashion_trainset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n fashion_testset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n train_indices = [i for i, (x, y) in enumerate(fashion_trainset) if y in classes]\n test_indices = [i for i, (x, y) in enumerate(fashion_testset) if y in classes]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.data.hybrid_data", + "documentation": {} + }, + { + "label": "dataset", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.data.prueba", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.data.prueba", + "peekOfCode": "dataset = \"electron_photon\"\nX_train, X_test, y_train, y_test = data_load_and_process(dataset, classes=[0,1], feature_reduction= \"img16x16x1\", binary=True)\nplt.imshow(X_train[0])\nplt.show()\nprint(y_train[0])\nprint(np.unique(y_train, return_counts = True))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.data.prueba", + "documentation": {} + }, + { + "label": "quark_gluon", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.hep_processing.hep_dataset_processing", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.hep_processing.hep_dataset_processing", + "peekOfCode": "def quark_gluon(): \n file_path = \"/home/lazaror/quantum/pruebas/Takhur_QCNN/Equivariant_QCNN/hep_data/QG_16x16x1_dataset_50k\"\n with h5py.File(file_path, \"r\") as file:\n X = np.array(file[\"X\"])\n y = np.array(file[\"y\"])\n X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)\n X_train = tf.convert_to_tensor(X_train, dtype=tf.float32)\n X_test = tf.convert_to_tensor(X_test, dtype=tf.float32)\n Y_train = tf.convert_to_tensor(y_train, dtype=tf.float32)\n Y_test = tf.convert_to_tensor(y_test, dtype=tf.float32)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.hep_processing.hep_dataset_processing", + "documentation": {} + }, + { + "label": "electron_photon", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.hep_processing.hep_dataset_processing", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.hep_processing.hep_dataset_processing", + "peekOfCode": "def electron_photon():\n file_path_electron = \"/home/lazaror/quantum/pruebas/Takhur_QCNN/Equivariant_QCNN/hep_data/electron.hdf5\"\n with h5py.File(file_path_electron, \"r\") as file:\n X_e = np.array(file[\"X\"])\n y_e = np.array(file[\"y\"])\n file_path_photon = \"/home/lazaror/quantum/pruebas/Takhur_QCNN/Equivariant_QCNN/hep_data/photon.hdf5\"\n with h5py.File(file_path_photon, \"r\") as file:\n X_p = np.array(file[\"X\"])\n y_p = np.array(file[\"y\"])\n X = np.concatenate((X_e, X_p), axis=0)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.hep_processing.hep_dataset_processing", + "documentation": {} + }, + { + "label": "equivariant_amplitude_encoding", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.embedding", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.embedding", + "peekOfCode": "def equivariant_amplitude_encoding(img: np.ndarray) -> None:\n # n = 8\n wires=range(8)\n n = len(wires) // 2\n # If the image is single-channel, reshape it to 2D\n if img.shape[2] == 1:\n img = img.reshape(img.shape[0], img.shape[1])\n # Initialize the feature vector with zeros\n features = np.zeros(2 ** (2*n)) # 2^(2*4) = 2^8 = 256 = 16x16\n # for each pixel in the image, we asign a value using the sine function with the", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.embedding", + "documentation": {} + }, + { + "label": "data_embedding", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.embedding", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.embedding", + "peekOfCode": "def data_embedding(X, embedding_type='Amplitude'):\n \"\"\"\n Embeds the input data X using various embedding types.\n Parameters:\n X (numpy array): An array of dimension (16,16,1).\n embedding_type (str): The type of embedding to use. Options are:\n - \"Amplitude\"\n - \"Equivariant-Amplitude\"\n - \"Angle\"\n - \"Angle-compact\"", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.embedding", + "documentation": {} + }, + { + "label": "U2_equiv", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def U2_equiv(params, wires): # 6 params\n qml.RX(params[0], wires=wires[0]) \n qml.RX(params[1], wires=wires[1]) \n qml.IsingZZ(params[2], wires=wires) \n qml.RX(params[3], wires=wires[0]) \n qml.RX(params[4], wires=wires[1]) \n qml.IsingYY(params[5], wires=wires) \ndef U4_equiv(params, wires): # 3 params\n qml.RX(params[0], wires=wires[0])\n qml.RX(params[1], wires=wires[1])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "U4_equiv", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def U4_equiv(params, wires): # 3 params\n qml.RX(params[0], wires=wires[0])\n qml.RX(params[1], wires=wires[1])\n qml.RX(params[0], wires=wires[2])\n qml.RX(params[1], wires=wires[3])\n # Define the Z matrix\n Z = np.array([[1, 0], [0, -1]])\n # Create a Kronecker product of Z matrices for four qubits\n Z4 = np.kron(Z, np.kron(Z, np.kron(Z, Z)))\n # Create a diagonal matrix with phase shifts", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "Pooling_ansatz_equiv", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def Pooling_ansatz_equiv(phi, wires):\n qml.RX(phi[0], wires=wires[1])\n qml.RX(phi[1], wires=wires[0])\n qml.RY(phi[2], wires=wires[0])\n qml.RZ(phi[3], wires=wires[0])\n qml.CRX(phi[4], wires=[wires[0], wires[1]])\n#----- No-Equivariant gates \ndef U_TTN(params, wires): # 2 params\n qml.RY(params[0], wires=wires[0])\n qml.RY(params[1], wires=wires[1])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "U_TTN", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def U_TTN(params, wires): # 2 params\n qml.RY(params[0], wires=wires[0])\n qml.RY(params[1], wires=wires[1])\n qml.CNOT(wires=[wires[0], wires[1]])\ndef U_5(params, wires): # 10 params\n qml.RX(params[0], wires=wires[0])\n qml.RX(params[1], wires=wires[1])\n qml.RZ(params[2], wires=wires[0])\n qml.RZ(params[3], wires=wires[1])\n qml.CRZ(params[4], wires=[wires[1], wires[0]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "U_5", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def U_5(params, wires): # 10 params\n qml.RX(params[0], wires=wires[0])\n qml.RX(params[1], wires=wires[1])\n qml.RZ(params[2], wires=wires[0])\n qml.RZ(params[3], wires=wires[1])\n qml.CRZ(params[4], wires=[wires[1], wires[0]])\n qml.CRZ(params[5], wires=[wires[0], wires[1]])\n qml.RX(params[6], wires=wires[0])\n qml.RX(params[7], wires=wires[1])\n qml.RZ(params[8], wires=wires[0])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "U_6", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def U_6(params, wires): # 10 params\n qml.RX(params[0], wires=wires[0])\n qml.RX(params[1], wires=wires[1])\n qml.RZ(params[2], wires=wires[0])\n qml.RZ(params[3], wires=wires[1])\n qml.CRX(params[4], wires=[wires[1], wires[0]])\n qml.CRX(params[5], wires=[wires[0], wires[1]])\n qml.RX(params[6], wires=wires[0])\n qml.RX(params[7], wires=wires[1])\n qml.RZ(params[8], wires=wires[0])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "U_9", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def U_9(params, wires): # 2 params\n qml.Hadamard(wires=wires[0])\n qml.Hadamard(wires=wires[1])\n qml.CZ(wires=[wires[0], wires[1]])\n qml.RX(params[0], wires=wires[0])\n qml.RX(params[1], wires=wires[1])\ndef U_13(params, wires): # 6 params\n qml.RY(params[0], wires=wires[0])\n qml.RY(params[1], wires=wires[1])\n qml.CRZ(params[2], wires=[wires[1], wires[0]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "U_13", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def U_13(params, wires): # 6 params\n qml.RY(params[0], wires=wires[0])\n qml.RY(params[1], wires=wires[1])\n qml.CRZ(params[2], wires=[wires[1], wires[0]])\n qml.RY(params[3], wires=wires[0])\n qml.RY(params[4], wires=wires[1])\n qml.CRZ(params[5], wires=[wires[0], wires[1]])\ndef U_14(params, wires): # 6 params\n qml.RY(params[0], wires=wires[0])\n qml.RY(params[1], wires=wires[1])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "U_14", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def U_14(params, wires): # 6 params\n qml.RY(params[0], wires=wires[0])\n qml.RY(params[1], wires=wires[1])\n qml.CRX(params[2], wires=[wires[1], wires[0]])\n qml.RY(params[3], wires=wires[0])\n qml.RY(params[4], wires=wires[1])\n qml.CRX(params[5], wires=[wires[0], wires[1]])\ndef U_15(params, wires): # 4 params\n qml.RY(params[0], wires=wires[0])\n qml.RY(params[1], wires=wires[1])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "U_15", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def U_15(params, wires): # 4 params\n qml.RY(params[0], wires=wires[0])\n qml.RY(params[1], wires=wires[1])\n qml.CNOT(wires=[wires[1], wires[0]])\n qml.RY(params[2], wires=wires[0])\n qml.RY(params[3], wires=wires[1])\n qml.CNOT(wires=[wires[0], wires[1]])\ndef U_SO4(params, wires): # 6 params\n qml.RY(params[0], wires=wires[0])\n qml.RY(params[1], wires=wires[1])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "U_SO4", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def U_SO4(params, wires): # 6 params\n qml.RY(params[0], wires=wires[0])\n qml.RY(params[1], wires=wires[1])\n qml.CNOT(wires=[wires[0], wires[1]])\n qml.RY(params[2], wires=wires[0])\n qml.RY(params[3], wires=wires[1])\n qml.CNOT(wires=[wires[0], wires[1]])\n qml.RY(params[4], wires=wires[0])\n qml.RY(params[5], wires=wires[1])\ndef U_SU4(params, wires): # 15 params", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "U_SU4", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def U_SU4(params, wires): # 15 params\n qml.U3(params[0], params[1], params[2], wires=wires[0])\n qml.U3(params[3], params[4], params[5], wires=wires[1])\n qml.CNOT(wires=[wires[0], wires[1]])\n qml.RY(params[6], wires=wires[0])\n qml.RZ(params[7], wires=wires[1])\n qml.CNOT(wires=[wires[1], wires[0]])\n qml.RY(params[8], wires=wires[0])\n qml.CNOT(wires=[wires[0], wires[1]])\n qml.U3(params[9], params[10], params[11], wires=wires[0])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "Pooling_ansatz1", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def Pooling_ansatz1(params, wires): #2 params\n qml.CRZ(params[0], wires=[wires[0], wires[1]])\n qml.PauliX(wires=wires[0])\n qml.CRX(params[1], wires=[wires[0], wires[1]])\ndef Pooling_ansatz2(wires): #0 params\n qml.CRZ(wires=[wires[0], wires[1]])\ndef Pooling_ansatz3(*params, wires): #3 params\n qml.CRot(*params, wires=[wires[0], wires[1]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "Pooling_ansatz2", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def Pooling_ansatz2(wires): #0 params\n qml.CRZ(wires=[wires[0], wires[1]])\ndef Pooling_ansatz3(*params, wires): #3 params\n qml.CRot(*params, wires=[wires[0], wires[1]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "Pooling_ansatz3", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "peekOfCode": "def Pooling_ansatz3(*params, wires): #3 params\n qml.CRot(*params, wires=[wires[0], wires[1]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.utils.unitary", + "documentation": {} + }, + { + "label": "U2", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "peekOfCode": "def U2(phi, wires):\n qml.RX(phi[0], wires=wires[0])\n qml.RX(phi[1], wires=wires[1])\n qml.IsingZZ(phi[2], wires=wires)\n qml.RX(phi[3], wires=wires[0])\n qml.RX(phi[4], wires=wires[1])\n qml.IsingYY(phi[5], wires=wires)\ndef Pooling_ansatz(phi, wires):\n qml.RX(phi[0], wires=wires[0])\n qml.RX(phi[1], wires=wires[1])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "documentation": {} + }, + { + "label": "Pooling_ansatz", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "peekOfCode": "def Pooling_ansatz(phi, wires):\n qml.RX(phi[0], wires=wires[0])\n qml.RX(phi[1], wires=wires[1])\n qml.RY(phi[2], wires=wires[1])\n qml.RZ(phi[3], wires=wires[1])\n qml.CRX(phi[4], wires=[wires[1], wires[0]])\ndef qcnn_full(params, wires):\n # 14 params\n for i in range(int(len(wires) / 2)):\n U2(params[:6], [wires[2 * i], wires[2 * i + 1]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "documentation": {} + }, + { + "label": "qcnn_full", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "peekOfCode": "def qcnn_full(params, wires):\n # 14 params\n for i in range(int(len(wires) / 2)):\n U2(params[:6], [wires[2 * i], wires[2 * i + 1]])\n U2(params[:6], [wires[1], wires[2]])\n U2(params[:6], [wires[5], wires[6]])\n U2(params[:6], [wires[0], wires[3]])\n U2(params[:6], [wires[4], wires[7]])\n qml.Barrier()\n for i in range(int(len(wires) / 2)):", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "documentation": {} + }, + { + "label": "hybrid_models", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "peekOfCode": "def hybrid_models( model = 1, num_layers=4):\n num_qubits = 8\n dev = qml.device(\"default.qubit\", wires=num_qubits)\n if model == 0: ## equiv\n num_layers = num_layers\n @qml.qnode(dev)\n def hybrid_circuit(inputs, params):\n qml.AmplitudeEmbedding(inputs, wires=range(num_qubits), normalize=True)\n qcnn_full(params, [0,1,2,3,4,5,6,7])\n #for layer in range(num_layers):", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "documentation": {} + }, + { + "label": "training_hybrid", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "peekOfCode": "def training_hybrid(X_train, y_train, epochs = 20, batch_size= 64, samples= 10000, model = 0, num_layers = 4, lr = 0.1):\n # one-hot encoding\n loss_history = []\n y_train_one_hot = F.one_hot(y_train[:samples].to(torch.int64), num_classes=2) # Convert y_train to an integer tensor\n X = X_train[:samples].reshape(samples, 16*16).float().requires_grad_(True)\n y_hot = y_train_one_hot.long()\n batches = samples // batch_size\n data_loader = torch.utils.data.DataLoader(\n list(zip(X, y_hot)), batch_size=batch_size, shuffle=True, drop_last=True\n )", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.hybrid_models", + "documentation": {} + }, + { + "label": "conv_layer_equiv_U2", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def conv_layer_equiv_U2(U, params): # apply a layer of U2 to all the system\n for i in range(int(8 / 2)):\n U(params, [2 * i, 2 * i + 1])\n U(params, [1, 2])\n U(params, [5, 6])\n U(params, [0, 3])\n U(params, [4, 7])\ndef conv_layer_equiv_U2_pair(U, params):\n U(params, [0, 2])\n U(params, [4,6])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "conv_layer_equiv_U2_pair", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def conv_layer_equiv_U2_pair(U, params):\n U(params, [0, 2])\n U(params, [4,6])\ndef conv_layer_equiv_U2_single(U, params):\n U(params, [0, 4])\n#---------\ndef conv_layer_equiv_U4(U, params):\n U(params, [0,1,2,3])\n U(params, [4,5,6,7])\n U(params, [2,3,4,5])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "conv_layer_equiv_U2_single", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def conv_layer_equiv_U2_single(U, params):\n U(params, [0, 4])\n#---------\ndef conv_layer_equiv_U4(U, params):\n U(params, [0,1,2,3])\n U(params, [4,5,6,7])\n U(params, [2,3,4,5])\n U(params, [0, 1, 6, 7])\ndef conv_layer_equiv_U4_single(U,params):\n U(params, [0,2,4,6])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "conv_layer_equiv_U4", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def conv_layer_equiv_U4(U, params):\n U(params, [0,1,2,3])\n U(params, [4,5,6,7])\n U(params, [2,3,4,5])\n U(params, [0, 1, 6, 7])\ndef conv_layer_equiv_U4_single(U,params):\n U(params, [0,2,4,6])\n# Quantum Circuits for Convolutional layers\ndef conv_layer1(U, params):\n U(params, wires=[0, 7])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "conv_layer_equiv_U4_single", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def conv_layer_equiv_U4_single(U,params):\n U(params, [0,2,4,6])\n# Quantum Circuits for Convolutional layers\ndef conv_layer1(U, params):\n U(params, wires=[0, 7])\n for i in range(0, 8, 2):\n U(params, wires=[i, i + 1])\n for i in range(1, 7, 2):\n U(params, wires=[i, i + 1])\ndef conv_layer2(U, params):", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "conv_layer1", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def conv_layer1(U, params):\n U(params, wires=[0, 7])\n for i in range(0, 8, 2):\n U(params, wires=[i, i + 1])\n for i in range(1, 7, 2):\n U(params, wires=[i, i + 1])\ndef conv_layer2(U, params):\n U(params, wires=[0, 6])\n U(params, wires=[0, 2])\n U(params, wires=[4, 6])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "conv_layer2", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def conv_layer2(U, params):\n U(params, wires=[0, 6])\n U(params, wires=[0, 2])\n U(params, wires=[4, 6])\n U(params, wires=[2, 4])\ndef conv_layer3(U, params):\n U(params, wires=[0,4])\n# Quantum Circuits for Pooling layers\ndef pooling_layer1(V, params):\n for i in range(0, 8, 2):", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "conv_layer3", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def conv_layer3(U, params):\n U(params, wires=[0,4])\n# Quantum Circuits for Pooling layers\ndef pooling_layer1(V, params):\n for i in range(0, 8, 2):\n V(params, wires=[i+1, i])\ndef pooling_layer2(V, params):\n V(params, wires=[2,0])\n V(params, wires=[6,4])\ndef pooling_layer3(V, params):", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "pooling_layer1", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def pooling_layer1(V, params):\n for i in range(0, 8, 2):\n V(params, wires=[i+1, i])\ndef pooling_layer2(V, params):\n V(params, wires=[2,0])\n V(params, wires=[6,4])\ndef pooling_layer3(V, params):\n V(params, wires=[0,4])\n## ---- equiv\ndef p4m_QCNN_structure(U, params, U_params = 6, layers=3):", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "pooling_layer2", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def pooling_layer2(V, params):\n V(params, wires=[2,0])\n V(params, wires=[6,4])\ndef pooling_layer3(V, params):\n V(params, wires=[0,4])\n## ---- equiv\ndef p4m_QCNN_structure(U, params, U_params = 6, layers=3):\n param1 = params[0:U_params] #conv1 U2 6\n param2 = params[U_params: U_params+5] # pooling1 5 \n param3 = params[U_params+5: U_params+5+6] # conv2 pair U2 6 ", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "pooling_layer3", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def pooling_layer3(V, params):\n V(params, wires=[0,4])\n## ---- equiv\ndef p4m_QCNN_structure(U, params, U_params = 6, layers=3):\n param1 = params[0:U_params] #conv1 U2 6\n param2 = params[U_params: U_params+5] # pooling1 5 \n param3 = params[U_params+5: U_params+5+6] # conv2 pair U2 6 \n param4 = params[5+6+ U_params: 5+6+ U_params + 5] #pooling2 5\n param5 = params[5+6+5+ U_params: 5+6+5+U_params + 6] # conv3 single U2 6\n param6 = params[5+6+5+6+ U_params: 5+6+5+6+ U_params + 5] #pooling3 5", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "p4m_QCNN_structure", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def p4m_QCNN_structure(U, params, U_params = 6, layers=3):\n param1 = params[0:U_params] #conv1 U2 6\n param2 = params[U_params: U_params+5] # pooling1 5 \n param3 = params[U_params+5: U_params+5+6] # conv2 pair U2 6 \n param4 = params[5+6+ U_params: 5+6+ U_params + 5] #pooling2 5\n param5 = params[5+6+5+ U_params: 5+6+5+U_params + 6] # conv3 single U2 6\n param6 = params[5+6+5+6+ U_params: 5+6+5+6+ U_params + 5] #pooling3 5\n # Pooling Ansatz1 is used by default\n #conv1_U2\n conv_layer_equiv_U2(unitary.U2_equiv, param1) # 6 params", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "reflection_QCNN_structure_without_pooling", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def reflection_QCNN_structure_without_pooling(U, params, U_params, layers):\n param_layers = [params[i * U_params:(i + 1) * U_params] for i in range(layers)]\n for i in range(layers):\n conv_layer_equiv_U2(U, param_layers[i])\n## ------ normal \ndef QCNN_structure(U, params, U_params, n_qubits, layers):\n param_layers = [params[i * U_params:(i + 1) * U_params] for i in range(layers)]\n pooling_params = params[U_params * layers: U_params * layers + layers * 2] \n for i in range(layers):\n conv_layer1(U, param_layers[i], n_qubits)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "QCNN_structure", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def QCNN_structure(U, params, U_params, n_qubits, layers):\n param_layers = [params[i * U_params:(i + 1) * U_params] for i in range(layers)]\n pooling_params = params[U_params * layers: U_params * layers + layers * 2] \n for i in range(layers):\n conv_layer1(U, param_layers[i], n_qubits)\n if i < len(pooling_params) // 2: \n pooling_layer1(unitary.Pooling_ansatz1, pooling_params[i * 2:(i + 1) * 2], n_qubits)\ndef QCNN_structure_without_pooling(U, params, U_params):\n param1 = params[0:U_params]\n param2 = params[U_params: 2 * U_params]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "QCNN_structure_without_pooling", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def QCNN_structure_without_pooling(U, params, U_params):\n param1 = params[0:U_params]\n param2 = params[U_params: 2 * U_params]\n param3 = params[2 * U_params: 3 * U_params]\n conv_layer1(U, param1)\n conv_layer2(U, param2)\n conv_layer3(U, param3)\ndef QCNN_1D_circuit(U, params, U_params):\n param1 = params[0: U_params]\n param2 = params[U_params: 2*U_params]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "QCNN_1D_circuit", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def QCNN_1D_circuit(U, params, U_params):\n param1 = params[0: U_params]\n param2 = params[U_params: 2*U_params]\n param3 = params[2*U_params: 3*U_params]\n for i in range(0, 8, 2):\n U(param1, wires=[i, i + 1])\n for i in range(1, 7, 2):\n U(param1, wires=[i, i + 1])\n U(param2, wires=[2,3])\n U(param2, wires=[4,5])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "QCNN", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "def QCNN(X, params, U, U_params, embedding_type='Equivariant-Amplitude', cost_fn='cross_entropy', layers = 3):\n # Data Embedding\n embedding.data_embedding(X, embedding_type=embedding_type)\n # Quantum Convolutional Neural Network\n if U == 'U_TTN':\n QCNN_structure(unitary.U_TTN, params, U_params, layers)\n elif U == \"U2_equiv\":\n reflection_QCNN_structure_without_pooling(unitary.U2_equiv, params, U_params, layers)\n elif U == \"U4_equiv\":\n p4m_QCNN_structure(unitary.U4_equiv, params, U_params, layers)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "dev", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "peekOfCode": "dev = qml.device('default.qubit', wires = 8)\n@qml.qnode(dev)\ndef QCNN(X, params, U, U_params, embedding_type='Equivariant-Amplitude', cost_fn='cross_entropy', layers = 3):\n # Data Embedding\n embedding.data_embedding(X, embedding_type=embedding_type)\n # Quantum Convolutional Neural Network\n if U == 'U_TTN':\n QCNN_structure(unitary.U_TTN, params, U_params, layers)\n elif U == \"U2_equiv\":\n reflection_QCNN_structure_without_pooling(unitary.U2_equiv, params, U_params, layers)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.models.QCNN_circuit", + "documentation": {} + }, + { + "label": "pca8_MNIST_L3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "pca8_MNIST_L3 = np.array([0.5366430260047281, 0.9867612293144208, 0.9744680851063829, 0.9768321513002364, 0.9820330969267139, 0.9773049645390071, 0.9749408983451536, 0.9787234042553191, 0.984869976359338, 0.957919621749409 ])\nae8_MNIST_L3 = np.array([0.8297872340425532, 0.9352245862884161, 0.9635933806146572, 0.857210401891253, 0.5366430260047281, 0.9702127659574468, 0.9919621749408983, 0.9730496453900709, 0.9877068557919622, 0.9957446808510638])\npca16_MNIST_L3 = np.array([0.984869976359338, 0.9843971631205674, 0.5366430260047281, 0.9891252955082742, 0.9801418439716312, 0.9825059101654846, 0.8898345153664302, 0.9806146572104019, 0.9886524822695035, 0.9881796690307328])\nae16_MNIST_L3 = np.array([0.9252955082742317, 0.9796690307328605, 0.9371158392434988, 0.9593380614657211, 0.9877068557919622, 0.9314420803782506, 0.9744680851063829, 0.9602836879432625, 0.9730496453900709, 0.9191489361702128])\npca8_FASHION_L3 = np.array([0.86, 0.9225, 0.808, 0.932, 0.8995, 0.915, 0.929, 0.899, 0.9265, 0.851])\nae8_FASHION_L3 = np.array([0.941, 0.8605, 0.9395, 0.903, 0.8975, 0.9545, 0.933, 0.9515, 0.9355, 0.9225])\npca16_FASHION_L3 = np.array([0.9235, 0.9395, 0.918, 0.913, 0.898, 0.884, 0.946, 0.901, 0.947, 0.9155])\nae16_FASHION_L3 = np.array([0.948, 0.8935, 0.9435, 0.914, 0.968, 0.931, 0.929, 0.9635, 0.9205, 0.9455])\npca8_MNIST_L2 = np.array([0.9215130023640662, 0.8940898345153664, 0.9810874704491725, 0.9588652482269504, 0.9423167848699764, 0.9801418439716312, 0.9541371158392435, 0.5366430260047281, 0.9768321513002364, 0.9588652482269504])\nae8_MNIST_L2 = np.array([0.885579196217494, 0.5366430260047281, 0.9560283687943263, 0.8141843971631205, 0.9026004728132387, 0.5366430260047281, 0.8964539007092198, 0.8539007092198582, 0.9777777777777777, 0.9130023640661938])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "ae8_MNIST_L3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "ae8_MNIST_L3 = np.array([0.8297872340425532, 0.9352245862884161, 0.9635933806146572, 0.857210401891253, 0.5366430260047281, 0.9702127659574468, 0.9919621749408983, 0.9730496453900709, 0.9877068557919622, 0.9957446808510638])\npca16_MNIST_L3 = np.array([0.984869976359338, 0.9843971631205674, 0.5366430260047281, 0.9891252955082742, 0.9801418439716312, 0.9825059101654846, 0.8898345153664302, 0.9806146572104019, 0.9886524822695035, 0.9881796690307328])\nae16_MNIST_L3 = np.array([0.9252955082742317, 0.9796690307328605, 0.9371158392434988, 0.9593380614657211, 0.9877068557919622, 0.9314420803782506, 0.9744680851063829, 0.9602836879432625, 0.9730496453900709, 0.9191489361702128])\npca8_FASHION_L3 = np.array([0.86, 0.9225, 0.808, 0.932, 0.8995, 0.915, 0.929, 0.899, 0.9265, 0.851])\nae8_FASHION_L3 = np.array([0.941, 0.8605, 0.9395, 0.903, 0.8975, 0.9545, 0.933, 0.9515, 0.9355, 0.9225])\npca16_FASHION_L3 = np.array([0.9235, 0.9395, 0.918, 0.913, 0.898, 0.884, 0.946, 0.901, 0.947, 0.9155])\nae16_FASHION_L3 = np.array([0.948, 0.8935, 0.9435, 0.914, 0.968, 0.931, 0.929, 0.9635, 0.9205, 0.9455])\npca8_MNIST_L2 = np.array([0.9215130023640662, 0.8940898345153664, 0.9810874704491725, 0.9588652482269504, 0.9423167848699764, 0.9801418439716312, 0.9541371158392435, 0.5366430260047281, 0.9768321513002364, 0.9588652482269504])\nae8_MNIST_L2 = np.array([0.885579196217494, 0.5366430260047281, 0.9560283687943263, 0.8141843971631205, 0.9026004728132387, 0.5366430260047281, 0.8964539007092198, 0.8539007092198582, 0.9777777777777777, 0.9130023640661938])\npca16_MNIST_L2 = np.array([0.8676122931442081, 0.9815602836879432, 0.9853427895981087, 0.9877068557919622, 0.9687943262411347, 0.9843971631205674, 0.9829787234042553, 0.9886524822695035, 0.9730496453900709, 0.9815602836879432])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "pca16_MNIST_L3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "pca16_MNIST_L3 = np.array([0.984869976359338, 0.9843971631205674, 0.5366430260047281, 0.9891252955082742, 0.9801418439716312, 0.9825059101654846, 0.8898345153664302, 0.9806146572104019, 0.9886524822695035, 0.9881796690307328])\nae16_MNIST_L3 = np.array([0.9252955082742317, 0.9796690307328605, 0.9371158392434988, 0.9593380614657211, 0.9877068557919622, 0.9314420803782506, 0.9744680851063829, 0.9602836879432625, 0.9730496453900709, 0.9191489361702128])\npca8_FASHION_L3 = np.array([0.86, 0.9225, 0.808, 0.932, 0.8995, 0.915, 0.929, 0.899, 0.9265, 0.851])\nae8_FASHION_L3 = np.array([0.941, 0.8605, 0.9395, 0.903, 0.8975, 0.9545, 0.933, 0.9515, 0.9355, 0.9225])\npca16_FASHION_L3 = np.array([0.9235, 0.9395, 0.918, 0.913, 0.898, 0.884, 0.946, 0.901, 0.947, 0.9155])\nae16_FASHION_L3 = np.array([0.948, 0.8935, 0.9435, 0.914, 0.968, 0.931, 0.929, 0.9635, 0.9205, 0.9455])\npca8_MNIST_L2 = np.array([0.9215130023640662, 0.8940898345153664, 0.9810874704491725, 0.9588652482269504, 0.9423167848699764, 0.9801418439716312, 0.9541371158392435, 0.5366430260047281, 0.9768321513002364, 0.9588652482269504])\nae8_MNIST_L2 = np.array([0.885579196217494, 0.5366430260047281, 0.9560283687943263, 0.8141843971631205, 0.9026004728132387, 0.5366430260047281, 0.8964539007092198, 0.8539007092198582, 0.9777777777777777, 0.9130023640661938])\npca16_MNIST_L2 = np.array([0.8676122931442081, 0.9815602836879432, 0.9853427895981087, 0.9877068557919622, 0.9687943262411347, 0.9843971631205674, 0.9829787234042553, 0.9886524822695035, 0.9730496453900709, 0.9815602836879432])\nae16_MNIST_L2 = np.array([0.9583924349881797, 0.9479905437352246, 0.8912529550827423, 0.5366430260047281, 0.8964539007092198, 0.9030732860520094, 0.9030732860520094, 0.9673758865248226, 0.806146572104019, 0.5366430260047281])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "ae16_MNIST_L3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "ae16_MNIST_L3 = np.array([0.9252955082742317, 0.9796690307328605, 0.9371158392434988, 0.9593380614657211, 0.9877068557919622, 0.9314420803782506, 0.9744680851063829, 0.9602836879432625, 0.9730496453900709, 0.9191489361702128])\npca8_FASHION_L3 = np.array([0.86, 0.9225, 0.808, 0.932, 0.8995, 0.915, 0.929, 0.899, 0.9265, 0.851])\nae8_FASHION_L3 = np.array([0.941, 0.8605, 0.9395, 0.903, 0.8975, 0.9545, 0.933, 0.9515, 0.9355, 0.9225])\npca16_FASHION_L3 = np.array([0.9235, 0.9395, 0.918, 0.913, 0.898, 0.884, 0.946, 0.901, 0.947, 0.9155])\nae16_FASHION_L3 = np.array([0.948, 0.8935, 0.9435, 0.914, 0.968, 0.931, 0.929, 0.9635, 0.9205, 0.9455])\npca8_MNIST_L2 = np.array([0.9215130023640662, 0.8940898345153664, 0.9810874704491725, 0.9588652482269504, 0.9423167848699764, 0.9801418439716312, 0.9541371158392435, 0.5366430260047281, 0.9768321513002364, 0.9588652482269504])\nae8_MNIST_L2 = np.array([0.885579196217494, 0.5366430260047281, 0.9560283687943263, 0.8141843971631205, 0.9026004728132387, 0.5366430260047281, 0.8964539007092198, 0.8539007092198582, 0.9777777777777777, 0.9130023640661938])\npca16_MNIST_L2 = np.array([0.8676122931442081, 0.9815602836879432, 0.9853427895981087, 0.9877068557919622, 0.9687943262411347, 0.9843971631205674, 0.9829787234042553, 0.9886524822695035, 0.9730496453900709, 0.9815602836879432])\nae16_MNIST_L2 = np.array([0.9583924349881797, 0.9479905437352246, 0.8912529550827423, 0.5366430260047281, 0.8964539007092198, 0.9030732860520094, 0.9030732860520094, 0.9673758865248226, 0.806146572104019, 0.5366430260047281])\npca8_FASHION_L2 = np.array([0.5, 0.803, 0.5, 0.856, 0.946, 0.9245, 0.906, 0.9395, 0.921, 0.925])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "pca8_FASHION_L3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "pca8_FASHION_L3 = np.array([0.86, 0.9225, 0.808, 0.932, 0.8995, 0.915, 0.929, 0.899, 0.9265, 0.851])\nae8_FASHION_L3 = np.array([0.941, 0.8605, 0.9395, 0.903, 0.8975, 0.9545, 0.933, 0.9515, 0.9355, 0.9225])\npca16_FASHION_L3 = np.array([0.9235, 0.9395, 0.918, 0.913, 0.898, 0.884, 0.946, 0.901, 0.947, 0.9155])\nae16_FASHION_L3 = np.array([0.948, 0.8935, 0.9435, 0.914, 0.968, 0.931, 0.929, 0.9635, 0.9205, 0.9455])\npca8_MNIST_L2 = np.array([0.9215130023640662, 0.8940898345153664, 0.9810874704491725, 0.9588652482269504, 0.9423167848699764, 0.9801418439716312, 0.9541371158392435, 0.5366430260047281, 0.9768321513002364, 0.9588652482269504])\nae8_MNIST_L2 = np.array([0.885579196217494, 0.5366430260047281, 0.9560283687943263, 0.8141843971631205, 0.9026004728132387, 0.5366430260047281, 0.8964539007092198, 0.8539007092198582, 0.9777777777777777, 0.9130023640661938])\npca16_MNIST_L2 = np.array([0.8676122931442081, 0.9815602836879432, 0.9853427895981087, 0.9877068557919622, 0.9687943262411347, 0.9843971631205674, 0.9829787234042553, 0.9886524822695035, 0.9730496453900709, 0.9815602836879432])\nae16_MNIST_L2 = np.array([0.9583924349881797, 0.9479905437352246, 0.8912529550827423, 0.5366430260047281, 0.8964539007092198, 0.9030732860520094, 0.9030732860520094, 0.9673758865248226, 0.806146572104019, 0.5366430260047281])\npca8_FASHION_L2 = np.array([0.5, 0.803, 0.5, 0.856, 0.946, 0.9245, 0.906, 0.9395, 0.921, 0.925])\nae8_FASHION_L2 = np.array([0.5, 0.9195, 0.936, 0.921, 0.815, 0.926, 0.9125, 0.944, 0.907, 0.8985])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "ae8_FASHION_L3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "ae8_FASHION_L3 = np.array([0.941, 0.8605, 0.9395, 0.903, 0.8975, 0.9545, 0.933, 0.9515, 0.9355, 0.9225])\npca16_FASHION_L3 = np.array([0.9235, 0.9395, 0.918, 0.913, 0.898, 0.884, 0.946, 0.901, 0.947, 0.9155])\nae16_FASHION_L3 = np.array([0.948, 0.8935, 0.9435, 0.914, 0.968, 0.931, 0.929, 0.9635, 0.9205, 0.9455])\npca8_MNIST_L2 = np.array([0.9215130023640662, 0.8940898345153664, 0.9810874704491725, 0.9588652482269504, 0.9423167848699764, 0.9801418439716312, 0.9541371158392435, 0.5366430260047281, 0.9768321513002364, 0.9588652482269504])\nae8_MNIST_L2 = np.array([0.885579196217494, 0.5366430260047281, 0.9560283687943263, 0.8141843971631205, 0.9026004728132387, 0.5366430260047281, 0.8964539007092198, 0.8539007092198582, 0.9777777777777777, 0.9130023640661938])\npca16_MNIST_L2 = np.array([0.8676122931442081, 0.9815602836879432, 0.9853427895981087, 0.9877068557919622, 0.9687943262411347, 0.9843971631205674, 0.9829787234042553, 0.9886524822695035, 0.9730496453900709, 0.9815602836879432])\nae16_MNIST_L2 = np.array([0.9583924349881797, 0.9479905437352246, 0.8912529550827423, 0.5366430260047281, 0.8964539007092198, 0.9030732860520094, 0.9030732860520094, 0.9673758865248226, 0.806146572104019, 0.5366430260047281])\npca8_FASHION_L2 = np.array([0.5, 0.803, 0.5, 0.856, 0.946, 0.9245, 0.906, 0.9395, 0.921, 0.925])\nae8_FASHION_L2 = np.array([0.5, 0.9195, 0.936, 0.921, 0.815, 0.926, 0.9125, 0.944, 0.907, 0.8985])\npca16_FASHION_L2 = np.array([0.9, 0.5, 0.8365, 0.9305, 0.5, 0.915, 0.9355, 0.927, 0.9395, 0.5])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "pca16_FASHION_L3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "pca16_FASHION_L3 = np.array([0.9235, 0.9395, 0.918, 0.913, 0.898, 0.884, 0.946, 0.901, 0.947, 0.9155])\nae16_FASHION_L3 = np.array([0.948, 0.8935, 0.9435, 0.914, 0.968, 0.931, 0.929, 0.9635, 0.9205, 0.9455])\npca8_MNIST_L2 = np.array([0.9215130023640662, 0.8940898345153664, 0.9810874704491725, 0.9588652482269504, 0.9423167848699764, 0.9801418439716312, 0.9541371158392435, 0.5366430260047281, 0.9768321513002364, 0.9588652482269504])\nae8_MNIST_L2 = np.array([0.885579196217494, 0.5366430260047281, 0.9560283687943263, 0.8141843971631205, 0.9026004728132387, 0.5366430260047281, 0.8964539007092198, 0.8539007092198582, 0.9777777777777777, 0.9130023640661938])\npca16_MNIST_L2 = np.array([0.8676122931442081, 0.9815602836879432, 0.9853427895981087, 0.9877068557919622, 0.9687943262411347, 0.9843971631205674, 0.9829787234042553, 0.9886524822695035, 0.9730496453900709, 0.9815602836879432])\nae16_MNIST_L2 = np.array([0.9583924349881797, 0.9479905437352246, 0.8912529550827423, 0.5366430260047281, 0.8964539007092198, 0.9030732860520094, 0.9030732860520094, 0.9673758865248226, 0.806146572104019, 0.5366430260047281])\npca8_FASHION_L2 = np.array([0.5, 0.803, 0.5, 0.856, 0.946, 0.9245, 0.906, 0.9395, 0.921, 0.925])\nae8_FASHION_L2 = np.array([0.5, 0.9195, 0.936, 0.921, 0.815, 0.926, 0.9125, 0.944, 0.907, 0.8985])\npca16_FASHION_L2 = np.array([0.9, 0.5, 0.8365, 0.9305, 0.5, 0.915, 0.9355, 0.927, 0.9395, 0.5])\nae16_FASHION_L2 = np.array([0.5, 0.5, 0.905, 0.93, 0.9, 0.9375, 0.89, 0.5, 0.9195, 0.9145])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "ae16_FASHION_L3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "ae16_FASHION_L3 = np.array([0.948, 0.8935, 0.9435, 0.914, 0.968, 0.931, 0.929, 0.9635, 0.9205, 0.9455])\npca8_MNIST_L2 = np.array([0.9215130023640662, 0.8940898345153664, 0.9810874704491725, 0.9588652482269504, 0.9423167848699764, 0.9801418439716312, 0.9541371158392435, 0.5366430260047281, 0.9768321513002364, 0.9588652482269504])\nae8_MNIST_L2 = np.array([0.885579196217494, 0.5366430260047281, 0.9560283687943263, 0.8141843971631205, 0.9026004728132387, 0.5366430260047281, 0.8964539007092198, 0.8539007092198582, 0.9777777777777777, 0.9130023640661938])\npca16_MNIST_L2 = np.array([0.8676122931442081, 0.9815602836879432, 0.9853427895981087, 0.9877068557919622, 0.9687943262411347, 0.9843971631205674, 0.9829787234042553, 0.9886524822695035, 0.9730496453900709, 0.9815602836879432])\nae16_MNIST_L2 = np.array([0.9583924349881797, 0.9479905437352246, 0.8912529550827423, 0.5366430260047281, 0.8964539007092198, 0.9030732860520094, 0.9030732860520094, 0.9673758865248226, 0.806146572104019, 0.5366430260047281])\npca8_FASHION_L2 = np.array([0.5, 0.803, 0.5, 0.856, 0.946, 0.9245, 0.906, 0.9395, 0.921, 0.925])\nae8_FASHION_L2 = np.array([0.5, 0.9195, 0.936, 0.921, 0.815, 0.926, 0.9125, 0.944, 0.907, 0.8985])\npca16_FASHION_L2 = np.array([0.9, 0.5, 0.8365, 0.9305, 0.5, 0.915, 0.9355, 0.927, 0.9395, 0.5])\nae16_FASHION_L2 = np.array([0.5, 0.5, 0.905, 0.93, 0.9, 0.9375, 0.89, 0.5, 0.9195, 0.9145])\nprint(\"CNN Result\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "pca8_MNIST_L2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "pca8_MNIST_L2 = np.array([0.9215130023640662, 0.8940898345153664, 0.9810874704491725, 0.9588652482269504, 0.9423167848699764, 0.9801418439716312, 0.9541371158392435, 0.5366430260047281, 0.9768321513002364, 0.9588652482269504])\nae8_MNIST_L2 = np.array([0.885579196217494, 0.5366430260047281, 0.9560283687943263, 0.8141843971631205, 0.9026004728132387, 0.5366430260047281, 0.8964539007092198, 0.8539007092198582, 0.9777777777777777, 0.9130023640661938])\npca16_MNIST_L2 = np.array([0.8676122931442081, 0.9815602836879432, 0.9853427895981087, 0.9877068557919622, 0.9687943262411347, 0.9843971631205674, 0.9829787234042553, 0.9886524822695035, 0.9730496453900709, 0.9815602836879432])\nae16_MNIST_L2 = np.array([0.9583924349881797, 0.9479905437352246, 0.8912529550827423, 0.5366430260047281, 0.8964539007092198, 0.9030732860520094, 0.9030732860520094, 0.9673758865248226, 0.806146572104019, 0.5366430260047281])\npca8_FASHION_L2 = np.array([0.5, 0.803, 0.5, 0.856, 0.946, 0.9245, 0.906, 0.9395, 0.921, 0.925])\nae8_FASHION_L2 = np.array([0.5, 0.9195, 0.936, 0.921, 0.815, 0.926, 0.9125, 0.944, 0.907, 0.8985])\npca16_FASHION_L2 = np.array([0.9, 0.5, 0.8365, 0.9305, 0.5, 0.915, 0.9355, 0.927, 0.9395, 0.5])\nae16_FASHION_L2 = np.array([0.5, 0.5, 0.905, 0.93, 0.9, 0.9375, 0.89, 0.5, 0.9195, 0.9145])\nprint(\"CNN Result\")\nprint(\"3 Layers - 44 params(8 input) / 56 params (16 input) \")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "ae8_MNIST_L2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "ae8_MNIST_L2 = np.array([0.885579196217494, 0.5366430260047281, 0.9560283687943263, 0.8141843971631205, 0.9026004728132387, 0.5366430260047281, 0.8964539007092198, 0.8539007092198582, 0.9777777777777777, 0.9130023640661938])\npca16_MNIST_L2 = np.array([0.8676122931442081, 0.9815602836879432, 0.9853427895981087, 0.9877068557919622, 0.9687943262411347, 0.9843971631205674, 0.9829787234042553, 0.9886524822695035, 0.9730496453900709, 0.9815602836879432])\nae16_MNIST_L2 = np.array([0.9583924349881797, 0.9479905437352246, 0.8912529550827423, 0.5366430260047281, 0.8964539007092198, 0.9030732860520094, 0.9030732860520094, 0.9673758865248226, 0.806146572104019, 0.5366430260047281])\npca8_FASHION_L2 = np.array([0.5, 0.803, 0.5, 0.856, 0.946, 0.9245, 0.906, 0.9395, 0.921, 0.925])\nae8_FASHION_L2 = np.array([0.5, 0.9195, 0.936, 0.921, 0.815, 0.926, 0.9125, 0.944, 0.907, 0.8985])\npca16_FASHION_L2 = np.array([0.9, 0.5, 0.8365, 0.9305, 0.5, 0.915, 0.9355, 0.927, 0.9395, 0.5])\nae16_FASHION_L2 = np.array([0.5, 0.5, 0.905, 0.93, 0.9, 0.9375, 0.89, 0.5, 0.9195, 0.9145])\nprint(\"CNN Result\")\nprint(\"3 Layers - 44 params(8 input) / 56 params (16 input) \")\nprint(\"PCA8(MNIST): \" + str(pca8_MNIST_L3.mean()) + \" +/- \" + str(pca8_MNIST_L3.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "pca16_MNIST_L2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "pca16_MNIST_L2 = np.array([0.8676122931442081, 0.9815602836879432, 0.9853427895981087, 0.9877068557919622, 0.9687943262411347, 0.9843971631205674, 0.9829787234042553, 0.9886524822695035, 0.9730496453900709, 0.9815602836879432])\nae16_MNIST_L2 = np.array([0.9583924349881797, 0.9479905437352246, 0.8912529550827423, 0.5366430260047281, 0.8964539007092198, 0.9030732860520094, 0.9030732860520094, 0.9673758865248226, 0.806146572104019, 0.5366430260047281])\npca8_FASHION_L2 = np.array([0.5, 0.803, 0.5, 0.856, 0.946, 0.9245, 0.906, 0.9395, 0.921, 0.925])\nae8_FASHION_L2 = np.array([0.5, 0.9195, 0.936, 0.921, 0.815, 0.926, 0.9125, 0.944, 0.907, 0.8985])\npca16_FASHION_L2 = np.array([0.9, 0.5, 0.8365, 0.9305, 0.5, 0.915, 0.9355, 0.927, 0.9395, 0.5])\nae16_FASHION_L2 = np.array([0.5, 0.5, 0.905, 0.93, 0.9, 0.9375, 0.89, 0.5, 0.9195, 0.9145])\nprint(\"CNN Result\")\nprint(\"3 Layers - 44 params(8 input) / 56 params (16 input) \")\nprint(\"PCA8(MNIST): \" + str(pca8_MNIST_L3.mean()) + \" +/- \" + str(pca8_MNIST_L3.std()))\nprint(\"AE8(MNIST): \" + str(ae8_MNIST_L3.mean()) + \" +/- \" + str(ae8_MNIST_L3.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "ae16_MNIST_L2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "ae16_MNIST_L2 = np.array([0.9583924349881797, 0.9479905437352246, 0.8912529550827423, 0.5366430260047281, 0.8964539007092198, 0.9030732860520094, 0.9030732860520094, 0.9673758865248226, 0.806146572104019, 0.5366430260047281])\npca8_FASHION_L2 = np.array([0.5, 0.803, 0.5, 0.856, 0.946, 0.9245, 0.906, 0.9395, 0.921, 0.925])\nae8_FASHION_L2 = np.array([0.5, 0.9195, 0.936, 0.921, 0.815, 0.926, 0.9125, 0.944, 0.907, 0.8985])\npca16_FASHION_L2 = np.array([0.9, 0.5, 0.8365, 0.9305, 0.5, 0.915, 0.9355, 0.927, 0.9395, 0.5])\nae16_FASHION_L2 = np.array([0.5, 0.5, 0.905, 0.93, 0.9, 0.9375, 0.89, 0.5, 0.9195, 0.9145])\nprint(\"CNN Result\")\nprint(\"3 Layers - 44 params(8 input) / 56 params (16 input) \")\nprint(\"PCA8(MNIST): \" + str(pca8_MNIST_L3.mean()) + \" +/- \" + str(pca8_MNIST_L3.std()))\nprint(\"AE8(MNIST): \" + str(ae8_MNIST_L3.mean()) + \" +/- \" + str(ae8_MNIST_L3.std()))\nprint(\"PCA16(MNIST): \" + str(pca16_MNIST_L3.mean()) + \" +/- \" + str(pca16_MNIST_L3.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "pca8_FASHION_L2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "pca8_FASHION_L2 = np.array([0.5, 0.803, 0.5, 0.856, 0.946, 0.9245, 0.906, 0.9395, 0.921, 0.925])\nae8_FASHION_L2 = np.array([0.5, 0.9195, 0.936, 0.921, 0.815, 0.926, 0.9125, 0.944, 0.907, 0.8985])\npca16_FASHION_L2 = np.array([0.9, 0.5, 0.8365, 0.9305, 0.5, 0.915, 0.9355, 0.927, 0.9395, 0.5])\nae16_FASHION_L2 = np.array([0.5, 0.5, 0.905, 0.93, 0.9, 0.9375, 0.89, 0.5, 0.9195, 0.9145])\nprint(\"CNN Result\")\nprint(\"3 Layers - 44 params(8 input) / 56 params (16 input) \")\nprint(\"PCA8(MNIST): \" + str(pca8_MNIST_L3.mean()) + \" +/- \" + str(pca8_MNIST_L3.std()))\nprint(\"AE8(MNIST): \" + str(ae8_MNIST_L3.mean()) + \" +/- \" + str(ae8_MNIST_L3.std()))\nprint(\"PCA16(MNIST): \" + str(pca16_MNIST_L3.mean()) + \" +/- \" + str(pca16_MNIST_L3.std()))\nprint(\"AE16(MNIST): \" + str(ae16_MNIST_L3.mean()) + \" +/- \" + str(ae16_MNIST_L3.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "ae8_FASHION_L2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "ae8_FASHION_L2 = np.array([0.5, 0.9195, 0.936, 0.921, 0.815, 0.926, 0.9125, 0.944, 0.907, 0.8985])\npca16_FASHION_L2 = np.array([0.9, 0.5, 0.8365, 0.9305, 0.5, 0.915, 0.9355, 0.927, 0.9395, 0.5])\nae16_FASHION_L2 = np.array([0.5, 0.5, 0.905, 0.93, 0.9, 0.9375, 0.89, 0.5, 0.9195, 0.9145])\nprint(\"CNN Result\")\nprint(\"3 Layers - 44 params(8 input) / 56 params (16 input) \")\nprint(\"PCA8(MNIST): \" + str(pca8_MNIST_L3.mean()) + \" +/- \" + str(pca8_MNIST_L3.std()))\nprint(\"AE8(MNIST): \" + str(ae8_MNIST_L3.mean()) + \" +/- \" + str(ae8_MNIST_L3.std()))\nprint(\"PCA16(MNIST): \" + str(pca16_MNIST_L3.mean()) + \" +/- \" + str(pca16_MNIST_L3.std()))\nprint(\"AE16(MNIST): \" + str(ae16_MNIST_L3.mean()) + \" +/- \" + str(ae16_MNIST_L3.std()))\nprint(\"\\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "pca16_FASHION_L2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "pca16_FASHION_L2 = np.array([0.9, 0.5, 0.8365, 0.9305, 0.5, 0.915, 0.9355, 0.927, 0.9395, 0.5])\nae16_FASHION_L2 = np.array([0.5, 0.5, 0.905, 0.93, 0.9, 0.9375, 0.89, 0.5, 0.9195, 0.9145])\nprint(\"CNN Result\")\nprint(\"3 Layers - 44 params(8 input) / 56 params (16 input) \")\nprint(\"PCA8(MNIST): \" + str(pca8_MNIST_L3.mean()) + \" +/- \" + str(pca8_MNIST_L3.std()))\nprint(\"AE8(MNIST): \" + str(ae8_MNIST_L3.mean()) + \" +/- \" + str(ae8_MNIST_L3.std()))\nprint(\"PCA16(MNIST): \" + str(pca16_MNIST_L3.mean()) + \" +/- \" + str(pca16_MNIST_L3.std()))\nprint(\"AE16(MNIST): \" + str(ae16_MNIST_L3.mean()) + \" +/- \" + str(ae16_MNIST_L3.std()))\nprint(\"\\n\")\nprint(\"PCA8(FASHION): \" + str(pca8_FASHION_L3.mean()) + \" +/- \" + str(pca8_FASHION_L3.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "ae16_FASHION_L2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "peekOfCode": "ae16_FASHION_L2 = np.array([0.5, 0.5, 0.905, 0.93, 0.9, 0.9375, 0.89, 0.5, 0.9195, 0.9145])\nprint(\"CNN Result\")\nprint(\"3 Layers - 44 params(8 input) / 56 params (16 input) \")\nprint(\"PCA8(MNIST): \" + str(pca8_MNIST_L3.mean()) + \" +/- \" + str(pca8_MNIST_L3.std()))\nprint(\"AE8(MNIST): \" + str(ae8_MNIST_L3.mean()) + \" +/- \" + str(ae8_MNIST_L3.std()))\nprint(\"PCA16(MNIST): \" + str(pca16_MNIST_L3.mean()) + \" +/- \" + str(pca16_MNIST_L3.std()))\nprint(\"AE16(MNIST): \" + str(ae16_MNIST_L3.mean()) + \" +/- \" + str(ae16_MNIST_L3.std()))\nprint(\"\\n\")\nprint(\"PCA8(FASHION): \" + str(pca8_FASHION_L3.mean()) + \" +/- \" + str(pca8_FASHION_L3.std()))\nprint(\"AE8(FASHION): \" + str(ae8_FASHION_L3.mean()) + \" +/- \" + str(ae8_FASHION_L3.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.CNN_result", + "documentation": {} + }, + { + "label": "plot_loss_history", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "def plot_loss_history(Encodings, datasets, params):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n if datasets == 'mnist':\n if params == 'large':\n loss_history_CNN = loss_histories_CNN_MNIST_3L[i][::n]\n loss_history_QCNN = loss_histories_QCNN_MNIST_SU4[i][::n]\n elif params == 'small':\n loss_history_CNN = loss_histories_CNN_MNIST_2L[i][::n]\n loss_history_QCNN = loss_histories_QCNN_MNIST_SO4[i][::n]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_CNN_MNIST_3L", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "loss_histories_CNN_MNIST_3L = np.array([[0.6950972676277161, 0.624514102935791, 0.583101212978363, 0.6993337869644165, 0.6476080417633057, 0.6364650130271912, 0.6012523174285889, 0.6161369681358337, 0.6181449890136719, 0.6342973113059998, 0.5804245471954346, 0.5431748032569885, 0.5727656483650208, 0.549979031085968, 0.5044635534286499, 0.512463390827179, 0.5102589726448059, 0.5334477424621582, 0.45759502053260803, 0.45609891414642334, 0.4384606182575226, 0.5343804955482483, 0.41797107458114624, 0.4342503845691681, 0.3890399634838104, 0.41044917702674866, 0.35132548213005066, 0.3810461759567261, 0.3425566554069519, 0.3171461820602417, 0.2762487828731537, 0.29926612973213196, 0.27376917004585266, 0.28830668330192566, 0.24913270771503448, 0.19938430190086365, 0.2352171540260315, 0.2669123411178589, 0.20059126615524292, 0.22783060371875763, 0.26497378945350647, 0.23277893662452698, 0.23508895933628082, 0.17620548605918884, 0.140126571059227, 0.17897853255271912, 0.17362214624881744, 0.10912153124809265, 0.20787329971790314, 0.09750537574291229, 0.24669703841209412, 0.09363225847482681, 0.08267596364021301, 0.10493816435337067, 0.19090412557125092, 0.09467125684022903, 0.09239176660776138, 0.05489746481180191, 0.08461172133684158, 0.19685931503772736, 0.18854761123657227, 0.05868925154209137, 0.05014041066169739, 0.15637528896331787, 0.06508491933345795, 0.06407226622104645, 0.035801321268081665, 0.16898909211158752, 0.24060499668121338, 0.2960735559463501, 0.11876606196165085, 0.11272123456001282, 0.2833302617073059, 0.17450375854969025, 0.11714029312133789, 0.066585473716259, 0.357699990272522, 0.053224291652441025, 0.12500178813934326, 0.07176703214645386, 0.06332272291183472, 0.049026332795619965, 0.06790094077587128, 0.07368071377277374, 0.08223270624876022, 0.017215171828866005, 0.12109950929880142, 0.014817332848906517, 0.014553045853972435, 0.13761082291603088, 0.020390180870890617, 0.031047077849507332, 0.2506447732448578, 0.1129269227385521, 0.06206779554486275, 0.2958768308162689, 0.024755340069532394, 0.026094689965248108, 0.10279291123151779, 0.013493400067090988, 0.018021859228610992, 0.25203627347946167, 0.056582916527986526, 0.03349141776561737, 0.1758762001991272, 0.12884046137332916, 0.23406201601028442, 0.04947153478860855, 0.08455410599708557, 0.2010369449853897, 0.07859295606613159, 0.05398073047399521, 0.018245480954647064, 0.0462871678173542, 0.04117013141512871, 0.07244301587343216, 0.012532993219792843, 0.022867579013109207, 0.21418723464012146, 0.04215965420007706, 0.017704876139760017, 0.033693164587020874, 0.025307342410087585, 0.2049824297428131, 0.1619843691587448, 0.07375242561101913, 0.05191435664892197, 0.03824467211961746, 0.03036719746887684, 0.03482874482870102, 0.11714939028024673, 0.059163156896829605, 0.007508269976824522, 0.5880686044692993, 0.00375778297893703, 0.3006771504878998, 0.14734655618667603, 0.17704173922538757, 0.011697666719555855, 0.0760924369096756, 0.044274553656578064, 0.03366928920149803, 0.01301832590252161, 0.06542199850082397, 0.016044197604060173, 0.2634228467941284, 0.0016423964407294989, 0.07154283672571182, 0.0497405044734478, 0.014087897725403309, 0.2702277898788452, 0.0058559393510222435, 0.001999075058847666, 0.20934492349624634, 0.027578163892030716, 0.0185542069375515, 0.009298637509346008, 0.2260131686925888, 0.01682950370013714, 0.049877066165208817, 0.1795051246881485, 0.06075964495539665, 0.1485597938299179, 0.005703788250684738, 0.014600261114537716, 0.27203476428985596, 0.003919422160834074, 0.004767566919326782, 0.09969690442085266, 0.010243923403322697, 0.1011170893907547, 0.07468743622303009, 0.03205803036689758, 0.008248867467045784, 0.00845771562308073, 0.2290336936712265, 0.004072263836860657, 0.09669051319360733, 0.08218424022197723, 0.13435952365398407, 0.02292836830019951, 0.030399208888411522, 0.012336748652160168, 0.010568873025476933, 0.08546885848045349, 0.012400435283780098, 0.16309873759746552, 0.017225008457899094, 0.07175344973802567, 0.02251896634697914, 0.020442284643650055, 0.0015783198177814484, 0.01836889237165451, 0.002289178315550089, 0.10199715942144394, 0.08479230850934982, 0.25068947672843933, 0.0004926148685626686, 0.17583408951759338, 0.0029860532376915216],\n [0.7300472855567932, 0.6867007613182068, 0.6846880316734314, 0.6840613484382629, 0.7379503846168518, 0.7141649723052979, 0.6768766641616821, 0.7168253064155579, 0.6865659356117249, 0.7158349752426147, 0.6955016255378723, 0.7160823345184326, 0.6909663677215576, 0.6839632987976074, 0.7168858051300049, 0.7112577557563782, 0.6876682043075562, 0.6938645243644714, 0.6912950277328491, 0.7029373645782471, 0.6912244558334351, 0.6886566877365112, 0.6692138910293579, 0.6587687730789185, 0.6587306261062622, 0.6906790137290955, 0.6571764945983887, 0.7349249124526978, 0.6769081354141235, 0.6851152181625366, 0.6883368492126465, 0.6931599378585815, 0.660807728767395, 0.6832686066627502, 0.663866400718689, 0.7190250158309937, 0.6782967448234558, 0.6647338271141052, 0.7190255522727966, 0.6869436502456665, 0.7135319709777832, 0.6910357475280762, 0.6790058016777039, 0.6729632019996643, 0.6573574542999268, 0.6717900037765503, 0.6499390602111816, 0.6566978693008423, 0.6533600091934204, 0.6694302558898926, 0.7413253784179688, 0.6782134175300598, 0.651166558265686, 0.6957483887672424, 0.6816896200180054, 0.6836461424827576, 0.65700364112854, 0.6469296813011169, 0.6859912872314453, 0.6694807410240173, 0.6653313636779785, 0.6578412652015686, 0.6503434181213379, 0.6439347863197327, 0.6405288577079773, 0.6545844078063965, 0.6400285363197327, 0.6392137408256531, 0.662876307964325, 0.6274128556251526, 0.611276388168335, 0.6296300888061523, 0.6971974968910217, 0.6152403354644775, 0.6399407386779785, 0.6099597811698914, 0.614037275314331, 0.6306928396224976, 0.6233912110328674, 0.6071301102638245, 0.5422117710113525, 0.6034693121910095, 0.5755348801612854, 0.6107156276702881, 0.5766172409057617, 0.6092644333839417, 0.5880871415138245, 0.6234793663024902, 0.5463911890983582, 0.5608245134353638, 0.5888428092002869, 0.5628297924995422, 0.6291128396987915, 0.5931711792945862, 0.5677962303161621, 0.6117801070213318, 0.5791870355606079, 0.5750445127487183, 0.5487401485443115, 0.5867695212364197, 0.6092665791511536, 0.5819268226623535, 0.5454160571098328, 0.5633161067962646, 0.5455551743507385, 0.5346310138702393, 0.5471535325050354, 0.5041360259056091, 0.5242226123809814, 0.5793042182922363, 0.5374759435653687, 0.538278341293335, 0.5183054208755493, 0.5400028824806213, 0.5124049186706543, 0.549633264541626, 0.4841218590736389, 0.46203166246414185, 0.5005708932876587, 0.48604172468185425, 0.49221688508987427, 0.4566079378128052, 0.46951553225517273, 0.43924057483673096, 0.5289251208305359, 0.4628008008003235, 0.41659680008888245, 0.43333157896995544, 0.43089747428894043, 0.4631856679916382, 0.4540521204471588, 0.5134605169296265, 0.4546918570995331, 0.3948281407356262, 0.4207550287246704, 0.41652747988700867, 0.41493555903434753, 0.37897631525993347, 0.35793769359588623, 0.38966673612594604, 0.4038204550743103, 0.35448944568634033, 0.38130858540534973, 0.4311788082122803, 0.39160025119781494, 0.3783036470413208, 0.3963787853717804, 0.3914090692996979, 0.3896610140800476, 0.30679458379745483, 0.32349467277526855, 0.31617602705955505, 0.28154852986335754, 0.28761547803878784, 0.2818848788738251, 0.2809687852859497, 0.3206581175327301, 0.27352452278137207, 0.25297045707702637, 0.30204370617866516, 0.21382515132427216, 0.3405051827430725, 0.3692021071910858, 0.25636792182922363, 0.37404823303222656, 0.30002561211586, 0.31114882230758667, 0.33810797333717346, 0.40628308057785034, 0.26683157682418823, 0.3439508080482483, 0.27956536412239075, 0.29898345470428467, 0.1934766173362732, 0.44087517261505127, 0.31433001160621643, 0.2813761830329895, 0.19954250752925873, 0.27304938435554504, 0.2746534049510956, 0.2640751600265503, 0.3004697561264038, 0.2624701261520386, 0.31487491726875305, 0.2908383011817932, 0.17720551788806915, 0.26453515887260437, 0.2599099278450012, 0.1590421348810196, 0.21811221539974213, 0.21677884459495544, 0.28607749938964844, 0.19262443482875824, 0.35018521547317505, 0.20188915729522705, 0.19327974319458008, 0.19713938236236572, 0.24497658014297485, 0.219795823097229, 0.24567201733589172],\n [0.745194673538208, 0.6906776428222656, 0.687400758266449, 0.6717780828475952, 0.7604597210884094, 0.752467930316925, 0.6976206302642822, 0.704662561416626, 0.6916715502738953, 0.6934672594070435, 0.6470069289207458, 0.7148513793945312, 0.6763886213302612, 0.6802818775177002, 0.6672106385231018, 0.6484056711196899, 0.6637589931488037, 0.6583610773086548, 0.6557639837265015, 0.6428236365318298, 0.642822265625, 0.6243969202041626, 0.6294710040092468, 0.6075966358184814, 0.6001461744308472, 0.6056424379348755, 0.6350275278091431, 0.5835784673690796, 0.5997673273086548, 0.5480678081512451, 0.5856139659881592, 0.5785164833068848, 0.5884747505187988, 0.5700724124908447, 0.4928150177001953, 0.5337228775024414, 0.5427252054214478, 0.5247185230255127, 0.5774044394493103, 0.5185093879699707, 0.5197265148162842, 0.4411318302154541, 0.5511063933372498, 0.5263891220092773, 0.45463821291923523, 0.5314087271690369, 0.47028473019599915, 0.4092979431152344, 0.5777102112770081, 0.5306834578514099, 0.42406684160232544, 0.4687281847000122, 0.5070981383323669, 0.47439032793045044, 0.37494319677352905, 0.4947170913219452, 0.4206700921058655, 0.4383554756641388, 0.409442663192749, 0.44664183259010315, 0.4921582043170929, 0.3810812830924988, 0.4118553102016449, 0.39941662549972534, 0.43318724632263184, 0.31750908493995667, 0.34022581577301025, 0.3589905798435211, 0.3815973997116089, 0.3389066755771637, 0.32979461550712585, 0.32433709502220154, 0.28116682171821594, 0.31867024302482605, 0.29788222908973694, 0.31168806552886963, 0.3142642080783844, 0.25582820177078247, 0.32176223397254944, 0.3123543858528137, 0.2679462432861328, 0.2546009123325348, 0.24331651628017426, 0.402679443359375, 0.21918657422065735, 0.23834611475467682, 0.36965858936309814, 0.24380004405975342, 0.22266092896461487, 0.28505799174308777, 0.33410510420799255, 0.2647447884082794, 0.23237332701683044, 0.22678805887699127, 0.2724705934524536, 0.22559556365013123, 0.200675368309021, 0.2558078169822693, 0.36495375633239746, 0.1662682741880417, 0.26781201362609863, 0.22160503268241882, 0.24419258534908295, 0.2391909956932068, 0.1725086271762848, 0.19450058043003082, 0.13128599524497986, 0.15883126854896545, 0.08883234858512878, 0.22445882856845856, 0.19227375090122223, 0.12227177619934082, 0.14730407297611237, 0.10880385339260101, 0.15898071229457855, 0.14434635639190674, 0.10622251778841019, 0.13339510560035706, 0.12552213668823242, 0.0874275416135788, 0.21024318039417267, 0.05612509697675705, 0.2596701383590698, 0.06474778801202774, 0.21115590631961823, 0.08705268055200577, 0.2965729236602783, 0.11845093965530396, 0.3113614022731781, 0.0852881520986557, 0.057083774358034134, 0.05873868614435196, 0.18759706616401672, 0.1588224172592163, 0.2532350420951843, 0.06962744891643524, 0.07326000928878784, 0.07850103080272675, 0.07247595489025116, 0.05099700763821602, 0.09197735041379929, 0.0621706023812294, 0.13775122165679932, 0.03626300394535065, 0.09360489994287491, 0.08558189123868942, 0.0900314450263977, 0.06604598462581635, 0.05721459537744522, 0.12379155308008194, 0.07557165622711182, 0.18699273467063904, 0.04573093727231026, 0.04888857528567314, 0.07472683489322662, 0.1503097116947174, 0.13833612203598022, 0.036567263305187225, 0.09546559303998947, 0.17797045409679413, 0.20176424086093903, 0.06245369091629982, 0.20733888447284698, 0.07638655602931976, 0.22693926095962524, 0.09672175347805023, 0.18307197093963623, 0.2140817642211914, 0.024585099890828133, 0.047724127769470215, 0.03778181970119476, 0.035520292818546295, 0.06756170094013214, 0.025606723502278328, 0.0960211381316185, 0.055418919771909714, 0.08863727748394012, 0.11362741142511368, 0.09254298359155655, 0.3518190383911133, 0.0551731139421463, 0.12171444296836853, 0.12081704288721085, 0.08439555764198303, 0.027323825284838676, 0.10690995305776596, 0.1244480237364769, 0.0726374164223671, 0.06508301943540573, 0.17972348630428314, 0.10436990112066269, 0.09515547752380371, 0.09833931922912598, 0.1103367730975151, 0.09164026379585266, 0.03350993990898132, 0.05515069514513016, 0.1332990527153015, 0.027255253866314888, 0.05536457523703575],\n [0.7065368890762329, 0.7003437876701355, 0.7149812579154968, 0.702370822429657, 0.6912967562675476, 0.6774630546569824, 0.7349916100502014, 0.6901237368583679, 0.6894038915634155, 0.6863296031951904, 0.69279944896698, 0.6839656233787537, 0.6706349849700928, 0.6464716196060181, 0.7043266296386719, 0.6820791363716125, 0.6842585206031799, 0.6803882122039795, 0.6784244775772095, 0.665507972240448, 0.6820626854896545, 0.6624901294708252, 0.6764769554138184, 0.66874098777771, 0.6772385239601135, 0.6694998741149902, 0.6600099802017212, 0.6915762424468994, 0.6615751385688782, 0.6696990728378296, 0.6297839283943176, 0.6528367400169373, 0.6536012887954712, 0.6452928781509399, 0.6481162309646606, 0.6480607390403748, 0.6370586156845093, 0.6382020711898804, 0.6140261888504028, 0.6135526299476624, 0.6092220544815063, 0.6079503297805786, 0.5887094736099243, 0.5322624444961548, 0.6122974753379822, 0.5561879277229309, 0.6315677762031555, 0.6449221968650818, 0.5963243246078491, 0.5868348479270935, 0.6084422469139099, 0.5791459083557129, 0.6085484027862549, 0.5580623745918274, 0.5514646768569946, 0.5585185885429382, 0.551949679851532, 0.5090385675430298, 0.48625242710113525, 0.5502084493637085, 0.4771082401275635, 0.4774615168571472, 0.48718807101249695, 0.4917224943637848, 0.4732765257358551, 0.48384198546409607, 0.43808743357658386, 0.4454682171344757, 0.4767683446407318, 0.482342004776001, 0.45924878120422363, 0.45319855213165283, 0.48693686723709106, 0.3850550949573517, 0.410541832447052, 0.4543793499469757, 0.366267591714859, 0.4305279552936554, 0.34036755561828613, 0.36081624031066895, 0.3392334282398224, 0.37473422288894653, 0.28155049681663513, 0.3425044119358063, 0.2705245912075043, 0.31728631258010864, 0.2448488026857376, 0.31782156229019165, 0.3272281587123871, 0.37958672642707825, 0.29934489727020264, 0.21500541269779205, 0.29144471883773804, 0.30118197202682495, 0.16127417981624603, 0.371753454208374, 0.21204784512519836, 0.2533915042877197, 0.18653224408626556, 0.20099136233329773, 0.15585556626319885, 0.15119512379169464, 0.2260332703590393, 0.2896896302700043, 0.1453980952501297, 0.20472170412540436, 0.16929101943969727, 0.20646162331104279, 0.14845998585224152, 0.10794796794652939, 0.09016146510839462, 0.12105012685060501, 0.23024918138980865, 0.14298059046268463, 0.1215953603386879, 0.29008549451828003, 0.13764692842960358, 0.1657395362854004, 0.15904945135116577, 0.09157120436429977, 0.13966745138168335, 0.16450193524360657, 0.09266634285449982, 0.16199813783168793, 0.22420541942119598, 0.1499921977519989, 0.08530532568693161, 0.20761361718177795, 0.07668470591306686, 0.2324078232049942, 0.04722387343645096, 0.2246658354997635, 0.10235711932182312, 0.16499486565589905, 0.04430126026272774, 0.22800308465957642, 0.07830563187599182, 0.11839699000120163, 0.07917902618646622, 0.09214942902326584, 0.08970518410205841, 0.14069652557373047, 0.0409579798579216, 0.0552971251308918, 0.10124063491821289, 0.02533102221786976, 0.16206085681915283, 0.03471945971250534, 0.10241880267858505, 0.1285448670387268, 0.12653371691703796, 0.12206773459911346, 0.06682068109512329, 0.050219982862472534, 0.10264866054058075, 0.041177548468112946, 0.047057829797267914, 0.5175573825836182, 0.03898908570408821, 0.16199985146522522, 0.04879048466682434, 0.029836969450116158, 0.553887128829956, 0.06854133307933807, 0.021930286660790443, 0.02233067713677883, 0.02433081343770027, 0.02149748057126999, 0.017385564744472504, 0.006968638394027948, 0.3127046227455139, 0.007114108186215162, 0.0656125620007515, 0.35484519600868225, 0.013066131621599197, 0.056551698595285416, 0.07355687767267227, 0.02248852699995041, 0.07129108905792236, 0.029671985656023026, 0.06515096873044968, 0.02897089160978794, 0.15118078887462616, 0.22743676602840424, 0.14168672263622284, 0.02568655088543892, 0.031071646139025688, 0.01346743106842041, 0.004205648321658373, 0.004720211960375309, 0.15807323157787323, 0.3110294044017792, 0.2914554178714752, 0.014225986786186695, 0.047441449016332626, 0.051407698541879654, 0.0412781722843647, 0.01342132780700922, 0.012588735669851303, 0.06328532099723816]])\nloss_histories_CNN_MNIST_2L = np.array([[0.6388911008834839, 0.755074143409729, 0.6632554531097412, 0.6934544444084167, 0.6570810675621033, 0.6539508700370789, 0.6652110815048218, 0.6544034481048584, 0.6927835941314697, 0.6516890525817871, 0.6747910976409912, 0.6588358879089355, 0.6461827158927917, 0.6406604647636414, 0.6593841314315796, 0.6290771961212158, 0.635811448097229, 0.616572916507721, 0.6336753964424133, 0.6234488487243652, 0.5950428247451782, 0.5689221024513245, 0.5299611687660217, 0.5597831606864929, 0.5785280466079712, 0.594134509563446, 0.5603616833686829, 0.5874844193458557, 0.5339792370796204, 0.5163654088973999, 0.5020135641098022, 0.5339027643203735, 0.4965783953666687, 0.504167377948761, 0.5495306253433228, 0.5618932247161865, 0.4844675362110138, 0.5362534523010254, 0.4832651913166046, 0.4696060121059418, 0.5025283694267273, 0.4544117748737335, 0.45164787769317627, 0.393768310546875, 0.38244518637657166, 0.3918498158454895, 0.3800329267978668, 0.3839554190635681, 0.4202963709831238, 0.3279581367969513, 0.42289531230926514, 0.39857998490333557, 0.35821080207824707, 0.3929472863674164, 0.44718289375305176, 0.30050015449523926, 0.29927077889442444, 0.3997846841812134, 0.28816962242126465, 0.27845433354377747, 0.34952616691589355, 0.21190403401851654, 0.25238102674484253, 0.25262564420700073, 0.235600084066391, 0.2105487436056137, 0.2774962782859802, 0.16170747578144073, 0.15625521540641785, 0.24559007585048676, 0.21703849732875824, 0.2002720832824707, 0.143121600151062, 0.21113519370555878, 0.13413314521312714, 0.094456747174263, 0.1285012662410736, 0.13243742287158966, 0.11418274790048599, 0.11925558745861053, 0.21439248323440552, 0.1880965232849121, 0.22684723138809204, 0.14851173758506775, 0.17117464542388916, 0.29501330852508545, 0.1405528038740158, 0.15265348553657532, 0.14476747810840607, 0.2107343226671219, 0.14534687995910645, 0.10932359844446182, 0.1359551101922989, 0.08792147040367126, 0.44539299607276917, 0.12740293145179749, 0.19267109036445618, 0.18098540604114532, 0.09354829043149948, 0.12443973869085312, 0.06745033711194992, 0.356609582901001, 0.21240657567977905, 0.1610700637102127, 0.417524516582489, 0.27998778223991394, 0.07074294239282608, 0.3500562906265259, 0.08549501746892929, 0.0939045324921608, 0.08015522360801697, 0.100171759724617, 0.11099651455879211, 0.13093963265419006, 0.1679827719926834, 0.24390658736228943, 0.04739665985107422, 0.0744795873761177, 0.07295193523168564, 0.2371142953634262, 0.059540607035160065, 0.05386676639318466, 0.12376302480697632, 0.0402214378118515, 0.16778266429901123, 0.049860335886478424, 0.23358245193958282, 0.03220764175057411, 0.03889647498726845, 0.10671715438365936, 0.16780143976211548, 0.09492737799882889, 0.10106085985898972, 0.04440590739250183, 0.020353030413389206, 0.04254315048456192, 0.18652085959911346, 0.05661969631910324, 0.050825752317905426, 0.040961287915706635, 0.1736149936914444, 0.10325713455677032, 0.4208120107650757, 0.23678357899188995, 0.05452421307563782, 0.06970299035310745, 0.07004570215940475, 0.010129554197192192, 0.04974444955587387, 0.022693078964948654, 0.0604703389108181, 0.04160081222653389, 0.14491456747055054, 0.015511690638959408, 0.0703435093164444, 0.21803060173988342, 0.11409696936607361, 0.24460017681121826, 0.11236660927534103, 0.024127209559082985, 0.03911502659320831, 0.023577801883220673, 0.015934821218252182, 0.03514527902007103, 0.029105860739946365, 0.042216356843709946, 0.4885580539703369, 0.2975511848926544, 0.030093304812908173, 0.0473562590777874, 0.04369819164276123, 0.013611183501780033, 0.11871989071369171, 0.18853046000003815, 0.018563533201813698, 0.15615783631801605, 0.07639063894748688, 0.02535773068666458, 0.01811952143907547, 0.048622678965330124, 0.40922871232032776, 0.05182328820228577, 0.0855875015258789, 0.007202706299722195, 0.005893732886761427, 0.07512831687927246, 0.04234791919589043, 0.034827884286642075, 0.28690654039382935, 0.2144981175661087, 0.019982123747467995, 0.14100973308086395, 0.025057468563318253, 0.04754437133669853, 0.01411377266049385, 0.01860228180885315, 0.02221028506755829, 0.0223314817994833, 0.12828055024147034, 0.007281612139195204],\n [0.7150665521621704, 0.6849260926246643, 0.6839231848716736, 0.6914247274398804, 0.7217769026756287, 0.686398983001709, 0.6719824075698853, 0.668079674243927, 0.6932412981987, 0.6920555830001831, 0.6890246868133545, 0.6718630194664001, 0.6747984886169434, 0.6807641386985779, 0.6621140837669373, 0.670916736125946, 0.659507691860199, 0.643657386302948, 0.666704535484314, 0.6662449836730957, 0.6511495113372803, 0.6499025225639343, 0.6435579657554626, 0.6392068266868591, 0.621609628200531, 0.6199886798858643, 0.6364652514457703, 0.6426952481269836, 0.607877254486084, 0.6169797778129578, 0.6301181316375732, 0.609940767288208, 0.6170454621315002, 0.6107946634292603, 0.6118714809417725, 0.567970871925354, 0.601747989654541, 0.5884506106376648, 0.5826886892318726, 0.5579785704612732, 0.5351777076721191, 0.5977340936660767, 0.5800117254257202, 0.578112006187439, 0.5632650256156921, 0.5356637835502625, 0.566638708114624, 0.5227785706520081, 0.5338039994239807, 0.5314185619354248, 0.500601589679718, 0.47139179706573486, 0.532673716545105, 0.46814391016960144, 0.46852052211761475, 0.47733187675476074, 0.5256465673446655, 0.46834608912467957, 0.4960532486438751, 0.47720324993133545, 0.4778778851032257, 0.47898757457733154, 0.4454004764556885, 0.44470423460006714, 0.3844307065010071, 0.4479762017726898, 0.42458829283714294, 0.48051315546035767, 0.4186549484729767, 0.4721565544605255, 0.44315823912620544, 0.459507554769516, 0.4486005902290344, 0.3585163354873657, 0.4180649518966675, 0.36260655522346497, 0.428268164396286, 0.4573773145675659, 0.39308762550354004, 0.32516413927078247, 0.47218453884124756, 0.4175204932689667, 0.3852556645870209, 0.38668152689933777, 0.3957267701625824, 0.3171007037162781, 0.3545765280723572, 0.33774837851524353, 0.4291412830352783, 0.2971607446670532, 0.3743261694908142, 0.3171898424625397, 0.3795458674430847, 0.35343658924102783, 0.37818291783332825, 0.3594251275062561, 0.44336703419685364, 0.3099798560142517, 0.33978527784347534, 0.3230708837509155, 0.24347634613513947, 0.34002116322517395, 0.38097622990608215, 0.3140774667263031, 0.35912543535232544, 0.3096110224723816, 0.2934989333152771, 0.32362639904022217, 0.2853163778781891, 0.40927788615226746, 0.344549298286438, 0.35486701130867004, 0.2919713258743286, 0.29366111755371094, 0.2345850169658661, 0.3269291818141937, 0.27009838819503784, 0.24729961156845093, 0.23643560707569122, 0.2713281810283661, 0.3508720397949219, 0.31076687574386597, 0.23196296393871307, 0.30137068033218384, 0.30662110447883606, 0.30144092440605164, 0.26325902342796326, 0.2414439171552658, 0.24168288707733154, 0.3665657937526703, 0.26635539531707764, 0.27412736415863037, 0.26859673857688904, 0.2679976224899292, 0.2033390998840332, 0.21444451808929443, 0.24715252220630646, 0.20027630031108856, 0.2651069164276123, 0.22406940162181854, 0.23558159172534943, 0.29279056191444397, 0.21005813777446747, 0.2264915257692337, 0.2705976963043213, 0.29812881350517273, 0.24411886930465698, 0.19121330976486206, 0.24011512100696564, 0.2024538815021515, 0.23937229812145233, 0.1517479121685028, 0.21809834241867065, 0.15252619981765747, 0.1679471731185913, 0.1792537122964859, 0.37559834122657776, 0.2153422236442566, 0.1996726095676422, 0.2669014036655426, 0.23820249736309052, 0.14729902148246765, 0.14499309659004211, 0.23550225794315338, 0.15557198226451874, 0.171242356300354, 0.21155670285224915, 0.22475789487361908, 0.2937394380569458, 0.14584924280643463, 0.2291427105665207, 0.15905174612998962, 0.20977066457271576, 0.14832350611686707, 0.2069104015827179, 0.22295908629894257, 0.23826734721660614, 0.3387182950973511, 0.18861545622348785, 0.17743946611881256, 0.29566702246665955, 0.20286554098129272, 0.330140620470047, 0.15765614807605743, 0.19045421481132507, 0.22484281659126282, 0.16441059112548828, 0.2321143001317978, 0.2628859579563141, 0.15579646825790405, 0.11080299317836761, 0.1395931988954544, 0.25522783398628235, 0.233719140291214, 0.23066744208335876, 0.19644682109355927, 0.2597416937351227, 0.18068340420722961, 0.23088663816452026, 0.16657133400440216],\n [0.6949936151504517, 0.6952354311943054, 0.6785717010498047, 0.6861963868141174, 0.6734799742698669, 0.6700699329376221, 0.6696422696113586, 0.651672899723053, 0.661271870136261, 0.6783350110054016, 0.7057138085365295, 0.659278929233551, 0.6615378856658936, 0.6497985124588013, 0.6584003567695618, 0.6476974487304688, 0.6553170680999756, 0.6473636031150818, 0.6407015919685364, 0.6469347476959229, 0.6329619288444519, 0.6414917707443237, 0.6199281215667725, 0.6084939241409302, 0.6061837673187256, 0.6165797114372253, 0.5994716286659241, 0.5790928602218628, 0.5922828316688538, 0.5553305149078369, 0.5734820365905762, 0.5577170848846436, 0.5705720782279968, 0.5534763932228088, 0.5055938959121704, 0.5430341362953186, 0.5333026647567749, 0.5111618041992188, 0.5246110558509827, 0.4945722222328186, 0.4700045883655548, 0.5159235596656799, 0.47609397768974304, 0.46036380529403687, 0.45059922337532043, 0.539964497089386, 0.45361819863319397, 0.4363147020339966, 0.4218635857105255, 0.4894428551197052, 0.43742886185646057, 0.4587785303592682, 0.39136767387390137, 0.4317249357700348, 0.3714491128921509, 0.3519900143146515, 0.35803020000457764, 0.3514930009841919, 0.41125229001045227, 0.33479222655296326, 0.3904966711997986, 0.3367680311203003, 0.35715779662132263, 0.35796919465065, 0.3029354512691498, 0.33077535033226013, 0.2994166314601898, 0.3397771418094635, 0.25192075967788696, 0.2745169699192047, 0.31597650051116943, 0.28831997513771057, 0.2565656900405884, 0.2846139669418335, 0.3085164725780487, 0.27865245938301086, 0.2597048282623291, 0.28016573190689087, 0.33969929814338684, 0.2359662652015686, 0.22362755239009857, 0.32718124985694885, 0.256195068359375, 0.2263725847005844, 0.27277684211730957, 0.22780318558216095, 0.2946825325489044, 0.22673363983631134, 0.1874716579914093, 0.18231447041034698, 0.22426769137382507, 0.1225583478808403, 0.191275954246521, 0.22184419631958008, 0.20843099057674408, 0.2179173231124878, 0.13894739747047424, 0.1945052146911621, 0.24411915242671967, 0.1262909322977066, 0.1336781084537506, 0.19644251465797424, 0.29393139481544495, 0.1329544186592102, 0.21651336550712585, 0.10534613579511642, 0.10970297455787659, 0.11439694464206696, 0.10766737908124924, 0.09469611197710037, 0.11409152299165726, 0.14505302906036377, 0.08264971524477005, 0.08373641222715378, 0.08635269105434418, 0.09610045701265335, 0.15934310853481293, 0.08797606825828552, 0.057386692613363266, 0.10886342823505402, 0.11574409157037735, 0.15501470863819122, 0.11580544710159302, 0.10888257622718811, 0.08929429203271866, 0.12872563302516937, 0.055571578443050385, 0.05712718889117241, 0.10910465568304062, 0.04774434119462967, 0.10090269148349762, 0.1407812088727951, 0.10765101760625839, 0.07177164405584335, 0.07429184764623642, 0.07011748850345612, 0.11558900028467178, 0.06008166819810867, 0.10659179836511612, 0.09970154613256454, 0.06036578491330147, 0.16555644571781158, 0.050087690353393555, 0.261164128780365, 0.03510201349854469, 0.14045573770999908, 0.04542343318462372, 0.05252540111541748, 0.17340490221977234, 0.04753060266375542, 0.10562638193368912, 0.10070968419313431, 0.05016722530126572, 0.07565747946500778, 0.07548436522483826, 0.06991387158632278, 0.058277618139982224, 0.02746376022696495, 0.10515159368515015, 0.04497210681438446, 0.1646173596382141, 0.029241960495710373, 0.06743945181369781, 0.06716044992208481, 0.07172014564275742, 0.164986252784729, 0.033722780644893646, 0.25184309482574463, 0.02660728245973587, 0.2376488894224167, 0.0433085672557354, 0.09181798994541168, 0.0604783333837986, 0.053979579359292984, 0.19779837131500244, 0.03650300204753876, 0.35486748814582825, 0.018221942707896233, 0.041731446981430054, 0.06237120181322098, 0.1872447431087494, 0.30035507678985596, 0.06373988837003708, 0.07760319858789444, 0.0451621450483799, 0.04935208708047867, 0.01006357092410326, 0.05355437844991684, 0.01504779327660799, 0.2620829939842224, 0.030764922499656677, 0.07496432960033417, 0.3234938085079193, 0.012935340404510498, 0.16990314424037933, 0.01977086253464222, 0.015342634171247482, 0.028019368648529053, 0.12320356070995331, 0.06336066871881485],\n [0.6734607815742493, 0.6650577783584595, 0.7102711200714111, 0.693929135799408, 0.6988271474838257, 0.6860700845718384, 0.6785882711410522, 0.6931283473968506, 0.6676186919212341, 0.676205575466156, 0.6686538457870483, 0.6715155839920044, 0.6554709672927856, 0.6537784337997437, 0.6374448537826538, 0.6496015787124634, 0.638644814491272, 0.6325873136520386, 0.5982065200805664, 0.6398217082023621, 0.6295318603515625, 0.6195254325866699, 0.599952757358551, 0.6177839636802673, 0.5872480869293213, 0.6155892610549927, 0.5772850513458252, 0.6361809968948364, 0.5771239399909973, 0.5560054183006287, 0.5512492656707764, 0.5083346366882324, 0.588905394077301, 0.5306777954101562, 0.5099344849586487, 0.5179614424705505, 0.5360977053642273, 0.5523738861083984, 0.5092798471450806, 0.5112224221229553, 0.45956704020500183, 0.45448124408721924, 0.4331269860267639, 0.4714414179325104, 0.44046077132225037, 0.4792649745941162, 0.40831059217453003, 0.4616020917892456, 0.4115033745765686, 0.428427129983902, 0.3536805808544159, 0.3971237540245056, 0.41458365321159363, 0.40857547521591187, 0.3642134964466095, 0.3724595010280609, 0.39579346776008606, 0.3279842138290405, 0.3637480139732361, 0.37341809272766113, 0.32788166403770447, 0.3339717388153076, 0.388381689786911, 0.4231674075126648, 0.3650681972503662, 0.30893054604530334, 0.3420785963535309, 0.38755279779434204, 0.3734549582004547, 0.2918618321418762, 0.36253196001052856, 0.3379853367805481, 0.31508368253707886, 0.32781195640563965, 0.381572961807251, 0.30499470233917236, 0.3382324278354645, 0.2420194000005722, 0.3507157564163208, 0.31584838032722473, 0.28133654594421387, 0.23439913988113403, 0.25078830122947693, 0.309343159198761, 0.25425347685813904, 0.3265491724014282, 0.22766274213790894, 0.3392275273799896, 0.2816329300403595, 0.3172505795955658, 0.2874555289745331, 0.23281890153884888, 0.2938626706600189, 0.20971855521202087, 0.20428352057933807, 0.18985994160175323, 0.21345168352127075, 0.22230438888072968, 0.1424950659275055, 0.20548848807811737, 0.24698014557361603, 0.2982845604419708, 0.3037109673023224, 0.2556310296058655, 0.2084486037492752, 0.2006864994764328, 0.23979412019252777, 0.298304945230484, 0.21333563327789307, 0.2628375291824341, 0.18165212869644165, 0.207316592335701, 0.2174670398235321, 0.2522740364074707, 0.14822618663311005, 0.19933146238327026, 0.1626323163509369, 0.23581382632255554, 0.29229843616485596, 0.22589454054832458, 0.17559100687503815, 0.10761989653110504, 0.16979072988033295, 0.15813110768795013, 0.08478847146034241, 0.1003241166472435, 0.11911974847316742, 0.13875901699066162, 0.11395498365163803, 0.19272516667842865, 0.14996206760406494, 0.10391950607299805, 0.1736968606710434, 0.11564969271421432, 0.11454245448112488, 0.20188166201114655, 0.11639059334993362, 0.13142740726470947, 0.12504342198371887, 0.1324760466814041, 0.11340098083019257, 0.09728231281042099, 0.12973423302173615, 0.07123860716819763, 0.15773168206214905, 0.08957032859325409, 0.2768268287181854, 0.09786324203014374, 0.2007419615983963, 0.06373140215873718, 0.0817687138915062, 0.1599176526069641, 0.10015073418617249, 0.2237526774406433, 0.08339745551347733, 0.10071887820959091, 0.07920438051223755, 0.12705478072166443, 0.0833975300192833, 0.10126693546772003, 0.10480248183012009, 0.1877751499414444, 0.09175236523151398, 0.17485201358795166, 0.06307676434516907, 0.09258589148521423, 0.10997342318296432, 0.14010410010814667, 0.08516515046358109, 0.12857507169246674, 0.20907457172870636, 0.1344369649887085, 0.12898562848567963, 0.2652635872364044, 0.21312803030014038, 0.20858009159564972, 0.1498584896326065, 0.1613665074110031, 0.13983330130577087, 0.13477762043476105, 0.16344882547855377, 0.20185469090938568, 0.06201845780014992, 0.12131336331367493, 0.07238198816776276, 0.287982702255249, 0.07747112959623337, 0.19001345336437225, 0.10505222529172897, 0.2580859959125519, 0.18462476134300232, 0.20932045578956604, 0.11803331971168518, 0.08391863852739334, 0.09098409861326218, 0.05974394828081131, 0.23643453419208527, 0.03688646852970123, 0.10661374777555466, 0.09279479831457138]])\nloss_histories_CNN_FASHION_3L = np.array([[0.681438684463501, 0.6889891028404236, 0.6886380910873413, 0.6888549327850342, 0.7001448273658752, 0.6896989345550537, 0.6827958822250366, 0.6809604167938232, 0.6802992820739746, 0.6798832416534424, 0.6873130202293396, 0.6706038117408752, 0.6825727820396423, 0.6711708307266235, 0.6692336797714233, 0.6626574993133545, 0.6598449945449829, 0.6661273241043091, 0.6690754890441895, 0.6628720164299011, 0.6578156352043152, 0.6536375284194946, 0.6395933032035828, 0.6494665741920471, 0.6431573629379272, 0.6422117352485657, 0.6304263472557068, 0.6317248940467834, 0.6545873284339905, 0.6310784816741943, 0.6355032920837402, 0.6170492768287659, 0.6209865808486938, 0.6031823754310608, 0.5973383188247681, 0.5987150073051453, 0.5997783541679382, 0.5827844142913818, 0.5649464130401611, 0.5976262092590332, 0.5651332139968872, 0.5687211155891418, 0.5581858158111572, 0.5616911053657532, 0.5290145874023438, 0.5284591317176819, 0.5671270489692688, 0.47875893115997314, 0.5753456950187683, 0.5510994791984558, 0.5518543720245361, 0.4903143048286438, 0.48829033970832825, 0.5358155965805054, 0.530174732208252, 0.5644151568412781, 0.4735930562019348, 0.4666731357574463, 0.4378604590892792, 0.470310240983963, 0.4612236022949219, 0.5205991268157959, 0.4360387921333313, 0.44255444407463074, 0.5049978494644165, 0.40847957134246826, 0.4495522379875183, 0.4902504086494446, 0.4367920160293579, 0.4487551152706146, 0.4421207010746002, 0.37032660841941833, 0.3559698164463043, 0.42548468708992004, 0.4063631296157837, 0.45850908756256104, 0.38015541434288025, 0.3413124084472656, 0.4414171576499939, 0.3528973460197449, 0.3753133714199066, 0.4682203233242035, 0.38733014464378357, 0.3464372158050537, 0.3330712914466858, 0.38146668672561646, 0.37155666947364807, 0.3937838077545166, 0.3397389352321625, 0.2911732792854309, 0.3185632824897766, 0.34439948201179504, 0.37463000416755676, 0.2941756844520569, 0.3446425497531891, 0.2543509304523468, 0.44017115235328674, 0.2634207010269165, 0.32429203391075134, 0.3531845808029175, 0.4209728538990021, 0.2728058993816376, 0.29612597823143005, 0.36481037735939026, 0.23760972917079926, 0.31274184584617615, 0.29005640745162964, 0.3057081699371338, 0.3319462239742279, 0.4440755546092987, 0.3070727586746216, 0.3044629395008087, 0.32753700017929077, 0.2656729221343994, 0.422760009765625, 0.2262178659439087, 0.34336617588996887, 0.28752654790878296, 0.22466769814491272, 0.19892671704292297, 0.32547518610954285, 0.3219960033893585, 0.2053043395280838, 0.33121222257614136, 0.24561984837055206, 0.18318641185760498, 0.3192058503627777, 0.30691757798194885, 0.18206992745399475, 0.22018828988075256, 0.38999852538108826, 0.17489850521087646, 0.30276167392730713, 0.21708929538726807, 0.35204121470451355, 0.20095224678516388, 0.26634764671325684, 0.1619335412979126, 0.17421387135982513, 0.17063964903354645, 0.23774376511573792, 0.21871469914913177, 0.28005892038345337, 0.14502578973770142, 0.2649957239627838, 0.31908106803894043, 0.22305874526500702, 0.19752325117588043, 0.22612883150577545, 0.2095600664615631, 0.3497889041900635, 0.24804994463920593, 0.18516618013381958, 0.19408148527145386, 0.39791882038116455, 0.2503810226917267, 0.13216803967952728, 0.1892985850572586, 0.2676987946033478, 0.1970151662826538, 0.2100503146648407, 0.14209169149398804, 0.1863841563463211, 0.14308282732963562, 0.3121347725391388, 0.2626650333404541, 0.12991006672382355, 0.16739721596240997, 0.11467546224594116, 0.32184040546417236, 0.11009383946657181, 0.19579708576202393, 0.13897623121738434, 0.1434190273284912, 0.31635451316833496, 0.2833975851535797, 0.16934165358543396, 0.14960110187530518, 0.31888577342033386, 0.21167083084583282, 0.30760568380355835, 0.41453269124031067, 0.229119211435318, 0.1743178516626358, 0.2994597852230072, 0.24154801666736603, 0.09829962998628616, 0.1747366487979889, 0.29539310932159424, 0.2035258263349533, 0.30196595191955566, 0.30807745456695557, 0.3411264419555664, 0.15731027722358704, 0.12981931865215302, 0.2966948449611664, 0.21025419235229492, 0.15241996943950653, 0.07024487853050232, 0.23431310057640076],\n [0.7195901274681091, 0.6704943180084229, 0.7085641622543335, 0.6748741269111633, 0.7423946857452393, 0.7234722375869751, 0.6986007690429688, 0.6960390210151672, 0.7012070417404175, 0.6947640180587769, 0.6792826056480408, 0.7239813804626465, 0.6761874556541443, 0.6902806758880615, 0.6771406531333923, 0.6850265264511108, 0.692349910736084, 0.6808741092681885, 0.6753465533256531, 0.6633909344673157, 0.6686793565750122, 0.6637576222419739, 0.680237889289856, 0.654888391494751, 0.6780327558517456, 0.6408708095550537, 0.6315723061561584, 0.6431190371513367, 0.6341533064842224, 0.6376373767852783, 0.6409605145454407, 0.6432655453681946, 0.6013216972351074, 0.6401882171630859, 0.6355386972427368, 0.5561983585357666, 0.5904024839401245, 0.6024128198623657, 0.5541431903839111, 0.575777530670166, 0.5260757803916931, 0.5354135036468506, 0.44182831048965454, 0.4903651475906372, 0.4765366017818451, 0.4635029733181, 0.5017414093017578, 0.4560497999191284, 0.4651477336883545, 0.4632211923599243, 0.5207407474517822, 0.47012707591056824, 0.35573041439056396, 0.40080904960632324, 0.5226706266403198, 0.3614789843559265, 0.3670787811279297, 0.4320698082447052, 0.4483892321586609, 0.3988180160522461, 0.44299742579460144, 0.4347727596759796, 0.2980192303657532, 0.3726247549057007, 0.42988693714141846, 0.39973509311676025, 0.34497734904289246, 0.39063209295272827, 0.3743012249469757, 0.3100280165672302, 0.3662980794906616, 0.3414981961250305, 0.32001861929893494, 0.3834022283554077, 0.23791366815567017, 0.30651259422302246, 0.2968301773071289, 0.3293622136116028, 0.1965251863002777, 0.3719707429409027, 0.3272671401500702, 0.24777932465076447, 0.3863266110420227, 0.24999620020389557, 0.21851596236228943, 0.21008063852787018, 0.3156169652938843, 0.3615129590034485, 0.2176506221294403, 0.19833393394947052, 0.3163139820098877, 0.3545341491699219, 0.19693829119205475, 0.28649449348449707, 0.31108948588371277, 0.2429426908493042, 0.20192016661167145, 0.21015271544456482, 0.2626788020133972, 0.3365514278411865, 0.2673414647579193, 0.09546613693237305, 0.12053285539150238, 0.17347104847431183, 0.23780611157417297, 0.24000667035579681, 0.24722233414649963, 0.23631177842617035, 0.24386261403560638, 0.16348625719547272, 0.18626368045806885, 0.23263885080814362, 0.12030680477619171, 0.18763351440429688, 0.06662426143884659, 0.2510358393192291, 0.35354670882225037, 0.22257286310195923, 0.24602772295475006, 0.10453713685274124, 0.185865119099617, 0.120824433863163, 0.16575820744037628, 0.16989396512508392, 0.06567985564470291, 0.4476766586303711, 0.13566845655441284, 0.2541764974594116, 0.16894878447055817, 0.31662842631340027, 0.20940165221691132, 0.20982550084590912, 0.2824331521987915, 0.2067907750606537, 0.1581425666809082, 0.2816847264766693, 0.20116807520389557, 0.1924077868461609, 0.17162162065505981, 0.1839713156223297, 0.13327762484550476, 0.12191877514123917, 0.08460020273923874, 0.142286017537117, 0.22156421840190887, 0.1368444859981537, 0.08621849864721298, 0.427219033241272, 0.0529838465154171, 0.09679907560348511, 0.1012415811419487, 0.10245517641305923, 0.31773877143859863, 0.21166706085205078, 0.06600738316774368, 0.12189425528049469, 0.1269007921218872, 0.19244657456874847, 0.27885836362838745, 0.05483828857541084, 0.21648481488227844, 0.20490846037864685, 0.07530874758958817, 0.2320171296596527, 0.02713399939239025, 0.13403251767158508, 0.2914361357688904, 0.07058202475309372, 0.19688743352890015, 0.14317165315151215, 0.3330090343952179, 0.0591675229370594, 0.19346661865711212, 0.13119332492351532, 0.07487566769123077, 0.16602414846420288, 0.016024962067604065, 0.1277586966753006, 0.6630570292472839, 0.2762067914009094, 0.11343107372522354, 0.2670533359050751, 0.11601337790489197, 0.32291847467422485, 0.1735117882490158, 0.11186012625694275, 0.0449039451777935, 0.12063135951757431, 0.09390923380851746, 0.08387088775634766, 0.07510893046855927, 0.11414384096860886, 0.15171335637569427, 0.08825315535068512, 0.050439488142728806, 0.34374308586120605, 0.21407325565814972, 0.18398113548755646, 0.10786985605955124, 0.1447535753250122],", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_CNN_MNIST_2L", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "loss_histories_CNN_MNIST_2L = np.array([[0.6388911008834839, 0.755074143409729, 0.6632554531097412, 0.6934544444084167, 0.6570810675621033, 0.6539508700370789, 0.6652110815048218, 0.6544034481048584, 0.6927835941314697, 0.6516890525817871, 0.6747910976409912, 0.6588358879089355, 0.6461827158927917, 0.6406604647636414, 0.6593841314315796, 0.6290771961212158, 0.635811448097229, 0.616572916507721, 0.6336753964424133, 0.6234488487243652, 0.5950428247451782, 0.5689221024513245, 0.5299611687660217, 0.5597831606864929, 0.5785280466079712, 0.594134509563446, 0.5603616833686829, 0.5874844193458557, 0.5339792370796204, 0.5163654088973999, 0.5020135641098022, 0.5339027643203735, 0.4965783953666687, 0.504167377948761, 0.5495306253433228, 0.5618932247161865, 0.4844675362110138, 0.5362534523010254, 0.4832651913166046, 0.4696060121059418, 0.5025283694267273, 0.4544117748737335, 0.45164787769317627, 0.393768310546875, 0.38244518637657166, 0.3918498158454895, 0.3800329267978668, 0.3839554190635681, 0.4202963709831238, 0.3279581367969513, 0.42289531230926514, 0.39857998490333557, 0.35821080207824707, 0.3929472863674164, 0.44718289375305176, 0.30050015449523926, 0.29927077889442444, 0.3997846841812134, 0.28816962242126465, 0.27845433354377747, 0.34952616691589355, 0.21190403401851654, 0.25238102674484253, 0.25262564420700073, 0.235600084066391, 0.2105487436056137, 0.2774962782859802, 0.16170747578144073, 0.15625521540641785, 0.24559007585048676, 0.21703849732875824, 0.2002720832824707, 0.143121600151062, 0.21113519370555878, 0.13413314521312714, 0.094456747174263, 0.1285012662410736, 0.13243742287158966, 0.11418274790048599, 0.11925558745861053, 0.21439248323440552, 0.1880965232849121, 0.22684723138809204, 0.14851173758506775, 0.17117464542388916, 0.29501330852508545, 0.1405528038740158, 0.15265348553657532, 0.14476747810840607, 0.2107343226671219, 0.14534687995910645, 0.10932359844446182, 0.1359551101922989, 0.08792147040367126, 0.44539299607276917, 0.12740293145179749, 0.19267109036445618, 0.18098540604114532, 0.09354829043149948, 0.12443973869085312, 0.06745033711194992, 0.356609582901001, 0.21240657567977905, 0.1610700637102127, 0.417524516582489, 0.27998778223991394, 0.07074294239282608, 0.3500562906265259, 0.08549501746892929, 0.0939045324921608, 0.08015522360801697, 0.100171759724617, 0.11099651455879211, 0.13093963265419006, 0.1679827719926834, 0.24390658736228943, 0.04739665985107422, 0.0744795873761177, 0.07295193523168564, 0.2371142953634262, 0.059540607035160065, 0.05386676639318466, 0.12376302480697632, 0.0402214378118515, 0.16778266429901123, 0.049860335886478424, 0.23358245193958282, 0.03220764175057411, 0.03889647498726845, 0.10671715438365936, 0.16780143976211548, 0.09492737799882889, 0.10106085985898972, 0.04440590739250183, 0.020353030413389206, 0.04254315048456192, 0.18652085959911346, 0.05661969631910324, 0.050825752317905426, 0.040961287915706635, 0.1736149936914444, 0.10325713455677032, 0.4208120107650757, 0.23678357899188995, 0.05452421307563782, 0.06970299035310745, 0.07004570215940475, 0.010129554197192192, 0.04974444955587387, 0.022693078964948654, 0.0604703389108181, 0.04160081222653389, 0.14491456747055054, 0.015511690638959408, 0.0703435093164444, 0.21803060173988342, 0.11409696936607361, 0.24460017681121826, 0.11236660927534103, 0.024127209559082985, 0.03911502659320831, 0.023577801883220673, 0.015934821218252182, 0.03514527902007103, 0.029105860739946365, 0.042216356843709946, 0.4885580539703369, 0.2975511848926544, 0.030093304812908173, 0.0473562590777874, 0.04369819164276123, 0.013611183501780033, 0.11871989071369171, 0.18853046000003815, 0.018563533201813698, 0.15615783631801605, 0.07639063894748688, 0.02535773068666458, 0.01811952143907547, 0.048622678965330124, 0.40922871232032776, 0.05182328820228577, 0.0855875015258789, 0.007202706299722195, 0.005893732886761427, 0.07512831687927246, 0.04234791919589043, 0.034827884286642075, 0.28690654039382935, 0.2144981175661087, 0.019982123747467995, 0.14100973308086395, 0.025057468563318253, 0.04754437133669853, 0.01411377266049385, 0.01860228180885315, 0.02221028506755829, 0.0223314817994833, 0.12828055024147034, 0.007281612139195204],\n [0.7150665521621704, 0.6849260926246643, 0.6839231848716736, 0.6914247274398804, 0.7217769026756287, 0.686398983001709, 0.6719824075698853, 0.668079674243927, 0.6932412981987, 0.6920555830001831, 0.6890246868133545, 0.6718630194664001, 0.6747984886169434, 0.6807641386985779, 0.6621140837669373, 0.670916736125946, 0.659507691860199, 0.643657386302948, 0.666704535484314, 0.6662449836730957, 0.6511495113372803, 0.6499025225639343, 0.6435579657554626, 0.6392068266868591, 0.621609628200531, 0.6199886798858643, 0.6364652514457703, 0.6426952481269836, 0.607877254486084, 0.6169797778129578, 0.6301181316375732, 0.609940767288208, 0.6170454621315002, 0.6107946634292603, 0.6118714809417725, 0.567970871925354, 0.601747989654541, 0.5884506106376648, 0.5826886892318726, 0.5579785704612732, 0.5351777076721191, 0.5977340936660767, 0.5800117254257202, 0.578112006187439, 0.5632650256156921, 0.5356637835502625, 0.566638708114624, 0.5227785706520081, 0.5338039994239807, 0.5314185619354248, 0.500601589679718, 0.47139179706573486, 0.532673716545105, 0.46814391016960144, 0.46852052211761475, 0.47733187675476074, 0.5256465673446655, 0.46834608912467957, 0.4960532486438751, 0.47720324993133545, 0.4778778851032257, 0.47898757457733154, 0.4454004764556885, 0.44470423460006714, 0.3844307065010071, 0.4479762017726898, 0.42458829283714294, 0.48051315546035767, 0.4186549484729767, 0.4721565544605255, 0.44315823912620544, 0.459507554769516, 0.4486005902290344, 0.3585163354873657, 0.4180649518966675, 0.36260655522346497, 0.428268164396286, 0.4573773145675659, 0.39308762550354004, 0.32516413927078247, 0.47218453884124756, 0.4175204932689667, 0.3852556645870209, 0.38668152689933777, 0.3957267701625824, 0.3171007037162781, 0.3545765280723572, 0.33774837851524353, 0.4291412830352783, 0.2971607446670532, 0.3743261694908142, 0.3171898424625397, 0.3795458674430847, 0.35343658924102783, 0.37818291783332825, 0.3594251275062561, 0.44336703419685364, 0.3099798560142517, 0.33978527784347534, 0.3230708837509155, 0.24347634613513947, 0.34002116322517395, 0.38097622990608215, 0.3140774667263031, 0.35912543535232544, 0.3096110224723816, 0.2934989333152771, 0.32362639904022217, 0.2853163778781891, 0.40927788615226746, 0.344549298286438, 0.35486701130867004, 0.2919713258743286, 0.29366111755371094, 0.2345850169658661, 0.3269291818141937, 0.27009838819503784, 0.24729961156845093, 0.23643560707569122, 0.2713281810283661, 0.3508720397949219, 0.31076687574386597, 0.23196296393871307, 0.30137068033218384, 0.30662110447883606, 0.30144092440605164, 0.26325902342796326, 0.2414439171552658, 0.24168288707733154, 0.3665657937526703, 0.26635539531707764, 0.27412736415863037, 0.26859673857688904, 0.2679976224899292, 0.2033390998840332, 0.21444451808929443, 0.24715252220630646, 0.20027630031108856, 0.2651069164276123, 0.22406940162181854, 0.23558159172534943, 0.29279056191444397, 0.21005813777446747, 0.2264915257692337, 0.2705976963043213, 0.29812881350517273, 0.24411886930465698, 0.19121330976486206, 0.24011512100696564, 0.2024538815021515, 0.23937229812145233, 0.1517479121685028, 0.21809834241867065, 0.15252619981765747, 0.1679471731185913, 0.1792537122964859, 0.37559834122657776, 0.2153422236442566, 0.1996726095676422, 0.2669014036655426, 0.23820249736309052, 0.14729902148246765, 0.14499309659004211, 0.23550225794315338, 0.15557198226451874, 0.171242356300354, 0.21155670285224915, 0.22475789487361908, 0.2937394380569458, 0.14584924280643463, 0.2291427105665207, 0.15905174612998962, 0.20977066457271576, 0.14832350611686707, 0.2069104015827179, 0.22295908629894257, 0.23826734721660614, 0.3387182950973511, 0.18861545622348785, 0.17743946611881256, 0.29566702246665955, 0.20286554098129272, 0.330140620470047, 0.15765614807605743, 0.19045421481132507, 0.22484281659126282, 0.16441059112548828, 0.2321143001317978, 0.2628859579563141, 0.15579646825790405, 0.11080299317836761, 0.1395931988954544, 0.25522783398628235, 0.233719140291214, 0.23066744208335876, 0.19644682109355927, 0.2597416937351227, 0.18068340420722961, 0.23088663816452026, 0.16657133400440216],\n [0.6949936151504517, 0.6952354311943054, 0.6785717010498047, 0.6861963868141174, 0.6734799742698669, 0.6700699329376221, 0.6696422696113586, 0.651672899723053, 0.661271870136261, 0.6783350110054016, 0.7057138085365295, 0.659278929233551, 0.6615378856658936, 0.6497985124588013, 0.6584003567695618, 0.6476974487304688, 0.6553170680999756, 0.6473636031150818, 0.6407015919685364, 0.6469347476959229, 0.6329619288444519, 0.6414917707443237, 0.6199281215667725, 0.6084939241409302, 0.6061837673187256, 0.6165797114372253, 0.5994716286659241, 0.5790928602218628, 0.5922828316688538, 0.5553305149078369, 0.5734820365905762, 0.5577170848846436, 0.5705720782279968, 0.5534763932228088, 0.5055938959121704, 0.5430341362953186, 0.5333026647567749, 0.5111618041992188, 0.5246110558509827, 0.4945722222328186, 0.4700045883655548, 0.5159235596656799, 0.47609397768974304, 0.46036380529403687, 0.45059922337532043, 0.539964497089386, 0.45361819863319397, 0.4363147020339966, 0.4218635857105255, 0.4894428551197052, 0.43742886185646057, 0.4587785303592682, 0.39136767387390137, 0.4317249357700348, 0.3714491128921509, 0.3519900143146515, 0.35803020000457764, 0.3514930009841919, 0.41125229001045227, 0.33479222655296326, 0.3904966711997986, 0.3367680311203003, 0.35715779662132263, 0.35796919465065, 0.3029354512691498, 0.33077535033226013, 0.2994166314601898, 0.3397771418094635, 0.25192075967788696, 0.2745169699192047, 0.31597650051116943, 0.28831997513771057, 0.2565656900405884, 0.2846139669418335, 0.3085164725780487, 0.27865245938301086, 0.2597048282623291, 0.28016573190689087, 0.33969929814338684, 0.2359662652015686, 0.22362755239009857, 0.32718124985694885, 0.256195068359375, 0.2263725847005844, 0.27277684211730957, 0.22780318558216095, 0.2946825325489044, 0.22673363983631134, 0.1874716579914093, 0.18231447041034698, 0.22426769137382507, 0.1225583478808403, 0.191275954246521, 0.22184419631958008, 0.20843099057674408, 0.2179173231124878, 0.13894739747047424, 0.1945052146911621, 0.24411915242671967, 0.1262909322977066, 0.1336781084537506, 0.19644251465797424, 0.29393139481544495, 0.1329544186592102, 0.21651336550712585, 0.10534613579511642, 0.10970297455787659, 0.11439694464206696, 0.10766737908124924, 0.09469611197710037, 0.11409152299165726, 0.14505302906036377, 0.08264971524477005, 0.08373641222715378, 0.08635269105434418, 0.09610045701265335, 0.15934310853481293, 0.08797606825828552, 0.057386692613363266, 0.10886342823505402, 0.11574409157037735, 0.15501470863819122, 0.11580544710159302, 0.10888257622718811, 0.08929429203271866, 0.12872563302516937, 0.055571578443050385, 0.05712718889117241, 0.10910465568304062, 0.04774434119462967, 0.10090269148349762, 0.1407812088727951, 0.10765101760625839, 0.07177164405584335, 0.07429184764623642, 0.07011748850345612, 0.11558900028467178, 0.06008166819810867, 0.10659179836511612, 0.09970154613256454, 0.06036578491330147, 0.16555644571781158, 0.050087690353393555, 0.261164128780365, 0.03510201349854469, 0.14045573770999908, 0.04542343318462372, 0.05252540111541748, 0.17340490221977234, 0.04753060266375542, 0.10562638193368912, 0.10070968419313431, 0.05016722530126572, 0.07565747946500778, 0.07548436522483826, 0.06991387158632278, 0.058277618139982224, 0.02746376022696495, 0.10515159368515015, 0.04497210681438446, 0.1646173596382141, 0.029241960495710373, 0.06743945181369781, 0.06716044992208481, 0.07172014564275742, 0.164986252784729, 0.033722780644893646, 0.25184309482574463, 0.02660728245973587, 0.2376488894224167, 0.0433085672557354, 0.09181798994541168, 0.0604783333837986, 0.053979579359292984, 0.19779837131500244, 0.03650300204753876, 0.35486748814582825, 0.018221942707896233, 0.041731446981430054, 0.06237120181322098, 0.1872447431087494, 0.30035507678985596, 0.06373988837003708, 0.07760319858789444, 0.0451621450483799, 0.04935208708047867, 0.01006357092410326, 0.05355437844991684, 0.01504779327660799, 0.2620829939842224, 0.030764922499656677, 0.07496432960033417, 0.3234938085079193, 0.012935340404510498, 0.16990314424037933, 0.01977086253464222, 0.015342634171247482, 0.028019368648529053, 0.12320356070995331, 0.06336066871881485],\n [0.6734607815742493, 0.6650577783584595, 0.7102711200714111, 0.693929135799408, 0.6988271474838257, 0.6860700845718384, 0.6785882711410522, 0.6931283473968506, 0.6676186919212341, 0.676205575466156, 0.6686538457870483, 0.6715155839920044, 0.6554709672927856, 0.6537784337997437, 0.6374448537826538, 0.6496015787124634, 0.638644814491272, 0.6325873136520386, 0.5982065200805664, 0.6398217082023621, 0.6295318603515625, 0.6195254325866699, 0.599952757358551, 0.6177839636802673, 0.5872480869293213, 0.6155892610549927, 0.5772850513458252, 0.6361809968948364, 0.5771239399909973, 0.5560054183006287, 0.5512492656707764, 0.5083346366882324, 0.588905394077301, 0.5306777954101562, 0.5099344849586487, 0.5179614424705505, 0.5360977053642273, 0.5523738861083984, 0.5092798471450806, 0.5112224221229553, 0.45956704020500183, 0.45448124408721924, 0.4331269860267639, 0.4714414179325104, 0.44046077132225037, 0.4792649745941162, 0.40831059217453003, 0.4616020917892456, 0.4115033745765686, 0.428427129983902, 0.3536805808544159, 0.3971237540245056, 0.41458365321159363, 0.40857547521591187, 0.3642134964466095, 0.3724595010280609, 0.39579346776008606, 0.3279842138290405, 0.3637480139732361, 0.37341809272766113, 0.32788166403770447, 0.3339717388153076, 0.388381689786911, 0.4231674075126648, 0.3650681972503662, 0.30893054604530334, 0.3420785963535309, 0.38755279779434204, 0.3734549582004547, 0.2918618321418762, 0.36253196001052856, 0.3379853367805481, 0.31508368253707886, 0.32781195640563965, 0.381572961807251, 0.30499470233917236, 0.3382324278354645, 0.2420194000005722, 0.3507157564163208, 0.31584838032722473, 0.28133654594421387, 0.23439913988113403, 0.25078830122947693, 0.309343159198761, 0.25425347685813904, 0.3265491724014282, 0.22766274213790894, 0.3392275273799896, 0.2816329300403595, 0.3172505795955658, 0.2874555289745331, 0.23281890153884888, 0.2938626706600189, 0.20971855521202087, 0.20428352057933807, 0.18985994160175323, 0.21345168352127075, 0.22230438888072968, 0.1424950659275055, 0.20548848807811737, 0.24698014557361603, 0.2982845604419708, 0.3037109673023224, 0.2556310296058655, 0.2084486037492752, 0.2006864994764328, 0.23979412019252777, 0.298304945230484, 0.21333563327789307, 0.2628375291824341, 0.18165212869644165, 0.207316592335701, 0.2174670398235321, 0.2522740364074707, 0.14822618663311005, 0.19933146238327026, 0.1626323163509369, 0.23581382632255554, 0.29229843616485596, 0.22589454054832458, 0.17559100687503815, 0.10761989653110504, 0.16979072988033295, 0.15813110768795013, 0.08478847146034241, 0.1003241166472435, 0.11911974847316742, 0.13875901699066162, 0.11395498365163803, 0.19272516667842865, 0.14996206760406494, 0.10391950607299805, 0.1736968606710434, 0.11564969271421432, 0.11454245448112488, 0.20188166201114655, 0.11639059334993362, 0.13142740726470947, 0.12504342198371887, 0.1324760466814041, 0.11340098083019257, 0.09728231281042099, 0.12973423302173615, 0.07123860716819763, 0.15773168206214905, 0.08957032859325409, 0.2768268287181854, 0.09786324203014374, 0.2007419615983963, 0.06373140215873718, 0.0817687138915062, 0.1599176526069641, 0.10015073418617249, 0.2237526774406433, 0.08339745551347733, 0.10071887820959091, 0.07920438051223755, 0.12705478072166443, 0.0833975300192833, 0.10126693546772003, 0.10480248183012009, 0.1877751499414444, 0.09175236523151398, 0.17485201358795166, 0.06307676434516907, 0.09258589148521423, 0.10997342318296432, 0.14010410010814667, 0.08516515046358109, 0.12857507169246674, 0.20907457172870636, 0.1344369649887085, 0.12898562848567963, 0.2652635872364044, 0.21312803030014038, 0.20858009159564972, 0.1498584896326065, 0.1613665074110031, 0.13983330130577087, 0.13477762043476105, 0.16344882547855377, 0.20185469090938568, 0.06201845780014992, 0.12131336331367493, 0.07238198816776276, 0.287982702255249, 0.07747112959623337, 0.19001345336437225, 0.10505222529172897, 0.2580859959125519, 0.18462476134300232, 0.20932045578956604, 0.11803331971168518, 0.08391863852739334, 0.09098409861326218, 0.05974394828081131, 0.23643453419208527, 0.03688646852970123, 0.10661374777555466, 0.09279479831457138]])\nloss_histories_CNN_FASHION_3L = np.array([[0.681438684463501, 0.6889891028404236, 0.6886380910873413, 0.6888549327850342, 0.7001448273658752, 0.6896989345550537, 0.6827958822250366, 0.6809604167938232, 0.6802992820739746, 0.6798832416534424, 0.6873130202293396, 0.6706038117408752, 0.6825727820396423, 0.6711708307266235, 0.6692336797714233, 0.6626574993133545, 0.6598449945449829, 0.6661273241043091, 0.6690754890441895, 0.6628720164299011, 0.6578156352043152, 0.6536375284194946, 0.6395933032035828, 0.6494665741920471, 0.6431573629379272, 0.6422117352485657, 0.6304263472557068, 0.6317248940467834, 0.6545873284339905, 0.6310784816741943, 0.6355032920837402, 0.6170492768287659, 0.6209865808486938, 0.6031823754310608, 0.5973383188247681, 0.5987150073051453, 0.5997783541679382, 0.5827844142913818, 0.5649464130401611, 0.5976262092590332, 0.5651332139968872, 0.5687211155891418, 0.5581858158111572, 0.5616911053657532, 0.5290145874023438, 0.5284591317176819, 0.5671270489692688, 0.47875893115997314, 0.5753456950187683, 0.5510994791984558, 0.5518543720245361, 0.4903143048286438, 0.48829033970832825, 0.5358155965805054, 0.530174732208252, 0.5644151568412781, 0.4735930562019348, 0.4666731357574463, 0.4378604590892792, 0.470310240983963, 0.4612236022949219, 0.5205991268157959, 0.4360387921333313, 0.44255444407463074, 0.5049978494644165, 0.40847957134246826, 0.4495522379875183, 0.4902504086494446, 0.4367920160293579, 0.4487551152706146, 0.4421207010746002, 0.37032660841941833, 0.3559698164463043, 0.42548468708992004, 0.4063631296157837, 0.45850908756256104, 0.38015541434288025, 0.3413124084472656, 0.4414171576499939, 0.3528973460197449, 0.3753133714199066, 0.4682203233242035, 0.38733014464378357, 0.3464372158050537, 0.3330712914466858, 0.38146668672561646, 0.37155666947364807, 0.3937838077545166, 0.3397389352321625, 0.2911732792854309, 0.3185632824897766, 0.34439948201179504, 0.37463000416755676, 0.2941756844520569, 0.3446425497531891, 0.2543509304523468, 0.44017115235328674, 0.2634207010269165, 0.32429203391075134, 0.3531845808029175, 0.4209728538990021, 0.2728058993816376, 0.29612597823143005, 0.36481037735939026, 0.23760972917079926, 0.31274184584617615, 0.29005640745162964, 0.3057081699371338, 0.3319462239742279, 0.4440755546092987, 0.3070727586746216, 0.3044629395008087, 0.32753700017929077, 0.2656729221343994, 0.422760009765625, 0.2262178659439087, 0.34336617588996887, 0.28752654790878296, 0.22466769814491272, 0.19892671704292297, 0.32547518610954285, 0.3219960033893585, 0.2053043395280838, 0.33121222257614136, 0.24561984837055206, 0.18318641185760498, 0.3192058503627777, 0.30691757798194885, 0.18206992745399475, 0.22018828988075256, 0.38999852538108826, 0.17489850521087646, 0.30276167392730713, 0.21708929538726807, 0.35204121470451355, 0.20095224678516388, 0.26634764671325684, 0.1619335412979126, 0.17421387135982513, 0.17063964903354645, 0.23774376511573792, 0.21871469914913177, 0.28005892038345337, 0.14502578973770142, 0.2649957239627838, 0.31908106803894043, 0.22305874526500702, 0.19752325117588043, 0.22612883150577545, 0.2095600664615631, 0.3497889041900635, 0.24804994463920593, 0.18516618013381958, 0.19408148527145386, 0.39791882038116455, 0.2503810226917267, 0.13216803967952728, 0.1892985850572586, 0.2676987946033478, 0.1970151662826538, 0.2100503146648407, 0.14209169149398804, 0.1863841563463211, 0.14308282732963562, 0.3121347725391388, 0.2626650333404541, 0.12991006672382355, 0.16739721596240997, 0.11467546224594116, 0.32184040546417236, 0.11009383946657181, 0.19579708576202393, 0.13897623121738434, 0.1434190273284912, 0.31635451316833496, 0.2833975851535797, 0.16934165358543396, 0.14960110187530518, 0.31888577342033386, 0.21167083084583282, 0.30760568380355835, 0.41453269124031067, 0.229119211435318, 0.1743178516626358, 0.2994597852230072, 0.24154801666736603, 0.09829962998628616, 0.1747366487979889, 0.29539310932159424, 0.2035258263349533, 0.30196595191955566, 0.30807745456695557, 0.3411264419555664, 0.15731027722358704, 0.12981931865215302, 0.2966948449611664, 0.21025419235229492, 0.15241996943950653, 0.07024487853050232, 0.23431310057640076],\n [0.7195901274681091, 0.6704943180084229, 0.7085641622543335, 0.6748741269111633, 0.7423946857452393, 0.7234722375869751, 0.6986007690429688, 0.6960390210151672, 0.7012070417404175, 0.6947640180587769, 0.6792826056480408, 0.7239813804626465, 0.6761874556541443, 0.6902806758880615, 0.6771406531333923, 0.6850265264511108, 0.692349910736084, 0.6808741092681885, 0.6753465533256531, 0.6633909344673157, 0.6686793565750122, 0.6637576222419739, 0.680237889289856, 0.654888391494751, 0.6780327558517456, 0.6408708095550537, 0.6315723061561584, 0.6431190371513367, 0.6341533064842224, 0.6376373767852783, 0.6409605145454407, 0.6432655453681946, 0.6013216972351074, 0.6401882171630859, 0.6355386972427368, 0.5561983585357666, 0.5904024839401245, 0.6024128198623657, 0.5541431903839111, 0.575777530670166, 0.5260757803916931, 0.5354135036468506, 0.44182831048965454, 0.4903651475906372, 0.4765366017818451, 0.4635029733181, 0.5017414093017578, 0.4560497999191284, 0.4651477336883545, 0.4632211923599243, 0.5207407474517822, 0.47012707591056824, 0.35573041439056396, 0.40080904960632324, 0.5226706266403198, 0.3614789843559265, 0.3670787811279297, 0.4320698082447052, 0.4483892321586609, 0.3988180160522461, 0.44299742579460144, 0.4347727596759796, 0.2980192303657532, 0.3726247549057007, 0.42988693714141846, 0.39973509311676025, 0.34497734904289246, 0.39063209295272827, 0.3743012249469757, 0.3100280165672302, 0.3662980794906616, 0.3414981961250305, 0.32001861929893494, 0.3834022283554077, 0.23791366815567017, 0.30651259422302246, 0.2968301773071289, 0.3293622136116028, 0.1965251863002777, 0.3719707429409027, 0.3272671401500702, 0.24777932465076447, 0.3863266110420227, 0.24999620020389557, 0.21851596236228943, 0.21008063852787018, 0.3156169652938843, 0.3615129590034485, 0.2176506221294403, 0.19833393394947052, 0.3163139820098877, 0.3545341491699219, 0.19693829119205475, 0.28649449348449707, 0.31108948588371277, 0.2429426908493042, 0.20192016661167145, 0.21015271544456482, 0.2626788020133972, 0.3365514278411865, 0.2673414647579193, 0.09546613693237305, 0.12053285539150238, 0.17347104847431183, 0.23780611157417297, 0.24000667035579681, 0.24722233414649963, 0.23631177842617035, 0.24386261403560638, 0.16348625719547272, 0.18626368045806885, 0.23263885080814362, 0.12030680477619171, 0.18763351440429688, 0.06662426143884659, 0.2510358393192291, 0.35354670882225037, 0.22257286310195923, 0.24602772295475006, 0.10453713685274124, 0.185865119099617, 0.120824433863163, 0.16575820744037628, 0.16989396512508392, 0.06567985564470291, 0.4476766586303711, 0.13566845655441284, 0.2541764974594116, 0.16894878447055817, 0.31662842631340027, 0.20940165221691132, 0.20982550084590912, 0.2824331521987915, 0.2067907750606537, 0.1581425666809082, 0.2816847264766693, 0.20116807520389557, 0.1924077868461609, 0.17162162065505981, 0.1839713156223297, 0.13327762484550476, 0.12191877514123917, 0.08460020273923874, 0.142286017537117, 0.22156421840190887, 0.1368444859981537, 0.08621849864721298, 0.427219033241272, 0.0529838465154171, 0.09679907560348511, 0.1012415811419487, 0.10245517641305923, 0.31773877143859863, 0.21166706085205078, 0.06600738316774368, 0.12189425528049469, 0.1269007921218872, 0.19244657456874847, 0.27885836362838745, 0.05483828857541084, 0.21648481488227844, 0.20490846037864685, 0.07530874758958817, 0.2320171296596527, 0.02713399939239025, 0.13403251767158508, 0.2914361357688904, 0.07058202475309372, 0.19688743352890015, 0.14317165315151215, 0.3330090343952179, 0.0591675229370594, 0.19346661865711212, 0.13119332492351532, 0.07487566769123077, 0.16602414846420288, 0.016024962067604065, 0.1277586966753006, 0.6630570292472839, 0.2762067914009094, 0.11343107372522354, 0.2670533359050751, 0.11601337790489197, 0.32291847467422485, 0.1735117882490158, 0.11186012625694275, 0.0449039451777935, 0.12063135951757431, 0.09390923380851746, 0.08387088775634766, 0.07510893046855927, 0.11414384096860886, 0.15171335637569427, 0.08825315535068512, 0.050439488142728806, 0.34374308586120605, 0.21407325565814972, 0.18398113548755646, 0.10786985605955124, 0.1447535753250122],\n [0.6444740295410156, 0.6480551958084106, 0.7680001258850098, 0.6938083171844482, 0.6945194005966187, 0.6434619426727295, 0.6478080153465271, 0.6552700996398926, 0.6474529504776001, 0.6316699385643005, 0.6500007510185242, 0.5896865725517273, 0.7054746150970459, 0.5965741872787476, 0.5899113416671753, 0.5865727066993713, 0.5499936938285828, 0.5815310478210449, 0.5552288293838501, 0.5780386924743652, 0.600023090839386, 0.4392690360546112, 0.5045123100280762, 0.5101278424263, 0.48246777057647705, 0.4923814833164215, 0.443683385848999, 0.3944355249404907, 0.4880301356315613, 0.35789451003074646, 0.36372992396354675, 0.47596442699432373, 0.4648134112358093, 0.31122469902038574, 0.30873405933380127, 0.36700308322906494, 0.2557302713394165, 0.43774741888046265, 0.36664271354675293, 0.38658323884010315, 0.39342930912971497, 0.25078845024108887, 0.332029789686203, 0.3891676962375641, 0.350697785615921, 0.5877716541290283, 0.3053388297557831, 0.35364529490470886, 0.35642775893211365, 0.5797093510627747, 0.308402419090271, 0.33300063014030457, 0.20527392625808716, 0.37823060154914856, 0.23164953291416168, 0.3390084207057953, 0.48220548033714294, 0.19653253257274628, 0.30830979347229004, 0.27321138978004456, 0.25642216205596924, 0.4782291650772095, 0.4833580255508423, 0.24064145982265472, 0.2778085470199585, 0.14250364899635315, 0.21408909559249878, 0.3077957332134247, 0.24375246465206146, 0.2403443455696106, 0.318545937538147, 0.27636250853538513, 0.20684844255447388, 0.39635372161865234, 0.2426823079586029, 0.3302248418331146, 0.21984857320785522, 0.3096882700920105, 0.16199436783790588, 0.2731266915798187, 0.16509762406349182, 0.19987358152866364, 0.21301832795143127, 0.16873307526111603, 0.4657672643661499, 0.19174867868423462, 0.23006097972393036, 0.3353741765022278, 0.2514898180961609, 0.4382299482822418, 0.17479459941387177, 0.1549443155527115, 0.36182865500450134, 0.1583036184310913, 0.19739067554473877, 0.3335547149181366, 0.17704877257347107, 0.22076626121997833, 0.26111721992492676, 0.1475856900215149, 0.39272114634513855, 0.06898980587720871, 0.20126555860042572, 0.1861010193824768, 0.2317998707294464, 0.21326132118701935, 0.2857711911201477, 0.09368398040533066, 0.1964348554611206, 0.18394115567207336, 0.13550300896167755, 0.14457449316978455, 0.15295009315013885, 0.16381102800369263, 0.2795810401439667, 0.17563505470752716, 0.23439742624759674, 0.27755534648895264, 0.19725731015205383, 0.24526743590831757, 0.3030700087547302, 0.22834354639053345, 0.13675029575824738, 0.08429285883903503, 0.1311313807964325, 0.18073631823062897, 0.07007424533367157, 0.3054443299770355, 0.10602294653654099, 0.16532698273658752, 0.15915970504283905, 0.09758727997541428, 0.35041344165802, 0.09038594365119934, 0.16694681346416473, 0.1270684152841568, 0.08292841911315918, 0.26534590125083923, 0.19535087049007416, 0.31837645173072815, 0.19049564003944397, 0.27681252360343933, 0.18552172183990479, 0.03605049103498459, 0.39923956990242004, 0.23028255999088287, 0.12411822378635406, 0.08776320517063141, 0.3358216881752014, 0.17280055582523346, 0.07582242041826248, 0.1299680471420288, 0.07957464456558228, 0.2518991231918335, 0.22218579053878784, 0.2953566014766693, 0.23980411887168884, 0.2516424059867859, 0.1552591174840927, 0.1568763107061386, 0.11077530682086945, 0.06224577873945236, 0.14881819486618042, 0.3115622401237488, 0.26296448707580566, 0.1568690836429596, 0.13243553042411804, 0.16159479320049286, 0.11857082694768906, 0.4102708399295807, 0.12056007981300354, 0.08759808540344238, 0.1313447803258896, 0.2028246819972992, 0.45793774724006653, 0.06600210815668106, 0.08839228749275208, 0.2017313838005066, 0.04523666948080063, 0.4295879304409027, 0.3630306124687195, 0.49682503938674927, 0.4873455762863159, 0.1069202795624733, 0.06406697630882263, 0.18771663308143616, 0.14819641411304474, 0.2584706246852875, 0.1014157086610794, 0.2010267823934555, 0.10214871168136597, 0.1414615660905838, 0.06353311240673065, 0.0687236413359642, 0.16954435408115387, 0.10046547651290894, 0.0897723138332367, 0.17460182309150696, 0.23726686835289001, 0.3276420831680298],\n [0.651547908782959, 0.6305937767028809, 0.714191198348999, 0.7366882562637329, 0.7081306576728821, 0.7052466869354248, 0.6959658265113831, 0.6876019239425659, 0.6796861290931702, 0.6682548522949219, 0.6783831119537354, 0.6809177398681641, 0.6594600081443787, 0.6597816944122314, 0.6607940196990967, 0.6554914712905884, 0.6487285494804382, 0.6310643553733826, 0.646901547908783, 0.6084935069084167, 0.646790087223053, 0.620003342628479, 0.6197651028633118, 0.6110859513282776, 0.6023704409599304, 0.5829523205757141, 0.5887001156806946, 0.5428747534751892, 0.5947333574295044, 0.520041823387146, 0.530125081539154, 0.5711768269538879, 0.517208993434906, 0.5556505918502808, 0.5052176117897034, 0.5029464960098267, 0.503192663192749, 0.4674018919467926, 0.4327305555343628, 0.4827580749988556, 0.44052502512931824, 0.45015448331832886, 0.374491810798645, 0.41120240092277527, 0.33931177854537964, 0.28159549832344055, 0.29745686054229736, 0.33262911438941956, 0.2546529471874237, 0.24919618666172028, 0.30038347840309143, 0.28520357608795166, 0.28131112456321716, 0.24188008904457092, 0.19906455278396606, 0.2039707601070404, 0.283230721950531, 0.2393074482679367, 0.16120368242263794, 0.209192156791687, 0.19151107966899872, 0.24003323912620544, 0.2383124679327011, 0.13323870301246643, 0.20210768282413483, 0.21252617239952087, 0.16489844024181366, 0.15838722884655, 0.15414556860923767, 0.08920346945524216, 0.18110282719135284, 0.16188500821590424, 0.14822573959827423, 0.14462649822235107, 0.07673535495996475, 0.18902957439422607, 0.08951728790998459, 0.08679455518722534, 0.1354941427707672, 0.15025372803211212, 0.2071131318807602, 0.23014762997627258, 0.19371269643306732, 0.1132298931479454, 0.05065694823861122, 0.09418382495641708, 0.07476998120546341, 0.11219014972448349, 0.03167591616511345, 0.03173637390136719, 0.1261214315891266, 0.06699670106172562, 0.051265086978673935, 0.1227421760559082, 0.06480707228183746, 0.09140351414680481, 0.019068550318479538, 0.06808232516050339, 0.07977265864610672, 0.1261254847049713, 0.10272825509309769, 0.23310545086860657, 0.08881580084562302, 0.19029644131660461, 0.07674399018287659, 0.03163032978773117, 0.058682236820459366, 0.0618613138794899, 0.0789889469742775, 0.033192627131938934, 0.10236170887947083, 0.05678369104862213, 0.26384907960891724, 0.11628346145153046, 0.07242055237293243, 0.044395193457603455, 0.08786195516586304, 0.032504141330718994, 0.031131453812122345, 0.06867041438817978, 0.07551262527704239, 0.0563284270465374, 0.12378427386283875, 0.10616520047187805, 0.05368490517139435, 0.08634105324745178, 0.017265010625123978, 0.13792777061462402, 0.036241959780454636, 0.019719859585165977, 0.06076066941022873, 0.13962452113628387, 0.12119129300117493, 0.030299704521894455, 0.11001968383789062, 0.050810620188713074, 0.26568764448165894, 0.009919365867972374, 0.05394136533141136, 0.06656327843666077, 0.021763263270258904, 0.015879830345511436, 0.08032137900590897, 0.08495171368122101, 0.02712983824312687, 0.1609228253364563, 0.03401617705821991, 0.011437006294727325, 0.07945907860994339, 0.018461011350154877, 0.07861661165952682, 0.01987522467970848, 0.021168570965528488, 0.0673016831278801, 0.12502692639827728, 0.11859587579965591, 0.15622280538082123, 0.016832754015922546, 0.07315339893102646, 0.04298650100827217, 0.27725979685783386, 0.10535978525876999, 0.07640652358531952, 0.016620127484202385, 0.02031087689101696, 0.050513237714767456, 0.20297472178936005, 0.01747487112879753, 0.03910563141107559, 0.13426096737384796, 0.08585914969444275, 0.028291454538702965, 0.11463844031095505, 0.29609841108322144, 0.015064324252307415, 0.05555401369929314, 0.19431497156620026, 0.020027797669172287, 0.004898903891444206, 0.10011210292577744, 0.07396364212036133, 0.09759460389614105, 0.003400891786441207, 0.06437862664461136, 0.11968780308961868, 0.16718178987503052, 0.0749969407916069, 0.05589193478226662, 0.1745350956916809, 0.018143052235245705, 0.033501315861940384, 0.08575007319450378, 0.017686162143945694, 0.03692617267370224, 0.08320647478103638, 0.023435505107045174, 0.06533226370811462, 0.06523420661687851, 0.11267257481813431, 0.16568031907081604]])\nloss_histories_CNN_FASHION_2L = np.array([[0.6982033252716064, 0.685494065284729, 0.7043045163154602, 0.6664453744888306, 0.692385196685791, 0.6883255839347839, 0.6670038104057312, 0.6721140146255493, 0.7477293610572815, 0.7019577622413635, 0.7049409747123718, 0.6816695332527161, 0.6746503710746765, 0.7107524871826172, 0.6755272746086121, 0.6871398091316223, 0.6663607954978943, 0.6776321530342102, 0.6692557334899902, 0.6771387457847595, 0.6749445199966431, 0.6708089709281921, 0.6586058139801025, 0.6667677164077759, 0.6446713209152222, 0.6719373464584351, 0.6606504917144775, 0.657498300075531, 0.6470506191253662, 0.6401866674423218, 0.6414895057678223, 0.6237302422523499, 0.6307801604270935, 0.6398060917854309, 0.612299382686615, 0.6208985447883606, 0.6133882999420166, 0.6116693019866943, 0.5888859033584595, 0.5796552896499634, 0.6230239272117615, 0.5889614224433899, 0.5819820165634155, 0.5470789074897766, 0.5382369756698608, 0.5513080358505249, 0.555568516254425, 0.512162446975708, 0.542407751083374, 0.625302791595459, 0.5083873867988586, 0.5874236822128296, 0.5095157027244568, 0.5841799974441528, 0.49809378385543823, 0.5399268865585327, 0.5122842192649841, 0.5358125567436218, 0.519302248954773, 0.5164752006530762, 0.46569016575813293, 0.5407835245132446, 0.48874348402023315, 0.47747379541397095, 0.4097748100757599, 0.4176773130893707, 0.45327454805374146, 0.42068949341773987, 0.4547436237335205, 0.4771629571914673, 0.5482209920883179, 0.408530056476593, 0.4851240813732147, 0.4207240343093872, 0.46289223432540894, 0.5424150228500366, 0.39088600873947144, 0.43761688470840454, 0.3984193503856659, 0.31847116351127625, 0.5500505566596985, 0.4576525390148163, 0.5193397402763367, 0.3719850182533264, 0.34440648555755615, 0.5614837408065796, 0.28313952684402466, 0.44536086916923523, 0.34781497716903687, 0.46229103207588196, 0.3734305500984192, 0.43122661113739014, 0.3639208674430847, 0.3386707305908203, 0.41308125853538513, 0.3886600136756897, 0.40352439880371094, 0.41081854701042175, 0.3028257489204407, 0.43178990483283997, 0.38263648748397827, 0.3496360778808594, 0.358610063791275, 0.2800368368625641, 0.3421652615070343, 0.44051799178123474, 0.43672409653663635, 0.3709166347980499, 0.24854938685894012, 0.3083757758140564, 0.3452308773994446, 0.30687573552131653, 0.3816249370574951, 0.3573996424674988, 0.26188158988952637, 0.24953699111938477, 0.17291030287742615, 0.41357630491256714, 0.3565349578857422, 0.26830142736434937, 0.25871559977531433, 0.30517154932022095, 0.27130597829818726, 0.27063170075416565, 0.21682584285736084, 0.18601535260677338, 0.2388288378715515, 0.38377171754837036, 0.21231044828891754, 0.19891099631786346, 0.40024814009666443, 0.1763014793395996, 0.2191973477602005, 0.3140704333782196, 0.2190384864807129, 0.2297554612159729, 0.28490787744522095, 0.18833035230636597, 0.17096717655658722, 0.38814058899879456, 0.35706454515457153, 0.3980378806591034, 0.3082973062992096, 0.282254695892334, 0.28172925114631653, 0.33135756850242615, 0.18617062270641327, 0.3084704279899597, 0.1653742641210556, 0.2611149847507477, 0.23364774882793427, 0.32361915707588196, 0.2880187928676605, 0.22159168124198914, 0.32011330127716064, 0.37932154536247253, 0.2227104753255844, 0.27614346146583557, 0.2528597116470337, 0.13920791447162628, 0.14623363316059113, 0.29781511425971985, 0.30779993534088135, 0.23402608931064606, 0.2049279808998108, 0.195842444896698, 0.284122109413147, 0.26556384563446045, 0.16032564640045166, 0.19326667487621307, 0.16147050261497498, 0.2859514355659485, 0.27686405181884766, 0.14044542610645294, 0.1292620599269867, 0.22018609941005707, 0.16846336424350739, 0.1789669245481491, 0.22029483318328857, 0.20728746056556702, 0.44362884759902954, 0.31444764137268066, 0.23610062897205353, 0.3099147081375122, 0.2594585418701172, 0.13596713542938232, 0.14830733835697174, 0.11959406733512878, 0.11047989130020142, 0.19856494665145874, 0.12652510404586792, 0.3232700824737549, 0.13162431120872498, 0.16679613292217255, 0.1600688397884369, 0.1490894854068756, 0.17836572229862213, 0.1767909973859787, 0.1359943300485611, 0.3607396185398102],\n [0.722959041595459, 0.8378788828849792, 0.6359409093856812, 0.7786480188369751, 0.6722474098205566, 0.7209513783454895, 0.7147747874259949, 0.7389553785324097, 0.7043027281761169, 0.6997193098068237, 0.6960577964782715, 0.6514461040496826, 0.7242903113365173, 0.7110193371772766, 0.6887984275817871, 0.6825430393218994, 0.6723452210426331, 0.668528139591217, 0.656069278717041, 0.6439428925514221, 0.67144376039505, 0.681311845779419, 0.6493660807609558, 0.6525905728340149, 0.6488508582115173, 0.6426312327384949, 0.655098021030426, 0.6406035423278809, 0.6233764886856079, 0.6242199540138245, 0.6306735873222351, 0.6166422963142395, 0.5946296453475952, 0.5960651636123657, 0.6093128323554993, 0.6189892888069153, 0.5935332179069519, 0.586617648601532, 0.5726534128189087, 0.5882557034492493, 0.5732150673866272, 0.6082722544670105, 0.5631254315376282, 0.5543763637542725, 0.6138622760772705, 0.5315272808074951, 0.5507160425186157, 0.5241436958312988, 0.5181093811988831, 0.474336177110672, 0.5000462532043457, 0.5013487339019775, 0.4983973801136017, 0.4515523612499237, 0.439666211605072, 0.4340149164199829, 0.46038758754730225, 0.44209349155426025, 0.4850611984729767, 0.42503097653388977, 0.4745623767375946, 0.45707252621650696, 0.37832871079444885, 0.46673983335494995, 0.4491257071495056, 0.37395069003105164, 0.4663311839103699, 0.3531072735786438, 0.3056586682796478, 0.4227212071418762, 0.3507891595363617, 0.325242817401886, 0.342677503824234, 0.28175806999206543, 0.4168330132961273, 0.2992843985557556, 0.38543903827667236, 0.310502290725708, 0.32055363059043884, 0.3279689848423004, 0.22771680355072021, 0.2996452748775482, 0.27647387981414795, 0.3224877119064331, 0.24797911942005157, 0.31701239943504333, 0.27963507175445557, 0.3111099600791931, 0.2598114013671875, 0.2720106840133667, 0.28728973865509033, 0.31687983870506287, 0.16315454244613647, 0.28481221199035645, 0.32374823093414307, 0.2818513810634613, 0.27142226696014404, 0.26268696784973145, 0.18802690505981445, 0.2330765277147293, 0.28008073568344116, 0.13481254875659943, 0.23573260009288788, 0.1210196316242218, 0.19468261301517487, 0.17421527206897736, 0.17932678759098053, 0.22074772417545319, 0.15295524895191193, 0.12509538233280182, 0.18809691071510315, 0.18245065212249756, 0.20922879874706268, 0.2670697271823883, 0.22580507397651672, 0.16062802076339722, 0.23412327468395233, 0.43120795488357544, 0.2926293611526489, 0.2947879135608673, 0.17843599617481232, 0.3449716866016388, 0.15644687414169312, 0.19783726334571838, 0.0992257222533226, 0.11392301321029663, 0.32231271266937256, 0.19188730418682098, 0.16153278946876526, 0.2035246342420578, 0.18345876038074493, 0.051402803510427475, 0.07299385964870453, 0.24596910178661346, 0.317927747964859, 0.16063934564590454, 0.3693201541900635, 0.3464672565460205, 0.24623923003673553, 0.1416863203048706, 0.28232452273368835, 0.23875318467617035, 0.21324512362480164, 0.2701603174209595, 0.18765276670455933, 0.2246081680059433, 0.15949931740760803, 0.17738769948482513, 0.18758100271224976, 0.21686187386512756, 0.11897433549165726, 0.1632753312587738, 0.1252729594707489, 0.18664729595184326, 0.22377149760723114, 0.0922139436006546, 0.3411566913127899, 0.1822480410337448, 0.2687120735645294, 0.2085094302892685, 0.36180827021598816, 0.16845770180225372, 0.22141145169734955, 0.16436506807804108, 0.14664797484874725, 0.1761969029903412, 0.3861934244632721, 0.2474733293056488, 0.152466282248497, 0.27540338039398193, 0.32959243655204773, 0.0701957643032074, 0.23025813698768616, 0.30845510959625244, 0.19448892772197723, 0.15782098472118378, 0.0669102892279625, 0.17205177247524261, 0.14290541410446167, 0.04908277839422226, 0.16558429598808289, 0.3065306544303894, 0.07012531161308289, 0.38416728377342224, 0.11788350343704224, 0.12578633427619934, 0.057048648595809937, 0.17244094610214233, 0.16887983679771423, 0.21592962741851807, 0.1084679663181305, 0.06436526030302048, 0.13887561857700348, 0.05770181491971016, 0.16522125899791718, 0.34186455607414246, 0.1313944011926651, 0.09368456155061722, 0.15273809432983398, 0.07838013023138046],", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_CNN_FASHION_3L", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "loss_histories_CNN_FASHION_3L = np.array([[0.681438684463501, 0.6889891028404236, 0.6886380910873413, 0.6888549327850342, 0.7001448273658752, 0.6896989345550537, 0.6827958822250366, 0.6809604167938232, 0.6802992820739746, 0.6798832416534424, 0.6873130202293396, 0.6706038117408752, 0.6825727820396423, 0.6711708307266235, 0.6692336797714233, 0.6626574993133545, 0.6598449945449829, 0.6661273241043091, 0.6690754890441895, 0.6628720164299011, 0.6578156352043152, 0.6536375284194946, 0.6395933032035828, 0.6494665741920471, 0.6431573629379272, 0.6422117352485657, 0.6304263472557068, 0.6317248940467834, 0.6545873284339905, 0.6310784816741943, 0.6355032920837402, 0.6170492768287659, 0.6209865808486938, 0.6031823754310608, 0.5973383188247681, 0.5987150073051453, 0.5997783541679382, 0.5827844142913818, 0.5649464130401611, 0.5976262092590332, 0.5651332139968872, 0.5687211155891418, 0.5581858158111572, 0.5616911053657532, 0.5290145874023438, 0.5284591317176819, 0.5671270489692688, 0.47875893115997314, 0.5753456950187683, 0.5510994791984558, 0.5518543720245361, 0.4903143048286438, 0.48829033970832825, 0.5358155965805054, 0.530174732208252, 0.5644151568412781, 0.4735930562019348, 0.4666731357574463, 0.4378604590892792, 0.470310240983963, 0.4612236022949219, 0.5205991268157959, 0.4360387921333313, 0.44255444407463074, 0.5049978494644165, 0.40847957134246826, 0.4495522379875183, 0.4902504086494446, 0.4367920160293579, 0.4487551152706146, 0.4421207010746002, 0.37032660841941833, 0.3559698164463043, 0.42548468708992004, 0.4063631296157837, 0.45850908756256104, 0.38015541434288025, 0.3413124084472656, 0.4414171576499939, 0.3528973460197449, 0.3753133714199066, 0.4682203233242035, 0.38733014464378357, 0.3464372158050537, 0.3330712914466858, 0.38146668672561646, 0.37155666947364807, 0.3937838077545166, 0.3397389352321625, 0.2911732792854309, 0.3185632824897766, 0.34439948201179504, 0.37463000416755676, 0.2941756844520569, 0.3446425497531891, 0.2543509304523468, 0.44017115235328674, 0.2634207010269165, 0.32429203391075134, 0.3531845808029175, 0.4209728538990021, 0.2728058993816376, 0.29612597823143005, 0.36481037735939026, 0.23760972917079926, 0.31274184584617615, 0.29005640745162964, 0.3057081699371338, 0.3319462239742279, 0.4440755546092987, 0.3070727586746216, 0.3044629395008087, 0.32753700017929077, 0.2656729221343994, 0.422760009765625, 0.2262178659439087, 0.34336617588996887, 0.28752654790878296, 0.22466769814491272, 0.19892671704292297, 0.32547518610954285, 0.3219960033893585, 0.2053043395280838, 0.33121222257614136, 0.24561984837055206, 0.18318641185760498, 0.3192058503627777, 0.30691757798194885, 0.18206992745399475, 0.22018828988075256, 0.38999852538108826, 0.17489850521087646, 0.30276167392730713, 0.21708929538726807, 0.35204121470451355, 0.20095224678516388, 0.26634764671325684, 0.1619335412979126, 0.17421387135982513, 0.17063964903354645, 0.23774376511573792, 0.21871469914913177, 0.28005892038345337, 0.14502578973770142, 0.2649957239627838, 0.31908106803894043, 0.22305874526500702, 0.19752325117588043, 0.22612883150577545, 0.2095600664615631, 0.3497889041900635, 0.24804994463920593, 0.18516618013381958, 0.19408148527145386, 0.39791882038116455, 0.2503810226917267, 0.13216803967952728, 0.1892985850572586, 0.2676987946033478, 0.1970151662826538, 0.2100503146648407, 0.14209169149398804, 0.1863841563463211, 0.14308282732963562, 0.3121347725391388, 0.2626650333404541, 0.12991006672382355, 0.16739721596240997, 0.11467546224594116, 0.32184040546417236, 0.11009383946657181, 0.19579708576202393, 0.13897623121738434, 0.1434190273284912, 0.31635451316833496, 0.2833975851535797, 0.16934165358543396, 0.14960110187530518, 0.31888577342033386, 0.21167083084583282, 0.30760568380355835, 0.41453269124031067, 0.229119211435318, 0.1743178516626358, 0.2994597852230072, 0.24154801666736603, 0.09829962998628616, 0.1747366487979889, 0.29539310932159424, 0.2035258263349533, 0.30196595191955566, 0.30807745456695557, 0.3411264419555664, 0.15731027722358704, 0.12981931865215302, 0.2966948449611664, 0.21025419235229492, 0.15241996943950653, 0.07024487853050232, 0.23431310057640076],\n [0.7195901274681091, 0.6704943180084229, 0.7085641622543335, 0.6748741269111633, 0.7423946857452393, 0.7234722375869751, 0.6986007690429688, 0.6960390210151672, 0.7012070417404175, 0.6947640180587769, 0.6792826056480408, 0.7239813804626465, 0.6761874556541443, 0.6902806758880615, 0.6771406531333923, 0.6850265264511108, 0.692349910736084, 0.6808741092681885, 0.6753465533256531, 0.6633909344673157, 0.6686793565750122, 0.6637576222419739, 0.680237889289856, 0.654888391494751, 0.6780327558517456, 0.6408708095550537, 0.6315723061561584, 0.6431190371513367, 0.6341533064842224, 0.6376373767852783, 0.6409605145454407, 0.6432655453681946, 0.6013216972351074, 0.6401882171630859, 0.6355386972427368, 0.5561983585357666, 0.5904024839401245, 0.6024128198623657, 0.5541431903839111, 0.575777530670166, 0.5260757803916931, 0.5354135036468506, 0.44182831048965454, 0.4903651475906372, 0.4765366017818451, 0.4635029733181, 0.5017414093017578, 0.4560497999191284, 0.4651477336883545, 0.4632211923599243, 0.5207407474517822, 0.47012707591056824, 0.35573041439056396, 0.40080904960632324, 0.5226706266403198, 0.3614789843559265, 0.3670787811279297, 0.4320698082447052, 0.4483892321586609, 0.3988180160522461, 0.44299742579460144, 0.4347727596759796, 0.2980192303657532, 0.3726247549057007, 0.42988693714141846, 0.39973509311676025, 0.34497734904289246, 0.39063209295272827, 0.3743012249469757, 0.3100280165672302, 0.3662980794906616, 0.3414981961250305, 0.32001861929893494, 0.3834022283554077, 0.23791366815567017, 0.30651259422302246, 0.2968301773071289, 0.3293622136116028, 0.1965251863002777, 0.3719707429409027, 0.3272671401500702, 0.24777932465076447, 0.3863266110420227, 0.24999620020389557, 0.21851596236228943, 0.21008063852787018, 0.3156169652938843, 0.3615129590034485, 0.2176506221294403, 0.19833393394947052, 0.3163139820098877, 0.3545341491699219, 0.19693829119205475, 0.28649449348449707, 0.31108948588371277, 0.2429426908493042, 0.20192016661167145, 0.21015271544456482, 0.2626788020133972, 0.3365514278411865, 0.2673414647579193, 0.09546613693237305, 0.12053285539150238, 0.17347104847431183, 0.23780611157417297, 0.24000667035579681, 0.24722233414649963, 0.23631177842617035, 0.24386261403560638, 0.16348625719547272, 0.18626368045806885, 0.23263885080814362, 0.12030680477619171, 0.18763351440429688, 0.06662426143884659, 0.2510358393192291, 0.35354670882225037, 0.22257286310195923, 0.24602772295475006, 0.10453713685274124, 0.185865119099617, 0.120824433863163, 0.16575820744037628, 0.16989396512508392, 0.06567985564470291, 0.4476766586303711, 0.13566845655441284, 0.2541764974594116, 0.16894878447055817, 0.31662842631340027, 0.20940165221691132, 0.20982550084590912, 0.2824331521987915, 0.2067907750606537, 0.1581425666809082, 0.2816847264766693, 0.20116807520389557, 0.1924077868461609, 0.17162162065505981, 0.1839713156223297, 0.13327762484550476, 0.12191877514123917, 0.08460020273923874, 0.142286017537117, 0.22156421840190887, 0.1368444859981537, 0.08621849864721298, 0.427219033241272, 0.0529838465154171, 0.09679907560348511, 0.1012415811419487, 0.10245517641305923, 0.31773877143859863, 0.21166706085205078, 0.06600738316774368, 0.12189425528049469, 0.1269007921218872, 0.19244657456874847, 0.27885836362838745, 0.05483828857541084, 0.21648481488227844, 0.20490846037864685, 0.07530874758958817, 0.2320171296596527, 0.02713399939239025, 0.13403251767158508, 0.2914361357688904, 0.07058202475309372, 0.19688743352890015, 0.14317165315151215, 0.3330090343952179, 0.0591675229370594, 0.19346661865711212, 0.13119332492351532, 0.07487566769123077, 0.16602414846420288, 0.016024962067604065, 0.1277586966753006, 0.6630570292472839, 0.2762067914009094, 0.11343107372522354, 0.2670533359050751, 0.11601337790489197, 0.32291847467422485, 0.1735117882490158, 0.11186012625694275, 0.0449039451777935, 0.12063135951757431, 0.09390923380851746, 0.08387088775634766, 0.07510893046855927, 0.11414384096860886, 0.15171335637569427, 0.08825315535068512, 0.050439488142728806, 0.34374308586120605, 0.21407325565814972, 0.18398113548755646, 0.10786985605955124, 0.1447535753250122],\n [0.6444740295410156, 0.6480551958084106, 0.7680001258850098, 0.6938083171844482, 0.6945194005966187, 0.6434619426727295, 0.6478080153465271, 0.6552700996398926, 0.6474529504776001, 0.6316699385643005, 0.6500007510185242, 0.5896865725517273, 0.7054746150970459, 0.5965741872787476, 0.5899113416671753, 0.5865727066993713, 0.5499936938285828, 0.5815310478210449, 0.5552288293838501, 0.5780386924743652, 0.600023090839386, 0.4392690360546112, 0.5045123100280762, 0.5101278424263, 0.48246777057647705, 0.4923814833164215, 0.443683385848999, 0.3944355249404907, 0.4880301356315613, 0.35789451003074646, 0.36372992396354675, 0.47596442699432373, 0.4648134112358093, 0.31122469902038574, 0.30873405933380127, 0.36700308322906494, 0.2557302713394165, 0.43774741888046265, 0.36664271354675293, 0.38658323884010315, 0.39342930912971497, 0.25078845024108887, 0.332029789686203, 0.3891676962375641, 0.350697785615921, 0.5877716541290283, 0.3053388297557831, 0.35364529490470886, 0.35642775893211365, 0.5797093510627747, 0.308402419090271, 0.33300063014030457, 0.20527392625808716, 0.37823060154914856, 0.23164953291416168, 0.3390084207057953, 0.48220548033714294, 0.19653253257274628, 0.30830979347229004, 0.27321138978004456, 0.25642216205596924, 0.4782291650772095, 0.4833580255508423, 0.24064145982265472, 0.2778085470199585, 0.14250364899635315, 0.21408909559249878, 0.3077957332134247, 0.24375246465206146, 0.2403443455696106, 0.318545937538147, 0.27636250853538513, 0.20684844255447388, 0.39635372161865234, 0.2426823079586029, 0.3302248418331146, 0.21984857320785522, 0.3096882700920105, 0.16199436783790588, 0.2731266915798187, 0.16509762406349182, 0.19987358152866364, 0.21301832795143127, 0.16873307526111603, 0.4657672643661499, 0.19174867868423462, 0.23006097972393036, 0.3353741765022278, 0.2514898180961609, 0.4382299482822418, 0.17479459941387177, 0.1549443155527115, 0.36182865500450134, 0.1583036184310913, 0.19739067554473877, 0.3335547149181366, 0.17704877257347107, 0.22076626121997833, 0.26111721992492676, 0.1475856900215149, 0.39272114634513855, 0.06898980587720871, 0.20126555860042572, 0.1861010193824768, 0.2317998707294464, 0.21326132118701935, 0.2857711911201477, 0.09368398040533066, 0.1964348554611206, 0.18394115567207336, 0.13550300896167755, 0.14457449316978455, 0.15295009315013885, 0.16381102800369263, 0.2795810401439667, 0.17563505470752716, 0.23439742624759674, 0.27755534648895264, 0.19725731015205383, 0.24526743590831757, 0.3030700087547302, 0.22834354639053345, 0.13675029575824738, 0.08429285883903503, 0.1311313807964325, 0.18073631823062897, 0.07007424533367157, 0.3054443299770355, 0.10602294653654099, 0.16532698273658752, 0.15915970504283905, 0.09758727997541428, 0.35041344165802, 0.09038594365119934, 0.16694681346416473, 0.1270684152841568, 0.08292841911315918, 0.26534590125083923, 0.19535087049007416, 0.31837645173072815, 0.19049564003944397, 0.27681252360343933, 0.18552172183990479, 0.03605049103498459, 0.39923956990242004, 0.23028255999088287, 0.12411822378635406, 0.08776320517063141, 0.3358216881752014, 0.17280055582523346, 0.07582242041826248, 0.1299680471420288, 0.07957464456558228, 0.2518991231918335, 0.22218579053878784, 0.2953566014766693, 0.23980411887168884, 0.2516424059867859, 0.1552591174840927, 0.1568763107061386, 0.11077530682086945, 0.06224577873945236, 0.14881819486618042, 0.3115622401237488, 0.26296448707580566, 0.1568690836429596, 0.13243553042411804, 0.16159479320049286, 0.11857082694768906, 0.4102708399295807, 0.12056007981300354, 0.08759808540344238, 0.1313447803258896, 0.2028246819972992, 0.45793774724006653, 0.06600210815668106, 0.08839228749275208, 0.2017313838005066, 0.04523666948080063, 0.4295879304409027, 0.3630306124687195, 0.49682503938674927, 0.4873455762863159, 0.1069202795624733, 0.06406697630882263, 0.18771663308143616, 0.14819641411304474, 0.2584706246852875, 0.1014157086610794, 0.2010267823934555, 0.10214871168136597, 0.1414615660905838, 0.06353311240673065, 0.0687236413359642, 0.16954435408115387, 0.10046547651290894, 0.0897723138332367, 0.17460182309150696, 0.23726686835289001, 0.3276420831680298],\n [0.651547908782959, 0.6305937767028809, 0.714191198348999, 0.7366882562637329, 0.7081306576728821, 0.7052466869354248, 0.6959658265113831, 0.6876019239425659, 0.6796861290931702, 0.6682548522949219, 0.6783831119537354, 0.6809177398681641, 0.6594600081443787, 0.6597816944122314, 0.6607940196990967, 0.6554914712905884, 0.6487285494804382, 0.6310643553733826, 0.646901547908783, 0.6084935069084167, 0.646790087223053, 0.620003342628479, 0.6197651028633118, 0.6110859513282776, 0.6023704409599304, 0.5829523205757141, 0.5887001156806946, 0.5428747534751892, 0.5947333574295044, 0.520041823387146, 0.530125081539154, 0.5711768269538879, 0.517208993434906, 0.5556505918502808, 0.5052176117897034, 0.5029464960098267, 0.503192663192749, 0.4674018919467926, 0.4327305555343628, 0.4827580749988556, 0.44052502512931824, 0.45015448331832886, 0.374491810798645, 0.41120240092277527, 0.33931177854537964, 0.28159549832344055, 0.29745686054229736, 0.33262911438941956, 0.2546529471874237, 0.24919618666172028, 0.30038347840309143, 0.28520357608795166, 0.28131112456321716, 0.24188008904457092, 0.19906455278396606, 0.2039707601070404, 0.283230721950531, 0.2393074482679367, 0.16120368242263794, 0.209192156791687, 0.19151107966899872, 0.24003323912620544, 0.2383124679327011, 0.13323870301246643, 0.20210768282413483, 0.21252617239952087, 0.16489844024181366, 0.15838722884655, 0.15414556860923767, 0.08920346945524216, 0.18110282719135284, 0.16188500821590424, 0.14822573959827423, 0.14462649822235107, 0.07673535495996475, 0.18902957439422607, 0.08951728790998459, 0.08679455518722534, 0.1354941427707672, 0.15025372803211212, 0.2071131318807602, 0.23014762997627258, 0.19371269643306732, 0.1132298931479454, 0.05065694823861122, 0.09418382495641708, 0.07476998120546341, 0.11219014972448349, 0.03167591616511345, 0.03173637390136719, 0.1261214315891266, 0.06699670106172562, 0.051265086978673935, 0.1227421760559082, 0.06480707228183746, 0.09140351414680481, 0.019068550318479538, 0.06808232516050339, 0.07977265864610672, 0.1261254847049713, 0.10272825509309769, 0.23310545086860657, 0.08881580084562302, 0.19029644131660461, 0.07674399018287659, 0.03163032978773117, 0.058682236820459366, 0.0618613138794899, 0.0789889469742775, 0.033192627131938934, 0.10236170887947083, 0.05678369104862213, 0.26384907960891724, 0.11628346145153046, 0.07242055237293243, 0.044395193457603455, 0.08786195516586304, 0.032504141330718994, 0.031131453812122345, 0.06867041438817978, 0.07551262527704239, 0.0563284270465374, 0.12378427386283875, 0.10616520047187805, 0.05368490517139435, 0.08634105324745178, 0.017265010625123978, 0.13792777061462402, 0.036241959780454636, 0.019719859585165977, 0.06076066941022873, 0.13962452113628387, 0.12119129300117493, 0.030299704521894455, 0.11001968383789062, 0.050810620188713074, 0.26568764448165894, 0.009919365867972374, 0.05394136533141136, 0.06656327843666077, 0.021763263270258904, 0.015879830345511436, 0.08032137900590897, 0.08495171368122101, 0.02712983824312687, 0.1609228253364563, 0.03401617705821991, 0.011437006294727325, 0.07945907860994339, 0.018461011350154877, 0.07861661165952682, 0.01987522467970848, 0.021168570965528488, 0.0673016831278801, 0.12502692639827728, 0.11859587579965591, 0.15622280538082123, 0.016832754015922546, 0.07315339893102646, 0.04298650100827217, 0.27725979685783386, 0.10535978525876999, 0.07640652358531952, 0.016620127484202385, 0.02031087689101696, 0.050513237714767456, 0.20297472178936005, 0.01747487112879753, 0.03910563141107559, 0.13426096737384796, 0.08585914969444275, 0.028291454538702965, 0.11463844031095505, 0.29609841108322144, 0.015064324252307415, 0.05555401369929314, 0.19431497156620026, 0.020027797669172287, 0.004898903891444206, 0.10011210292577744, 0.07396364212036133, 0.09759460389614105, 0.003400891786441207, 0.06437862664461136, 0.11968780308961868, 0.16718178987503052, 0.0749969407916069, 0.05589193478226662, 0.1745350956916809, 0.018143052235245705, 0.033501315861940384, 0.08575007319450378, 0.017686162143945694, 0.03692617267370224, 0.08320647478103638, 0.023435505107045174, 0.06533226370811462, 0.06523420661687851, 0.11267257481813431, 0.16568031907081604]])\nloss_histories_CNN_FASHION_2L = np.array([[0.6982033252716064, 0.685494065284729, 0.7043045163154602, 0.6664453744888306, 0.692385196685791, 0.6883255839347839, 0.6670038104057312, 0.6721140146255493, 0.7477293610572815, 0.7019577622413635, 0.7049409747123718, 0.6816695332527161, 0.6746503710746765, 0.7107524871826172, 0.6755272746086121, 0.6871398091316223, 0.6663607954978943, 0.6776321530342102, 0.6692557334899902, 0.6771387457847595, 0.6749445199966431, 0.6708089709281921, 0.6586058139801025, 0.6667677164077759, 0.6446713209152222, 0.6719373464584351, 0.6606504917144775, 0.657498300075531, 0.6470506191253662, 0.6401866674423218, 0.6414895057678223, 0.6237302422523499, 0.6307801604270935, 0.6398060917854309, 0.612299382686615, 0.6208985447883606, 0.6133882999420166, 0.6116693019866943, 0.5888859033584595, 0.5796552896499634, 0.6230239272117615, 0.5889614224433899, 0.5819820165634155, 0.5470789074897766, 0.5382369756698608, 0.5513080358505249, 0.555568516254425, 0.512162446975708, 0.542407751083374, 0.625302791595459, 0.5083873867988586, 0.5874236822128296, 0.5095157027244568, 0.5841799974441528, 0.49809378385543823, 0.5399268865585327, 0.5122842192649841, 0.5358125567436218, 0.519302248954773, 0.5164752006530762, 0.46569016575813293, 0.5407835245132446, 0.48874348402023315, 0.47747379541397095, 0.4097748100757599, 0.4176773130893707, 0.45327454805374146, 0.42068949341773987, 0.4547436237335205, 0.4771629571914673, 0.5482209920883179, 0.408530056476593, 0.4851240813732147, 0.4207240343093872, 0.46289223432540894, 0.5424150228500366, 0.39088600873947144, 0.43761688470840454, 0.3984193503856659, 0.31847116351127625, 0.5500505566596985, 0.4576525390148163, 0.5193397402763367, 0.3719850182533264, 0.34440648555755615, 0.5614837408065796, 0.28313952684402466, 0.44536086916923523, 0.34781497716903687, 0.46229103207588196, 0.3734305500984192, 0.43122661113739014, 0.3639208674430847, 0.3386707305908203, 0.41308125853538513, 0.3886600136756897, 0.40352439880371094, 0.41081854701042175, 0.3028257489204407, 0.43178990483283997, 0.38263648748397827, 0.3496360778808594, 0.358610063791275, 0.2800368368625641, 0.3421652615070343, 0.44051799178123474, 0.43672409653663635, 0.3709166347980499, 0.24854938685894012, 0.3083757758140564, 0.3452308773994446, 0.30687573552131653, 0.3816249370574951, 0.3573996424674988, 0.26188158988952637, 0.24953699111938477, 0.17291030287742615, 0.41357630491256714, 0.3565349578857422, 0.26830142736434937, 0.25871559977531433, 0.30517154932022095, 0.27130597829818726, 0.27063170075416565, 0.21682584285736084, 0.18601535260677338, 0.2388288378715515, 0.38377171754837036, 0.21231044828891754, 0.19891099631786346, 0.40024814009666443, 0.1763014793395996, 0.2191973477602005, 0.3140704333782196, 0.2190384864807129, 0.2297554612159729, 0.28490787744522095, 0.18833035230636597, 0.17096717655658722, 0.38814058899879456, 0.35706454515457153, 0.3980378806591034, 0.3082973062992096, 0.282254695892334, 0.28172925114631653, 0.33135756850242615, 0.18617062270641327, 0.3084704279899597, 0.1653742641210556, 0.2611149847507477, 0.23364774882793427, 0.32361915707588196, 0.2880187928676605, 0.22159168124198914, 0.32011330127716064, 0.37932154536247253, 0.2227104753255844, 0.27614346146583557, 0.2528597116470337, 0.13920791447162628, 0.14623363316059113, 0.29781511425971985, 0.30779993534088135, 0.23402608931064606, 0.2049279808998108, 0.195842444896698, 0.284122109413147, 0.26556384563446045, 0.16032564640045166, 0.19326667487621307, 0.16147050261497498, 0.2859514355659485, 0.27686405181884766, 0.14044542610645294, 0.1292620599269867, 0.22018609941005707, 0.16846336424350739, 0.1789669245481491, 0.22029483318328857, 0.20728746056556702, 0.44362884759902954, 0.31444764137268066, 0.23610062897205353, 0.3099147081375122, 0.2594585418701172, 0.13596713542938232, 0.14830733835697174, 0.11959406733512878, 0.11047989130020142, 0.19856494665145874, 0.12652510404586792, 0.3232700824737549, 0.13162431120872498, 0.16679613292217255, 0.1600688397884369, 0.1490894854068756, 0.17836572229862213, 0.1767909973859787, 0.1359943300485611, 0.3607396185398102],\n [0.722959041595459, 0.8378788828849792, 0.6359409093856812, 0.7786480188369751, 0.6722474098205566, 0.7209513783454895, 0.7147747874259949, 0.7389553785324097, 0.7043027281761169, 0.6997193098068237, 0.6960577964782715, 0.6514461040496826, 0.7242903113365173, 0.7110193371772766, 0.6887984275817871, 0.6825430393218994, 0.6723452210426331, 0.668528139591217, 0.656069278717041, 0.6439428925514221, 0.67144376039505, 0.681311845779419, 0.6493660807609558, 0.6525905728340149, 0.6488508582115173, 0.6426312327384949, 0.655098021030426, 0.6406035423278809, 0.6233764886856079, 0.6242199540138245, 0.6306735873222351, 0.6166422963142395, 0.5946296453475952, 0.5960651636123657, 0.6093128323554993, 0.6189892888069153, 0.5935332179069519, 0.586617648601532, 0.5726534128189087, 0.5882557034492493, 0.5732150673866272, 0.6082722544670105, 0.5631254315376282, 0.5543763637542725, 0.6138622760772705, 0.5315272808074951, 0.5507160425186157, 0.5241436958312988, 0.5181093811988831, 0.474336177110672, 0.5000462532043457, 0.5013487339019775, 0.4983973801136017, 0.4515523612499237, 0.439666211605072, 0.4340149164199829, 0.46038758754730225, 0.44209349155426025, 0.4850611984729767, 0.42503097653388977, 0.4745623767375946, 0.45707252621650696, 0.37832871079444885, 0.46673983335494995, 0.4491257071495056, 0.37395069003105164, 0.4663311839103699, 0.3531072735786438, 0.3056586682796478, 0.4227212071418762, 0.3507891595363617, 0.325242817401886, 0.342677503824234, 0.28175806999206543, 0.4168330132961273, 0.2992843985557556, 0.38543903827667236, 0.310502290725708, 0.32055363059043884, 0.3279689848423004, 0.22771680355072021, 0.2996452748775482, 0.27647387981414795, 0.3224877119064331, 0.24797911942005157, 0.31701239943504333, 0.27963507175445557, 0.3111099600791931, 0.2598114013671875, 0.2720106840133667, 0.28728973865509033, 0.31687983870506287, 0.16315454244613647, 0.28481221199035645, 0.32374823093414307, 0.2818513810634613, 0.27142226696014404, 0.26268696784973145, 0.18802690505981445, 0.2330765277147293, 0.28008073568344116, 0.13481254875659943, 0.23573260009288788, 0.1210196316242218, 0.19468261301517487, 0.17421527206897736, 0.17932678759098053, 0.22074772417545319, 0.15295524895191193, 0.12509538233280182, 0.18809691071510315, 0.18245065212249756, 0.20922879874706268, 0.2670697271823883, 0.22580507397651672, 0.16062802076339722, 0.23412327468395233, 0.43120795488357544, 0.2926293611526489, 0.2947879135608673, 0.17843599617481232, 0.3449716866016388, 0.15644687414169312, 0.19783726334571838, 0.0992257222533226, 0.11392301321029663, 0.32231271266937256, 0.19188730418682098, 0.16153278946876526, 0.2035246342420578, 0.18345876038074493, 0.051402803510427475, 0.07299385964870453, 0.24596910178661346, 0.317927747964859, 0.16063934564590454, 0.3693201541900635, 0.3464672565460205, 0.24623923003673553, 0.1416863203048706, 0.28232452273368835, 0.23875318467617035, 0.21324512362480164, 0.2701603174209595, 0.18765276670455933, 0.2246081680059433, 0.15949931740760803, 0.17738769948482513, 0.18758100271224976, 0.21686187386512756, 0.11897433549165726, 0.1632753312587738, 0.1252729594707489, 0.18664729595184326, 0.22377149760723114, 0.0922139436006546, 0.3411566913127899, 0.1822480410337448, 0.2687120735645294, 0.2085094302892685, 0.36180827021598816, 0.16845770180225372, 0.22141145169734955, 0.16436506807804108, 0.14664797484874725, 0.1761969029903412, 0.3861934244632721, 0.2474733293056488, 0.152466282248497, 0.27540338039398193, 0.32959243655204773, 0.0701957643032074, 0.23025813698768616, 0.30845510959625244, 0.19448892772197723, 0.15782098472118378, 0.0669102892279625, 0.17205177247524261, 0.14290541410446167, 0.04908277839422226, 0.16558429598808289, 0.3065306544303894, 0.07012531161308289, 0.38416728377342224, 0.11788350343704224, 0.12578633427619934, 0.057048648595809937, 0.17244094610214233, 0.16887983679771423, 0.21592962741851807, 0.1084679663181305, 0.06436526030302048, 0.13887561857700348, 0.05770181491971016, 0.16522125899791718, 0.34186455607414246, 0.1313944011926651, 0.09368456155061722, 0.15273809432983398, 0.07838013023138046],\n [0.7304385900497437, 0.6144606471061707, 0.7663922309875488, 0.7456250190734863, 0.6472451686859131, 0.6408094763755798, 0.6577707529067993, 0.6627257466316223, 0.6259477734565735, 0.6269749402999878, 0.6147814393043518, 0.5954440236091614, 0.5954847931861877, 0.622576117515564, 0.5681723952293396, 0.6685020327568054, 0.6117186546325684, 0.6318418383598328, 0.6105124950408936, 0.6059049367904663, 0.5891886353492737, 0.570899248123169, 0.5469162464141846, 0.48898395895957947, 0.5481796264648438, 0.5801829695701599, 0.5309649705886841, 0.5229575037956238, 0.5118927359580994, 0.5621098875999451, 0.5783540606498718, 0.6303244829177856, 0.5337151288986206, 0.5568647980690002, 0.506201982498169, 0.4340185821056366, 0.41403329372406006, 0.4785533547401428, 0.4407808184623718, 0.40219631791114807, 0.4949192702770233, 0.44360852241516113, 0.4859486520290375, 0.6203747391700745, 0.4916355013847351, 0.4286290407180786, 0.4152555763721466, 0.42479854822158813, 0.41854721307754517, 0.3812496066093445, 0.5289347171783447, 0.4742628037929535, 0.5494931936264038, 0.5970712304115295, 0.4466589093208313, 0.3422013819217682, 0.3693443238735199, 0.5024840235710144, 0.49555644392967224, 0.5533590912818909, 0.39836445450782776, 0.47018149495124817, 0.4293385446071625, 0.45852386951446533, 0.3161731958389282, 0.3988656997680664, 0.4170445501804352, 0.5251786708831787, 0.4676470458507538, 0.3298081159591675, 0.4780762195587158, 0.461908757686615, 0.39387425780296326, 0.32310259342193604, 0.267741858959198, 0.30676278471946716, 0.36632004380226135, 0.27854710817337036, 0.42963460087776184, 0.25819167494773865, 0.3424122929573059, 0.29098108410835266, 0.3162521421909332, 0.39036551117897034, 0.27974581718444824, 0.1921042948961258, 0.35803231596946716, 0.22351698577404022, 0.23193897306919098, 0.270576536655426, 0.26012200117111206, 0.4471112787723541, 0.25369831919670105, 0.2558428645133972, 0.32542353868484497, 0.36307579278945923, 0.3768329918384552, 0.4200160503387451, 0.22499065101146698, 0.5793442726135254, 0.3363257348537445, 0.4287791848182678, 0.37058791518211365, 0.48414185643196106, 0.6411834955215454, 0.42239871621131897, 0.30433931946754456, 0.5210206508636475, 0.36561423540115356, 0.3857577443122864, 0.43900078535079956, 0.39965251088142395, 0.34631234407424927, 0.33402514457702637, 0.1892768293619156, 0.5548384189605713, 0.294956237077713, 0.2289484739303589, 0.3705907464027405, 0.2672608494758606, 0.20700865983963013, 0.3425357937812805, 0.3496844470500946, 0.23137161135673523, 0.2634831368923187, 0.22181850671768188, 0.18999303877353668, 0.15023399889469147, 0.21937914192676544, 0.14932149648666382, 0.34122416377067566, 0.4100107252597809, 0.3010890781879425, 0.24609117209911346, 0.27445802092552185, 0.3145654499530792, 0.24431446194648743, 0.19399075210094452, 0.36408981680870056, 0.2884557843208313, 0.16600178182125092, 0.28500232100486755, 0.23745673894882202, 0.21136383712291718, 0.0959608182311058, 0.15006370842456818, 0.18879224359989166, 0.15188197791576385, 0.1134112998843193, 0.28580376505851746, 0.1936863660812378, 0.18274204432964325, 0.14029626548290253, 0.23430503904819489, 0.2909218668937683, 0.4258160889148712, 0.17616671323776245, 0.2367817759513855, 0.2458539605140686, 0.29726433753967285, 0.46034517884254456, 0.32381248474121094, 0.34687304496765137, 0.17108510434627533, 0.21359069645404816, 0.1707487851381302, 0.20969286561012268, 0.3743198812007904, 0.1890423744916916, 0.09562225639820099, 0.3847002387046814, 0.18096588551998138, 0.43845775723457336, 0.17242531478405, 0.09854660928249359, 0.37685948610305786, 0.2172141671180725, 0.19972801208496094, 0.18116970360279083, 0.1707378327846527, 0.1351509988307953, 0.27734071016311646, 0.20136167109012604, 0.17077362537384033, 0.1518319845199585, 0.10934215784072876, 0.23918990790843964, 0.21355099976062775, 0.44173896312713623, 0.17486245930194855, 0.23481792211532593, 0.25250425934791565, 0.2996631860733032, 0.20723186433315277, 0.21162660419940948, 0.10248483717441559, 0.1318439245223999, 0.13994517922401428, 0.27477771043777466, 0.23451608419418335],\n [0.6871811747550964, 0.6947159767150879, 0.7224424481391907, 0.6957899332046509, 0.6950237154960632, 0.692287266254425, 0.6832610368728638, 0.679440975189209, 0.6760961413383484, 0.6624426245689392, 0.7226876020431519, 0.6843721270561218, 0.7191374897956848, 0.6711501479148865, 0.7006677389144897, 0.7007753849029541, 0.7156102657318115, 0.6859699487686157, 0.7013571262359619, 0.6866233944892883, 0.6852321028709412, 0.6905125379562378, 0.6834650635719299, 0.6936928629875183, 0.6823381185531616, 0.6749610304832458, 0.685768723487854, 0.678738534450531, 0.6896703243255615, 0.6865702867507935, 0.6811116933822632, 0.668097734451294, 0.6821491718292236, 0.6791404485702515, 0.6781688928604126, 0.690072238445282, 0.6856334209442139, 0.6750831007957458, 0.6529764533042908, 0.6610110998153687, 0.6336938142776489, 0.6646669507026672, 0.710946798324585, 0.6515848636627197, 0.7003756761550903, 0.6449767351150513, 0.6952865123748779, 0.6705141663551331, 0.6893569827079773, 0.6516514420509338, 0.6514383554458618, 0.6547107696533203, 0.6561172008514404, 0.6791470050811768, 0.6708744764328003, 0.6613931059837341, 0.6580820679664612, 0.6673327684402466, 0.6676281094551086, 0.6564735174179077, 0.6494910717010498, 0.6877132654190063, 0.6585811376571655, 0.6668791174888611, 0.647765576839447, 0.6290729641914368, 0.6641139388084412, 0.6315721273422241, 0.6486416459083557, 0.6528235673904419, 0.6443641185760498, 0.6421051621437073, 0.6615763306617737, 0.6287109851837158, 0.6512008905410767, 0.6036527156829834, 0.6198510527610779, 0.6783833503723145, 0.6088035106658936, 0.6225212216377258, 0.6592507362365723, 0.6567370891571045, 0.6126384735107422, 0.6371833086013794, 0.5944145321846008, 0.5978620052337646, 0.6077452898025513, 0.5822727084159851, 0.5963525772094727, 0.5794957876205444, 0.6378943920135498, 0.6341503858566284, 0.5994493961334229, 0.5735271573066711, 0.6093106269836426, 0.5819461941719055, 0.5689053535461426, 0.5474928021430969, 0.5603645443916321, 0.5779107213020325, 0.4984801113605499, 0.6043984889984131, 0.5709444880485535, 0.66923987865448, 0.5486158132553101, 0.584330141544342, 0.5256052613258362, 0.530910313129425, 0.5120806694030762, 0.5294786691665649, 0.5647129416465759, 0.5062854290008545, 0.48372966051101685, 0.51963210105896, 0.4960635006427765, 0.5811001658439636, 0.5099151134490967, 0.5170969367027283, 0.5345323085784912, 0.47125083208084106, 0.5331724286079407, 0.5247058272361755, 0.4804627597332001, 0.48637351393699646, 0.5729228854179382, 0.4769532084465027, 0.47298645973205566, 0.4819401502609253, 0.476338267326355, 0.46539318561553955, 0.4952813386917114, 0.4353810250759125, 0.4610579311847687, 0.583404004573822, 0.5057952404022217, 0.45675453543663025, 0.48780953884124756, 0.4423540532588959, 0.4120423197746277, 0.45300909876823425, 0.43147432804107666, 0.42600253224372864, 0.3763691186904907, 0.41571441292762756, 0.4785982370376587, 0.43587276339530945, 0.3980005383491516, 0.5164824724197388, 0.4339365065097809, 0.440243124961853, 0.35807377099990845, 0.40282681584358215, 0.3178444802761078, 0.3887963593006134, 0.3478772044181824, 0.3316830098628998, 0.4186180531978607, 0.5320008397102356, 0.4033140242099762, 0.27233877778053284, 0.32683777809143066, 0.33621105551719666, 0.24845899641513824, 0.3208399713039398, 0.3043327033519745, 0.24448028206825256, 0.2664804458618164, 0.3350409269332886, 0.37464845180511475, 0.3291729986667633, 0.3024751543998718, 0.3177521824836731, 0.23499956727027893, 0.30369794368743896, 0.4720979332923889, 0.2811543941497803, 0.2326480895280838, 0.38028159737586975, 0.28157252073287964, 0.23514851927757263, 0.18976829946041107, 0.2996746301651001, 0.20262986421585083, 0.23356129229068756, 0.2618411183357239, 0.3928142488002777, 0.19240917265415192, 0.23905567824840546, 0.4425520598888397, 0.281526118516922, 0.17487844824790955, 0.21724052727222443, 0.1648930311203003, 0.22342437505722046, 0.1669752299785614, 0.24347242712974548, 0.24049922823905945, 0.22209186851978302, 0.11796236038208008, 0.18972276151180267]])\nloss_histories_QCNN_MNIST_SU4 = np.array([[15.701968392104067, 14.00975194601019, 13.575795786392687, 12.61039043284571, 11.420276002266895, 11.99525947567977, 12.080220566339552, 10.924803410541063, 11.131198050239918, 10.050426570399022, 11.053791542219374, 9.788098151349532, 11.143553225059602, 9.197232470430466, 9.229157116588144, 9.162957631022463, 10.71179400256479, 10.196747292508277, 10.79829244400887, 9.42574101202477, 8.295175259785562, 9.076819970830673, 10.227437780059452, 7.8419230403439535, 9.851325083008735, 7.682262563812961, 6.124709565416007, 9.158968991756522, 6.672908530214931, 5.209655971223005, 4.993276753394282, 9.664936574185854, 6.88459262068916, 4.970247473779515, 4.603592627994019, 4.59443369268576, 9.794815707657726, 10.239708125356454, 6.449176123973521, 4.3614383898577245, 3.178962661406524, 5.323808065197847, 3.742336146764105, 3.408487182474864, 4.0378028800153745, 3.446378084287989, 3.7187355535234006, 3.087831614419782, 3.7096100667430014, 4.565046696353874, 3.770663248286091, 4.4217543223496145, 3.77968191595814, 5.056168926358595, 2.7803131922419175, 2.719334554107501, 3.8792788429673997, 3.1498685402458206, 4.025981341150764, 4.3933448358886835, 3.5465754527777196, 3.5006393293490854, 5.749003912443001, 5.285935336974153, 5.215205181476441, 3.33499755695145, 3.7997378845686343, 3.017773877789444, 4.067487463964749, 4.5587834989118035, 3.740154036304224, 2.7708132679167123, 4.512851209860175, 2.924350433846229, 2.795042698165882, 4.149110497309133, 3.369464977487066, 4.23293533133619, 3.8553201319663155, 3.9359680762072085, 4.161241707714771, 5.064086532487843, 3.702060601779912, 2.7105860877244092, 4.108486332151197, 3.6338065699190376, 2.881211037354094, 3.723927043461242, 5.510865245386038, 5.124190965728359, 2.821525546046732, 3.098837600425342, 2.3469765230853548, 2.4742370473245545, 4.067343226565326, 3.6394183895744443, 2.733427685289968, 3.2653415891638153, 4.247409817892573, 3.6035555769047796, 4.3211502162877675, 4.045216873050909, 2.9323636325128075, 3.4306359296475173, 2.8276962373988956, 3.564636207572396, 2.9737011426056164, 4.391465105518088, 3.069343501908466, 2.9634796303315323, 3.1466302177539296, 3.3780949337259174, 4.562911170376287, 3.948278038667186, 4.874029252223579, 3.6684211237346824, 2.998924179663272, 3.590306877884917, 3.981573843773927, 4.613897998161246, 3.981322551072088, 2.680360942734963, 2.422755364259979, 3.2320172271380865, 3.8089662923296976, 2.939901362941198, 4.833732255949402, 2.1849227406448146, 2.6372671950900055, 3.104418483742709, 3.8629675948558835, 4.687536630526817, 6.9927241243613265, 4.922012283032045, 7.452795267168994, 4.485254992748128, 2.8914771697111363, 4.265530398933267, 3.1971441908330624, 3.068105106360343, 4.275963212623704, 3.155163439406081, 2.8000281304346384, 4.1954482933166934, 3.253825150780586, 3.272701085036064, 2.8422443341131016, 2.603392372802548, 3.532346488795025, 4.011695154843002, 3.472111135034264, 2.9318768612813235, 4.250864330840455, 4.439079409959164, 6.85829042210198, 3.373765377796869, 3.3066825370799746, 3.6209249292139156, 2.663136341349555, 3.176614433068561, 5.179692142004379, 2.386125444576648, 3.6714569666304624, 5.968140140543236, 4.6862114302954225, 3.627958786157068, 3.7152938038804124, 3.4850735691747947, 5.75016980980869, 4.714566479061207, 2.8790642558219135, 4.789001128730438, 2.723388960931453, 3.3189986144639394, 4.477900327018953, 3.2164083086369955, 4.715276756538122, 3.566305058074321, 3.4740378623097388, 5.9606633045649655, 3.3624451866929257, 7.466756701248525, 3.099570878630708, 3.396522335144449, 3.5183642097565473, 6.242929419688088, 5.3682240107336066, 3.1235126677502376, 2.575365849609988, 2.9803470547016326, 3.379381007307192, 5.401406710868802, 2.9037086563127614, 5.75416297233594, 3.1099352188520903, 4.07746222351273, 3.172574914460961, 3.2580509567486198, 2.6716076043732255, 4.383820778498077],\n [16.67610968670757, 14.718491446461517, 12.090224017449659, 13.472120600797032, 13.25131879124339, 13.581067562519276, 11.840332662256907, 11.130310125485495, 9.93192347576329, 11.029197156268669, 9.465616135342069, 8.846563678505364, 8.832779249446189, 7.304634735385226, 6.712227892140214, 7.312650169150179, 5.20134121314218, 6.881439914653229, 6.838544538526815, 6.765550850415497, 6.5342587606872105, 5.5488092796654875, 5.811336175272483, 5.794328461356434, 7.5140454270663, 6.833730935841877, 6.5306652794487245, 6.182513857890757, 5.931352459921179, 6.077425703330703, 5.122698673541781, 5.661275028840929, 6.288518717055746, 8.056005588611056, 5.29513931085165, 5.080406097314944, 4.541240119374063, 6.7937862430249325, 5.551516303937469, 8.561069772637719, 5.778696027265669, 7.581037487036702, 4.87089471665885, 5.461873570139039, 6.274697551835343, 6.178863189755656, 5.525816468577715, 5.752541148100186, 6.78491692819599, 6.429252986978849, 6.103956718052821, 5.997072731286677, 8.039526919812602, 5.44374742986944, 6.312137074095871, 5.3384457339559885, 7.340812313020643, 5.5364661975369485, 5.72976620514688, 5.841613803200098, 5.429284862378174, 5.522583756402512, 5.509153976952111, 4.8467675121924945, 6.49822992308749, 5.368611880740126, 6.65582979493719, 6.852889978124359, 6.94843047028678, 5.596496249741784, 6.339885612054319, 5.267701813339295, 6.152560736297184, 5.775192895855112, 5.5163207175483, 6.44778772471083, 5.430156597046173, 5.932478159862267, 5.6314257765548135, 5.481369375552064, 5.338024105579926, 6.670512346943634, 6.300996333041053, 5.730069681685715, 5.02963972081574, 6.936543925341157, 5.062802489117022, 5.55732970400315, 4.7762005374477985, 6.645698540681672, 5.869444734157781, 6.253047036221847, 5.19025078164954, 7.484779118830626, 5.222194367529911, 4.900463084980787, 5.935513417865195, 6.310398185116211, 8.132592884151036, 5.664289009103563, 6.183934383088775, 5.088139300408709, 5.508491361131622, 5.548466329859139, 4.719837683756191, 6.167420125947168, 6.594100741695282, 8.158242684269862, 5.487510771423499, 5.21810002541766, 6.04446323264393, 5.563562650115921, 5.232073383963237, 5.401459469361614, 8.85540524437023, 5.4966514988068536, 7.138435313756308, 3.738691460410528, 5.974228304092972, 5.889754947819547, 5.205233223920336, 5.660552283292949, 7.533655402675502, 6.237175062990105, 4.612106288110162, 4.83261522589498, 6.121743265892744, 4.616937597386011, 5.607222524525371, 6.607172071129023, 4.616853299540282, 4.8959008742344095, 5.388591509835658, 5.110263418707962, 6.2392279693893, 5.698228760691443, 6.090751357758069, 5.387412545935041, 4.59741341670396, 4.256336578754677, 5.242254014621976, 5.857879934488944, 5.50051672654861, 6.083693906452281, 6.28930037142256, 5.435763076249767, 5.600938823415071, 4.911512560507955, 4.708365035514755, 5.1425019670095296, 4.05608917987239, 5.8505255380084735, 6.0356476179414935, 5.843945163828317, 6.003963889711409, 6.004885939386664, 6.093422936170946, 7.43302607226079, 5.598670955133419, 6.318483050933266, 5.29497634661668, 4.659365000673817, 4.657314955631369, 6.911662937984151, 5.956445279906723, 6.205526277177433, 5.8233172244351215, 7.349116195432372, 6.132131053390929, 5.485940276394268, 6.217186135246689, 5.418644398444133, 5.036585543343947, 4.968299459526778, 4.075560315776475, 5.568414559989412, 5.4080102959787855, 5.464803055108561, 4.993330216988004, 4.506529139518999, 5.988501668255765, 6.0157665530526225, 5.864092195873374, 5.551867433037801, 6.259680423346111, 5.377826263121123, 5.832501013799243, 5.100914993522977, 5.177372502514758, 5.0693544932836945, 4.587039983702243, 5.405716516521254, 6.365331685040749, 6.633691593318545, 6.630421586190276, 5.442422435211777, 4.359705683191795, 5.151171627435816, 5.864607067681872, 7.071624705566063],", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_CNN_FASHION_2L", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "loss_histories_CNN_FASHION_2L = np.array([[0.6982033252716064, 0.685494065284729, 0.7043045163154602, 0.6664453744888306, 0.692385196685791, 0.6883255839347839, 0.6670038104057312, 0.6721140146255493, 0.7477293610572815, 0.7019577622413635, 0.7049409747123718, 0.6816695332527161, 0.6746503710746765, 0.7107524871826172, 0.6755272746086121, 0.6871398091316223, 0.6663607954978943, 0.6776321530342102, 0.6692557334899902, 0.6771387457847595, 0.6749445199966431, 0.6708089709281921, 0.6586058139801025, 0.6667677164077759, 0.6446713209152222, 0.6719373464584351, 0.6606504917144775, 0.657498300075531, 0.6470506191253662, 0.6401866674423218, 0.6414895057678223, 0.6237302422523499, 0.6307801604270935, 0.6398060917854309, 0.612299382686615, 0.6208985447883606, 0.6133882999420166, 0.6116693019866943, 0.5888859033584595, 0.5796552896499634, 0.6230239272117615, 0.5889614224433899, 0.5819820165634155, 0.5470789074897766, 0.5382369756698608, 0.5513080358505249, 0.555568516254425, 0.512162446975708, 0.542407751083374, 0.625302791595459, 0.5083873867988586, 0.5874236822128296, 0.5095157027244568, 0.5841799974441528, 0.49809378385543823, 0.5399268865585327, 0.5122842192649841, 0.5358125567436218, 0.519302248954773, 0.5164752006530762, 0.46569016575813293, 0.5407835245132446, 0.48874348402023315, 0.47747379541397095, 0.4097748100757599, 0.4176773130893707, 0.45327454805374146, 0.42068949341773987, 0.4547436237335205, 0.4771629571914673, 0.5482209920883179, 0.408530056476593, 0.4851240813732147, 0.4207240343093872, 0.46289223432540894, 0.5424150228500366, 0.39088600873947144, 0.43761688470840454, 0.3984193503856659, 0.31847116351127625, 0.5500505566596985, 0.4576525390148163, 0.5193397402763367, 0.3719850182533264, 0.34440648555755615, 0.5614837408065796, 0.28313952684402466, 0.44536086916923523, 0.34781497716903687, 0.46229103207588196, 0.3734305500984192, 0.43122661113739014, 0.3639208674430847, 0.3386707305908203, 0.41308125853538513, 0.3886600136756897, 0.40352439880371094, 0.41081854701042175, 0.3028257489204407, 0.43178990483283997, 0.38263648748397827, 0.3496360778808594, 0.358610063791275, 0.2800368368625641, 0.3421652615070343, 0.44051799178123474, 0.43672409653663635, 0.3709166347980499, 0.24854938685894012, 0.3083757758140564, 0.3452308773994446, 0.30687573552131653, 0.3816249370574951, 0.3573996424674988, 0.26188158988952637, 0.24953699111938477, 0.17291030287742615, 0.41357630491256714, 0.3565349578857422, 0.26830142736434937, 0.25871559977531433, 0.30517154932022095, 0.27130597829818726, 0.27063170075416565, 0.21682584285736084, 0.18601535260677338, 0.2388288378715515, 0.38377171754837036, 0.21231044828891754, 0.19891099631786346, 0.40024814009666443, 0.1763014793395996, 0.2191973477602005, 0.3140704333782196, 0.2190384864807129, 0.2297554612159729, 0.28490787744522095, 0.18833035230636597, 0.17096717655658722, 0.38814058899879456, 0.35706454515457153, 0.3980378806591034, 0.3082973062992096, 0.282254695892334, 0.28172925114631653, 0.33135756850242615, 0.18617062270641327, 0.3084704279899597, 0.1653742641210556, 0.2611149847507477, 0.23364774882793427, 0.32361915707588196, 0.2880187928676605, 0.22159168124198914, 0.32011330127716064, 0.37932154536247253, 0.2227104753255844, 0.27614346146583557, 0.2528597116470337, 0.13920791447162628, 0.14623363316059113, 0.29781511425971985, 0.30779993534088135, 0.23402608931064606, 0.2049279808998108, 0.195842444896698, 0.284122109413147, 0.26556384563446045, 0.16032564640045166, 0.19326667487621307, 0.16147050261497498, 0.2859514355659485, 0.27686405181884766, 0.14044542610645294, 0.1292620599269867, 0.22018609941005707, 0.16846336424350739, 0.1789669245481491, 0.22029483318328857, 0.20728746056556702, 0.44362884759902954, 0.31444764137268066, 0.23610062897205353, 0.3099147081375122, 0.2594585418701172, 0.13596713542938232, 0.14830733835697174, 0.11959406733512878, 0.11047989130020142, 0.19856494665145874, 0.12652510404586792, 0.3232700824737549, 0.13162431120872498, 0.16679613292217255, 0.1600688397884369, 0.1490894854068756, 0.17836572229862213, 0.1767909973859787, 0.1359943300485611, 0.3607396185398102],\n [0.722959041595459, 0.8378788828849792, 0.6359409093856812, 0.7786480188369751, 0.6722474098205566, 0.7209513783454895, 0.7147747874259949, 0.7389553785324097, 0.7043027281761169, 0.6997193098068237, 0.6960577964782715, 0.6514461040496826, 0.7242903113365173, 0.7110193371772766, 0.6887984275817871, 0.6825430393218994, 0.6723452210426331, 0.668528139591217, 0.656069278717041, 0.6439428925514221, 0.67144376039505, 0.681311845779419, 0.6493660807609558, 0.6525905728340149, 0.6488508582115173, 0.6426312327384949, 0.655098021030426, 0.6406035423278809, 0.6233764886856079, 0.6242199540138245, 0.6306735873222351, 0.6166422963142395, 0.5946296453475952, 0.5960651636123657, 0.6093128323554993, 0.6189892888069153, 0.5935332179069519, 0.586617648601532, 0.5726534128189087, 0.5882557034492493, 0.5732150673866272, 0.6082722544670105, 0.5631254315376282, 0.5543763637542725, 0.6138622760772705, 0.5315272808074951, 0.5507160425186157, 0.5241436958312988, 0.5181093811988831, 0.474336177110672, 0.5000462532043457, 0.5013487339019775, 0.4983973801136017, 0.4515523612499237, 0.439666211605072, 0.4340149164199829, 0.46038758754730225, 0.44209349155426025, 0.4850611984729767, 0.42503097653388977, 0.4745623767375946, 0.45707252621650696, 0.37832871079444885, 0.46673983335494995, 0.4491257071495056, 0.37395069003105164, 0.4663311839103699, 0.3531072735786438, 0.3056586682796478, 0.4227212071418762, 0.3507891595363617, 0.325242817401886, 0.342677503824234, 0.28175806999206543, 0.4168330132961273, 0.2992843985557556, 0.38543903827667236, 0.310502290725708, 0.32055363059043884, 0.3279689848423004, 0.22771680355072021, 0.2996452748775482, 0.27647387981414795, 0.3224877119064331, 0.24797911942005157, 0.31701239943504333, 0.27963507175445557, 0.3111099600791931, 0.2598114013671875, 0.2720106840133667, 0.28728973865509033, 0.31687983870506287, 0.16315454244613647, 0.28481221199035645, 0.32374823093414307, 0.2818513810634613, 0.27142226696014404, 0.26268696784973145, 0.18802690505981445, 0.2330765277147293, 0.28008073568344116, 0.13481254875659943, 0.23573260009288788, 0.1210196316242218, 0.19468261301517487, 0.17421527206897736, 0.17932678759098053, 0.22074772417545319, 0.15295524895191193, 0.12509538233280182, 0.18809691071510315, 0.18245065212249756, 0.20922879874706268, 0.2670697271823883, 0.22580507397651672, 0.16062802076339722, 0.23412327468395233, 0.43120795488357544, 0.2926293611526489, 0.2947879135608673, 0.17843599617481232, 0.3449716866016388, 0.15644687414169312, 0.19783726334571838, 0.0992257222533226, 0.11392301321029663, 0.32231271266937256, 0.19188730418682098, 0.16153278946876526, 0.2035246342420578, 0.18345876038074493, 0.051402803510427475, 0.07299385964870453, 0.24596910178661346, 0.317927747964859, 0.16063934564590454, 0.3693201541900635, 0.3464672565460205, 0.24623923003673553, 0.1416863203048706, 0.28232452273368835, 0.23875318467617035, 0.21324512362480164, 0.2701603174209595, 0.18765276670455933, 0.2246081680059433, 0.15949931740760803, 0.17738769948482513, 0.18758100271224976, 0.21686187386512756, 0.11897433549165726, 0.1632753312587738, 0.1252729594707489, 0.18664729595184326, 0.22377149760723114, 0.0922139436006546, 0.3411566913127899, 0.1822480410337448, 0.2687120735645294, 0.2085094302892685, 0.36180827021598816, 0.16845770180225372, 0.22141145169734955, 0.16436506807804108, 0.14664797484874725, 0.1761969029903412, 0.3861934244632721, 0.2474733293056488, 0.152466282248497, 0.27540338039398193, 0.32959243655204773, 0.0701957643032074, 0.23025813698768616, 0.30845510959625244, 0.19448892772197723, 0.15782098472118378, 0.0669102892279625, 0.17205177247524261, 0.14290541410446167, 0.04908277839422226, 0.16558429598808289, 0.3065306544303894, 0.07012531161308289, 0.38416728377342224, 0.11788350343704224, 0.12578633427619934, 0.057048648595809937, 0.17244094610214233, 0.16887983679771423, 0.21592962741851807, 0.1084679663181305, 0.06436526030302048, 0.13887561857700348, 0.05770181491971016, 0.16522125899791718, 0.34186455607414246, 0.1313944011926651, 0.09368456155061722, 0.15273809432983398, 0.07838013023138046],\n [0.7304385900497437, 0.6144606471061707, 0.7663922309875488, 0.7456250190734863, 0.6472451686859131, 0.6408094763755798, 0.6577707529067993, 0.6627257466316223, 0.6259477734565735, 0.6269749402999878, 0.6147814393043518, 0.5954440236091614, 0.5954847931861877, 0.622576117515564, 0.5681723952293396, 0.6685020327568054, 0.6117186546325684, 0.6318418383598328, 0.6105124950408936, 0.6059049367904663, 0.5891886353492737, 0.570899248123169, 0.5469162464141846, 0.48898395895957947, 0.5481796264648438, 0.5801829695701599, 0.5309649705886841, 0.5229575037956238, 0.5118927359580994, 0.5621098875999451, 0.5783540606498718, 0.6303244829177856, 0.5337151288986206, 0.5568647980690002, 0.506201982498169, 0.4340185821056366, 0.41403329372406006, 0.4785533547401428, 0.4407808184623718, 0.40219631791114807, 0.4949192702770233, 0.44360852241516113, 0.4859486520290375, 0.6203747391700745, 0.4916355013847351, 0.4286290407180786, 0.4152555763721466, 0.42479854822158813, 0.41854721307754517, 0.3812496066093445, 0.5289347171783447, 0.4742628037929535, 0.5494931936264038, 0.5970712304115295, 0.4466589093208313, 0.3422013819217682, 0.3693443238735199, 0.5024840235710144, 0.49555644392967224, 0.5533590912818909, 0.39836445450782776, 0.47018149495124817, 0.4293385446071625, 0.45852386951446533, 0.3161731958389282, 0.3988656997680664, 0.4170445501804352, 0.5251786708831787, 0.4676470458507538, 0.3298081159591675, 0.4780762195587158, 0.461908757686615, 0.39387425780296326, 0.32310259342193604, 0.267741858959198, 0.30676278471946716, 0.36632004380226135, 0.27854710817337036, 0.42963460087776184, 0.25819167494773865, 0.3424122929573059, 0.29098108410835266, 0.3162521421909332, 0.39036551117897034, 0.27974581718444824, 0.1921042948961258, 0.35803231596946716, 0.22351698577404022, 0.23193897306919098, 0.270576536655426, 0.26012200117111206, 0.4471112787723541, 0.25369831919670105, 0.2558428645133972, 0.32542353868484497, 0.36307579278945923, 0.3768329918384552, 0.4200160503387451, 0.22499065101146698, 0.5793442726135254, 0.3363257348537445, 0.4287791848182678, 0.37058791518211365, 0.48414185643196106, 0.6411834955215454, 0.42239871621131897, 0.30433931946754456, 0.5210206508636475, 0.36561423540115356, 0.3857577443122864, 0.43900078535079956, 0.39965251088142395, 0.34631234407424927, 0.33402514457702637, 0.1892768293619156, 0.5548384189605713, 0.294956237077713, 0.2289484739303589, 0.3705907464027405, 0.2672608494758606, 0.20700865983963013, 0.3425357937812805, 0.3496844470500946, 0.23137161135673523, 0.2634831368923187, 0.22181850671768188, 0.18999303877353668, 0.15023399889469147, 0.21937914192676544, 0.14932149648666382, 0.34122416377067566, 0.4100107252597809, 0.3010890781879425, 0.24609117209911346, 0.27445802092552185, 0.3145654499530792, 0.24431446194648743, 0.19399075210094452, 0.36408981680870056, 0.2884557843208313, 0.16600178182125092, 0.28500232100486755, 0.23745673894882202, 0.21136383712291718, 0.0959608182311058, 0.15006370842456818, 0.18879224359989166, 0.15188197791576385, 0.1134112998843193, 0.28580376505851746, 0.1936863660812378, 0.18274204432964325, 0.14029626548290253, 0.23430503904819489, 0.2909218668937683, 0.4258160889148712, 0.17616671323776245, 0.2367817759513855, 0.2458539605140686, 0.29726433753967285, 0.46034517884254456, 0.32381248474121094, 0.34687304496765137, 0.17108510434627533, 0.21359069645404816, 0.1707487851381302, 0.20969286561012268, 0.3743198812007904, 0.1890423744916916, 0.09562225639820099, 0.3847002387046814, 0.18096588551998138, 0.43845775723457336, 0.17242531478405, 0.09854660928249359, 0.37685948610305786, 0.2172141671180725, 0.19972801208496094, 0.18116970360279083, 0.1707378327846527, 0.1351509988307953, 0.27734071016311646, 0.20136167109012604, 0.17077362537384033, 0.1518319845199585, 0.10934215784072876, 0.23918990790843964, 0.21355099976062775, 0.44173896312713623, 0.17486245930194855, 0.23481792211532593, 0.25250425934791565, 0.2996631860733032, 0.20723186433315277, 0.21162660419940948, 0.10248483717441559, 0.1318439245223999, 0.13994517922401428, 0.27477771043777466, 0.23451608419418335],\n [0.6871811747550964, 0.6947159767150879, 0.7224424481391907, 0.6957899332046509, 0.6950237154960632, 0.692287266254425, 0.6832610368728638, 0.679440975189209, 0.6760961413383484, 0.6624426245689392, 0.7226876020431519, 0.6843721270561218, 0.7191374897956848, 0.6711501479148865, 0.7006677389144897, 0.7007753849029541, 0.7156102657318115, 0.6859699487686157, 0.7013571262359619, 0.6866233944892883, 0.6852321028709412, 0.6905125379562378, 0.6834650635719299, 0.6936928629875183, 0.6823381185531616, 0.6749610304832458, 0.685768723487854, 0.678738534450531, 0.6896703243255615, 0.6865702867507935, 0.6811116933822632, 0.668097734451294, 0.6821491718292236, 0.6791404485702515, 0.6781688928604126, 0.690072238445282, 0.6856334209442139, 0.6750831007957458, 0.6529764533042908, 0.6610110998153687, 0.6336938142776489, 0.6646669507026672, 0.710946798324585, 0.6515848636627197, 0.7003756761550903, 0.6449767351150513, 0.6952865123748779, 0.6705141663551331, 0.6893569827079773, 0.6516514420509338, 0.6514383554458618, 0.6547107696533203, 0.6561172008514404, 0.6791470050811768, 0.6708744764328003, 0.6613931059837341, 0.6580820679664612, 0.6673327684402466, 0.6676281094551086, 0.6564735174179077, 0.6494910717010498, 0.6877132654190063, 0.6585811376571655, 0.6668791174888611, 0.647765576839447, 0.6290729641914368, 0.6641139388084412, 0.6315721273422241, 0.6486416459083557, 0.6528235673904419, 0.6443641185760498, 0.6421051621437073, 0.6615763306617737, 0.6287109851837158, 0.6512008905410767, 0.6036527156829834, 0.6198510527610779, 0.6783833503723145, 0.6088035106658936, 0.6225212216377258, 0.6592507362365723, 0.6567370891571045, 0.6126384735107422, 0.6371833086013794, 0.5944145321846008, 0.5978620052337646, 0.6077452898025513, 0.5822727084159851, 0.5963525772094727, 0.5794957876205444, 0.6378943920135498, 0.6341503858566284, 0.5994493961334229, 0.5735271573066711, 0.6093106269836426, 0.5819461941719055, 0.5689053535461426, 0.5474928021430969, 0.5603645443916321, 0.5779107213020325, 0.4984801113605499, 0.6043984889984131, 0.5709444880485535, 0.66923987865448, 0.5486158132553101, 0.584330141544342, 0.5256052613258362, 0.530910313129425, 0.5120806694030762, 0.5294786691665649, 0.5647129416465759, 0.5062854290008545, 0.48372966051101685, 0.51963210105896, 0.4960635006427765, 0.5811001658439636, 0.5099151134490967, 0.5170969367027283, 0.5345323085784912, 0.47125083208084106, 0.5331724286079407, 0.5247058272361755, 0.4804627597332001, 0.48637351393699646, 0.5729228854179382, 0.4769532084465027, 0.47298645973205566, 0.4819401502609253, 0.476338267326355, 0.46539318561553955, 0.4952813386917114, 0.4353810250759125, 0.4610579311847687, 0.583404004573822, 0.5057952404022217, 0.45675453543663025, 0.48780953884124756, 0.4423540532588959, 0.4120423197746277, 0.45300909876823425, 0.43147432804107666, 0.42600253224372864, 0.3763691186904907, 0.41571441292762756, 0.4785982370376587, 0.43587276339530945, 0.3980005383491516, 0.5164824724197388, 0.4339365065097809, 0.440243124961853, 0.35807377099990845, 0.40282681584358215, 0.3178444802761078, 0.3887963593006134, 0.3478772044181824, 0.3316830098628998, 0.4186180531978607, 0.5320008397102356, 0.4033140242099762, 0.27233877778053284, 0.32683777809143066, 0.33621105551719666, 0.24845899641513824, 0.3208399713039398, 0.3043327033519745, 0.24448028206825256, 0.2664804458618164, 0.3350409269332886, 0.37464845180511475, 0.3291729986667633, 0.3024751543998718, 0.3177521824836731, 0.23499956727027893, 0.30369794368743896, 0.4720979332923889, 0.2811543941497803, 0.2326480895280838, 0.38028159737586975, 0.28157252073287964, 0.23514851927757263, 0.18976829946041107, 0.2996746301651001, 0.20262986421585083, 0.23356129229068756, 0.2618411183357239, 0.3928142488002777, 0.19240917265415192, 0.23905567824840546, 0.4425520598888397, 0.281526118516922, 0.17487844824790955, 0.21724052727222443, 0.1648930311203003, 0.22342437505722046, 0.1669752299785614, 0.24347242712974548, 0.24049922823905945, 0.22209186851978302, 0.11796236038208008, 0.18972276151180267]])\nloss_histories_QCNN_MNIST_SU4 = np.array([[15.701968392104067, 14.00975194601019, 13.575795786392687, 12.61039043284571, 11.420276002266895, 11.99525947567977, 12.080220566339552, 10.924803410541063, 11.131198050239918, 10.050426570399022, 11.053791542219374, 9.788098151349532, 11.143553225059602, 9.197232470430466, 9.229157116588144, 9.162957631022463, 10.71179400256479, 10.196747292508277, 10.79829244400887, 9.42574101202477, 8.295175259785562, 9.076819970830673, 10.227437780059452, 7.8419230403439535, 9.851325083008735, 7.682262563812961, 6.124709565416007, 9.158968991756522, 6.672908530214931, 5.209655971223005, 4.993276753394282, 9.664936574185854, 6.88459262068916, 4.970247473779515, 4.603592627994019, 4.59443369268576, 9.794815707657726, 10.239708125356454, 6.449176123973521, 4.3614383898577245, 3.178962661406524, 5.323808065197847, 3.742336146764105, 3.408487182474864, 4.0378028800153745, 3.446378084287989, 3.7187355535234006, 3.087831614419782, 3.7096100667430014, 4.565046696353874, 3.770663248286091, 4.4217543223496145, 3.77968191595814, 5.056168926358595, 2.7803131922419175, 2.719334554107501, 3.8792788429673997, 3.1498685402458206, 4.025981341150764, 4.3933448358886835, 3.5465754527777196, 3.5006393293490854, 5.749003912443001, 5.285935336974153, 5.215205181476441, 3.33499755695145, 3.7997378845686343, 3.017773877789444, 4.067487463964749, 4.5587834989118035, 3.740154036304224, 2.7708132679167123, 4.512851209860175, 2.924350433846229, 2.795042698165882, 4.149110497309133, 3.369464977487066, 4.23293533133619, 3.8553201319663155, 3.9359680762072085, 4.161241707714771, 5.064086532487843, 3.702060601779912, 2.7105860877244092, 4.108486332151197, 3.6338065699190376, 2.881211037354094, 3.723927043461242, 5.510865245386038, 5.124190965728359, 2.821525546046732, 3.098837600425342, 2.3469765230853548, 2.4742370473245545, 4.067343226565326, 3.6394183895744443, 2.733427685289968, 3.2653415891638153, 4.247409817892573, 3.6035555769047796, 4.3211502162877675, 4.045216873050909, 2.9323636325128075, 3.4306359296475173, 2.8276962373988956, 3.564636207572396, 2.9737011426056164, 4.391465105518088, 3.069343501908466, 2.9634796303315323, 3.1466302177539296, 3.3780949337259174, 4.562911170376287, 3.948278038667186, 4.874029252223579, 3.6684211237346824, 2.998924179663272, 3.590306877884917, 3.981573843773927, 4.613897998161246, 3.981322551072088, 2.680360942734963, 2.422755364259979, 3.2320172271380865, 3.8089662923296976, 2.939901362941198, 4.833732255949402, 2.1849227406448146, 2.6372671950900055, 3.104418483742709, 3.8629675948558835, 4.687536630526817, 6.9927241243613265, 4.922012283032045, 7.452795267168994, 4.485254992748128, 2.8914771697111363, 4.265530398933267, 3.1971441908330624, 3.068105106360343, 4.275963212623704, 3.155163439406081, 2.8000281304346384, 4.1954482933166934, 3.253825150780586, 3.272701085036064, 2.8422443341131016, 2.603392372802548, 3.532346488795025, 4.011695154843002, 3.472111135034264, 2.9318768612813235, 4.250864330840455, 4.439079409959164, 6.85829042210198, 3.373765377796869, 3.3066825370799746, 3.6209249292139156, 2.663136341349555, 3.176614433068561, 5.179692142004379, 2.386125444576648, 3.6714569666304624, 5.968140140543236, 4.6862114302954225, 3.627958786157068, 3.7152938038804124, 3.4850735691747947, 5.75016980980869, 4.714566479061207, 2.8790642558219135, 4.789001128730438, 2.723388960931453, 3.3189986144639394, 4.477900327018953, 3.2164083086369955, 4.715276756538122, 3.566305058074321, 3.4740378623097388, 5.9606633045649655, 3.3624451866929257, 7.466756701248525, 3.099570878630708, 3.396522335144449, 3.5183642097565473, 6.242929419688088, 5.3682240107336066, 3.1235126677502376, 2.575365849609988, 2.9803470547016326, 3.379381007307192, 5.401406710868802, 2.9037086563127614, 5.75416297233594, 3.1099352188520903, 4.07746222351273, 3.172574914460961, 3.2580509567486198, 2.6716076043732255, 4.383820778498077],\n [16.67610968670757, 14.718491446461517, 12.090224017449659, 13.472120600797032, 13.25131879124339, 13.581067562519276, 11.840332662256907, 11.130310125485495, 9.93192347576329, 11.029197156268669, 9.465616135342069, 8.846563678505364, 8.832779249446189, 7.304634735385226, 6.712227892140214, 7.312650169150179, 5.20134121314218, 6.881439914653229, 6.838544538526815, 6.765550850415497, 6.5342587606872105, 5.5488092796654875, 5.811336175272483, 5.794328461356434, 7.5140454270663, 6.833730935841877, 6.5306652794487245, 6.182513857890757, 5.931352459921179, 6.077425703330703, 5.122698673541781, 5.661275028840929, 6.288518717055746, 8.056005588611056, 5.29513931085165, 5.080406097314944, 4.541240119374063, 6.7937862430249325, 5.551516303937469, 8.561069772637719, 5.778696027265669, 7.581037487036702, 4.87089471665885, 5.461873570139039, 6.274697551835343, 6.178863189755656, 5.525816468577715, 5.752541148100186, 6.78491692819599, 6.429252986978849, 6.103956718052821, 5.997072731286677, 8.039526919812602, 5.44374742986944, 6.312137074095871, 5.3384457339559885, 7.340812313020643, 5.5364661975369485, 5.72976620514688, 5.841613803200098, 5.429284862378174, 5.522583756402512, 5.509153976952111, 4.8467675121924945, 6.49822992308749, 5.368611880740126, 6.65582979493719, 6.852889978124359, 6.94843047028678, 5.596496249741784, 6.339885612054319, 5.267701813339295, 6.152560736297184, 5.775192895855112, 5.5163207175483, 6.44778772471083, 5.430156597046173, 5.932478159862267, 5.6314257765548135, 5.481369375552064, 5.338024105579926, 6.670512346943634, 6.300996333041053, 5.730069681685715, 5.02963972081574, 6.936543925341157, 5.062802489117022, 5.55732970400315, 4.7762005374477985, 6.645698540681672, 5.869444734157781, 6.253047036221847, 5.19025078164954, 7.484779118830626, 5.222194367529911, 4.900463084980787, 5.935513417865195, 6.310398185116211, 8.132592884151036, 5.664289009103563, 6.183934383088775, 5.088139300408709, 5.508491361131622, 5.548466329859139, 4.719837683756191, 6.167420125947168, 6.594100741695282, 8.158242684269862, 5.487510771423499, 5.21810002541766, 6.04446323264393, 5.563562650115921, 5.232073383963237, 5.401459469361614, 8.85540524437023, 5.4966514988068536, 7.138435313756308, 3.738691460410528, 5.974228304092972, 5.889754947819547, 5.205233223920336, 5.660552283292949, 7.533655402675502, 6.237175062990105, 4.612106288110162, 4.83261522589498, 6.121743265892744, 4.616937597386011, 5.607222524525371, 6.607172071129023, 4.616853299540282, 4.8959008742344095, 5.388591509835658, 5.110263418707962, 6.2392279693893, 5.698228760691443, 6.090751357758069, 5.387412545935041, 4.59741341670396, 4.256336578754677, 5.242254014621976, 5.857879934488944, 5.50051672654861, 6.083693906452281, 6.28930037142256, 5.435763076249767, 5.600938823415071, 4.911512560507955, 4.708365035514755, 5.1425019670095296, 4.05608917987239, 5.8505255380084735, 6.0356476179414935, 5.843945163828317, 6.003963889711409, 6.004885939386664, 6.093422936170946, 7.43302607226079, 5.598670955133419, 6.318483050933266, 5.29497634661668, 4.659365000673817, 4.657314955631369, 6.911662937984151, 5.956445279906723, 6.205526277177433, 5.8233172244351215, 7.349116195432372, 6.132131053390929, 5.485940276394268, 6.217186135246689, 5.418644398444133, 5.036585543343947, 4.968299459526778, 4.075560315776475, 5.568414559989412, 5.4080102959787855, 5.464803055108561, 4.993330216988004, 4.506529139518999, 5.988501668255765, 6.0157665530526225, 5.864092195873374, 5.551867433037801, 6.259680423346111, 5.377826263121123, 5.832501013799243, 5.100914993522977, 5.177372502514758, 5.0693544932836945, 4.587039983702243, 5.405716516521254, 6.365331685040749, 6.633691593318545, 6.630421586190276, 5.442422435211777, 4.359705683191795, 5.151171627435816, 5.864607067681872, 7.071624705566063],\n [16.157302948705482, 15.324869229417093, 12.793915961353354, 11.194524103323525, 12.507868898219838, 12.616777225433516, 9.234552183500577, 13.336267076141908, 13.461740153378038, 15.409796063035124, 14.399175925702068, 12.32478585460413, 11.568925602138641, 11.705259189481604, 9.73148182172859, 10.518139699222413, 10.340323759782864, 10.184819821480033, 10.267859866173783, 9.163877978474442, 8.228781475027233, 10.085746901037597, 9.56261037065576, 9.945013093887507, 10.573418684599648, 8.355653192143533, 7.489047008196033, 8.831587856043486, 9.243498791854762, 7.899791859405868, 7.948776869634101, 8.167661085368842, 8.591449746262468, 7.964795560429807, 7.997626507713476, 8.73053525367385, 7.882803305531689, 9.108972728938417, 7.486328417560084, 8.358746544165566, 7.3501368133987555, 7.312364691281757, 7.145939172039308, 7.566180102372289, 8.736332347556841, 8.807592977750446, 7.18998285729017, 7.866211944807089, 7.766185842391377, 8.749967011867623, 7.34235823310588, 7.43610428815889, 11.590298332110903, 10.682766742665306, 8.308300044564142, 7.845831118765122, 9.813484056770166, 7.876955617382441, 7.312174779836708, 8.759938285312519, 7.171839228177709, 8.499615134414299, 7.509722002675852, 8.557543892841624, 10.934254213376416, 7.398766901107699, 9.48803778380712, 8.620835490337631, 7.451737187278983, 7.506113667562998, 9.622532082724248, 7.981036342366205, 7.20993447571356, 7.390474283132135, 6.912475677827477, 9.935513837829825, 8.1391905581568, 7.653484659161942, 8.668598919073082, 7.3093307391150555, 7.042841867576427, 7.3547098066608685, 8.436869537863673, 9.242697469694416, 7.9004890747461864, 7.5769601403855145, 7.739169857643963, 7.767203539409437, 8.131669134778706, 7.523153815633725, 7.243530316860227, 7.017534997319622, 7.893922940479337, 7.851475941368426, 11.101369748051242, 7.214014499430652, 7.6427246289029735, 8.060879862630905, 7.16097689167272, 8.51976921955402, 7.426907407706117, 7.117950793658989, 9.61181003072403, 7.805022209950351, 7.192555676979239, 7.290056228132629, 7.74080333198409, 8.487442826710854, 8.076479022745504, 8.787609642883186, 8.046578612355681, 7.105401588418192, 6.844574411490987, 7.6829392612869905, 7.312980895734044, 9.491955518171958, 9.688628921645819, 7.4551245842341824, 7.927041620533395, 6.994347169790121, 10.582764094442553, 9.212100380430446, 8.901361471420842, 7.733593560064719, 7.292646673929886, 7.523124993883956, 8.413483649790573, 6.745971429687285, 6.642352670042393, 6.792652616208365, 7.058424327375997, 6.8197281306589685, 8.10655307764079, 6.685050927429077, 6.659166080268061, 6.878696271049514, 4.719019994186838, 5.603938789504077, 7.621652905756453, 6.145401957453078, 6.677120178796311, 6.703668716347524, 5.997955740160936, 5.980717971896108, 5.567386511698885, 7.5956198437787075, 5.130131650562907, 4.990221204908122, 5.476391991515819, 6.365308395517907, 6.8850515038891515, 4.655652668201105, 6.334821345904701, 6.436170644437902, 6.242356223431878, 6.170889252614643, 6.133992909267375, 4.418106928723493, 6.411815646774802, 5.7580827946683035, 5.587910081784416, 5.920203466111238, 5.075548487129552, 5.594348434856907, 4.880413518719992, 7.11898695805031, 4.918739668721355, 6.030580773214612, 4.867536812712909, 6.548113260997608, 5.925273555442318, 6.441058132083475, 5.958600235576363, 6.188452131829611, 4.763154852906501, 5.084234230561494, 6.431881492994884, 6.463236472806914, 5.177834809985206, 5.9085440880547795, 4.466481954235352, 5.093821854921718, 5.913804587244972, 6.534469535291809, 6.9831619354062795, 5.694415254676171, 6.043951539053322, 4.3193148608133285, 4.955811360383821, 6.259607203662118, 5.137108402954629, 3.8870179844761457, 5.345411227868419, 6.21392862186227, 5.825652183856423, 6.542955899153829, 4.8654957072963185, 5.464461129441604, 4.81624685568573, 6.6756728243884025],\n [19.032143621634493, 16.194512861190685, 14.070380479727929, 13.198287009534448, 11.658842295966911, 12.845838181580614, 9.92842975069186, 8.340922863931732, 9.438250198119544, 8.826118705346307, 8.977224376220182, 7.82055561653431, 7.435343739299787, 7.339672601209882, 7.4653233362133475, 6.743519454229658, 8.266202954128136, 7.48683369299498, 8.576888544427238, 8.295673048958871, 7.115855331346225, 7.057634470134533, 7.788478167736406, 7.607587660873876, 7.2221224461159785, 10.477549941721739, 9.991173576151768, 7.690475066534521, 8.23076073932386, 9.657234478409052, 7.840326794242612, 8.838089830170091, 7.04361205995808, 8.264255760767675, 6.505283763792735, 8.60415728022085, 8.358983359528077, 7.664200099932535, 7.913448256241944, 7.096651644568762, 9.226432084636743, 7.619133027510663, 8.647014114441758, 6.656057609800202, 7.842104656674251, 6.9600498931736805, 8.10606199375973, 8.466432510958839, 7.40153259109756, 9.07148808072991, 6.868617353266047, 5.919080506185863, 6.607333429338317, 5.881337036671048, 8.104557840315634, 9.099680294876244, 10.545378372025553, 7.287784875240738, 8.903782943237218, 6.377039884724531, 6.561077651984436, 5.996439560500791, 5.651760682995162, 7.635823099783645, 7.4254597725695515, 9.973080014049327, 7.989821956036739, 8.721159400686329, 7.282684346639685, 7.805577983759187, 5.284734292939751, 10.273475613245477, 7.444084579226282, 7.196012547270868, 7.3266398223111855, 10.169797171369403, 6.910223517266793, 6.360082652565239, 7.0495510384401445, 7.0010005717049735, 8.937516965119448, 11.768753146194, 10.057163535515652, 7.327156519644253, 6.722085215015063, 7.090113411628408, 8.162956894714235, 8.110979207950077, 7.608851841650445, 8.122056232404226, 9.562302671422325, 11.017566376617967, 8.118450543425004, 5.9884809308000175, 6.233408292972936, 9.984399992482755, 5.70660495993625, 7.2688122355625655, 5.923913173807007, 6.653465700788417, 7.205855711433588, 6.175794064046743, 7.239757808907411, 6.265333926257176, 6.5104783001160635, 6.917826888902511, 5.862495884136119, 5.691576891756985, 6.736533864001166, 9.238688646166626, 8.156307595197847, 6.629414785713525, 6.329526538346367, 6.9447206392072705, 6.9656604512037985, 7.050107597873999, 5.478758804560645, 6.619764152386958, 7.464510003667385, 9.862350340134846, 6.8740825491839335, 9.485750025050397, 8.060864772016885, 6.995910875168525, 7.287096821377743, 6.582874516827176, 6.9549358536386565, 6.245012703718529, 6.464432907244605, 6.942876869923922, 6.4294820999540665, 6.416639035776809, 5.862306094262835, 6.861035027547251, 8.658030598046818, 8.2283454407951, 6.375704097382855, 5.2618733745296185, 6.713233812181773, 4.940355800497349, 5.518245519699501, 5.668330331297082, 6.732072646158387, 6.367909293670774, 6.878788996969291, 5.824729671076968, 6.008431125827654, 6.035710274031377, 6.336766900878407, 7.086607673658162, 7.253597248114665, 6.882551592867163, 6.139184438513964, 7.838798452685489, 6.069580217003175, 5.769755078490886, 5.626697098976681, 6.160514814104392, 5.755309011244466, 4.969237523454341, 5.369400365976718, 8.310323652947192, 7.405701305374025, 5.909910701799533, 6.895873320501355, 6.840050294965555, 5.802374613799513, 10.341108739001449, 6.585551950849652, 6.539564956769238, 6.752751779344392, 8.17107021872274, 7.666285285991902, 7.3680928866493565, 6.478880746202967, 8.126125853956061, 6.119129939334914, 5.231010188035771, 8.305862299761461, 6.1917252076692515, 7.466396126126784, 5.87307105867748, 4.885696561446003, 7.363839219678114, 6.638050798663683, 6.780625938777236, 8.232446378366747, 7.330033076118185, 6.408003895523217, 6.17068764015699, 5.8491337169032525, 5.86570616958171, 5.039792257538728, 5.84107289576245, 7.121454547722266, 6.12431950738183, 6.9254586066742405, 5.966802122983741, 5.6785771903617634, 5.416991705035096]])\nloss_histories_QCNN_FASHION_SU4 = np.array([[17.443278550474297, 17.16577921968382, 14.973819729555512, 12.769693588206378, 12.170809006417198, 12.213970720341004, 12.578425729600427, 12.43466688489016, 12.28081413013992, 12.084410582501091, 16.417011227422222, 11.382395417442334, 11.23266987153646, 10.808642275524221, 11.44149106023741, 11.250913438040923, 11.354851755365916, 10.493035309996236, 12.026813099693047, 13.327785058834287, 12.224603882808099, 10.448678691640882, 8.514232798052115, 10.087586151506727, 14.055361965916951, 10.704870661998351, 8.819600512858635, 8.65161586998424, 9.01391248018459, 11.957452737257798, 8.95047859991031, 12.533314976734397, 10.681533577947004, 9.24561706449855, 10.123411197766806, 9.473144455249843, 9.723407803882262, 9.097953540937064, 9.85921377153185, 11.544707585980971, 8.772316400070213, 11.252331126447098, 9.246875642108929, 9.649229202966579, 9.2401865768133, 9.394988572883396, 8.891406690282578, 12.078512046284784, 9.194837727975251, 14.690684009496746, 9.613170964862485, 9.507603398954949, 6.946137009361685, 8.707550993622393, 8.386562458978949, 10.005071289225063, 6.407887689258142, 10.813987974435356, 8.040000699194591, 10.138010598983616, 8.123415087845224, 9.458437814532507, 9.852917882877552, 6.176291943848856, 7.392322094340768, 9.050650081644772, 7.476964859920051, 10.33433750995395, 8.833975412654022, 8.817330877418808, 10.670130146768898, 10.897980497878907, 7.617122516565696, 7.326656966103456, 10.606958474983765, 8.18559371957156, 13.015685368714312, 8.8719608656002, 9.505846581987583, 10.512405484416355, 8.999363041440793, 9.184772791531385, 5.798573250785035, 7.981392582673394, 9.31756935301795, 8.915063630539477, 8.591007726315706, 9.45444400380748, 9.3609292696344, 10.9471017500293, 11.13936094829676, 10.223475315704693, 9.006551987103135, 9.090270961958039, 8.922421726300206, 12.088563884983701, 11.681827733253721, 7.365820996829409, 8.928863473368622, 11.079596566356175, 12.262344011727935, 10.338154467810183, 8.965153208564839, 9.88653947019998, 6.286630648590946, 9.615363704358947, 9.053531788709535, 6.652380853687866, 8.603430756756556, 7.309620971388127, 5.990069632188528, 8.255773133810427, 6.223234782856303, 10.068098065457464, 7.1255135729894645, 8.144562826332951, 8.209875086686242, 7.403124228124328, 11.34965631435034, 9.292427533335111, 11.315492801790676, 8.26865178065061, 6.52268513177663, 8.88908134305642, 9.102670483542195, 7.995237335857793, 8.76866477357488, 6.295409266731967, 9.50278897974037, 7.0324839992390675, 8.171210488622197, 8.657996323038251, 7.956625123747362, 7.141582129224361, 7.4795700450991145, 10.726480851033932, 8.35005437427705, 9.055218833986887, 8.768069597004352, 9.791120006604377, 8.559962423409974, 9.59088933031979, 9.315865406590403, 6.986772902361076, 7.614420580548834, 7.082080116254331, 10.189204994959402, 9.2543645459938, 10.496683789427879, 9.598394839058413, 8.227786353895036, 7.3370607818432445, 6.695914271627405, 7.505957575532087, 9.113245991475054, 9.999051230168154, 6.851139161032189, 8.415735053184918, 8.69327473235283, 5.593946725483097, 8.01710275475752, 10.393806144359647, 7.040634148312258, 6.508163909075995, 7.396239185415195, 8.544287489685281, 9.68378742309977, 7.373278355532548, 9.08886535185232, 10.243824750108477, 8.797869213111484, 8.206948381956693, 7.748551820260261, 6.448055868560193, 8.168043002409197, 7.312013015818958, 7.682641600486561, 10.919740163829418, 9.145221017483832, 10.88597886983067, 7.842471711530599, 9.86407240077582, 10.640131144801318, 8.299362851639405, 7.638175617014868, 8.11947213747128, 9.625292806144286, 8.020953393336297, 10.714511652044507, 7.571986629999169, 10.234590940333108, 10.145618572281032, 9.23022560343612, 7.662598519223538, 8.217837827520823, 9.136499985731215, 8.887040422851442, 8.43982629891468, 8.275747889469732, 8.43634190178734],\n [13.781420507451605, 14.311127197604215, 12.774744763573139, 11.753722856124261, 10.889005467629874, 10.691661174408802, 9.83085760892305, 7.989851610785623, 11.274199906728377, 13.06847515079633, 11.838697511673214, 9.92153553311109, 8.011282475090788, 11.390653061175241, 10.288798018555665, 9.941574581262115, 13.058531715676198, 11.962490073295825, 10.463279380707643, 8.798106609335939, 8.067138687070784, 11.670129028521373, 18.073529986719524, 13.746328390492538, 11.63520215779551, 10.628683240917256, 8.613759148499042, 7.170247436163334, 10.663671868087405, 8.487377466927516, 14.53491903076141, 10.841888369114738, 9.550244749969027, 10.228351446722499, 7.358463467880387, 6.655560138547353, 7.994193017482326, 6.349488414090103, 8.94655572837793, 7.620889890131844, 8.37353601034038, 8.418089646167626, 6.851595145268178, 8.33742248363684, 9.113185469923597, 6.887655249448182, 5.940241202167263, 7.97777056397606, 7.324728088854264, 9.228544273625527, 8.264636819212916, 8.168386436711819, 6.42083493489828, 7.402011828017656, 6.8144456182053315, 6.615156514897394, 6.451463214773481, 7.893624888643488, 5.516363129095715, 6.7565312726783056, 8.944052925906442, 8.307235841051643, 6.230644393449745, 6.610607583613377, 7.580506771503883, 8.13936677822777, 6.305975214686918, 5.56600615893906, 6.248594588764513, 8.840392483492927, 5.708048995181534, 9.176612770336554, 9.089096945362018, 7.272426902888895, 7.844774359899686, 8.423613512764872, 7.012194888635476, 9.202258183872091, 6.111497084561831, 8.919889060158082, 7.024251017125005, 7.295306226948175, 8.122770313942244, 8.312934415517494, 6.120615740948725, 8.963251905124288, 7.072094947253225, 7.545763475870825, 5.854270622437426, 8.065630725035863, 8.277996438120136, 9.051296582728574, 4.870385952919668, 4.703486777119415, 8.551189697597664, 7.841416919647015, 6.063558372716247, 7.0446746302120244, 5.691395320810918, 5.5146049948052775, 7.1298017503634465, 7.096293201875608, 5.8919100218265426, 7.702804211482065, 6.6897732992185945, 6.288115688895544, 6.545304522565702, 5.2853042400807695, 9.176030355429871, 6.556744416635388, 6.774015310921933, 7.815469363290282, 6.252766798523736, 6.227952751705526, 7.484721458640154, 7.747466397495973, 5.629324993798868, 8.179587408151514, 5.386451634820617, 7.401539408834831, 6.554172618568669, 7.997850920582821, 5.3604276474802, 6.314956752016867, 6.262283527567297, 7.648019794006348, 6.935319494741965, 6.481339470199687, 7.614021551219931, 6.120775224102908, 8.302372946236032, 7.0225306796182965, 6.845661567684474, 6.955930184751324, 7.1651544212323985, 6.310545860083952, 6.134268262578467, 6.401030614706069, 9.270271512042381, 9.326862864973439, 8.642090597093521, 9.582095271753895, 6.169194216514685, 6.213865892738523, 6.705902625681505, 5.955158198297917, 7.137559697501645, 11.329087156824865, 6.629481250275585, 7.697225280482319, 6.845129371998716, 7.474123050281505, 7.406390403358574, 5.9652313785437485, 8.54154545801901, 7.47565563189108, 8.275655756837889, 5.088358917871128, 7.496607622318046, 6.762019810658592, 6.5277946228584485, 9.022069200139695, 5.6406020560743615, 6.331594236621054, 7.7756452914304575, 7.865463412212698, 6.548349849121678, 5.985852765962835, 7.615193749978773, 6.599934650715535, 7.198543021991292, 7.398748611799334, 9.918361794473224, 10.00734540058611, 5.464991364168524, 7.562537305293145, 7.040523141605568, 6.5341878195358305, 5.7819985050571026, 5.398436138268658, 7.048133953075174, 6.77365635054944, 7.7937715997337795, 9.09185239480586, 8.752180473887293, 7.68825722151781, 5.767056557461325, 6.783106558069271, 5.348694408445115, 8.23587092840715, 5.201460557442625, 5.423242523914949, 6.409563019447731, 8.022071427130506, 6.089218900632388, 7.035957847198441, 6.869056802256081, 7.157803361892751, 5.9119412281151185, 8.11130518816837],", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_QCNN_MNIST_SU4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "loss_histories_QCNN_MNIST_SU4 = np.array([[15.701968392104067, 14.00975194601019, 13.575795786392687, 12.61039043284571, 11.420276002266895, 11.99525947567977, 12.080220566339552, 10.924803410541063, 11.131198050239918, 10.050426570399022, 11.053791542219374, 9.788098151349532, 11.143553225059602, 9.197232470430466, 9.229157116588144, 9.162957631022463, 10.71179400256479, 10.196747292508277, 10.79829244400887, 9.42574101202477, 8.295175259785562, 9.076819970830673, 10.227437780059452, 7.8419230403439535, 9.851325083008735, 7.682262563812961, 6.124709565416007, 9.158968991756522, 6.672908530214931, 5.209655971223005, 4.993276753394282, 9.664936574185854, 6.88459262068916, 4.970247473779515, 4.603592627994019, 4.59443369268576, 9.794815707657726, 10.239708125356454, 6.449176123973521, 4.3614383898577245, 3.178962661406524, 5.323808065197847, 3.742336146764105, 3.408487182474864, 4.0378028800153745, 3.446378084287989, 3.7187355535234006, 3.087831614419782, 3.7096100667430014, 4.565046696353874, 3.770663248286091, 4.4217543223496145, 3.77968191595814, 5.056168926358595, 2.7803131922419175, 2.719334554107501, 3.8792788429673997, 3.1498685402458206, 4.025981341150764, 4.3933448358886835, 3.5465754527777196, 3.5006393293490854, 5.749003912443001, 5.285935336974153, 5.215205181476441, 3.33499755695145, 3.7997378845686343, 3.017773877789444, 4.067487463964749, 4.5587834989118035, 3.740154036304224, 2.7708132679167123, 4.512851209860175, 2.924350433846229, 2.795042698165882, 4.149110497309133, 3.369464977487066, 4.23293533133619, 3.8553201319663155, 3.9359680762072085, 4.161241707714771, 5.064086532487843, 3.702060601779912, 2.7105860877244092, 4.108486332151197, 3.6338065699190376, 2.881211037354094, 3.723927043461242, 5.510865245386038, 5.124190965728359, 2.821525546046732, 3.098837600425342, 2.3469765230853548, 2.4742370473245545, 4.067343226565326, 3.6394183895744443, 2.733427685289968, 3.2653415891638153, 4.247409817892573, 3.6035555769047796, 4.3211502162877675, 4.045216873050909, 2.9323636325128075, 3.4306359296475173, 2.8276962373988956, 3.564636207572396, 2.9737011426056164, 4.391465105518088, 3.069343501908466, 2.9634796303315323, 3.1466302177539296, 3.3780949337259174, 4.562911170376287, 3.948278038667186, 4.874029252223579, 3.6684211237346824, 2.998924179663272, 3.590306877884917, 3.981573843773927, 4.613897998161246, 3.981322551072088, 2.680360942734963, 2.422755364259979, 3.2320172271380865, 3.8089662923296976, 2.939901362941198, 4.833732255949402, 2.1849227406448146, 2.6372671950900055, 3.104418483742709, 3.8629675948558835, 4.687536630526817, 6.9927241243613265, 4.922012283032045, 7.452795267168994, 4.485254992748128, 2.8914771697111363, 4.265530398933267, 3.1971441908330624, 3.068105106360343, 4.275963212623704, 3.155163439406081, 2.8000281304346384, 4.1954482933166934, 3.253825150780586, 3.272701085036064, 2.8422443341131016, 2.603392372802548, 3.532346488795025, 4.011695154843002, 3.472111135034264, 2.9318768612813235, 4.250864330840455, 4.439079409959164, 6.85829042210198, 3.373765377796869, 3.3066825370799746, 3.6209249292139156, 2.663136341349555, 3.176614433068561, 5.179692142004379, 2.386125444576648, 3.6714569666304624, 5.968140140543236, 4.6862114302954225, 3.627958786157068, 3.7152938038804124, 3.4850735691747947, 5.75016980980869, 4.714566479061207, 2.8790642558219135, 4.789001128730438, 2.723388960931453, 3.3189986144639394, 4.477900327018953, 3.2164083086369955, 4.715276756538122, 3.566305058074321, 3.4740378623097388, 5.9606633045649655, 3.3624451866929257, 7.466756701248525, 3.099570878630708, 3.396522335144449, 3.5183642097565473, 6.242929419688088, 5.3682240107336066, 3.1235126677502376, 2.575365849609988, 2.9803470547016326, 3.379381007307192, 5.401406710868802, 2.9037086563127614, 5.75416297233594, 3.1099352188520903, 4.07746222351273, 3.172574914460961, 3.2580509567486198, 2.6716076043732255, 4.383820778498077],\n [16.67610968670757, 14.718491446461517, 12.090224017449659, 13.472120600797032, 13.25131879124339, 13.581067562519276, 11.840332662256907, 11.130310125485495, 9.93192347576329, 11.029197156268669, 9.465616135342069, 8.846563678505364, 8.832779249446189, 7.304634735385226, 6.712227892140214, 7.312650169150179, 5.20134121314218, 6.881439914653229, 6.838544538526815, 6.765550850415497, 6.5342587606872105, 5.5488092796654875, 5.811336175272483, 5.794328461356434, 7.5140454270663, 6.833730935841877, 6.5306652794487245, 6.182513857890757, 5.931352459921179, 6.077425703330703, 5.122698673541781, 5.661275028840929, 6.288518717055746, 8.056005588611056, 5.29513931085165, 5.080406097314944, 4.541240119374063, 6.7937862430249325, 5.551516303937469, 8.561069772637719, 5.778696027265669, 7.581037487036702, 4.87089471665885, 5.461873570139039, 6.274697551835343, 6.178863189755656, 5.525816468577715, 5.752541148100186, 6.78491692819599, 6.429252986978849, 6.103956718052821, 5.997072731286677, 8.039526919812602, 5.44374742986944, 6.312137074095871, 5.3384457339559885, 7.340812313020643, 5.5364661975369485, 5.72976620514688, 5.841613803200098, 5.429284862378174, 5.522583756402512, 5.509153976952111, 4.8467675121924945, 6.49822992308749, 5.368611880740126, 6.65582979493719, 6.852889978124359, 6.94843047028678, 5.596496249741784, 6.339885612054319, 5.267701813339295, 6.152560736297184, 5.775192895855112, 5.5163207175483, 6.44778772471083, 5.430156597046173, 5.932478159862267, 5.6314257765548135, 5.481369375552064, 5.338024105579926, 6.670512346943634, 6.300996333041053, 5.730069681685715, 5.02963972081574, 6.936543925341157, 5.062802489117022, 5.55732970400315, 4.7762005374477985, 6.645698540681672, 5.869444734157781, 6.253047036221847, 5.19025078164954, 7.484779118830626, 5.222194367529911, 4.900463084980787, 5.935513417865195, 6.310398185116211, 8.132592884151036, 5.664289009103563, 6.183934383088775, 5.088139300408709, 5.508491361131622, 5.548466329859139, 4.719837683756191, 6.167420125947168, 6.594100741695282, 8.158242684269862, 5.487510771423499, 5.21810002541766, 6.04446323264393, 5.563562650115921, 5.232073383963237, 5.401459469361614, 8.85540524437023, 5.4966514988068536, 7.138435313756308, 3.738691460410528, 5.974228304092972, 5.889754947819547, 5.205233223920336, 5.660552283292949, 7.533655402675502, 6.237175062990105, 4.612106288110162, 4.83261522589498, 6.121743265892744, 4.616937597386011, 5.607222524525371, 6.607172071129023, 4.616853299540282, 4.8959008742344095, 5.388591509835658, 5.110263418707962, 6.2392279693893, 5.698228760691443, 6.090751357758069, 5.387412545935041, 4.59741341670396, 4.256336578754677, 5.242254014621976, 5.857879934488944, 5.50051672654861, 6.083693906452281, 6.28930037142256, 5.435763076249767, 5.600938823415071, 4.911512560507955, 4.708365035514755, 5.1425019670095296, 4.05608917987239, 5.8505255380084735, 6.0356476179414935, 5.843945163828317, 6.003963889711409, 6.004885939386664, 6.093422936170946, 7.43302607226079, 5.598670955133419, 6.318483050933266, 5.29497634661668, 4.659365000673817, 4.657314955631369, 6.911662937984151, 5.956445279906723, 6.205526277177433, 5.8233172244351215, 7.349116195432372, 6.132131053390929, 5.485940276394268, 6.217186135246689, 5.418644398444133, 5.036585543343947, 4.968299459526778, 4.075560315776475, 5.568414559989412, 5.4080102959787855, 5.464803055108561, 4.993330216988004, 4.506529139518999, 5.988501668255765, 6.0157665530526225, 5.864092195873374, 5.551867433037801, 6.259680423346111, 5.377826263121123, 5.832501013799243, 5.100914993522977, 5.177372502514758, 5.0693544932836945, 4.587039983702243, 5.405716516521254, 6.365331685040749, 6.633691593318545, 6.630421586190276, 5.442422435211777, 4.359705683191795, 5.151171627435816, 5.864607067681872, 7.071624705566063],\n [16.157302948705482, 15.324869229417093, 12.793915961353354, 11.194524103323525, 12.507868898219838, 12.616777225433516, 9.234552183500577, 13.336267076141908, 13.461740153378038, 15.409796063035124, 14.399175925702068, 12.32478585460413, 11.568925602138641, 11.705259189481604, 9.73148182172859, 10.518139699222413, 10.340323759782864, 10.184819821480033, 10.267859866173783, 9.163877978474442, 8.228781475027233, 10.085746901037597, 9.56261037065576, 9.945013093887507, 10.573418684599648, 8.355653192143533, 7.489047008196033, 8.831587856043486, 9.243498791854762, 7.899791859405868, 7.948776869634101, 8.167661085368842, 8.591449746262468, 7.964795560429807, 7.997626507713476, 8.73053525367385, 7.882803305531689, 9.108972728938417, 7.486328417560084, 8.358746544165566, 7.3501368133987555, 7.312364691281757, 7.145939172039308, 7.566180102372289, 8.736332347556841, 8.807592977750446, 7.18998285729017, 7.866211944807089, 7.766185842391377, 8.749967011867623, 7.34235823310588, 7.43610428815889, 11.590298332110903, 10.682766742665306, 8.308300044564142, 7.845831118765122, 9.813484056770166, 7.876955617382441, 7.312174779836708, 8.759938285312519, 7.171839228177709, 8.499615134414299, 7.509722002675852, 8.557543892841624, 10.934254213376416, 7.398766901107699, 9.48803778380712, 8.620835490337631, 7.451737187278983, 7.506113667562998, 9.622532082724248, 7.981036342366205, 7.20993447571356, 7.390474283132135, 6.912475677827477, 9.935513837829825, 8.1391905581568, 7.653484659161942, 8.668598919073082, 7.3093307391150555, 7.042841867576427, 7.3547098066608685, 8.436869537863673, 9.242697469694416, 7.9004890747461864, 7.5769601403855145, 7.739169857643963, 7.767203539409437, 8.131669134778706, 7.523153815633725, 7.243530316860227, 7.017534997319622, 7.893922940479337, 7.851475941368426, 11.101369748051242, 7.214014499430652, 7.6427246289029735, 8.060879862630905, 7.16097689167272, 8.51976921955402, 7.426907407706117, 7.117950793658989, 9.61181003072403, 7.805022209950351, 7.192555676979239, 7.290056228132629, 7.74080333198409, 8.487442826710854, 8.076479022745504, 8.787609642883186, 8.046578612355681, 7.105401588418192, 6.844574411490987, 7.6829392612869905, 7.312980895734044, 9.491955518171958, 9.688628921645819, 7.4551245842341824, 7.927041620533395, 6.994347169790121, 10.582764094442553, 9.212100380430446, 8.901361471420842, 7.733593560064719, 7.292646673929886, 7.523124993883956, 8.413483649790573, 6.745971429687285, 6.642352670042393, 6.792652616208365, 7.058424327375997, 6.8197281306589685, 8.10655307764079, 6.685050927429077, 6.659166080268061, 6.878696271049514, 4.719019994186838, 5.603938789504077, 7.621652905756453, 6.145401957453078, 6.677120178796311, 6.703668716347524, 5.997955740160936, 5.980717971896108, 5.567386511698885, 7.5956198437787075, 5.130131650562907, 4.990221204908122, 5.476391991515819, 6.365308395517907, 6.8850515038891515, 4.655652668201105, 6.334821345904701, 6.436170644437902, 6.242356223431878, 6.170889252614643, 6.133992909267375, 4.418106928723493, 6.411815646774802, 5.7580827946683035, 5.587910081784416, 5.920203466111238, 5.075548487129552, 5.594348434856907, 4.880413518719992, 7.11898695805031, 4.918739668721355, 6.030580773214612, 4.867536812712909, 6.548113260997608, 5.925273555442318, 6.441058132083475, 5.958600235576363, 6.188452131829611, 4.763154852906501, 5.084234230561494, 6.431881492994884, 6.463236472806914, 5.177834809985206, 5.9085440880547795, 4.466481954235352, 5.093821854921718, 5.913804587244972, 6.534469535291809, 6.9831619354062795, 5.694415254676171, 6.043951539053322, 4.3193148608133285, 4.955811360383821, 6.259607203662118, 5.137108402954629, 3.8870179844761457, 5.345411227868419, 6.21392862186227, 5.825652183856423, 6.542955899153829, 4.8654957072963185, 5.464461129441604, 4.81624685568573, 6.6756728243884025],\n [19.032143621634493, 16.194512861190685, 14.070380479727929, 13.198287009534448, 11.658842295966911, 12.845838181580614, 9.92842975069186, 8.340922863931732, 9.438250198119544, 8.826118705346307, 8.977224376220182, 7.82055561653431, 7.435343739299787, 7.339672601209882, 7.4653233362133475, 6.743519454229658, 8.266202954128136, 7.48683369299498, 8.576888544427238, 8.295673048958871, 7.115855331346225, 7.057634470134533, 7.788478167736406, 7.607587660873876, 7.2221224461159785, 10.477549941721739, 9.991173576151768, 7.690475066534521, 8.23076073932386, 9.657234478409052, 7.840326794242612, 8.838089830170091, 7.04361205995808, 8.264255760767675, 6.505283763792735, 8.60415728022085, 8.358983359528077, 7.664200099932535, 7.913448256241944, 7.096651644568762, 9.226432084636743, 7.619133027510663, 8.647014114441758, 6.656057609800202, 7.842104656674251, 6.9600498931736805, 8.10606199375973, 8.466432510958839, 7.40153259109756, 9.07148808072991, 6.868617353266047, 5.919080506185863, 6.607333429338317, 5.881337036671048, 8.104557840315634, 9.099680294876244, 10.545378372025553, 7.287784875240738, 8.903782943237218, 6.377039884724531, 6.561077651984436, 5.996439560500791, 5.651760682995162, 7.635823099783645, 7.4254597725695515, 9.973080014049327, 7.989821956036739, 8.721159400686329, 7.282684346639685, 7.805577983759187, 5.284734292939751, 10.273475613245477, 7.444084579226282, 7.196012547270868, 7.3266398223111855, 10.169797171369403, 6.910223517266793, 6.360082652565239, 7.0495510384401445, 7.0010005717049735, 8.937516965119448, 11.768753146194, 10.057163535515652, 7.327156519644253, 6.722085215015063, 7.090113411628408, 8.162956894714235, 8.110979207950077, 7.608851841650445, 8.122056232404226, 9.562302671422325, 11.017566376617967, 8.118450543425004, 5.9884809308000175, 6.233408292972936, 9.984399992482755, 5.70660495993625, 7.2688122355625655, 5.923913173807007, 6.653465700788417, 7.205855711433588, 6.175794064046743, 7.239757808907411, 6.265333926257176, 6.5104783001160635, 6.917826888902511, 5.862495884136119, 5.691576891756985, 6.736533864001166, 9.238688646166626, 8.156307595197847, 6.629414785713525, 6.329526538346367, 6.9447206392072705, 6.9656604512037985, 7.050107597873999, 5.478758804560645, 6.619764152386958, 7.464510003667385, 9.862350340134846, 6.8740825491839335, 9.485750025050397, 8.060864772016885, 6.995910875168525, 7.287096821377743, 6.582874516827176, 6.9549358536386565, 6.245012703718529, 6.464432907244605, 6.942876869923922, 6.4294820999540665, 6.416639035776809, 5.862306094262835, 6.861035027547251, 8.658030598046818, 8.2283454407951, 6.375704097382855, 5.2618733745296185, 6.713233812181773, 4.940355800497349, 5.518245519699501, 5.668330331297082, 6.732072646158387, 6.367909293670774, 6.878788996969291, 5.824729671076968, 6.008431125827654, 6.035710274031377, 6.336766900878407, 7.086607673658162, 7.253597248114665, 6.882551592867163, 6.139184438513964, 7.838798452685489, 6.069580217003175, 5.769755078490886, 5.626697098976681, 6.160514814104392, 5.755309011244466, 4.969237523454341, 5.369400365976718, 8.310323652947192, 7.405701305374025, 5.909910701799533, 6.895873320501355, 6.840050294965555, 5.802374613799513, 10.341108739001449, 6.585551950849652, 6.539564956769238, 6.752751779344392, 8.17107021872274, 7.666285285991902, 7.3680928866493565, 6.478880746202967, 8.126125853956061, 6.119129939334914, 5.231010188035771, 8.305862299761461, 6.1917252076692515, 7.466396126126784, 5.87307105867748, 4.885696561446003, 7.363839219678114, 6.638050798663683, 6.780625938777236, 8.232446378366747, 7.330033076118185, 6.408003895523217, 6.17068764015699, 5.8491337169032525, 5.86570616958171, 5.039792257538728, 5.84107289576245, 7.121454547722266, 6.12431950738183, 6.9254586066742405, 5.966802122983741, 5.6785771903617634, 5.416991705035096]])\nloss_histories_QCNN_FASHION_SU4 = np.array([[17.443278550474297, 17.16577921968382, 14.973819729555512, 12.769693588206378, 12.170809006417198, 12.213970720341004, 12.578425729600427, 12.43466688489016, 12.28081413013992, 12.084410582501091, 16.417011227422222, 11.382395417442334, 11.23266987153646, 10.808642275524221, 11.44149106023741, 11.250913438040923, 11.354851755365916, 10.493035309996236, 12.026813099693047, 13.327785058834287, 12.224603882808099, 10.448678691640882, 8.514232798052115, 10.087586151506727, 14.055361965916951, 10.704870661998351, 8.819600512858635, 8.65161586998424, 9.01391248018459, 11.957452737257798, 8.95047859991031, 12.533314976734397, 10.681533577947004, 9.24561706449855, 10.123411197766806, 9.473144455249843, 9.723407803882262, 9.097953540937064, 9.85921377153185, 11.544707585980971, 8.772316400070213, 11.252331126447098, 9.246875642108929, 9.649229202966579, 9.2401865768133, 9.394988572883396, 8.891406690282578, 12.078512046284784, 9.194837727975251, 14.690684009496746, 9.613170964862485, 9.507603398954949, 6.946137009361685, 8.707550993622393, 8.386562458978949, 10.005071289225063, 6.407887689258142, 10.813987974435356, 8.040000699194591, 10.138010598983616, 8.123415087845224, 9.458437814532507, 9.852917882877552, 6.176291943848856, 7.392322094340768, 9.050650081644772, 7.476964859920051, 10.33433750995395, 8.833975412654022, 8.817330877418808, 10.670130146768898, 10.897980497878907, 7.617122516565696, 7.326656966103456, 10.606958474983765, 8.18559371957156, 13.015685368714312, 8.8719608656002, 9.505846581987583, 10.512405484416355, 8.999363041440793, 9.184772791531385, 5.798573250785035, 7.981392582673394, 9.31756935301795, 8.915063630539477, 8.591007726315706, 9.45444400380748, 9.3609292696344, 10.9471017500293, 11.13936094829676, 10.223475315704693, 9.006551987103135, 9.090270961958039, 8.922421726300206, 12.088563884983701, 11.681827733253721, 7.365820996829409, 8.928863473368622, 11.079596566356175, 12.262344011727935, 10.338154467810183, 8.965153208564839, 9.88653947019998, 6.286630648590946, 9.615363704358947, 9.053531788709535, 6.652380853687866, 8.603430756756556, 7.309620971388127, 5.990069632188528, 8.255773133810427, 6.223234782856303, 10.068098065457464, 7.1255135729894645, 8.144562826332951, 8.209875086686242, 7.403124228124328, 11.34965631435034, 9.292427533335111, 11.315492801790676, 8.26865178065061, 6.52268513177663, 8.88908134305642, 9.102670483542195, 7.995237335857793, 8.76866477357488, 6.295409266731967, 9.50278897974037, 7.0324839992390675, 8.171210488622197, 8.657996323038251, 7.956625123747362, 7.141582129224361, 7.4795700450991145, 10.726480851033932, 8.35005437427705, 9.055218833986887, 8.768069597004352, 9.791120006604377, 8.559962423409974, 9.59088933031979, 9.315865406590403, 6.986772902361076, 7.614420580548834, 7.082080116254331, 10.189204994959402, 9.2543645459938, 10.496683789427879, 9.598394839058413, 8.227786353895036, 7.3370607818432445, 6.695914271627405, 7.505957575532087, 9.113245991475054, 9.999051230168154, 6.851139161032189, 8.415735053184918, 8.69327473235283, 5.593946725483097, 8.01710275475752, 10.393806144359647, 7.040634148312258, 6.508163909075995, 7.396239185415195, 8.544287489685281, 9.68378742309977, 7.373278355532548, 9.08886535185232, 10.243824750108477, 8.797869213111484, 8.206948381956693, 7.748551820260261, 6.448055868560193, 8.168043002409197, 7.312013015818958, 7.682641600486561, 10.919740163829418, 9.145221017483832, 10.88597886983067, 7.842471711530599, 9.86407240077582, 10.640131144801318, 8.299362851639405, 7.638175617014868, 8.11947213747128, 9.625292806144286, 8.020953393336297, 10.714511652044507, 7.571986629999169, 10.234590940333108, 10.145618572281032, 9.23022560343612, 7.662598519223538, 8.217837827520823, 9.136499985731215, 8.887040422851442, 8.43982629891468, 8.275747889469732, 8.43634190178734],\n [13.781420507451605, 14.311127197604215, 12.774744763573139, 11.753722856124261, 10.889005467629874, 10.691661174408802, 9.83085760892305, 7.989851610785623, 11.274199906728377, 13.06847515079633, 11.838697511673214, 9.92153553311109, 8.011282475090788, 11.390653061175241, 10.288798018555665, 9.941574581262115, 13.058531715676198, 11.962490073295825, 10.463279380707643, 8.798106609335939, 8.067138687070784, 11.670129028521373, 18.073529986719524, 13.746328390492538, 11.63520215779551, 10.628683240917256, 8.613759148499042, 7.170247436163334, 10.663671868087405, 8.487377466927516, 14.53491903076141, 10.841888369114738, 9.550244749969027, 10.228351446722499, 7.358463467880387, 6.655560138547353, 7.994193017482326, 6.349488414090103, 8.94655572837793, 7.620889890131844, 8.37353601034038, 8.418089646167626, 6.851595145268178, 8.33742248363684, 9.113185469923597, 6.887655249448182, 5.940241202167263, 7.97777056397606, 7.324728088854264, 9.228544273625527, 8.264636819212916, 8.168386436711819, 6.42083493489828, 7.402011828017656, 6.8144456182053315, 6.615156514897394, 6.451463214773481, 7.893624888643488, 5.516363129095715, 6.7565312726783056, 8.944052925906442, 8.307235841051643, 6.230644393449745, 6.610607583613377, 7.580506771503883, 8.13936677822777, 6.305975214686918, 5.56600615893906, 6.248594588764513, 8.840392483492927, 5.708048995181534, 9.176612770336554, 9.089096945362018, 7.272426902888895, 7.844774359899686, 8.423613512764872, 7.012194888635476, 9.202258183872091, 6.111497084561831, 8.919889060158082, 7.024251017125005, 7.295306226948175, 8.122770313942244, 8.312934415517494, 6.120615740948725, 8.963251905124288, 7.072094947253225, 7.545763475870825, 5.854270622437426, 8.065630725035863, 8.277996438120136, 9.051296582728574, 4.870385952919668, 4.703486777119415, 8.551189697597664, 7.841416919647015, 6.063558372716247, 7.0446746302120244, 5.691395320810918, 5.5146049948052775, 7.1298017503634465, 7.096293201875608, 5.8919100218265426, 7.702804211482065, 6.6897732992185945, 6.288115688895544, 6.545304522565702, 5.2853042400807695, 9.176030355429871, 6.556744416635388, 6.774015310921933, 7.815469363290282, 6.252766798523736, 6.227952751705526, 7.484721458640154, 7.747466397495973, 5.629324993798868, 8.179587408151514, 5.386451634820617, 7.401539408834831, 6.554172618568669, 7.997850920582821, 5.3604276474802, 6.314956752016867, 6.262283527567297, 7.648019794006348, 6.935319494741965, 6.481339470199687, 7.614021551219931, 6.120775224102908, 8.302372946236032, 7.0225306796182965, 6.845661567684474, 6.955930184751324, 7.1651544212323985, 6.310545860083952, 6.134268262578467, 6.401030614706069, 9.270271512042381, 9.326862864973439, 8.642090597093521, 9.582095271753895, 6.169194216514685, 6.213865892738523, 6.705902625681505, 5.955158198297917, 7.137559697501645, 11.329087156824865, 6.629481250275585, 7.697225280482319, 6.845129371998716, 7.474123050281505, 7.406390403358574, 5.9652313785437485, 8.54154545801901, 7.47565563189108, 8.275655756837889, 5.088358917871128, 7.496607622318046, 6.762019810658592, 6.5277946228584485, 9.022069200139695, 5.6406020560743615, 6.331594236621054, 7.7756452914304575, 7.865463412212698, 6.548349849121678, 5.985852765962835, 7.615193749978773, 6.599934650715535, 7.198543021991292, 7.398748611799334, 9.918361794473224, 10.00734540058611, 5.464991364168524, 7.562537305293145, 7.040523141605568, 6.5341878195358305, 5.7819985050571026, 5.398436138268658, 7.048133953075174, 6.77365635054944, 7.7937715997337795, 9.09185239480586, 8.752180473887293, 7.68825722151781, 5.767056557461325, 6.783106558069271, 5.348694408445115, 8.23587092840715, 5.201460557442625, 5.423242523914949, 6.409563019447731, 8.022071427130506, 6.089218900632388, 7.035957847198441, 6.869056802256081, 7.157803361892751, 5.9119412281151185, 8.11130518816837],\n [17.459975862230365, 16.035749627924975, 17.18838777214957, 14.965891703963191, 15.993014186770322, 14.865135537787218, 14.899235224369932, 14.413672066868134, 13.883837794678469, 12.668394725165031, 12.81644653763046, 12.430129738605853, 14.895598272770604, 12.828667864277463, 12.55165180148722, 12.686341345367868, 12.801550128580457, 13.146552401067662, 13.79568161845721, 12.841269687623006, 11.048741216694062, 11.095098462696997, 11.328646795214407, 14.469995656407928, 11.211728429887838, 10.332971201436699, 12.169767282195094, 12.260646185017386, 11.291980982335032, 11.582883632219728, 10.269562940067868, 12.493563864692193, 10.72883997288575, 11.050630785466156, 9.95344980064411, 11.427008879233378, 10.878908388915027, 10.771004365406146, 9.808425864141345, 9.791109300248536, 11.7113441824611, 11.959048315860688, 11.459908558272819, 11.126054151409766, 9.328252490001935, 11.523924845521126, 10.358743952589199, 10.596677386270969, 10.740609308215678, 9.894176987991175, 10.244173850295189, 7.927475454261797, 6.857694055625578, 10.317153674302336, 7.533170003430211, 10.162245394326915, 12.332967234941394, 7.336550790305615, 7.472371986231043, 8.780432097890694, 11.114931906352684, 6.836466134758279, 7.630377356740579, 7.023103731521618, 7.334859309549973, 7.299430194504444, 9.216839846711192, 7.123759421642965, 8.470537506491658, 9.38863465647617, 8.030866984336708, 9.185269089577277, 7.503215217249089, 10.99056571841205, 10.304633848571342, 7.3948242000253686, 8.161662982952835, 9.361411402483414, 9.48672407350652, 8.00036925874676, 6.296150070538171, 10.79363478346328, 7.7755094494493, 6.890823225749235, 10.955585376973156, 8.751595053584305, 8.70127964010896, 6.679636911017944, 10.340872416991916, 8.600840935961065, 8.637440988726306, 6.96376371792665, 9.67813752222703, 9.63598205902423, 10.706601935609097, 7.6150036933103955, 7.27381644699862, 6.526409604866082, 7.93604998507248, 8.96111879447386, 9.236528986472686, 9.23164964427008, 7.95594535575086, 9.85250989910494, 8.02710057165307, 5.083218180417743, 7.102064656824691, 9.463149038077571, 8.137033718928052, 6.23708017977305, 11.766565431871733, 11.013107460804878, 6.977587848984027, 8.769441986007832, 10.184766129650757, 9.817065827883836, 8.61951312561197, 8.802006787010635, 7.353706182643092, 7.001784249075218, 9.61892538770961, 6.840337166573375, 13.06990676483989, 7.011100350205812, 11.706063649485246, 10.007334064953163, 7.464914449158837, 8.071202576799415, 8.539718947050654, 6.699773677077703, 7.338162386739442, 10.642426360238414, 8.431371710982715, 5.77880780586605, 7.779492903237399, 7.518873127517724, 7.893062851533497, 6.660855454042156, 6.876835781879192, 8.561387884167551, 4.938483389069482, 7.144558868875467, 6.50069734100023, 11.798343100728069, 8.927807624362318, 11.836105068714826, 9.198312338046648, 7.91822035731483, 6.628910791541805, 10.394574924451714, 8.747555738118846, 6.845293724956791, 8.218875756457745, 7.65932838628157, 9.194888839584037, 6.3416897861327195, 7.137113467047185, 11.842572256944235, 7.811261487005192, 7.783480400863692, 9.476903487779085, 9.064381210148133, 7.994704539234405, 6.072028715158351, 8.594031878176462, 5.955563914806705, 9.350570597725985, 7.5917078119804495, 8.725234843842438, 7.847796913033744, 6.2867181325391766, 8.93512407806589, 7.1842297263118216, 6.339656229823455, 7.376922738780999, 6.583351581614699, 7.13903912317875, 6.818783191762078, 5.604546207629945, 7.768489513884695, 6.49357285467107, 4.755276166129291, 7.850674943448794, 6.0650976726623576, 8.436575845953756, 7.772024213108239, 8.356490036897103, 8.565732517915286, 6.780772217721273, 9.01068623703282, 9.79022758182365, 8.492731407090883, 7.982328257601873, 9.599615930780557, 11.07277777491831, 6.896958112675264, 8.753323430651404, 8.20736821864374, 9.962535361955954, 9.784613941911987],\n [16.078581915456894, 15.551948645299035, 16.268990336797817, 13.27492132814789, 12.653444495213616, 12.335550815972947, 11.203068009128836, 13.818018471215131, 12.390782655911048, 12.16861851789434, 12.976616762856493, 12.458182979922924, 10.785660467895937, 10.596281134331853, 10.598247597662484, 11.48925757088818, 9.952231956192257, 10.9603301706603, 10.75406079470945, 9.263381118466908, 10.024903906146596, 10.02285979578101, 8.725891050623563, 8.8110447174955, 11.798664965731673, 9.41147135557478, 8.707235102396805, 8.555916611569149, 9.570288163360518, 9.061572574560243, 6.9100234750010205, 9.0975788055241, 11.213705166333545, 13.083047899893907, 18.509887491689366, 10.758937740007648, 8.08682601052003, 9.11088050395127, 6.799606070934344, 8.319445907430541, 8.557758105280833, 6.797690913597723, 10.037661560155012, 8.215913950267181, 7.859440080738447, 7.546248682731447, 9.043388585394974, 7.464499666888328, 7.338301029359666, 5.536251127830057, 6.182019326931957, 4.612544868719036, 8.346804507802606, 8.655119265363009, 7.241711436823657, 6.877766052931581, 6.178995062584081, 7.164902403281522, 7.495071573754694, 7.405141504073983, 7.88320176374737, 6.608399591079871, 8.170312880448332, 7.6606189289608455, 7.43890999312149, 8.050289152228833, 6.41680076884502, 8.40259563039491, 6.877417277430936, 6.625701678854269, 6.195914154714127, 5.833315696655207, 7.519956971187638, 7.890402188591403, 5.885274657347369, 6.85981554043308, 7.0790437203811996, 8.141659719595388, 9.529073938972124, 9.11286211264871, 9.419867429756625, 10.652487682080594, 8.086142649020843, 7.170283074680485, 7.5758748904795565, 7.49526204054659, 7.5060283593093065, 7.315422999048236, 5.559589583999333, 6.6894183922516355, 6.055665604575956, 13.145355688580487, 6.8847399674583425, 7.337546107599397, 7.644127954802972, 6.790200258156367, 8.962519703882434, 12.821110910677632, 14.533286822513324, 5.578715200431863, 6.269807858963271, 6.2967891792058195, 11.036341426885684, 8.982404270851571, 6.918566850094237, 9.98229913203689, 6.839400628138751, 7.831976546559741, 9.176861832927699, 8.776105854080996, 7.846617288257853, 6.625640994354236, 10.33833003207289, 7.650949646700952, 8.240201744741027, 5.894346405037781, 6.765704844611307, 6.598639240818315, 7.062939085704468, 8.40590909981085, 5.239284065801005, 6.5273449351167745, 6.119921350125418, 7.622793608754128, 7.975389074028713, 9.761172269260438, 9.465028583802415, 8.113140562190294, 8.320008696618505, 6.808008059516669, 6.0090191399573545, 6.907504166434337, 5.8917924335228316, 7.048573077389917, 6.7643803268922085, 4.784088519933732, 6.116980418330318, 6.362383598446602, 6.994277659825698, 6.083560447594546, 6.22829552265675, 4.759146572175497, 8.637608069752496, 7.634430485294694, 7.3327436917585676, 6.129153305128859, 6.7717271120842035, 7.746543145044954, 7.976627480730413, 5.87517443642307, 5.785493605469459, 8.59816239885267, 6.092063650367079, 7.510720554580129, 7.104407066560065, 7.706381005839659, 6.3795316386953305, 8.603190272508067, 7.419666996632783, 6.80853946090193, 6.84390342864538, 7.832188783886975, 5.781370989376079, 5.687974915978132, 5.228708805630709, 5.813110268775446, 8.77126422768062, 6.631411896300097, 6.97375987215497, 5.398943150631865, 6.824674132978643, 6.835718511020224, 7.036416989460253, 6.720358922660684, 6.600828891817823, 9.52104495164815, 6.833227168980575, 5.885606416072369, 9.08868462512898, 7.158801668924259, 6.567896710698629, 7.189953585970182, 5.497172263419137, 8.88975758713214, 7.884142269205199, 8.180564828799383, 9.159722460144716, 7.539710100801855, 5.672312826072391, 7.128910049569991, 6.344874031685596, 5.361818804417766, 4.836023291425283, 5.428002394465912, 5.508348115737088, 6.658319248890129, 7.960378369266759, 7.712975190142974, 4.710869488703378, 6.683008165593933]])\nloss_histories_QCNN_MNIST_SO4 = np.array([[18.63822742818134, 14.996351756494038, 13.414763568786126, 10.329255663823925, 9.120760457750439, 8.102933852957612, 8.810869354003746, 10.006794023889881, 8.622455729518737, 7.467061678248796, 5.920724352894273, 7.459966178352162, 11.041309513649322, 7.47662233010936, 7.229695349908017, 7.4597916165930975, 9.340903651158031, 7.760242406952571, 6.9303084303708005, 6.777338888591327, 7.9143882391826335, 7.973693583757422, 6.523936001317213, 5.721920320624669, 6.411448467747264, 6.743808258426262, 8.36566536074733, 12.213662344812196, 9.86487882119216, 7.4320976728582036, 7.216063766836224, 5.953614537542056, 7.891266570572217, 9.775016203278446, 7.0168815366447355, 6.814926383198957, 8.07816330468346, 7.974640687711596, 6.915185780000111, 9.47010152197568, 7.101942912860193, 6.783657568036721, 5.249404624801605, 4.367429035519884, 5.320251740486269, 5.336899745584592, 4.317477069873344, 5.568857370287802, 4.920977955283231, 4.684384264386535, 4.553006285364518, 6.0887038581132815, 4.1491547879052275, 4.088186282272099, 5.244694869029588, 7.32992492009621, 8.228756722276643, 8.500450045848522, 8.900388765805115, 4.782393539924687, 4.453707918535589, 4.889501609395316, 3.7503185012881777, 6.173264902405083, 4.585455869803615, 7.977544468005952, 12.285540362630513, 17.202123904275613, 23.657125425177686, 20.76647699730193, 16.786600872716274, 14.920976885555575, 11.862972721304175, 12.38371680287977, 10.751962948697887, 10.622646942674177, 7.436915527080927, 6.9984957327255355, 9.245537060489227, 10.87902779410266, 7.756676325418157, 6.2902948478653915, 9.054715841741446, 10.213394194913555, 10.713174854514664, 6.016335441879939, 7.032425812051378, 5.889972846638912, 7.993898449819041, 14.957748121261922, 10.828835479078352, 7.282568079388084, 5.824867864544799, 6.061361312353642, 5.871914827628737, 4.957310367289297, 11.347540604193048, 8.968434957621131, 5.276743084970953, 4.738261486776431, 7.316307708548978, 8.567513291943742, 5.760823619869705, 4.984008551171022, 4.423857448855351, 5.040694492783116, 6.2865800097090245, 5.3107772324301115, 6.230352429774949, 6.4879478099807, 6.436442091132201, 3.9321405885942458, 6.354989070262162, 10.323274136262322, 6.6085413468570335, 4.658808263711185, 8.124160196438487, 4.403796314495204, 4.43255455422424, 4.520045902744706, 5.355124642362081, 8.332581786426433, 3.9723265229964175, 4.022760169594096, 4.029083881416903, 3.6011303828367707, 5.403125843885431, 3.732246282488777, 5.508279394390255, 4.40311432382525, 3.8790697897236774, 2.5849851881410575, 7.694104916900663, 11.101308206780002, 8.99908318435956, 6.711285025466133, 5.288220223376012, 3.124915378973183, 5.599927958218902, 8.78052184785724, 3.952932926563706, 5.369579528623957, 6.137370452943451, 5.322566415917456, 5.2392775554232225, 3.7752909935312235, 5.4785278121755825, 5.678241801210183, 5.721985227045817, 10.56429180944238, 11.938870978480443, 5.775736074154998, 6.234471503829917, 4.877195514439433, 4.899416054021516, 6.344045582687692, 4.865890550492679, 4.421535635425384, 5.867269600134156, 5.562142453240082, 3.360754216373563, 4.186431974326074, 4.731268758423836, 4.885922568736199, 7.568025061286378, 4.162483029371922, 5.151035979205273, 3.7086364620557464, 3.561273261586166, 3.692899360160353, 4.829052613610973, 4.425004252281964, 6.3568812323053, 6.240579093340331, 6.7254694546035605, 10.667056583895807, 6.239023279958996, 3.37687322896251, 3.9036095311643524, 4.61061799293459, 6.327346218781266, 7.403986110219671, 4.226736651569875, 6.70632723896253, 5.906006743999679, 3.6763807042074377, 3.2218102204105583, 5.167346171425076, 5.811388809291511, 6.178815426868625, 4.388779147164115, 4.2638554283047805, 3.6778325042340136, 3.9423847612500262, 4.727252148842417, 7.191028993956458, 8.017471459721563, 6.924090269713143, 4.705890036004035, 4.401884292499541],\n [19.490832395681625, 18.55871620176089, 15.649967314155994, 15.25775021242159, 15.946631312004945, 15.564568027417012, 15.775617802009306, 15.044765155752698, 13.933258098153132, 14.063824448150507, 12.21922352004349, 13.049576831161843, 12.712203213548328, 14.132134612398179, 12.556063713167147, 14.392089877214355, 13.440473111651375, 11.774158422851105, 10.50097655189196, 12.061937948723456, 12.21665381224416, 11.346394460348499, 8.629636878658907, 12.225110027264428, 9.579218885218834, 10.507291741638808, 11.886818768201305, 10.711623311876588, 12.077706930553655, 12.079871034775882, 10.461352678737866, 9.296707069363412, 8.87196431029472, 9.831395076540318, 9.20077237812059, 9.51518046692755, 10.844902472764865, 9.82194055864702, 8.345613554549725, 11.681707199949948, 11.627748820403133, 10.787938240027913, 8.356710854106506, 11.618109472429627, 10.803883976763743, 9.898552999685197, 8.666128456776557, 10.00192769450485, 10.155014381608025, 9.442274312076112, 10.164595326406975, 9.980580618952132, 10.254257704326186, 8.650898911794851, 9.925822401567757, 9.051865719487614, 7.753171230565504, 8.444428209226645, 9.242899780770129, 6.311130292087029, 9.825969502968748, 9.709971347005947, 11.203871054470323, 9.485870242227849, 9.139980205240354, 11.308542870287445, 10.313194124028934, 7.947284927075531, 9.638990963069965, 9.889833787600175, 8.414594325212523, 10.17246798640375, 9.630538313122777, 8.245168760098567, 11.641464349669182, 8.454133311779685, 10.67983864047887, 10.807836689384096, 8.444095801997983, 7.742010863489368, 8.671464960487954, 8.205575545539926, 8.133405059717063, 10.510342651330332, 7.679940560906537, 9.63960338744768, 7.942249457589856, 8.785638039321348, 10.011986435353814, 8.916937178788642, 10.30596575722854, 11.997141667108748, 17.715626650431954, 13.10843865338633, 11.72984666664915, 9.328112302029268, 9.753737428446168, 7.70135283346627, 8.588180852433954, 8.569610295783594, 7.474243807386273, 12.871161600296347, 26.844603572490467, 15.217840476536267, 15.36468555440585, 14.871990491928525, 14.782901708761406, 13.361935336116192, 13.604591646353025, 18.2271462086095, 14.4921575869624, 13.17298756808403, 13.757652388780645, 12.838897157734005, 12.534857900653275, 12.148956872993773, 12.743835919769873, 10.697705254259567, 10.263618848593444, 12.781232133207615, 9.674463890910015, 9.463841683137922, 9.243864323702923, 9.59537275202494, 8.881529761502213, 6.779813811049111, 8.844013588598528, 7.7767192947358454, 5.911364741194008, 8.228839281598237, 10.710147874346866, 9.590282395255961, 4.469541187332343, 10.682809221824277, 15.374534169996675, 6.518709752345706, 7.652843415578606, 6.860207736101027, 5.6419127458267315, 6.640874542689622, 5.89339677377351, 12.170129017811846, 14.010024538836916, 10.763818670492352, 8.74844120950976, 6.643879267715248, 5.953926144537239, 5.4765722664234175, 6.855202610554837, 5.915324979169266, 6.77289228291636, 8.19553229518862, 7.283430910005893, 5.732140383630094, 6.7169666284704155, 6.944716990826884, 8.222704555050731, 5.77755706716962, 6.605696600089706, 5.4039004019051085, 6.988184392491396, 6.324200673549187, 6.553284736887491, 6.944051674419599, 6.709041808717305, 5.5922144899053725, 6.3905134048626575, 6.957175449289159, 9.009156736137852, 6.256458420004562, 8.60741497751637, 10.112558264377345, 6.260743660849035, 6.565264761685845, 6.843761653996312, 5.302323455553702, 6.563358889368709, 5.990469571531752, 7.868939071658369, 5.115975030286059, 6.95779148327607, 6.676568231943454, 5.865441755153629, 7.848964879786915, 6.036973771766849, 4.982844394460535, 8.788126300069717, 4.954992438070776, 6.844259722427189, 6.163862578102813, 5.723721129395695, 5.416836037830964, 5.416896894672021, 5.670715166845436, 6.487588611397098, 5.619894619477072, 7.363213897492605, 5.712484199123037, 5.930128420038318, 6.926150805589353],", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_QCNN_FASHION_SU4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "loss_histories_QCNN_FASHION_SU4 = np.array([[17.443278550474297, 17.16577921968382, 14.973819729555512, 12.769693588206378, 12.170809006417198, 12.213970720341004, 12.578425729600427, 12.43466688489016, 12.28081413013992, 12.084410582501091, 16.417011227422222, 11.382395417442334, 11.23266987153646, 10.808642275524221, 11.44149106023741, 11.250913438040923, 11.354851755365916, 10.493035309996236, 12.026813099693047, 13.327785058834287, 12.224603882808099, 10.448678691640882, 8.514232798052115, 10.087586151506727, 14.055361965916951, 10.704870661998351, 8.819600512858635, 8.65161586998424, 9.01391248018459, 11.957452737257798, 8.95047859991031, 12.533314976734397, 10.681533577947004, 9.24561706449855, 10.123411197766806, 9.473144455249843, 9.723407803882262, 9.097953540937064, 9.85921377153185, 11.544707585980971, 8.772316400070213, 11.252331126447098, 9.246875642108929, 9.649229202966579, 9.2401865768133, 9.394988572883396, 8.891406690282578, 12.078512046284784, 9.194837727975251, 14.690684009496746, 9.613170964862485, 9.507603398954949, 6.946137009361685, 8.707550993622393, 8.386562458978949, 10.005071289225063, 6.407887689258142, 10.813987974435356, 8.040000699194591, 10.138010598983616, 8.123415087845224, 9.458437814532507, 9.852917882877552, 6.176291943848856, 7.392322094340768, 9.050650081644772, 7.476964859920051, 10.33433750995395, 8.833975412654022, 8.817330877418808, 10.670130146768898, 10.897980497878907, 7.617122516565696, 7.326656966103456, 10.606958474983765, 8.18559371957156, 13.015685368714312, 8.8719608656002, 9.505846581987583, 10.512405484416355, 8.999363041440793, 9.184772791531385, 5.798573250785035, 7.981392582673394, 9.31756935301795, 8.915063630539477, 8.591007726315706, 9.45444400380748, 9.3609292696344, 10.9471017500293, 11.13936094829676, 10.223475315704693, 9.006551987103135, 9.090270961958039, 8.922421726300206, 12.088563884983701, 11.681827733253721, 7.365820996829409, 8.928863473368622, 11.079596566356175, 12.262344011727935, 10.338154467810183, 8.965153208564839, 9.88653947019998, 6.286630648590946, 9.615363704358947, 9.053531788709535, 6.652380853687866, 8.603430756756556, 7.309620971388127, 5.990069632188528, 8.255773133810427, 6.223234782856303, 10.068098065457464, 7.1255135729894645, 8.144562826332951, 8.209875086686242, 7.403124228124328, 11.34965631435034, 9.292427533335111, 11.315492801790676, 8.26865178065061, 6.52268513177663, 8.88908134305642, 9.102670483542195, 7.995237335857793, 8.76866477357488, 6.295409266731967, 9.50278897974037, 7.0324839992390675, 8.171210488622197, 8.657996323038251, 7.956625123747362, 7.141582129224361, 7.4795700450991145, 10.726480851033932, 8.35005437427705, 9.055218833986887, 8.768069597004352, 9.791120006604377, 8.559962423409974, 9.59088933031979, 9.315865406590403, 6.986772902361076, 7.614420580548834, 7.082080116254331, 10.189204994959402, 9.2543645459938, 10.496683789427879, 9.598394839058413, 8.227786353895036, 7.3370607818432445, 6.695914271627405, 7.505957575532087, 9.113245991475054, 9.999051230168154, 6.851139161032189, 8.415735053184918, 8.69327473235283, 5.593946725483097, 8.01710275475752, 10.393806144359647, 7.040634148312258, 6.508163909075995, 7.396239185415195, 8.544287489685281, 9.68378742309977, 7.373278355532548, 9.08886535185232, 10.243824750108477, 8.797869213111484, 8.206948381956693, 7.748551820260261, 6.448055868560193, 8.168043002409197, 7.312013015818958, 7.682641600486561, 10.919740163829418, 9.145221017483832, 10.88597886983067, 7.842471711530599, 9.86407240077582, 10.640131144801318, 8.299362851639405, 7.638175617014868, 8.11947213747128, 9.625292806144286, 8.020953393336297, 10.714511652044507, 7.571986629999169, 10.234590940333108, 10.145618572281032, 9.23022560343612, 7.662598519223538, 8.217837827520823, 9.136499985731215, 8.887040422851442, 8.43982629891468, 8.275747889469732, 8.43634190178734],\n [13.781420507451605, 14.311127197604215, 12.774744763573139, 11.753722856124261, 10.889005467629874, 10.691661174408802, 9.83085760892305, 7.989851610785623, 11.274199906728377, 13.06847515079633, 11.838697511673214, 9.92153553311109, 8.011282475090788, 11.390653061175241, 10.288798018555665, 9.941574581262115, 13.058531715676198, 11.962490073295825, 10.463279380707643, 8.798106609335939, 8.067138687070784, 11.670129028521373, 18.073529986719524, 13.746328390492538, 11.63520215779551, 10.628683240917256, 8.613759148499042, 7.170247436163334, 10.663671868087405, 8.487377466927516, 14.53491903076141, 10.841888369114738, 9.550244749969027, 10.228351446722499, 7.358463467880387, 6.655560138547353, 7.994193017482326, 6.349488414090103, 8.94655572837793, 7.620889890131844, 8.37353601034038, 8.418089646167626, 6.851595145268178, 8.33742248363684, 9.113185469923597, 6.887655249448182, 5.940241202167263, 7.97777056397606, 7.324728088854264, 9.228544273625527, 8.264636819212916, 8.168386436711819, 6.42083493489828, 7.402011828017656, 6.8144456182053315, 6.615156514897394, 6.451463214773481, 7.893624888643488, 5.516363129095715, 6.7565312726783056, 8.944052925906442, 8.307235841051643, 6.230644393449745, 6.610607583613377, 7.580506771503883, 8.13936677822777, 6.305975214686918, 5.56600615893906, 6.248594588764513, 8.840392483492927, 5.708048995181534, 9.176612770336554, 9.089096945362018, 7.272426902888895, 7.844774359899686, 8.423613512764872, 7.012194888635476, 9.202258183872091, 6.111497084561831, 8.919889060158082, 7.024251017125005, 7.295306226948175, 8.122770313942244, 8.312934415517494, 6.120615740948725, 8.963251905124288, 7.072094947253225, 7.545763475870825, 5.854270622437426, 8.065630725035863, 8.277996438120136, 9.051296582728574, 4.870385952919668, 4.703486777119415, 8.551189697597664, 7.841416919647015, 6.063558372716247, 7.0446746302120244, 5.691395320810918, 5.5146049948052775, 7.1298017503634465, 7.096293201875608, 5.8919100218265426, 7.702804211482065, 6.6897732992185945, 6.288115688895544, 6.545304522565702, 5.2853042400807695, 9.176030355429871, 6.556744416635388, 6.774015310921933, 7.815469363290282, 6.252766798523736, 6.227952751705526, 7.484721458640154, 7.747466397495973, 5.629324993798868, 8.179587408151514, 5.386451634820617, 7.401539408834831, 6.554172618568669, 7.997850920582821, 5.3604276474802, 6.314956752016867, 6.262283527567297, 7.648019794006348, 6.935319494741965, 6.481339470199687, 7.614021551219931, 6.120775224102908, 8.302372946236032, 7.0225306796182965, 6.845661567684474, 6.955930184751324, 7.1651544212323985, 6.310545860083952, 6.134268262578467, 6.401030614706069, 9.270271512042381, 9.326862864973439, 8.642090597093521, 9.582095271753895, 6.169194216514685, 6.213865892738523, 6.705902625681505, 5.955158198297917, 7.137559697501645, 11.329087156824865, 6.629481250275585, 7.697225280482319, 6.845129371998716, 7.474123050281505, 7.406390403358574, 5.9652313785437485, 8.54154545801901, 7.47565563189108, 8.275655756837889, 5.088358917871128, 7.496607622318046, 6.762019810658592, 6.5277946228584485, 9.022069200139695, 5.6406020560743615, 6.331594236621054, 7.7756452914304575, 7.865463412212698, 6.548349849121678, 5.985852765962835, 7.615193749978773, 6.599934650715535, 7.198543021991292, 7.398748611799334, 9.918361794473224, 10.00734540058611, 5.464991364168524, 7.562537305293145, 7.040523141605568, 6.5341878195358305, 5.7819985050571026, 5.398436138268658, 7.048133953075174, 6.77365635054944, 7.7937715997337795, 9.09185239480586, 8.752180473887293, 7.68825722151781, 5.767056557461325, 6.783106558069271, 5.348694408445115, 8.23587092840715, 5.201460557442625, 5.423242523914949, 6.409563019447731, 8.022071427130506, 6.089218900632388, 7.035957847198441, 6.869056802256081, 7.157803361892751, 5.9119412281151185, 8.11130518816837],\n [17.459975862230365, 16.035749627924975, 17.18838777214957, 14.965891703963191, 15.993014186770322, 14.865135537787218, 14.899235224369932, 14.413672066868134, 13.883837794678469, 12.668394725165031, 12.81644653763046, 12.430129738605853, 14.895598272770604, 12.828667864277463, 12.55165180148722, 12.686341345367868, 12.801550128580457, 13.146552401067662, 13.79568161845721, 12.841269687623006, 11.048741216694062, 11.095098462696997, 11.328646795214407, 14.469995656407928, 11.211728429887838, 10.332971201436699, 12.169767282195094, 12.260646185017386, 11.291980982335032, 11.582883632219728, 10.269562940067868, 12.493563864692193, 10.72883997288575, 11.050630785466156, 9.95344980064411, 11.427008879233378, 10.878908388915027, 10.771004365406146, 9.808425864141345, 9.791109300248536, 11.7113441824611, 11.959048315860688, 11.459908558272819, 11.126054151409766, 9.328252490001935, 11.523924845521126, 10.358743952589199, 10.596677386270969, 10.740609308215678, 9.894176987991175, 10.244173850295189, 7.927475454261797, 6.857694055625578, 10.317153674302336, 7.533170003430211, 10.162245394326915, 12.332967234941394, 7.336550790305615, 7.472371986231043, 8.780432097890694, 11.114931906352684, 6.836466134758279, 7.630377356740579, 7.023103731521618, 7.334859309549973, 7.299430194504444, 9.216839846711192, 7.123759421642965, 8.470537506491658, 9.38863465647617, 8.030866984336708, 9.185269089577277, 7.503215217249089, 10.99056571841205, 10.304633848571342, 7.3948242000253686, 8.161662982952835, 9.361411402483414, 9.48672407350652, 8.00036925874676, 6.296150070538171, 10.79363478346328, 7.7755094494493, 6.890823225749235, 10.955585376973156, 8.751595053584305, 8.70127964010896, 6.679636911017944, 10.340872416991916, 8.600840935961065, 8.637440988726306, 6.96376371792665, 9.67813752222703, 9.63598205902423, 10.706601935609097, 7.6150036933103955, 7.27381644699862, 6.526409604866082, 7.93604998507248, 8.96111879447386, 9.236528986472686, 9.23164964427008, 7.95594535575086, 9.85250989910494, 8.02710057165307, 5.083218180417743, 7.102064656824691, 9.463149038077571, 8.137033718928052, 6.23708017977305, 11.766565431871733, 11.013107460804878, 6.977587848984027, 8.769441986007832, 10.184766129650757, 9.817065827883836, 8.61951312561197, 8.802006787010635, 7.353706182643092, 7.001784249075218, 9.61892538770961, 6.840337166573375, 13.06990676483989, 7.011100350205812, 11.706063649485246, 10.007334064953163, 7.464914449158837, 8.071202576799415, 8.539718947050654, 6.699773677077703, 7.338162386739442, 10.642426360238414, 8.431371710982715, 5.77880780586605, 7.779492903237399, 7.518873127517724, 7.893062851533497, 6.660855454042156, 6.876835781879192, 8.561387884167551, 4.938483389069482, 7.144558868875467, 6.50069734100023, 11.798343100728069, 8.927807624362318, 11.836105068714826, 9.198312338046648, 7.91822035731483, 6.628910791541805, 10.394574924451714, 8.747555738118846, 6.845293724956791, 8.218875756457745, 7.65932838628157, 9.194888839584037, 6.3416897861327195, 7.137113467047185, 11.842572256944235, 7.811261487005192, 7.783480400863692, 9.476903487779085, 9.064381210148133, 7.994704539234405, 6.072028715158351, 8.594031878176462, 5.955563914806705, 9.350570597725985, 7.5917078119804495, 8.725234843842438, 7.847796913033744, 6.2867181325391766, 8.93512407806589, 7.1842297263118216, 6.339656229823455, 7.376922738780999, 6.583351581614699, 7.13903912317875, 6.818783191762078, 5.604546207629945, 7.768489513884695, 6.49357285467107, 4.755276166129291, 7.850674943448794, 6.0650976726623576, 8.436575845953756, 7.772024213108239, 8.356490036897103, 8.565732517915286, 6.780772217721273, 9.01068623703282, 9.79022758182365, 8.492731407090883, 7.982328257601873, 9.599615930780557, 11.07277777491831, 6.896958112675264, 8.753323430651404, 8.20736821864374, 9.962535361955954, 9.784613941911987],\n [16.078581915456894, 15.551948645299035, 16.268990336797817, 13.27492132814789, 12.653444495213616, 12.335550815972947, 11.203068009128836, 13.818018471215131, 12.390782655911048, 12.16861851789434, 12.976616762856493, 12.458182979922924, 10.785660467895937, 10.596281134331853, 10.598247597662484, 11.48925757088818, 9.952231956192257, 10.9603301706603, 10.75406079470945, 9.263381118466908, 10.024903906146596, 10.02285979578101, 8.725891050623563, 8.8110447174955, 11.798664965731673, 9.41147135557478, 8.707235102396805, 8.555916611569149, 9.570288163360518, 9.061572574560243, 6.9100234750010205, 9.0975788055241, 11.213705166333545, 13.083047899893907, 18.509887491689366, 10.758937740007648, 8.08682601052003, 9.11088050395127, 6.799606070934344, 8.319445907430541, 8.557758105280833, 6.797690913597723, 10.037661560155012, 8.215913950267181, 7.859440080738447, 7.546248682731447, 9.043388585394974, 7.464499666888328, 7.338301029359666, 5.536251127830057, 6.182019326931957, 4.612544868719036, 8.346804507802606, 8.655119265363009, 7.241711436823657, 6.877766052931581, 6.178995062584081, 7.164902403281522, 7.495071573754694, 7.405141504073983, 7.88320176374737, 6.608399591079871, 8.170312880448332, 7.6606189289608455, 7.43890999312149, 8.050289152228833, 6.41680076884502, 8.40259563039491, 6.877417277430936, 6.625701678854269, 6.195914154714127, 5.833315696655207, 7.519956971187638, 7.890402188591403, 5.885274657347369, 6.85981554043308, 7.0790437203811996, 8.141659719595388, 9.529073938972124, 9.11286211264871, 9.419867429756625, 10.652487682080594, 8.086142649020843, 7.170283074680485, 7.5758748904795565, 7.49526204054659, 7.5060283593093065, 7.315422999048236, 5.559589583999333, 6.6894183922516355, 6.055665604575956, 13.145355688580487, 6.8847399674583425, 7.337546107599397, 7.644127954802972, 6.790200258156367, 8.962519703882434, 12.821110910677632, 14.533286822513324, 5.578715200431863, 6.269807858963271, 6.2967891792058195, 11.036341426885684, 8.982404270851571, 6.918566850094237, 9.98229913203689, 6.839400628138751, 7.831976546559741, 9.176861832927699, 8.776105854080996, 7.846617288257853, 6.625640994354236, 10.33833003207289, 7.650949646700952, 8.240201744741027, 5.894346405037781, 6.765704844611307, 6.598639240818315, 7.062939085704468, 8.40590909981085, 5.239284065801005, 6.5273449351167745, 6.119921350125418, 7.622793608754128, 7.975389074028713, 9.761172269260438, 9.465028583802415, 8.113140562190294, 8.320008696618505, 6.808008059516669, 6.0090191399573545, 6.907504166434337, 5.8917924335228316, 7.048573077389917, 6.7643803268922085, 4.784088519933732, 6.116980418330318, 6.362383598446602, 6.994277659825698, 6.083560447594546, 6.22829552265675, 4.759146572175497, 8.637608069752496, 7.634430485294694, 7.3327436917585676, 6.129153305128859, 6.7717271120842035, 7.746543145044954, 7.976627480730413, 5.87517443642307, 5.785493605469459, 8.59816239885267, 6.092063650367079, 7.510720554580129, 7.104407066560065, 7.706381005839659, 6.3795316386953305, 8.603190272508067, 7.419666996632783, 6.80853946090193, 6.84390342864538, 7.832188783886975, 5.781370989376079, 5.687974915978132, 5.228708805630709, 5.813110268775446, 8.77126422768062, 6.631411896300097, 6.97375987215497, 5.398943150631865, 6.824674132978643, 6.835718511020224, 7.036416989460253, 6.720358922660684, 6.600828891817823, 9.52104495164815, 6.833227168980575, 5.885606416072369, 9.08868462512898, 7.158801668924259, 6.567896710698629, 7.189953585970182, 5.497172263419137, 8.88975758713214, 7.884142269205199, 8.180564828799383, 9.159722460144716, 7.539710100801855, 5.672312826072391, 7.128910049569991, 6.344874031685596, 5.361818804417766, 4.836023291425283, 5.428002394465912, 5.508348115737088, 6.658319248890129, 7.960378369266759, 7.712975190142974, 4.710869488703378, 6.683008165593933]])\nloss_histories_QCNN_MNIST_SO4 = np.array([[18.63822742818134, 14.996351756494038, 13.414763568786126, 10.329255663823925, 9.120760457750439, 8.102933852957612, 8.810869354003746, 10.006794023889881, 8.622455729518737, 7.467061678248796, 5.920724352894273, 7.459966178352162, 11.041309513649322, 7.47662233010936, 7.229695349908017, 7.4597916165930975, 9.340903651158031, 7.760242406952571, 6.9303084303708005, 6.777338888591327, 7.9143882391826335, 7.973693583757422, 6.523936001317213, 5.721920320624669, 6.411448467747264, 6.743808258426262, 8.36566536074733, 12.213662344812196, 9.86487882119216, 7.4320976728582036, 7.216063766836224, 5.953614537542056, 7.891266570572217, 9.775016203278446, 7.0168815366447355, 6.814926383198957, 8.07816330468346, 7.974640687711596, 6.915185780000111, 9.47010152197568, 7.101942912860193, 6.783657568036721, 5.249404624801605, 4.367429035519884, 5.320251740486269, 5.336899745584592, 4.317477069873344, 5.568857370287802, 4.920977955283231, 4.684384264386535, 4.553006285364518, 6.0887038581132815, 4.1491547879052275, 4.088186282272099, 5.244694869029588, 7.32992492009621, 8.228756722276643, 8.500450045848522, 8.900388765805115, 4.782393539924687, 4.453707918535589, 4.889501609395316, 3.7503185012881777, 6.173264902405083, 4.585455869803615, 7.977544468005952, 12.285540362630513, 17.202123904275613, 23.657125425177686, 20.76647699730193, 16.786600872716274, 14.920976885555575, 11.862972721304175, 12.38371680287977, 10.751962948697887, 10.622646942674177, 7.436915527080927, 6.9984957327255355, 9.245537060489227, 10.87902779410266, 7.756676325418157, 6.2902948478653915, 9.054715841741446, 10.213394194913555, 10.713174854514664, 6.016335441879939, 7.032425812051378, 5.889972846638912, 7.993898449819041, 14.957748121261922, 10.828835479078352, 7.282568079388084, 5.824867864544799, 6.061361312353642, 5.871914827628737, 4.957310367289297, 11.347540604193048, 8.968434957621131, 5.276743084970953, 4.738261486776431, 7.316307708548978, 8.567513291943742, 5.760823619869705, 4.984008551171022, 4.423857448855351, 5.040694492783116, 6.2865800097090245, 5.3107772324301115, 6.230352429774949, 6.4879478099807, 6.436442091132201, 3.9321405885942458, 6.354989070262162, 10.323274136262322, 6.6085413468570335, 4.658808263711185, 8.124160196438487, 4.403796314495204, 4.43255455422424, 4.520045902744706, 5.355124642362081, 8.332581786426433, 3.9723265229964175, 4.022760169594096, 4.029083881416903, 3.6011303828367707, 5.403125843885431, 3.732246282488777, 5.508279394390255, 4.40311432382525, 3.8790697897236774, 2.5849851881410575, 7.694104916900663, 11.101308206780002, 8.99908318435956, 6.711285025466133, 5.288220223376012, 3.124915378973183, 5.599927958218902, 8.78052184785724, 3.952932926563706, 5.369579528623957, 6.137370452943451, 5.322566415917456, 5.2392775554232225, 3.7752909935312235, 5.4785278121755825, 5.678241801210183, 5.721985227045817, 10.56429180944238, 11.938870978480443, 5.775736074154998, 6.234471503829917, 4.877195514439433, 4.899416054021516, 6.344045582687692, 4.865890550492679, 4.421535635425384, 5.867269600134156, 5.562142453240082, 3.360754216373563, 4.186431974326074, 4.731268758423836, 4.885922568736199, 7.568025061286378, 4.162483029371922, 5.151035979205273, 3.7086364620557464, 3.561273261586166, 3.692899360160353, 4.829052613610973, 4.425004252281964, 6.3568812323053, 6.240579093340331, 6.7254694546035605, 10.667056583895807, 6.239023279958996, 3.37687322896251, 3.9036095311643524, 4.61061799293459, 6.327346218781266, 7.403986110219671, 4.226736651569875, 6.70632723896253, 5.906006743999679, 3.6763807042074377, 3.2218102204105583, 5.167346171425076, 5.811388809291511, 6.178815426868625, 4.388779147164115, 4.2638554283047805, 3.6778325042340136, 3.9423847612500262, 4.727252148842417, 7.191028993956458, 8.017471459721563, 6.924090269713143, 4.705890036004035, 4.401884292499541],\n [19.490832395681625, 18.55871620176089, 15.649967314155994, 15.25775021242159, 15.946631312004945, 15.564568027417012, 15.775617802009306, 15.044765155752698, 13.933258098153132, 14.063824448150507, 12.21922352004349, 13.049576831161843, 12.712203213548328, 14.132134612398179, 12.556063713167147, 14.392089877214355, 13.440473111651375, 11.774158422851105, 10.50097655189196, 12.061937948723456, 12.21665381224416, 11.346394460348499, 8.629636878658907, 12.225110027264428, 9.579218885218834, 10.507291741638808, 11.886818768201305, 10.711623311876588, 12.077706930553655, 12.079871034775882, 10.461352678737866, 9.296707069363412, 8.87196431029472, 9.831395076540318, 9.20077237812059, 9.51518046692755, 10.844902472764865, 9.82194055864702, 8.345613554549725, 11.681707199949948, 11.627748820403133, 10.787938240027913, 8.356710854106506, 11.618109472429627, 10.803883976763743, 9.898552999685197, 8.666128456776557, 10.00192769450485, 10.155014381608025, 9.442274312076112, 10.164595326406975, 9.980580618952132, 10.254257704326186, 8.650898911794851, 9.925822401567757, 9.051865719487614, 7.753171230565504, 8.444428209226645, 9.242899780770129, 6.311130292087029, 9.825969502968748, 9.709971347005947, 11.203871054470323, 9.485870242227849, 9.139980205240354, 11.308542870287445, 10.313194124028934, 7.947284927075531, 9.638990963069965, 9.889833787600175, 8.414594325212523, 10.17246798640375, 9.630538313122777, 8.245168760098567, 11.641464349669182, 8.454133311779685, 10.67983864047887, 10.807836689384096, 8.444095801997983, 7.742010863489368, 8.671464960487954, 8.205575545539926, 8.133405059717063, 10.510342651330332, 7.679940560906537, 9.63960338744768, 7.942249457589856, 8.785638039321348, 10.011986435353814, 8.916937178788642, 10.30596575722854, 11.997141667108748, 17.715626650431954, 13.10843865338633, 11.72984666664915, 9.328112302029268, 9.753737428446168, 7.70135283346627, 8.588180852433954, 8.569610295783594, 7.474243807386273, 12.871161600296347, 26.844603572490467, 15.217840476536267, 15.36468555440585, 14.871990491928525, 14.782901708761406, 13.361935336116192, 13.604591646353025, 18.2271462086095, 14.4921575869624, 13.17298756808403, 13.757652388780645, 12.838897157734005, 12.534857900653275, 12.148956872993773, 12.743835919769873, 10.697705254259567, 10.263618848593444, 12.781232133207615, 9.674463890910015, 9.463841683137922, 9.243864323702923, 9.59537275202494, 8.881529761502213, 6.779813811049111, 8.844013588598528, 7.7767192947358454, 5.911364741194008, 8.228839281598237, 10.710147874346866, 9.590282395255961, 4.469541187332343, 10.682809221824277, 15.374534169996675, 6.518709752345706, 7.652843415578606, 6.860207736101027, 5.6419127458267315, 6.640874542689622, 5.89339677377351, 12.170129017811846, 14.010024538836916, 10.763818670492352, 8.74844120950976, 6.643879267715248, 5.953926144537239, 5.4765722664234175, 6.855202610554837, 5.915324979169266, 6.77289228291636, 8.19553229518862, 7.283430910005893, 5.732140383630094, 6.7169666284704155, 6.944716990826884, 8.222704555050731, 5.77755706716962, 6.605696600089706, 5.4039004019051085, 6.988184392491396, 6.324200673549187, 6.553284736887491, 6.944051674419599, 6.709041808717305, 5.5922144899053725, 6.3905134048626575, 6.957175449289159, 9.009156736137852, 6.256458420004562, 8.60741497751637, 10.112558264377345, 6.260743660849035, 6.565264761685845, 6.843761653996312, 5.302323455553702, 6.563358889368709, 5.990469571531752, 7.868939071658369, 5.115975030286059, 6.95779148327607, 6.676568231943454, 5.865441755153629, 7.848964879786915, 6.036973771766849, 4.982844394460535, 8.788126300069717, 4.954992438070776, 6.844259722427189, 6.163862578102813, 5.723721129395695, 5.416836037830964, 5.416896894672021, 5.670715166845436, 6.487588611397098, 5.619894619477072, 7.363213897492605, 5.712484199123037, 5.930128420038318, 6.926150805589353],\n [18.677674720286532, 17.266856103940327, 17.335559978457628, 16.681866737195644, 16.84223599883026, 16.24488481771055, 16.110298391006523, 15.97027628377141, 15.44983421307251, 15.25638508931612, 15.322972559971118, 14.891564708072965, 15.045553923185338, 14.843479454974213, 13.856895526235808, 14.099419670496843, 13.549936450408953, 13.740618588037657, 13.302623314655508, 12.344426915680017, 11.797208902820305, 11.04861622498279, 10.46800103858427, 10.306431077570695, 9.188230579289511, 9.556546451397349, 9.206575747404164, 9.69741155057343, 8.591538202755547, 8.003960141820912, 9.252507359346506, 8.793393701603222, 7.928368379818497, 9.302909885593825, 7.608869008761578, 8.236825946017346, 9.9853588900174, 8.678481408055282, 8.349882090330496, 10.076827642715344, 8.090229798086346, 9.189593655088542, 7.936588262844694, 8.211361414033803, 7.407248298512138, 8.0582580377497, 7.4657409438247395, 8.953390748875144, 7.9467690261571144, 8.030070948578933, 8.139389734609296, 8.130138055252724, 7.677927209170414, 8.007218091175163, 9.321459400279426, 8.206262933533521, 7.8280262682637884, 8.656820869194108, 7.692683735774993, 7.566665187757739, 7.836433801576699, 9.435413631665114, 8.625385810839521, 9.002074918292974, 7.9817209095270805, 7.057963702059508, 9.177593220351767, 8.341213755533236, 7.754585459033915, 7.970266013416763, 8.438107414369197, 7.738943423637591, 7.668872922344866, 7.302855971899015, 8.36710050737675, 7.004058468552856, 8.370509095209938, 7.593457575345229, 7.434899394253483, 7.487783949485477, 7.383023196917764, 6.686195610599259, 7.216242980668383, 8.150555332906988, 7.479657821895236, 8.311835234313705, 7.189965692822094, 7.368458760058259, 7.483135451470152, 7.159292130845851, 6.972845358949022, 8.919214213576959, 9.036446256309423, 7.774991064214777, 7.7128854980398875, 7.825968970727324, 6.368090979542896, 7.462224480584682, 7.527037703450457, 7.712451683363823, 8.102218660330882, 7.830024798686506, 7.487319634682742, 9.21224247119141, 7.786345555555848, 8.025375110592737, 6.810305221279158, 7.788384961894594, 7.263648824028164, 6.9009097562351505, 7.796237938612585, 6.936513841038604, 8.28827725313593, 7.7458954032170535, 7.693895442333936, 8.081070942909923, 7.340907065112747, 6.314510878504791, 8.062277784499255, 6.239358186332451, 7.436231493560881, 8.084840818957016, 8.38098846216493, 7.789691023002653, 7.375115206789374, 6.91744489289211, 8.498982739048309, 7.181489165249464, 6.274272359948774, 7.5591463261909535, 7.656323916228581, 8.345929811495642, 6.531505002902869, 8.908993975283488, 6.529732721314727, 7.265614170403399, 7.719896051564857, 7.332419654217052, 7.71268787930972, 8.14626459853457, 6.7041468616053255, 6.788764437400522, 7.668331273872328, 7.584967052477962, 7.815034354712244, 7.636419333585498, 6.273455360995564, 6.912520987215233, 6.1358772907280255, 5.96560085525717, 6.5821611065575345, 7.530210361671302, 6.899628125185935, 6.676234816085111, 6.245327486324353, 7.434415056921618, 7.006716279875979, 8.183372557550465, 7.600082230556003, 6.782621564044763, 8.050512075932295, 6.713595693483005, 7.907809179875259, 7.957372566718753, 7.50100198807415, 7.914939125942729, 6.120781606557825, 7.114250345958644, 8.839459959489291, 6.450937609091488, 7.504495331851585, 6.310692529028966, 5.800675336875668, 5.102696045908363, 5.6668927376298415, 5.2615702372932684, 4.15902984092072, 6.598748558806056, 5.315736345312888, 4.7710562302473605, 7.627586321608522, 5.011165396093148, 4.324941742554292, 5.297403751037838, 5.850222566377067, 5.546201594228933, 5.1729772438736115, 4.792630453302324, 5.717642735839203, 4.4448504608234245, 4.305340982909913, 4.083892368819292, 5.087554652482536, 4.410274300127815, 4.300784822482181, 2.2462774742757423, 3.779154556536459, 7.679605527681904, 7.012382827839762, 9.497009141065512],\n [17.47229653104087, 16.52845100742326, 17.122798468538008, 16.295788396615954, 17.192619618145415, 15.74410864552886, 15.641333722311673, 15.729160908054453, 14.808834375733063, 14.734324937100821, 15.246273176166863, 14.470302488233042, 13.292638924061885, 13.390587100769629, 13.034219715955404, 12.879221760770431, 13.03321379977208, 12.69023511897895, 11.971062136795513, 11.383014306279488, 12.409418805739675, 11.680500527280792, 11.848131465885647, 11.10058719095201, 10.967248455483366, 9.931793197526837, 12.862457872364642, 11.642305719669054, 9.926962444242958, 10.444274884463491, 9.714842539841156, 10.886715061901675, 9.69194170162343, 12.37124160214884, 12.239361932692878, 11.227909943335716, 10.529937940962323, 10.466945849849349, 9.503375110985736, 9.21193563602031, 10.345152976017346, 10.13929514372021, 9.334912632559568, 11.20117443146198, 8.435465899928868, 9.306443392232177, 7.674775088739535, 11.094058594508224, 9.806566227764325, 9.753853422781601, 9.703475275521642, 9.338961178237717, 9.147076944032051, 9.969623679725812, 9.768411884397354, 9.94611962537007, 8.315401657960823, 8.423425337825657, 8.872765312401535, 10.882244689873716, 9.972856778617642, 9.3811068355655, 8.2336445192984, 10.436962503139233, 8.982909282471748, 8.813976410282278, 8.601510949049006, 9.953358920509665, 9.141569548320685, 7.387288208812636, 8.893257235238586, 8.537138469964765, 8.511317219755595, 9.677364577412943, 9.57305272773976, 10.722555670865308, 9.960625222486403, 10.949857743327017, 11.044668103113759, 9.181062739205466, 8.735843874765884, 8.412884276554134, 9.074406382485165, 8.007103575507182, 10.297575661129853, 9.537962646291962, 8.043294052839366, 9.406046206964078, 7.463110880775795, 8.216577341771748, 9.993277776229395, 9.641960929222911, 9.825544744254774, 7.961162820395543, 6.833864929841814, 8.119686649664352, 8.04628338513848, 8.725130268919587, 8.291468487722643, 7.601204536561341, 9.366169648453559, 7.746078602205226, 7.215659204103467, 6.70133407213436, 6.933379408499517, 9.200219213998405, 7.7737762826913395, 6.809951684382872, 8.779707732360738, 8.112684741852533, 8.661195081546301, 8.290013946417599, 8.115230804644153, 8.455201625530021, 7.237352606159898, 7.581595063403986, 7.943437197895797, 8.830929777172935, 9.495636849503327, 8.824172442181377, 8.866916335410826, 8.923975771489125, 7.333082117068796, 7.727410788615958, 7.3940216753183154, 8.169734137059535, 7.184251987091261, 8.149516338090352, 7.9267940412373665, 8.691281119472402, 8.149847896205559, 8.353040491826519, 6.775937272693683, 7.8961515291856434, 8.232009010481493, 7.745717791398985, 8.003948766007799, 8.927976407365597, 9.70473042956134, 9.19185628898125, 7.069397204937394, 6.616356391629773, 6.762746411850479, 6.12783348373553, 9.326037397038913, 8.272094369374019, 7.37179872245548, 7.57378906825723, 7.516188142897698, 7.800201021490489, 9.926911634880025, 7.999858607815883, 11.315279649450078, 9.191934306831268, 9.78564879547748, 8.775693221533093, 8.160015430815088, 7.399321435627195, 8.836504222864635, 8.074077592075158, 7.613055499760482, 8.982368271042896, 8.3566233210652, 8.944472876424268, 8.010135944887468, 6.487273132895113, 8.087074428212611, 7.147311424359065, 6.989552480289897, 8.679612408452398, 7.253769498388075, 7.599358379078584, 8.46247241445163, 6.915147675019893, 7.1380367918799665, 9.168662114410594, 9.567007014514896, 7.918437332323219, 7.484506409632197, 8.857027697147231, 8.019005616942929, 9.490593267527478, 8.440841403058242, 9.014628390498094, 7.4291556237446255, 10.464398772450274, 8.662762455377562, 9.553604082646517, 7.889702224457729, 9.523251715097762, 9.231765712593445, 7.699577303090916, 5.995010901400576, 9.714285989904047, 8.520612562686814, 6.591900319836927, 6.5140865610052, 6.6046229940003105, 6.555066801112216, 8.342359134931744]])\nloss_histories_QCNN_FASHION_SO4 = np.array([[15.555303202579205, 19.504919787996624, 16.974011398817957, 17.108571384885867, 16.468153100970216, 15.75587279071819, 15.31106664813007, 15.510275084448892, 15.48160051328491, 15.464417951912678, 15.21468547592811, 14.604456666514338, 16.618194576615522, 15.572879632393173, 14.717824918319078, 14.386997760135959, 14.433958925251964, 15.613930634505355, 14.657597338611513, 15.66931899051052, 14.834527710138387, 14.72801057161, 14.071797047329568, 14.01364789752765, 14.71289073458298, 14.432009196052622, 14.021694704420167, 14.451180198877216, 13.81502985811841, 13.857077906883745, 14.467416488282588, 14.74291139464224, 18.83755521564319, 14.327474596510424, 13.457830159177448, 13.370437601021738, 12.606666197690489, 13.259361223576914, 10.965227031427087, 15.85587643864697, 19.96072872803247, 15.99597559452018, 12.105793743935497, 14.385183227062825, 14.994104620905455, 13.565108731528309, 13.06204831085624, 12.039172057665446, 16.24745516822587, 13.429452485074872, 13.54877761686802, 12.114676102076725, 12.27702961667929, 12.16761591449498, 11.362066400874633, 13.341324275337044, 11.402425048712326, 12.223537616030802, 12.288648897995458, 12.380829164571688, 11.691332624672233, 11.434502691936164, 13.089649703108648, 12.288433247103564, 11.139703767239904, 10.449275570013748, 13.536140051771909, 11.876935131209281, 11.072024056197609, 12.48035410024065, 11.039898207936071, 12.068388184906631, 11.297000207542004, 11.509795352783245, 10.50816809299725, 11.893419700959901, 10.916165987629146, 14.590065321835178, 14.003523113387262, 9.674955097625048, 10.343833479385697, 13.34229982478152, 11.552967718343982, 10.30505443674415, 12.22575493421307, 12.409596480890572, 10.203760020086365, 11.910239860089346, 12.077710868087973, 12.050779151162956, 10.085481025342904, 10.508760538459361, 13.02167061714575, 10.969008290293907, 12.222903952283554, 10.177030636539607, 13.281301968063987, 13.160308285525717, 11.236368247275914, 10.893657505868008, 12.734237643057842, 10.783497099781778, 14.478882920666084, 10.333168430951634, 11.474930155484978, 12.482407368250314, 9.380519835782515, 10.741355680809695, 9.900772056018885, 11.37583422002143, 11.621624612002872, 11.244972679049836, 11.397841647027331, 13.395893623763943, 11.980370405851641, 9.001524595359493, 10.774930604235182, 11.321189766101934, 9.976631040563422, 12.226949408487787, 11.225077657927901, 11.012862695376992, 12.26412313610437, 11.098785934715472, 11.89594165408133, 10.508186199240512, 10.627600551300878, 11.526792908072663, 10.243772927793646, 12.671260599002071, 10.259582778329206, 11.523974496540236, 9.990639976275533, 11.816468100349503, 11.761603894025555, 11.969004025639347, 11.19478320178266, 10.290721290183578, 10.142939188345759, 9.923313637629583, 11.903822138979159, 9.662797524450559, 10.258292675377227, 11.72621282603769, 10.22100231147799, 10.759594235985471, 10.654187077192066, 8.786877857547301, 11.555273188238802, 10.445581745719611, 13.868872540114179, 11.637816309694294, 10.490302769295138, 10.652751636098762, 10.567486946748316, 10.823280250882126, 11.604759302035067, 10.572109217362748, 9.305649423624736, 13.120918411833392, 10.350301870992201, 10.664152450842048, 10.5522160714381, 9.76792440342697, 11.21372342896267, 12.72476540142632, 8.974037841687752, 10.176234933268807, 10.735434072133039, 11.614131727940505, 10.732166814460395, 10.236305916335079, 9.763756002560177, 12.551684786339672, 10.946261572511657, 10.934140724656354, 10.347075295193335, 10.11845009597337, 9.114796498678905, 10.60417434600415, 10.877718037573215, 8.984929919525204, 10.837912462534929, 9.917267902638878, 8.921645868589923, 8.872534953495492, 11.191232181844624, 11.388289756683118, 9.924240369521405, 11.826924835888446, 10.571129599370146, 10.91815752434668, 9.415967478168183, 7.427478307582434, 10.956675987652861, 10.601175428098784, 9.351546011502595, 9.009228800638875, 10.43676354074511, 11.86821422814849],\n [17.62685247943755, 15.921501286919272, 15.871127049053275, 15.239219491091808, 15.546447089828968, 14.600135975597338, 13.725492950779458, 12.795231677743987, 14.53600972191518, 13.237239660766868, 12.604003514991504, 12.35459815425994, 11.164789179173859, 11.844668382525047, 12.277120603329898, 12.024030420080734, 11.294641796127678, 11.24392458273532, 12.310419159019323, 11.345370396317726, 10.80820112790213, 10.815801819515524, 10.213738451315571, 10.710799346201188, 11.043522777680751, 10.88855437587723, 10.47066925380131, 10.031373264086302, 10.897892266182534, 10.646760578727005, 11.041489034733864, 9.405306961109028, 7.907784678787825, 9.531922065555655, 10.699178154783171, 8.752903439589083, 10.857875815010672, 9.304902995952173, 8.703717438297984, 8.178912002237922, 10.217501124790381, 7.303483190098499, 9.484631787673315, 10.013270960996968, 10.597687667500503, 8.081043712010258, 9.302155981943438, 12.025602788775371, 9.415051467888432, 8.178881929193468, 9.340531840584015, 8.96972859002934, 11.472191757174494, 8.726786333656417, 7.154037295664112, 9.50550426773253, 10.930467472213461, 8.428679058219908, 9.866546522197625, 9.460126225851688, 8.127562042688414, 8.731076847080129, 8.217546660968534, 6.9602964522291755, 8.508673649790321, 7.286550827537333, 8.529257040220635, 8.435019163022856, 8.214697249858581, 7.363161364022071, 8.409295511222089, 8.932938352882193, 9.657748816129136, 9.402933355234115, 8.033625912783963, 9.333287886466355, 6.0071033395362345, 13.52174485846432, 9.474038145187905, 8.41512872154908, 9.17522072968709, 7.188131367611232, 8.338200866485991, 7.700165339614012, 7.186507251196377, 7.990887899100909, 7.189818436149402, 10.48814613313002, 11.861186899967057, 8.407829223946711, 6.3207643885114635, 6.5663250285464825, 9.613737257075872, 9.759378520336458, 8.042083531415381, 8.500238071507258, 8.610623895897982, 8.43959912831457, 6.874162800109263, 8.480459139842097, 9.470975743084217, 7.9683563444049215, 9.396454646581605, 8.22933402121768, 7.349390753563533, 8.902547948207365, 7.828401540030315, 7.5800890133205785, 12.869500383197357, 8.198491859266301, 9.126446518990841, 8.842409464608494, 9.386062024980053, 6.5487939576272804, 12.870060560208461, 8.55166679928873, 7.349618957895044, 16.44127699261698, 9.801742117841222, 8.178429255263799, 9.010502713803481, 8.629907850704752, 9.51897498668676, 9.693104736043775, 8.492951152974834, 10.562769993756223, 7.05344723929231, 7.00204398863723, 9.639628821086191, 8.163686866373416, 8.092652093797048, 8.499333299231205, 9.77103796035867, 12.069450587607312, 7.610739757165238, 6.462582418006967, 8.762345812630295, 7.23830940712898, 7.836516945259697, 7.850891351777004, 8.651370362895152, 7.846079734368595, 5.590509258660489, 8.464820760806163, 9.590191551836199, 8.007623172552808, 8.98852563509547, 7.465163418995151, 9.000187278737076, 9.309331671788964, 10.848476889147218, 10.251196108002167, 10.730763648271184, 6.948891999549921, 11.513723180442248, 6.561250914709127, 8.01365423201725, 11.416863523154293, 10.197173077338093, 9.18078036599244, 8.43928921481611, 9.647627381367862, 10.282967156656275, 7.462597247340954, 6.606484021022375, 8.906424693863553, 9.729778755284839, 9.715512355668755, 8.248361935106342, 11.095987570237671, 9.905295254228086, 7.062665734310967, 10.210606771766201, 8.893116395322934, 9.784549832640428, 7.642579110635033, 7.390482811679149, 9.805399049017291, 8.287897240443316, 9.127738931994385, 8.715971847618126, 12.200338063282677, 7.921283754575972, 10.580766471236235, 7.14116652543785, 7.122673985144239, 9.20584836605196, 6.631598956365938, 7.504940829521654, 9.024155635326087, 9.120433264971544, 11.788116918526724, 8.436198596617677, 6.841406852637755, 7.917541317226638, 8.967008781145859, 8.101289961122422, 8.493785547965608, 7.570759463158343, 9.23757021605186],", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_QCNN_MNIST_SO4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "loss_histories_QCNN_MNIST_SO4 = np.array([[18.63822742818134, 14.996351756494038, 13.414763568786126, 10.329255663823925, 9.120760457750439, 8.102933852957612, 8.810869354003746, 10.006794023889881, 8.622455729518737, 7.467061678248796, 5.920724352894273, 7.459966178352162, 11.041309513649322, 7.47662233010936, 7.229695349908017, 7.4597916165930975, 9.340903651158031, 7.760242406952571, 6.9303084303708005, 6.777338888591327, 7.9143882391826335, 7.973693583757422, 6.523936001317213, 5.721920320624669, 6.411448467747264, 6.743808258426262, 8.36566536074733, 12.213662344812196, 9.86487882119216, 7.4320976728582036, 7.216063766836224, 5.953614537542056, 7.891266570572217, 9.775016203278446, 7.0168815366447355, 6.814926383198957, 8.07816330468346, 7.974640687711596, 6.915185780000111, 9.47010152197568, 7.101942912860193, 6.783657568036721, 5.249404624801605, 4.367429035519884, 5.320251740486269, 5.336899745584592, 4.317477069873344, 5.568857370287802, 4.920977955283231, 4.684384264386535, 4.553006285364518, 6.0887038581132815, 4.1491547879052275, 4.088186282272099, 5.244694869029588, 7.32992492009621, 8.228756722276643, 8.500450045848522, 8.900388765805115, 4.782393539924687, 4.453707918535589, 4.889501609395316, 3.7503185012881777, 6.173264902405083, 4.585455869803615, 7.977544468005952, 12.285540362630513, 17.202123904275613, 23.657125425177686, 20.76647699730193, 16.786600872716274, 14.920976885555575, 11.862972721304175, 12.38371680287977, 10.751962948697887, 10.622646942674177, 7.436915527080927, 6.9984957327255355, 9.245537060489227, 10.87902779410266, 7.756676325418157, 6.2902948478653915, 9.054715841741446, 10.213394194913555, 10.713174854514664, 6.016335441879939, 7.032425812051378, 5.889972846638912, 7.993898449819041, 14.957748121261922, 10.828835479078352, 7.282568079388084, 5.824867864544799, 6.061361312353642, 5.871914827628737, 4.957310367289297, 11.347540604193048, 8.968434957621131, 5.276743084970953, 4.738261486776431, 7.316307708548978, 8.567513291943742, 5.760823619869705, 4.984008551171022, 4.423857448855351, 5.040694492783116, 6.2865800097090245, 5.3107772324301115, 6.230352429774949, 6.4879478099807, 6.436442091132201, 3.9321405885942458, 6.354989070262162, 10.323274136262322, 6.6085413468570335, 4.658808263711185, 8.124160196438487, 4.403796314495204, 4.43255455422424, 4.520045902744706, 5.355124642362081, 8.332581786426433, 3.9723265229964175, 4.022760169594096, 4.029083881416903, 3.6011303828367707, 5.403125843885431, 3.732246282488777, 5.508279394390255, 4.40311432382525, 3.8790697897236774, 2.5849851881410575, 7.694104916900663, 11.101308206780002, 8.99908318435956, 6.711285025466133, 5.288220223376012, 3.124915378973183, 5.599927958218902, 8.78052184785724, 3.952932926563706, 5.369579528623957, 6.137370452943451, 5.322566415917456, 5.2392775554232225, 3.7752909935312235, 5.4785278121755825, 5.678241801210183, 5.721985227045817, 10.56429180944238, 11.938870978480443, 5.775736074154998, 6.234471503829917, 4.877195514439433, 4.899416054021516, 6.344045582687692, 4.865890550492679, 4.421535635425384, 5.867269600134156, 5.562142453240082, 3.360754216373563, 4.186431974326074, 4.731268758423836, 4.885922568736199, 7.568025061286378, 4.162483029371922, 5.151035979205273, 3.7086364620557464, 3.561273261586166, 3.692899360160353, 4.829052613610973, 4.425004252281964, 6.3568812323053, 6.240579093340331, 6.7254694546035605, 10.667056583895807, 6.239023279958996, 3.37687322896251, 3.9036095311643524, 4.61061799293459, 6.327346218781266, 7.403986110219671, 4.226736651569875, 6.70632723896253, 5.906006743999679, 3.6763807042074377, 3.2218102204105583, 5.167346171425076, 5.811388809291511, 6.178815426868625, 4.388779147164115, 4.2638554283047805, 3.6778325042340136, 3.9423847612500262, 4.727252148842417, 7.191028993956458, 8.017471459721563, 6.924090269713143, 4.705890036004035, 4.401884292499541],\n [19.490832395681625, 18.55871620176089, 15.649967314155994, 15.25775021242159, 15.946631312004945, 15.564568027417012, 15.775617802009306, 15.044765155752698, 13.933258098153132, 14.063824448150507, 12.21922352004349, 13.049576831161843, 12.712203213548328, 14.132134612398179, 12.556063713167147, 14.392089877214355, 13.440473111651375, 11.774158422851105, 10.50097655189196, 12.061937948723456, 12.21665381224416, 11.346394460348499, 8.629636878658907, 12.225110027264428, 9.579218885218834, 10.507291741638808, 11.886818768201305, 10.711623311876588, 12.077706930553655, 12.079871034775882, 10.461352678737866, 9.296707069363412, 8.87196431029472, 9.831395076540318, 9.20077237812059, 9.51518046692755, 10.844902472764865, 9.82194055864702, 8.345613554549725, 11.681707199949948, 11.627748820403133, 10.787938240027913, 8.356710854106506, 11.618109472429627, 10.803883976763743, 9.898552999685197, 8.666128456776557, 10.00192769450485, 10.155014381608025, 9.442274312076112, 10.164595326406975, 9.980580618952132, 10.254257704326186, 8.650898911794851, 9.925822401567757, 9.051865719487614, 7.753171230565504, 8.444428209226645, 9.242899780770129, 6.311130292087029, 9.825969502968748, 9.709971347005947, 11.203871054470323, 9.485870242227849, 9.139980205240354, 11.308542870287445, 10.313194124028934, 7.947284927075531, 9.638990963069965, 9.889833787600175, 8.414594325212523, 10.17246798640375, 9.630538313122777, 8.245168760098567, 11.641464349669182, 8.454133311779685, 10.67983864047887, 10.807836689384096, 8.444095801997983, 7.742010863489368, 8.671464960487954, 8.205575545539926, 8.133405059717063, 10.510342651330332, 7.679940560906537, 9.63960338744768, 7.942249457589856, 8.785638039321348, 10.011986435353814, 8.916937178788642, 10.30596575722854, 11.997141667108748, 17.715626650431954, 13.10843865338633, 11.72984666664915, 9.328112302029268, 9.753737428446168, 7.70135283346627, 8.588180852433954, 8.569610295783594, 7.474243807386273, 12.871161600296347, 26.844603572490467, 15.217840476536267, 15.36468555440585, 14.871990491928525, 14.782901708761406, 13.361935336116192, 13.604591646353025, 18.2271462086095, 14.4921575869624, 13.17298756808403, 13.757652388780645, 12.838897157734005, 12.534857900653275, 12.148956872993773, 12.743835919769873, 10.697705254259567, 10.263618848593444, 12.781232133207615, 9.674463890910015, 9.463841683137922, 9.243864323702923, 9.59537275202494, 8.881529761502213, 6.779813811049111, 8.844013588598528, 7.7767192947358454, 5.911364741194008, 8.228839281598237, 10.710147874346866, 9.590282395255961, 4.469541187332343, 10.682809221824277, 15.374534169996675, 6.518709752345706, 7.652843415578606, 6.860207736101027, 5.6419127458267315, 6.640874542689622, 5.89339677377351, 12.170129017811846, 14.010024538836916, 10.763818670492352, 8.74844120950976, 6.643879267715248, 5.953926144537239, 5.4765722664234175, 6.855202610554837, 5.915324979169266, 6.77289228291636, 8.19553229518862, 7.283430910005893, 5.732140383630094, 6.7169666284704155, 6.944716990826884, 8.222704555050731, 5.77755706716962, 6.605696600089706, 5.4039004019051085, 6.988184392491396, 6.324200673549187, 6.553284736887491, 6.944051674419599, 6.709041808717305, 5.5922144899053725, 6.3905134048626575, 6.957175449289159, 9.009156736137852, 6.256458420004562, 8.60741497751637, 10.112558264377345, 6.260743660849035, 6.565264761685845, 6.843761653996312, 5.302323455553702, 6.563358889368709, 5.990469571531752, 7.868939071658369, 5.115975030286059, 6.95779148327607, 6.676568231943454, 5.865441755153629, 7.848964879786915, 6.036973771766849, 4.982844394460535, 8.788126300069717, 4.954992438070776, 6.844259722427189, 6.163862578102813, 5.723721129395695, 5.416836037830964, 5.416896894672021, 5.670715166845436, 6.487588611397098, 5.619894619477072, 7.363213897492605, 5.712484199123037, 5.930128420038318, 6.926150805589353],\n [18.677674720286532, 17.266856103940327, 17.335559978457628, 16.681866737195644, 16.84223599883026, 16.24488481771055, 16.110298391006523, 15.97027628377141, 15.44983421307251, 15.25638508931612, 15.322972559971118, 14.891564708072965, 15.045553923185338, 14.843479454974213, 13.856895526235808, 14.099419670496843, 13.549936450408953, 13.740618588037657, 13.302623314655508, 12.344426915680017, 11.797208902820305, 11.04861622498279, 10.46800103858427, 10.306431077570695, 9.188230579289511, 9.556546451397349, 9.206575747404164, 9.69741155057343, 8.591538202755547, 8.003960141820912, 9.252507359346506, 8.793393701603222, 7.928368379818497, 9.302909885593825, 7.608869008761578, 8.236825946017346, 9.9853588900174, 8.678481408055282, 8.349882090330496, 10.076827642715344, 8.090229798086346, 9.189593655088542, 7.936588262844694, 8.211361414033803, 7.407248298512138, 8.0582580377497, 7.4657409438247395, 8.953390748875144, 7.9467690261571144, 8.030070948578933, 8.139389734609296, 8.130138055252724, 7.677927209170414, 8.007218091175163, 9.321459400279426, 8.206262933533521, 7.8280262682637884, 8.656820869194108, 7.692683735774993, 7.566665187757739, 7.836433801576699, 9.435413631665114, 8.625385810839521, 9.002074918292974, 7.9817209095270805, 7.057963702059508, 9.177593220351767, 8.341213755533236, 7.754585459033915, 7.970266013416763, 8.438107414369197, 7.738943423637591, 7.668872922344866, 7.302855971899015, 8.36710050737675, 7.004058468552856, 8.370509095209938, 7.593457575345229, 7.434899394253483, 7.487783949485477, 7.383023196917764, 6.686195610599259, 7.216242980668383, 8.150555332906988, 7.479657821895236, 8.311835234313705, 7.189965692822094, 7.368458760058259, 7.483135451470152, 7.159292130845851, 6.972845358949022, 8.919214213576959, 9.036446256309423, 7.774991064214777, 7.7128854980398875, 7.825968970727324, 6.368090979542896, 7.462224480584682, 7.527037703450457, 7.712451683363823, 8.102218660330882, 7.830024798686506, 7.487319634682742, 9.21224247119141, 7.786345555555848, 8.025375110592737, 6.810305221279158, 7.788384961894594, 7.263648824028164, 6.9009097562351505, 7.796237938612585, 6.936513841038604, 8.28827725313593, 7.7458954032170535, 7.693895442333936, 8.081070942909923, 7.340907065112747, 6.314510878504791, 8.062277784499255, 6.239358186332451, 7.436231493560881, 8.084840818957016, 8.38098846216493, 7.789691023002653, 7.375115206789374, 6.91744489289211, 8.498982739048309, 7.181489165249464, 6.274272359948774, 7.5591463261909535, 7.656323916228581, 8.345929811495642, 6.531505002902869, 8.908993975283488, 6.529732721314727, 7.265614170403399, 7.719896051564857, 7.332419654217052, 7.71268787930972, 8.14626459853457, 6.7041468616053255, 6.788764437400522, 7.668331273872328, 7.584967052477962, 7.815034354712244, 7.636419333585498, 6.273455360995564, 6.912520987215233, 6.1358772907280255, 5.96560085525717, 6.5821611065575345, 7.530210361671302, 6.899628125185935, 6.676234816085111, 6.245327486324353, 7.434415056921618, 7.006716279875979, 8.183372557550465, 7.600082230556003, 6.782621564044763, 8.050512075932295, 6.713595693483005, 7.907809179875259, 7.957372566718753, 7.50100198807415, 7.914939125942729, 6.120781606557825, 7.114250345958644, 8.839459959489291, 6.450937609091488, 7.504495331851585, 6.310692529028966, 5.800675336875668, 5.102696045908363, 5.6668927376298415, 5.2615702372932684, 4.15902984092072, 6.598748558806056, 5.315736345312888, 4.7710562302473605, 7.627586321608522, 5.011165396093148, 4.324941742554292, 5.297403751037838, 5.850222566377067, 5.546201594228933, 5.1729772438736115, 4.792630453302324, 5.717642735839203, 4.4448504608234245, 4.305340982909913, 4.083892368819292, 5.087554652482536, 4.410274300127815, 4.300784822482181, 2.2462774742757423, 3.779154556536459, 7.679605527681904, 7.012382827839762, 9.497009141065512],\n [17.47229653104087, 16.52845100742326, 17.122798468538008, 16.295788396615954, 17.192619618145415, 15.74410864552886, 15.641333722311673, 15.729160908054453, 14.808834375733063, 14.734324937100821, 15.246273176166863, 14.470302488233042, 13.292638924061885, 13.390587100769629, 13.034219715955404, 12.879221760770431, 13.03321379977208, 12.69023511897895, 11.971062136795513, 11.383014306279488, 12.409418805739675, 11.680500527280792, 11.848131465885647, 11.10058719095201, 10.967248455483366, 9.931793197526837, 12.862457872364642, 11.642305719669054, 9.926962444242958, 10.444274884463491, 9.714842539841156, 10.886715061901675, 9.69194170162343, 12.37124160214884, 12.239361932692878, 11.227909943335716, 10.529937940962323, 10.466945849849349, 9.503375110985736, 9.21193563602031, 10.345152976017346, 10.13929514372021, 9.334912632559568, 11.20117443146198, 8.435465899928868, 9.306443392232177, 7.674775088739535, 11.094058594508224, 9.806566227764325, 9.753853422781601, 9.703475275521642, 9.338961178237717, 9.147076944032051, 9.969623679725812, 9.768411884397354, 9.94611962537007, 8.315401657960823, 8.423425337825657, 8.872765312401535, 10.882244689873716, 9.972856778617642, 9.3811068355655, 8.2336445192984, 10.436962503139233, 8.982909282471748, 8.813976410282278, 8.601510949049006, 9.953358920509665, 9.141569548320685, 7.387288208812636, 8.893257235238586, 8.537138469964765, 8.511317219755595, 9.677364577412943, 9.57305272773976, 10.722555670865308, 9.960625222486403, 10.949857743327017, 11.044668103113759, 9.181062739205466, 8.735843874765884, 8.412884276554134, 9.074406382485165, 8.007103575507182, 10.297575661129853, 9.537962646291962, 8.043294052839366, 9.406046206964078, 7.463110880775795, 8.216577341771748, 9.993277776229395, 9.641960929222911, 9.825544744254774, 7.961162820395543, 6.833864929841814, 8.119686649664352, 8.04628338513848, 8.725130268919587, 8.291468487722643, 7.601204536561341, 9.366169648453559, 7.746078602205226, 7.215659204103467, 6.70133407213436, 6.933379408499517, 9.200219213998405, 7.7737762826913395, 6.809951684382872, 8.779707732360738, 8.112684741852533, 8.661195081546301, 8.290013946417599, 8.115230804644153, 8.455201625530021, 7.237352606159898, 7.581595063403986, 7.943437197895797, 8.830929777172935, 9.495636849503327, 8.824172442181377, 8.866916335410826, 8.923975771489125, 7.333082117068796, 7.727410788615958, 7.3940216753183154, 8.169734137059535, 7.184251987091261, 8.149516338090352, 7.9267940412373665, 8.691281119472402, 8.149847896205559, 8.353040491826519, 6.775937272693683, 7.8961515291856434, 8.232009010481493, 7.745717791398985, 8.003948766007799, 8.927976407365597, 9.70473042956134, 9.19185628898125, 7.069397204937394, 6.616356391629773, 6.762746411850479, 6.12783348373553, 9.326037397038913, 8.272094369374019, 7.37179872245548, 7.57378906825723, 7.516188142897698, 7.800201021490489, 9.926911634880025, 7.999858607815883, 11.315279649450078, 9.191934306831268, 9.78564879547748, 8.775693221533093, 8.160015430815088, 7.399321435627195, 8.836504222864635, 8.074077592075158, 7.613055499760482, 8.982368271042896, 8.3566233210652, 8.944472876424268, 8.010135944887468, 6.487273132895113, 8.087074428212611, 7.147311424359065, 6.989552480289897, 8.679612408452398, 7.253769498388075, 7.599358379078584, 8.46247241445163, 6.915147675019893, 7.1380367918799665, 9.168662114410594, 9.567007014514896, 7.918437332323219, 7.484506409632197, 8.857027697147231, 8.019005616942929, 9.490593267527478, 8.440841403058242, 9.014628390498094, 7.4291556237446255, 10.464398772450274, 8.662762455377562, 9.553604082646517, 7.889702224457729, 9.523251715097762, 9.231765712593445, 7.699577303090916, 5.995010901400576, 9.714285989904047, 8.520612562686814, 6.591900319836927, 6.5140865610052, 6.6046229940003105, 6.555066801112216, 8.342359134931744]])\nloss_histories_QCNN_FASHION_SO4 = np.array([[15.555303202579205, 19.504919787996624, 16.974011398817957, 17.108571384885867, 16.468153100970216, 15.75587279071819, 15.31106664813007, 15.510275084448892, 15.48160051328491, 15.464417951912678, 15.21468547592811, 14.604456666514338, 16.618194576615522, 15.572879632393173, 14.717824918319078, 14.386997760135959, 14.433958925251964, 15.613930634505355, 14.657597338611513, 15.66931899051052, 14.834527710138387, 14.72801057161, 14.071797047329568, 14.01364789752765, 14.71289073458298, 14.432009196052622, 14.021694704420167, 14.451180198877216, 13.81502985811841, 13.857077906883745, 14.467416488282588, 14.74291139464224, 18.83755521564319, 14.327474596510424, 13.457830159177448, 13.370437601021738, 12.606666197690489, 13.259361223576914, 10.965227031427087, 15.85587643864697, 19.96072872803247, 15.99597559452018, 12.105793743935497, 14.385183227062825, 14.994104620905455, 13.565108731528309, 13.06204831085624, 12.039172057665446, 16.24745516822587, 13.429452485074872, 13.54877761686802, 12.114676102076725, 12.27702961667929, 12.16761591449498, 11.362066400874633, 13.341324275337044, 11.402425048712326, 12.223537616030802, 12.288648897995458, 12.380829164571688, 11.691332624672233, 11.434502691936164, 13.089649703108648, 12.288433247103564, 11.139703767239904, 10.449275570013748, 13.536140051771909, 11.876935131209281, 11.072024056197609, 12.48035410024065, 11.039898207936071, 12.068388184906631, 11.297000207542004, 11.509795352783245, 10.50816809299725, 11.893419700959901, 10.916165987629146, 14.590065321835178, 14.003523113387262, 9.674955097625048, 10.343833479385697, 13.34229982478152, 11.552967718343982, 10.30505443674415, 12.22575493421307, 12.409596480890572, 10.203760020086365, 11.910239860089346, 12.077710868087973, 12.050779151162956, 10.085481025342904, 10.508760538459361, 13.02167061714575, 10.969008290293907, 12.222903952283554, 10.177030636539607, 13.281301968063987, 13.160308285525717, 11.236368247275914, 10.893657505868008, 12.734237643057842, 10.783497099781778, 14.478882920666084, 10.333168430951634, 11.474930155484978, 12.482407368250314, 9.380519835782515, 10.741355680809695, 9.900772056018885, 11.37583422002143, 11.621624612002872, 11.244972679049836, 11.397841647027331, 13.395893623763943, 11.980370405851641, 9.001524595359493, 10.774930604235182, 11.321189766101934, 9.976631040563422, 12.226949408487787, 11.225077657927901, 11.012862695376992, 12.26412313610437, 11.098785934715472, 11.89594165408133, 10.508186199240512, 10.627600551300878, 11.526792908072663, 10.243772927793646, 12.671260599002071, 10.259582778329206, 11.523974496540236, 9.990639976275533, 11.816468100349503, 11.761603894025555, 11.969004025639347, 11.19478320178266, 10.290721290183578, 10.142939188345759, 9.923313637629583, 11.903822138979159, 9.662797524450559, 10.258292675377227, 11.72621282603769, 10.22100231147799, 10.759594235985471, 10.654187077192066, 8.786877857547301, 11.555273188238802, 10.445581745719611, 13.868872540114179, 11.637816309694294, 10.490302769295138, 10.652751636098762, 10.567486946748316, 10.823280250882126, 11.604759302035067, 10.572109217362748, 9.305649423624736, 13.120918411833392, 10.350301870992201, 10.664152450842048, 10.5522160714381, 9.76792440342697, 11.21372342896267, 12.72476540142632, 8.974037841687752, 10.176234933268807, 10.735434072133039, 11.614131727940505, 10.732166814460395, 10.236305916335079, 9.763756002560177, 12.551684786339672, 10.946261572511657, 10.934140724656354, 10.347075295193335, 10.11845009597337, 9.114796498678905, 10.60417434600415, 10.877718037573215, 8.984929919525204, 10.837912462534929, 9.917267902638878, 8.921645868589923, 8.872534953495492, 11.191232181844624, 11.388289756683118, 9.924240369521405, 11.826924835888446, 10.571129599370146, 10.91815752434668, 9.415967478168183, 7.427478307582434, 10.956675987652861, 10.601175428098784, 9.351546011502595, 9.009228800638875, 10.43676354074511, 11.86821422814849],\n [17.62685247943755, 15.921501286919272, 15.871127049053275, 15.239219491091808, 15.546447089828968, 14.600135975597338, 13.725492950779458, 12.795231677743987, 14.53600972191518, 13.237239660766868, 12.604003514991504, 12.35459815425994, 11.164789179173859, 11.844668382525047, 12.277120603329898, 12.024030420080734, 11.294641796127678, 11.24392458273532, 12.310419159019323, 11.345370396317726, 10.80820112790213, 10.815801819515524, 10.213738451315571, 10.710799346201188, 11.043522777680751, 10.88855437587723, 10.47066925380131, 10.031373264086302, 10.897892266182534, 10.646760578727005, 11.041489034733864, 9.405306961109028, 7.907784678787825, 9.531922065555655, 10.699178154783171, 8.752903439589083, 10.857875815010672, 9.304902995952173, 8.703717438297984, 8.178912002237922, 10.217501124790381, 7.303483190098499, 9.484631787673315, 10.013270960996968, 10.597687667500503, 8.081043712010258, 9.302155981943438, 12.025602788775371, 9.415051467888432, 8.178881929193468, 9.340531840584015, 8.96972859002934, 11.472191757174494, 8.726786333656417, 7.154037295664112, 9.50550426773253, 10.930467472213461, 8.428679058219908, 9.866546522197625, 9.460126225851688, 8.127562042688414, 8.731076847080129, 8.217546660968534, 6.9602964522291755, 8.508673649790321, 7.286550827537333, 8.529257040220635, 8.435019163022856, 8.214697249858581, 7.363161364022071, 8.409295511222089, 8.932938352882193, 9.657748816129136, 9.402933355234115, 8.033625912783963, 9.333287886466355, 6.0071033395362345, 13.52174485846432, 9.474038145187905, 8.41512872154908, 9.17522072968709, 7.188131367611232, 8.338200866485991, 7.700165339614012, 7.186507251196377, 7.990887899100909, 7.189818436149402, 10.48814613313002, 11.861186899967057, 8.407829223946711, 6.3207643885114635, 6.5663250285464825, 9.613737257075872, 9.759378520336458, 8.042083531415381, 8.500238071507258, 8.610623895897982, 8.43959912831457, 6.874162800109263, 8.480459139842097, 9.470975743084217, 7.9683563444049215, 9.396454646581605, 8.22933402121768, 7.349390753563533, 8.902547948207365, 7.828401540030315, 7.5800890133205785, 12.869500383197357, 8.198491859266301, 9.126446518990841, 8.842409464608494, 9.386062024980053, 6.5487939576272804, 12.870060560208461, 8.55166679928873, 7.349618957895044, 16.44127699261698, 9.801742117841222, 8.178429255263799, 9.010502713803481, 8.629907850704752, 9.51897498668676, 9.693104736043775, 8.492951152974834, 10.562769993756223, 7.05344723929231, 7.00204398863723, 9.639628821086191, 8.163686866373416, 8.092652093797048, 8.499333299231205, 9.77103796035867, 12.069450587607312, 7.610739757165238, 6.462582418006967, 8.762345812630295, 7.23830940712898, 7.836516945259697, 7.850891351777004, 8.651370362895152, 7.846079734368595, 5.590509258660489, 8.464820760806163, 9.590191551836199, 8.007623172552808, 8.98852563509547, 7.465163418995151, 9.000187278737076, 9.309331671788964, 10.848476889147218, 10.251196108002167, 10.730763648271184, 6.948891999549921, 11.513723180442248, 6.561250914709127, 8.01365423201725, 11.416863523154293, 10.197173077338093, 9.18078036599244, 8.43928921481611, 9.647627381367862, 10.282967156656275, 7.462597247340954, 6.606484021022375, 8.906424693863553, 9.729778755284839, 9.715512355668755, 8.248361935106342, 11.095987570237671, 9.905295254228086, 7.062665734310967, 10.210606771766201, 8.893116395322934, 9.784549832640428, 7.642579110635033, 7.390482811679149, 9.805399049017291, 8.287897240443316, 9.127738931994385, 8.715971847618126, 12.200338063282677, 7.921283754575972, 10.580766471236235, 7.14116652543785, 7.122673985144239, 9.20584836605196, 6.631598956365938, 7.504940829521654, 9.024155635326087, 9.120433264971544, 11.788116918526724, 8.436198596617677, 6.841406852637755, 7.917541317226638, 8.967008781145859, 8.101289961122422, 8.493785547965608, 7.570759463158343, 9.23757021605186],\n [17.62468016949225, 17.792438583890053, 17.838038880100303, 17.360011410521626, 16.647677355797065, 16.582675736986584, 16.38485055301381, 15.7174842525365, 15.490590516811501, 15.352643944389436, 14.92893205299697, 14.56851661025037, 14.511536375111792, 14.628950105430702, 13.224883867875098, 13.854294300958472, 13.422023035306976, 12.284025049740396, 12.921328143239315, 13.184500173755039, 11.994004063793994, 12.922609214835617, 11.656847901033, 12.045552095386036, 10.239497407328853, 10.625018891164016, 11.679340002304818, 11.340329574831342, 9.4137151337736, 9.46199986176211, 8.29244437961657, 10.46692418314566, 11.145497262250817, 8.970813901450436, 11.537673655092243, 10.254133609356499, 9.228345926435182, 9.397130770337457, 11.136506861428513, 6.888429608904613, 8.079590278052613, 7.467989318495502, 10.714439544929968, 7.873329181214508, 8.659942937902782, 7.262812557105666, 7.23815770917138, 9.505493089022027, 7.622265313342531, 9.375747284879402, 8.543870279501094, 7.52421969953532, 7.657178207990818, 8.805004831607397, 8.891523378403335, 6.539231067403415, 9.836763322546467, 9.64918692893352, 8.06884015593062, 8.979826889691928, 9.672649717462255, 9.53238729460328, 7.304190225780084, 7.272106462770803, 7.787436103105838, 9.442138735945289, 8.903954731557388, 12.021805721262274, 7.78371822451657, 6.46142858406304, 10.260291283912835, 10.415961355957892, 8.24171104911391, 8.9226908506637, 7.978470519827774, 10.374408253258423, 10.115940919318756, 7.818019704787878, 6.860165800286143, 7.103852645936109, 7.062447417354729, 6.686701430677538, 6.761032757867685, 7.316178317228426, 8.823905002800142, 8.257086645355464, 8.492063526293874, 7.673751479492983, 8.40544279237507, 9.156239601700973, 5.3562186474106195, 7.42344721987932, 8.190270734708387, 10.715491296562725, 12.950639052824735, 8.659058860825702, 10.487748696215348, 8.373035434857053, 9.597169187092515, 8.339344712082248, 6.861051102229722, 8.037696938399586, 6.554466971336298, 6.788687707043678, 10.303061668679328, 7.939353753405397, 7.82764990599258, 6.892496865039029, 10.304612303964523, 7.302702520932945, 9.613426006277507, 6.061897833691992, 9.023305269361392, 5.4296929323141665, 8.967793122402107, 6.891621686906863, 8.49315622278125, 12.355338678026117, 8.909757955179726, 8.650829667288185, 6.052344550817281, 8.802399497981455, 5.170301281871506, 9.17986202222602, 9.439363903451572, 6.891974088626554, 10.832766817908803, 8.431104420259686, 7.159789178018122, 10.863249484227786, 7.2388141953994305, 8.128486660128706, 8.602487904950278, 7.801372398907806, 7.406013344098132, 6.24742158307956, 6.903713710018895, 7.032206022233541, 7.7703901681153775, 5.425460960450408, 14.023599126532005, 9.762440105851383, 9.788887742917831, 12.963735026376947, 9.437086121652701, 7.343225221177035, 9.451263404145537, 7.415475200331185, 5.63349503311597, 8.650636831883514, 10.1665660329664, 5.67181920348414, 9.080043789649764, 6.668638185751733, 6.376186970774991, 6.687202852319816, 6.636853099086844, 8.798490025430514, 7.161251785319214, 9.259887585614685, 8.91175543129141, 11.074919319887947, 8.330048827520601, 12.315792422280897, 8.628067236340287, 11.355416991569383, 6.909074209178678, 6.474581165684212, 5.525205757587205, 8.045967505261881, 8.23581285901292, 10.652722685352613, 7.173869709973668, 7.7069836034934545, 8.466667257144021, 8.979744504471867, 6.0717755693769115, 6.499622937004969, 10.064222390225854, 9.916834487152457, 6.636352993056183, 6.688103233068427, 9.4526237077505, 7.558827508769805, 7.572180237648968, 9.390118837751434, 11.474947875063155, 8.913215122749829, 6.28299499666229, 8.829952374193276, 8.961975646338047, 8.129179369525652, 10.939071109739311, 8.098020239057414, 12.736295556918169, 8.008065564511718, 8.385498293650762, 10.646271748965379, 7.945534549848988, 8.418372393730438],\n [16.985325693507114, 16.686421247177, 16.60664399162121, 16.363606536374924, 16.389725301775535, 15.532585242237792, 15.089783860416986, 14.964299179846586, 15.528321491050946, 15.396405824036188, 15.078057521100803, 13.895597709684854, 15.277467773775363, 15.253165328423163, 15.194306590677305, 13.985619278908498, 14.099605388418269, 14.23146192109241, 14.263253495959049, 14.067545389408268, 13.84183770273229, 13.139073332393481, 13.756079105761025, 13.332123968887183, 13.01753883655313, 12.68316712642569, 12.404637922192933, 12.110180574072377, 13.468399034150414, 12.948180080156112, 11.441852781461728, 11.492470527686576, 11.597920344904654, 11.293283445123588, 10.630150012662137, 10.481145666757946, 12.108997400744574, 10.600948695820303, 10.183592917521258, 9.020199752337817, 10.078771965203709, 8.666603794857238, 8.892053924546142, 12.60174092516368, 9.015991949189692, 8.594242089913909, 9.53040698506372, 8.815475838625472, 8.55092117144595, 8.125894809113923, 10.038419096015753, 9.909179940055504, 7.5720155909831055, 9.141014448037907, 9.21637359275205, 9.14196926349645, 9.169235118275482, 11.566470540384364, 8.00830805130341, 8.20632529420909, 9.424649752140953, 7.793715326945735, 8.017191203168933, 7.699105983522891, 8.079197887609604, 8.914645852323762, 9.375314701247403, 7.295548345475449, 6.703816697533753, 8.281930945462957, 7.498499156906735, 6.7773264801168, 7.362908334706134, 6.411800651823254, 9.180573177079072, 8.049842641319767, 8.251534879692139, 7.309601730764083, 8.142073314032178, 7.9043922900603105, 8.319227887520363, 8.837048249933705, 7.317279353931614, 7.132462592165005, 6.561302621854409, 8.18568908688175, 7.693552508584421, 8.918260604450571, 7.526265025745191, 8.74304463063513, 6.95806208066376, 7.688505974389126, 7.877970938625335, 6.868282819801885, 9.196637940461153, 7.1085505010089385, 6.456520678168566, 7.887582694981859, 7.209466206381627, 7.018692584126776, 6.70550579175984, 7.628646576531721, 7.070766270872706, 9.028499395068746, 7.305377489790678, 7.972484586016085, 8.15393658824796, 8.152696988674954, 8.569655766949126, 9.091620444186795, 7.467715226582069, 8.5835426581949, 7.594318070739007, 5.831039855855192, 8.421468549598357, 7.263476192795636, 8.236163003909972, 8.719376370858475, 6.188632615364697, 7.245097817835268, 7.375901167883419, 9.720884356195631, 6.626520696526847, 8.14309766351156, 7.830646962851051, 8.385058458564025, 7.276032373966876, 8.573778467605601, 6.7872131107781275, 6.519312116528838, 8.16679003844353, 7.138069016219151, 8.643212907637379, 7.810024594165573, 7.083796030228225, 7.374582800654189, 8.184980724572792, 7.916918931492604, 7.866344753919769, 9.005140522275756, 7.622498632602993, 8.198517119700385, 7.464843702329998, 7.250742153248974, 9.590680647016711, 8.391383372266443, 8.764379769748494, 7.299733437571678, 7.306139740583365, 6.676596443477916, 9.425020926885653, 7.9356697192471035, 7.35414817899485, 8.04188524099539, 6.742571448790994, 7.866279770868147, 6.548047697228785, 9.766661405491915, 7.659160857162992, 6.379533868308403, 8.308770095053541, 7.4867398561169605, 9.117734704780226, 7.723077592179748, 7.798572623440491, 6.472577796779722, 8.914433191382257, 8.308903473051807, 6.171786070449324, 6.707592984686594, 6.372191274640625, 7.368461099253531, 8.769536912463337, 8.581933371617868, 9.465414011833014, 8.389042014485675, 6.558472464040398, 8.747934584649272, 7.8668440904093675, 8.1235941160348, 9.336105606779242, 7.878693229104847, 7.564484621732083, 5.721196438913833, 10.364000581027605, 9.122705140976514, 7.824850536311662, 8.50747547183777, 8.967260254831217, 8.711110107811225, 7.754576742306416, 8.771903955630224, 8.837752511594896, 6.631696002149619, 6.171294364768627, 6.826267855440247, 9.46372545352008, 7.2510569412635135, 6.680233659296568, 6.684426334164904]])\nloss_histories_QCNN_FASHION_SO4, loss_histories_QCNN_MNIST_SU4, loss_histories_QCNN_FASHION_SU4, loss_histories_QCNN_MNIST_SO4 = loss_histories_QCNN_FASHION_SO4 / 25, loss_histories_QCNN_MNIST_SU4 / 25 , loss_histories_QCNN_FASHION_SU4 / 25, loss_histories_QCNN_MNIST_SO4 / 25\nEncodings = ['pca8', 'autoencoder8', 'pca16-compact', 'autoencoder16-compact']", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_QCNN_FASHION_SO4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "loss_histories_QCNN_FASHION_SO4 = np.array([[15.555303202579205, 19.504919787996624, 16.974011398817957, 17.108571384885867, 16.468153100970216, 15.75587279071819, 15.31106664813007, 15.510275084448892, 15.48160051328491, 15.464417951912678, 15.21468547592811, 14.604456666514338, 16.618194576615522, 15.572879632393173, 14.717824918319078, 14.386997760135959, 14.433958925251964, 15.613930634505355, 14.657597338611513, 15.66931899051052, 14.834527710138387, 14.72801057161, 14.071797047329568, 14.01364789752765, 14.71289073458298, 14.432009196052622, 14.021694704420167, 14.451180198877216, 13.81502985811841, 13.857077906883745, 14.467416488282588, 14.74291139464224, 18.83755521564319, 14.327474596510424, 13.457830159177448, 13.370437601021738, 12.606666197690489, 13.259361223576914, 10.965227031427087, 15.85587643864697, 19.96072872803247, 15.99597559452018, 12.105793743935497, 14.385183227062825, 14.994104620905455, 13.565108731528309, 13.06204831085624, 12.039172057665446, 16.24745516822587, 13.429452485074872, 13.54877761686802, 12.114676102076725, 12.27702961667929, 12.16761591449498, 11.362066400874633, 13.341324275337044, 11.402425048712326, 12.223537616030802, 12.288648897995458, 12.380829164571688, 11.691332624672233, 11.434502691936164, 13.089649703108648, 12.288433247103564, 11.139703767239904, 10.449275570013748, 13.536140051771909, 11.876935131209281, 11.072024056197609, 12.48035410024065, 11.039898207936071, 12.068388184906631, 11.297000207542004, 11.509795352783245, 10.50816809299725, 11.893419700959901, 10.916165987629146, 14.590065321835178, 14.003523113387262, 9.674955097625048, 10.343833479385697, 13.34229982478152, 11.552967718343982, 10.30505443674415, 12.22575493421307, 12.409596480890572, 10.203760020086365, 11.910239860089346, 12.077710868087973, 12.050779151162956, 10.085481025342904, 10.508760538459361, 13.02167061714575, 10.969008290293907, 12.222903952283554, 10.177030636539607, 13.281301968063987, 13.160308285525717, 11.236368247275914, 10.893657505868008, 12.734237643057842, 10.783497099781778, 14.478882920666084, 10.333168430951634, 11.474930155484978, 12.482407368250314, 9.380519835782515, 10.741355680809695, 9.900772056018885, 11.37583422002143, 11.621624612002872, 11.244972679049836, 11.397841647027331, 13.395893623763943, 11.980370405851641, 9.001524595359493, 10.774930604235182, 11.321189766101934, 9.976631040563422, 12.226949408487787, 11.225077657927901, 11.012862695376992, 12.26412313610437, 11.098785934715472, 11.89594165408133, 10.508186199240512, 10.627600551300878, 11.526792908072663, 10.243772927793646, 12.671260599002071, 10.259582778329206, 11.523974496540236, 9.990639976275533, 11.816468100349503, 11.761603894025555, 11.969004025639347, 11.19478320178266, 10.290721290183578, 10.142939188345759, 9.923313637629583, 11.903822138979159, 9.662797524450559, 10.258292675377227, 11.72621282603769, 10.22100231147799, 10.759594235985471, 10.654187077192066, 8.786877857547301, 11.555273188238802, 10.445581745719611, 13.868872540114179, 11.637816309694294, 10.490302769295138, 10.652751636098762, 10.567486946748316, 10.823280250882126, 11.604759302035067, 10.572109217362748, 9.305649423624736, 13.120918411833392, 10.350301870992201, 10.664152450842048, 10.5522160714381, 9.76792440342697, 11.21372342896267, 12.72476540142632, 8.974037841687752, 10.176234933268807, 10.735434072133039, 11.614131727940505, 10.732166814460395, 10.236305916335079, 9.763756002560177, 12.551684786339672, 10.946261572511657, 10.934140724656354, 10.347075295193335, 10.11845009597337, 9.114796498678905, 10.60417434600415, 10.877718037573215, 8.984929919525204, 10.837912462534929, 9.917267902638878, 8.921645868589923, 8.872534953495492, 11.191232181844624, 11.388289756683118, 9.924240369521405, 11.826924835888446, 10.571129599370146, 10.91815752434668, 9.415967478168183, 7.427478307582434, 10.956675987652861, 10.601175428098784, 9.351546011502595, 9.009228800638875, 10.43676354074511, 11.86821422814849],\n [17.62685247943755, 15.921501286919272, 15.871127049053275, 15.239219491091808, 15.546447089828968, 14.600135975597338, 13.725492950779458, 12.795231677743987, 14.53600972191518, 13.237239660766868, 12.604003514991504, 12.35459815425994, 11.164789179173859, 11.844668382525047, 12.277120603329898, 12.024030420080734, 11.294641796127678, 11.24392458273532, 12.310419159019323, 11.345370396317726, 10.80820112790213, 10.815801819515524, 10.213738451315571, 10.710799346201188, 11.043522777680751, 10.88855437587723, 10.47066925380131, 10.031373264086302, 10.897892266182534, 10.646760578727005, 11.041489034733864, 9.405306961109028, 7.907784678787825, 9.531922065555655, 10.699178154783171, 8.752903439589083, 10.857875815010672, 9.304902995952173, 8.703717438297984, 8.178912002237922, 10.217501124790381, 7.303483190098499, 9.484631787673315, 10.013270960996968, 10.597687667500503, 8.081043712010258, 9.302155981943438, 12.025602788775371, 9.415051467888432, 8.178881929193468, 9.340531840584015, 8.96972859002934, 11.472191757174494, 8.726786333656417, 7.154037295664112, 9.50550426773253, 10.930467472213461, 8.428679058219908, 9.866546522197625, 9.460126225851688, 8.127562042688414, 8.731076847080129, 8.217546660968534, 6.9602964522291755, 8.508673649790321, 7.286550827537333, 8.529257040220635, 8.435019163022856, 8.214697249858581, 7.363161364022071, 8.409295511222089, 8.932938352882193, 9.657748816129136, 9.402933355234115, 8.033625912783963, 9.333287886466355, 6.0071033395362345, 13.52174485846432, 9.474038145187905, 8.41512872154908, 9.17522072968709, 7.188131367611232, 8.338200866485991, 7.700165339614012, 7.186507251196377, 7.990887899100909, 7.189818436149402, 10.48814613313002, 11.861186899967057, 8.407829223946711, 6.3207643885114635, 6.5663250285464825, 9.613737257075872, 9.759378520336458, 8.042083531415381, 8.500238071507258, 8.610623895897982, 8.43959912831457, 6.874162800109263, 8.480459139842097, 9.470975743084217, 7.9683563444049215, 9.396454646581605, 8.22933402121768, 7.349390753563533, 8.902547948207365, 7.828401540030315, 7.5800890133205785, 12.869500383197357, 8.198491859266301, 9.126446518990841, 8.842409464608494, 9.386062024980053, 6.5487939576272804, 12.870060560208461, 8.55166679928873, 7.349618957895044, 16.44127699261698, 9.801742117841222, 8.178429255263799, 9.010502713803481, 8.629907850704752, 9.51897498668676, 9.693104736043775, 8.492951152974834, 10.562769993756223, 7.05344723929231, 7.00204398863723, 9.639628821086191, 8.163686866373416, 8.092652093797048, 8.499333299231205, 9.77103796035867, 12.069450587607312, 7.610739757165238, 6.462582418006967, 8.762345812630295, 7.23830940712898, 7.836516945259697, 7.850891351777004, 8.651370362895152, 7.846079734368595, 5.590509258660489, 8.464820760806163, 9.590191551836199, 8.007623172552808, 8.98852563509547, 7.465163418995151, 9.000187278737076, 9.309331671788964, 10.848476889147218, 10.251196108002167, 10.730763648271184, 6.948891999549921, 11.513723180442248, 6.561250914709127, 8.01365423201725, 11.416863523154293, 10.197173077338093, 9.18078036599244, 8.43928921481611, 9.647627381367862, 10.282967156656275, 7.462597247340954, 6.606484021022375, 8.906424693863553, 9.729778755284839, 9.715512355668755, 8.248361935106342, 11.095987570237671, 9.905295254228086, 7.062665734310967, 10.210606771766201, 8.893116395322934, 9.784549832640428, 7.642579110635033, 7.390482811679149, 9.805399049017291, 8.287897240443316, 9.127738931994385, 8.715971847618126, 12.200338063282677, 7.921283754575972, 10.580766471236235, 7.14116652543785, 7.122673985144239, 9.20584836605196, 6.631598956365938, 7.504940829521654, 9.024155635326087, 9.120433264971544, 11.788116918526724, 8.436198596617677, 6.841406852637755, 7.917541317226638, 8.967008781145859, 8.101289961122422, 8.493785547965608, 7.570759463158343, 9.23757021605186],\n [17.62468016949225, 17.792438583890053, 17.838038880100303, 17.360011410521626, 16.647677355797065, 16.582675736986584, 16.38485055301381, 15.7174842525365, 15.490590516811501, 15.352643944389436, 14.92893205299697, 14.56851661025037, 14.511536375111792, 14.628950105430702, 13.224883867875098, 13.854294300958472, 13.422023035306976, 12.284025049740396, 12.921328143239315, 13.184500173755039, 11.994004063793994, 12.922609214835617, 11.656847901033, 12.045552095386036, 10.239497407328853, 10.625018891164016, 11.679340002304818, 11.340329574831342, 9.4137151337736, 9.46199986176211, 8.29244437961657, 10.46692418314566, 11.145497262250817, 8.970813901450436, 11.537673655092243, 10.254133609356499, 9.228345926435182, 9.397130770337457, 11.136506861428513, 6.888429608904613, 8.079590278052613, 7.467989318495502, 10.714439544929968, 7.873329181214508, 8.659942937902782, 7.262812557105666, 7.23815770917138, 9.505493089022027, 7.622265313342531, 9.375747284879402, 8.543870279501094, 7.52421969953532, 7.657178207990818, 8.805004831607397, 8.891523378403335, 6.539231067403415, 9.836763322546467, 9.64918692893352, 8.06884015593062, 8.979826889691928, 9.672649717462255, 9.53238729460328, 7.304190225780084, 7.272106462770803, 7.787436103105838, 9.442138735945289, 8.903954731557388, 12.021805721262274, 7.78371822451657, 6.46142858406304, 10.260291283912835, 10.415961355957892, 8.24171104911391, 8.9226908506637, 7.978470519827774, 10.374408253258423, 10.115940919318756, 7.818019704787878, 6.860165800286143, 7.103852645936109, 7.062447417354729, 6.686701430677538, 6.761032757867685, 7.316178317228426, 8.823905002800142, 8.257086645355464, 8.492063526293874, 7.673751479492983, 8.40544279237507, 9.156239601700973, 5.3562186474106195, 7.42344721987932, 8.190270734708387, 10.715491296562725, 12.950639052824735, 8.659058860825702, 10.487748696215348, 8.373035434857053, 9.597169187092515, 8.339344712082248, 6.861051102229722, 8.037696938399586, 6.554466971336298, 6.788687707043678, 10.303061668679328, 7.939353753405397, 7.82764990599258, 6.892496865039029, 10.304612303964523, 7.302702520932945, 9.613426006277507, 6.061897833691992, 9.023305269361392, 5.4296929323141665, 8.967793122402107, 6.891621686906863, 8.49315622278125, 12.355338678026117, 8.909757955179726, 8.650829667288185, 6.052344550817281, 8.802399497981455, 5.170301281871506, 9.17986202222602, 9.439363903451572, 6.891974088626554, 10.832766817908803, 8.431104420259686, 7.159789178018122, 10.863249484227786, 7.2388141953994305, 8.128486660128706, 8.602487904950278, 7.801372398907806, 7.406013344098132, 6.24742158307956, 6.903713710018895, 7.032206022233541, 7.7703901681153775, 5.425460960450408, 14.023599126532005, 9.762440105851383, 9.788887742917831, 12.963735026376947, 9.437086121652701, 7.343225221177035, 9.451263404145537, 7.415475200331185, 5.63349503311597, 8.650636831883514, 10.1665660329664, 5.67181920348414, 9.080043789649764, 6.668638185751733, 6.376186970774991, 6.687202852319816, 6.636853099086844, 8.798490025430514, 7.161251785319214, 9.259887585614685, 8.91175543129141, 11.074919319887947, 8.330048827520601, 12.315792422280897, 8.628067236340287, 11.355416991569383, 6.909074209178678, 6.474581165684212, 5.525205757587205, 8.045967505261881, 8.23581285901292, 10.652722685352613, 7.173869709973668, 7.7069836034934545, 8.466667257144021, 8.979744504471867, 6.0717755693769115, 6.499622937004969, 10.064222390225854, 9.916834487152457, 6.636352993056183, 6.688103233068427, 9.4526237077505, 7.558827508769805, 7.572180237648968, 9.390118837751434, 11.474947875063155, 8.913215122749829, 6.28299499666229, 8.829952374193276, 8.961975646338047, 8.129179369525652, 10.939071109739311, 8.098020239057414, 12.736295556918169, 8.008065564511718, 8.385498293650762, 10.646271748965379, 7.945534549848988, 8.418372393730438],\n [16.985325693507114, 16.686421247177, 16.60664399162121, 16.363606536374924, 16.389725301775535, 15.532585242237792, 15.089783860416986, 14.964299179846586, 15.528321491050946, 15.396405824036188, 15.078057521100803, 13.895597709684854, 15.277467773775363, 15.253165328423163, 15.194306590677305, 13.985619278908498, 14.099605388418269, 14.23146192109241, 14.263253495959049, 14.067545389408268, 13.84183770273229, 13.139073332393481, 13.756079105761025, 13.332123968887183, 13.01753883655313, 12.68316712642569, 12.404637922192933, 12.110180574072377, 13.468399034150414, 12.948180080156112, 11.441852781461728, 11.492470527686576, 11.597920344904654, 11.293283445123588, 10.630150012662137, 10.481145666757946, 12.108997400744574, 10.600948695820303, 10.183592917521258, 9.020199752337817, 10.078771965203709, 8.666603794857238, 8.892053924546142, 12.60174092516368, 9.015991949189692, 8.594242089913909, 9.53040698506372, 8.815475838625472, 8.55092117144595, 8.125894809113923, 10.038419096015753, 9.909179940055504, 7.5720155909831055, 9.141014448037907, 9.21637359275205, 9.14196926349645, 9.169235118275482, 11.566470540384364, 8.00830805130341, 8.20632529420909, 9.424649752140953, 7.793715326945735, 8.017191203168933, 7.699105983522891, 8.079197887609604, 8.914645852323762, 9.375314701247403, 7.295548345475449, 6.703816697533753, 8.281930945462957, 7.498499156906735, 6.7773264801168, 7.362908334706134, 6.411800651823254, 9.180573177079072, 8.049842641319767, 8.251534879692139, 7.309601730764083, 8.142073314032178, 7.9043922900603105, 8.319227887520363, 8.837048249933705, 7.317279353931614, 7.132462592165005, 6.561302621854409, 8.18568908688175, 7.693552508584421, 8.918260604450571, 7.526265025745191, 8.74304463063513, 6.95806208066376, 7.688505974389126, 7.877970938625335, 6.868282819801885, 9.196637940461153, 7.1085505010089385, 6.456520678168566, 7.887582694981859, 7.209466206381627, 7.018692584126776, 6.70550579175984, 7.628646576531721, 7.070766270872706, 9.028499395068746, 7.305377489790678, 7.972484586016085, 8.15393658824796, 8.152696988674954, 8.569655766949126, 9.091620444186795, 7.467715226582069, 8.5835426581949, 7.594318070739007, 5.831039855855192, 8.421468549598357, 7.263476192795636, 8.236163003909972, 8.719376370858475, 6.188632615364697, 7.245097817835268, 7.375901167883419, 9.720884356195631, 6.626520696526847, 8.14309766351156, 7.830646962851051, 8.385058458564025, 7.276032373966876, 8.573778467605601, 6.7872131107781275, 6.519312116528838, 8.16679003844353, 7.138069016219151, 8.643212907637379, 7.810024594165573, 7.083796030228225, 7.374582800654189, 8.184980724572792, 7.916918931492604, 7.866344753919769, 9.005140522275756, 7.622498632602993, 8.198517119700385, 7.464843702329998, 7.250742153248974, 9.590680647016711, 8.391383372266443, 8.764379769748494, 7.299733437571678, 7.306139740583365, 6.676596443477916, 9.425020926885653, 7.9356697192471035, 7.35414817899485, 8.04188524099539, 6.742571448790994, 7.866279770868147, 6.548047697228785, 9.766661405491915, 7.659160857162992, 6.379533868308403, 8.308770095053541, 7.4867398561169605, 9.117734704780226, 7.723077592179748, 7.798572623440491, 6.472577796779722, 8.914433191382257, 8.308903473051807, 6.171786070449324, 6.707592984686594, 6.372191274640625, 7.368461099253531, 8.769536912463337, 8.581933371617868, 9.465414011833014, 8.389042014485675, 6.558472464040398, 8.747934584649272, 7.8668440904093675, 8.1235941160348, 9.336105606779242, 7.878693229104847, 7.564484621732083, 5.721196438913833, 10.364000581027605, 9.122705140976514, 7.824850536311662, 8.50747547183777, 8.967260254831217, 8.711110107811225, 7.754576742306416, 8.771903955630224, 8.837752511594896, 6.631696002149619, 6.171294364768627, 6.826267855440247, 9.46372545352008, 7.2510569412635135, 6.680233659296568, 6.684426334164904]])\nloss_histories_QCNN_FASHION_SO4, loss_histories_QCNN_MNIST_SU4, loss_histories_QCNN_FASHION_SU4, loss_histories_QCNN_MNIST_SO4 = loss_histories_QCNN_FASHION_SO4 / 25, loss_histories_QCNN_MNIST_SU4 / 25 , loss_histories_QCNN_FASHION_SU4 / 25, loss_histories_QCNN_MNIST_SO4 / 25\nEncodings = ['pca8', 'autoencoder8', 'pca16-compact', 'autoencoder16-compact']\nn = 1\ndef plot_loss_history(Encodings, datasets, params):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "Encodings", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "Encodings = ['pca8', 'autoencoder8', 'pca16-compact', 'autoencoder16-compact']\nn = 1\ndef plot_loss_history(Encodings, datasets, params):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n if datasets == 'mnist':\n if params == 'large':\n loss_history_CNN = loss_histories_CNN_MNIST_3L[i][::n]\n loss_history_QCNN = loss_histories_QCNN_MNIST_SU4[i][::n]\n elif params == 'small':", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "n", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "n = 1\ndef plot_loss_history(Encodings, datasets, params):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n if datasets == 'mnist':\n if params == 'large':\n loss_history_CNN = loss_histories_CNN_MNIST_3L[i][::n]\n loss_history_QCNN = loss_histories_QCNN_MNIST_SU4[i][::n]\n elif params == 'small':\n loss_history_CNN = loss_histories_CNN_MNIST_2L[i][::n]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "datasets", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "datasets = 'fashion'\nparams = 'small'\nplot_loss_history(Encodings, datasets, params)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "params", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "peekOfCode": "params = 'small'\nplot_loss_history(Encodings, datasets, params)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history", + "documentation": {} + }, + { + "label": "mean_std", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "def mean_std(x):\n x1, x2, x3, x4, x5 = x[0], x[1], x[2], x[3], x[4]\n mean, std = [], []\n for i in range(200):\n group = np.array([x2[i], x2[i], x3[i], x4[i], x5[i]])\n mean.append(group.mean())\n std.append(group.std())\n return np.array([mean]), np.array([std])\nCNN_pca8_MNIST = np.array([[1.0395441055297852, 0.9040911197662354, 0.7967270612716675, 0.8275858163833618, 0.7907562255859375, 0.7990762591362, 0.721591591835022, 0.7166886329650879, 0.7750990390777588, 0.7900590300559998, 0.8035131692886353, 0.6731948256492615, 0.7240055203437805, 0.7377450466156006, 0.734597384929657, 0.7461295127868652, 0.6596748232841492, 0.73154616355896, 0.6872416138648987, 0.7387175559997559, 0.7107906341552734, 0.7203117609024048, 0.7071639895439148, 0.7054979801177979, 0.7488474249839783, 0.7188284993171692, 0.6863250136375427, 0.6860948801040649, 0.6859638094902039, 0.725064754486084, 0.7039306163787842, 0.7188284397125244, 0.6937084197998047, 0.6998209953308105, 0.6812995076179504, 0.6800709366798401, 0.7010917067527771, 0.6932376027107239, 0.7217830419540405, 0.7125279307365417, 0.7091555595397949, 0.6889660358428955, 0.675688624382019, 0.7075445055961609, 0.7049722075462341, 0.7092664241790771, 0.6923767924308777, 0.7058832049369812, 0.6897933483123779, 0.6720492839813232, 0.6966054439544678, 0.6957329511642456, 0.7001269459724426, 0.6889373064041138, 0.7052937150001526, 0.6978541612625122, 0.6966645121574402, 0.692691445350647, 0.6927309632301331, 0.6925039887428284, 0.6975433826446533, 0.6925039887428284, 0.6897934079170227, 0.695732831954956, 0.6975435614585876, 0.6907206177711487, 0.6872100234031677, 0.7058829665184021, 0.6975435614585876, 0.6907206177711487, 0.6949601769447327, 0.7014207243919373, 0.6917478442192078, 0.6925039887428284, 0.6975435614585876, 0.7032042741775513, 0.6976479291915894, 0.6939749717712402, 0.6922519207000732, 0.6940122842788696, 0.6934850811958313, 0.6934249401092529, 0.6947183012962341, 0.69305819272995, 0.6937144994735718, 0.6932415962219238, 0.6928685903549194, 0.7010954022407532, 0.6904019117355347, 0.6883457899093628, 0.6946393847465515, 0.6855125427246094, 0.690205991268158, 0.6893329620361328, 0.6732932925224304, 0.6786530017852783, 0.6818959712982178, 0.705488383769989, 0.6873126029968262, 0.6930552124977112, 0.7035624980926514, 0.692503035068512, 0.688559889793396, 0.6925029754638672, 0.6694767475128174, 0.6786531209945679, 0.6818959712982178, 0.680621862411499, 0.6864635944366455, 0.7252711653709412, 0.6654136776924133, 0.7018212676048279, 0.7075135707855225, 0.6681886911392212, 0.6934803128242493, 0.6806220412254333, 0.7075135111808777, 0.6744053363800049, 0.6724303364753723, 0.7174545526504517, 0.7145301699638367, 0.6868386268615723, 0.6934803128242493, 0.6992718577384949, 0.6927291750907898, 0.6971196532249451, 0.688559889793396, 0.692503035068512, 0.7152763605117798, 0.7044160962104797, 0.6946394443511963, 0.6883457899093628, 0.6990726590156555, 0.6883458495140076, 0.6924226880073547, 0.6863162517547607, 0.6961930990219116, 0.6772663593292236, 0.6809266805648804, 0.6878864169120789, 0.6873126029968262, 0.6992717981338501, 0.6927292346954346, 0.692503035068512, 0.677109956741333, 0.683269739151001, 0.6818959712982178, 0.6992717981338501, 0.6873126029968262, 0.7054884433746338, 0.6710627675056458, 0.6930552124977112, 0.6927291750907898, 0.6971197724342346, 0.6809266805648804, 0.683269739151001, 0.6927292346954346, 0.6878863573074341, 0.671062707901001, 0.7117050290107727, 0.6656460762023926, 0.6806219220161438, 0.6724303364753723, 0.7018214464187622, 0.6864635944366455, 0.6861881017684937, 0.7549446225166321, 0.7174546718597412, 0.6934803128242493, 0.6930550932884216, 0.6818959712982178, 0.6744052767753601, 0.6794469356536865, 0.7096381187438965, 0.6794469952583313, 0.717454731464386, 0.7004969716072083, 0.6744052171707153, 0.6654137372970581, 0.6861881017684937, 0.694628119468689, 0.7018214464187622, 0.7145301699638367, 0.7365714907646179, 0.7143958210945129, 0.6878863573074341, 0.671062707901001, 0.7117050290107727, 0.6818959712982178, 0.6744053363800049, 0.6864635944366455, 0.6861881017684937, 0.686011552810669, 0.7141836285591125, 0.6601616740226746, 0.7141836285591125, 0.7204780578613281, 0.6940048336982727, 0.6864635944366455, 0.6627382040023804],\n [0.7148541212081909, 0.6917383074760437, 0.6618366241455078, 0.6686064004898071, 0.6984639763832092, 0.6672256588935852, 0.6541621685028076, 0.6492307782173157, 0.6543554663658142, 0.6441015601158142, 0.631781816482544, 0.6582762002944946, 0.6385629773139954, 0.6361024975776672, 0.6137480139732361, 0.5913801789283752, 0.6222887635231018, 0.6243156790733337, 0.5995888710021973, 0.6076188683509827, 0.568179726600647, 0.5291875004768372, 0.5926321744918823, 0.4744355380535126, 0.5410154461860657, 0.5646359920501709, 0.46340087056159973, 0.5635284185409546, 0.501291036605835, 0.6057778000831604, 0.4884985387325287, 0.5519206523895264, 0.4786244332790375, 0.5201334953308105, 0.4855765402317047, 0.536720871925354, 0.4441765546798706, 0.49751976132392883, 0.48349303007125854, 0.46924060583114624, 0.4291042685508728, 0.42085304856300354, 0.3471009433269501, 0.41016048192977905, 0.373371422290802, 0.42835330963134766, 0.3388358950614929, 0.3145340383052826, 0.27054938673973083, 0.32470786571502686, 0.36382415890693665, 0.36375123262405396, 0.3186143636703491, 0.2888978123664856, 0.2745971083641052, 0.3146398067474365, 0.2334534078836441, 0.23724083602428436, 0.22968396544456482, 0.2925437092781067, 0.21827153861522675, 0.25587406754493713, 0.167545884847641, 0.21072761714458466, 0.20445477962493896, 0.1787356734275818, 0.2026267945766449, 0.17790751159191132, 0.18309608101844788, 0.11933749914169312, 0.1345934420824051, 0.2876547873020172, 0.17307142913341522, 0.16716937720775604, 0.1456492394208908, 0.19155098497867584, 0.13542434573173523, 0.1446809470653534, 0.10407865792512894, 0.1343240588903427, 0.12431978434324265, 0.1017170250415802, 0.06897597759962082, 0.12904725968837738, 0.1577526032924652, 0.07371388375759125, 0.059341367334127426, 0.04938346520066261, 0.061827197670936584, 0.04146246984601021, 0.042282868176698685, 0.05540478602051735, 0.1542506217956543, 0.027186093851923943, 0.02657618746161461, 0.061568886041641235, 0.08250877261161804, 0.03680648282170296, 0.09798908978700638, 0.03346874564886093, 0.020387882366776466, 0.1632000207901001, 0.025366079062223434, 0.08644671738147736, 0.051234256476163864, 0.021099448204040527, 0.03424559906125069, 0.11417791992425919, 0.11327710747718811, 0.28605711460113525, 0.15895381569862366, 0.11990384757518768, 0.04029577597975731, 0.013613909482955933, 0.1630869358778, 0.02302442491054535, 0.12596093118190765, 0.06723032146692276, 0.04851767420768738, 0.015912547707557678, 0.026426436379551888, 0.024496333673596382, 0.022285975515842438, 0.051586855202913284, 0.14623317122459412, 0.1105586513876915, 0.06907850503921509, 0.06893545389175415, 0.0324433408677578, 0.16024038195610046, 0.23663575947284698, 0.015771381556987762, 0.03755446523427963, 0.07496296614408493, 0.6224384307861328, 0.07651115208864212, 0.0333198718726635, 0.13880574703216553, 0.3186022639274597, 0.03964480012655258, 0.03716016188263893, 0.03814265877008438, 0.017961381003260612, 0.03360458090901375, 0.07304424047470093, 0.026338092982769012, 0.02846469357609749, 0.15662965178489685, 0.026965728029608727, 0.0895264595746994, 0.07516946643590927, 0.022477271035313606, 0.013148901052772999, 0.2060529887676239, 0.20922300219535828, 0.02548394165933132, 0.020299972966313362, 0.2752262055873871, 0.09914732724428177, 0.007716770749539137, 0.024892054498195648, 0.3207026720046997, 0.006016118917614222, 0.022090015932917595, 0.00549943745136261, 0.006164054851979017, 0.08513738960027695, 0.04149516671895981, 0.02847282961010933, 0.21248625218868256, 0.008204855024814606, 0.05347968637943268, 0.06895387917757034, 0.0024248012341558933, 0.010131459683179855, 0.05448565632104874, 0.2561221718788147, 0.06306648999452591, 0.030811958014965057, 0.039673637598752975, 0.006389930844306946, 0.011076364666223526, 0.12791746854782104, 0.26567888259887695, 0.034085627645254135, 0.3349073827266693, 0.04625650867819786, 0.10651706904172897, 0.004379840102046728, 0.011859510093927383, 0.7038277983665466, 0.44814741611480713, 0.03070094995200634, 0.04700180143117905, 0.2603203058242798, 0.11481533944606781, 0.058733683079481125, 0.2815445363521576, 0.0029853840824216604, 0.24449318647384644],", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "plot_loss_history", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "def plot_loss_history(Encodings, datasets):\n for Encoding in Encodings:\n if Encoding == 'PCA8':\n if datasets == 'MNIST':\n loss_history_QCNN_mean = loss_history_QCNN_pca8_MNIST_mean[::n]\n loss_history_QCNN_std = loss_history_QCNN_pca8_MNIST_std[::n]\n loss_history_CNN_mean = loss_history_CNN_pca8_MNIST_mean[::n]\n loss_history_CNN_std = loss_history_CNN_pca8_MNIST_std[::n]\n else:\n loss_history_QCNN_mean = loss_history_QCNN_pca8_FASHION_mean[::n]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "CNN_pca8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "CNN_pca8_MNIST = np.array([[1.0395441055297852, 0.9040911197662354, 0.7967270612716675, 0.8275858163833618, 0.7907562255859375, 0.7990762591362, 0.721591591835022, 0.7166886329650879, 0.7750990390777588, 0.7900590300559998, 0.8035131692886353, 0.6731948256492615, 0.7240055203437805, 0.7377450466156006, 0.734597384929657, 0.7461295127868652, 0.6596748232841492, 0.73154616355896, 0.6872416138648987, 0.7387175559997559, 0.7107906341552734, 0.7203117609024048, 0.7071639895439148, 0.7054979801177979, 0.7488474249839783, 0.7188284993171692, 0.6863250136375427, 0.6860948801040649, 0.6859638094902039, 0.725064754486084, 0.7039306163787842, 0.7188284397125244, 0.6937084197998047, 0.6998209953308105, 0.6812995076179504, 0.6800709366798401, 0.7010917067527771, 0.6932376027107239, 0.7217830419540405, 0.7125279307365417, 0.7091555595397949, 0.6889660358428955, 0.675688624382019, 0.7075445055961609, 0.7049722075462341, 0.7092664241790771, 0.6923767924308777, 0.7058832049369812, 0.6897933483123779, 0.6720492839813232, 0.6966054439544678, 0.6957329511642456, 0.7001269459724426, 0.6889373064041138, 0.7052937150001526, 0.6978541612625122, 0.6966645121574402, 0.692691445350647, 0.6927309632301331, 0.6925039887428284, 0.6975433826446533, 0.6925039887428284, 0.6897934079170227, 0.695732831954956, 0.6975435614585876, 0.6907206177711487, 0.6872100234031677, 0.7058829665184021, 0.6975435614585876, 0.6907206177711487, 0.6949601769447327, 0.7014207243919373, 0.6917478442192078, 0.6925039887428284, 0.6975435614585876, 0.7032042741775513, 0.6976479291915894, 0.6939749717712402, 0.6922519207000732, 0.6940122842788696, 0.6934850811958313, 0.6934249401092529, 0.6947183012962341, 0.69305819272995, 0.6937144994735718, 0.6932415962219238, 0.6928685903549194, 0.7010954022407532, 0.6904019117355347, 0.6883457899093628, 0.6946393847465515, 0.6855125427246094, 0.690205991268158, 0.6893329620361328, 0.6732932925224304, 0.6786530017852783, 0.6818959712982178, 0.705488383769989, 0.6873126029968262, 0.6930552124977112, 0.7035624980926514, 0.692503035068512, 0.688559889793396, 0.6925029754638672, 0.6694767475128174, 0.6786531209945679, 0.6818959712982178, 0.680621862411499, 0.6864635944366455, 0.7252711653709412, 0.6654136776924133, 0.7018212676048279, 0.7075135707855225, 0.6681886911392212, 0.6934803128242493, 0.6806220412254333, 0.7075135111808777, 0.6744053363800049, 0.6724303364753723, 0.7174545526504517, 0.7145301699638367, 0.6868386268615723, 0.6934803128242493, 0.6992718577384949, 0.6927291750907898, 0.6971196532249451, 0.688559889793396, 0.692503035068512, 0.7152763605117798, 0.7044160962104797, 0.6946394443511963, 0.6883457899093628, 0.6990726590156555, 0.6883458495140076, 0.6924226880073547, 0.6863162517547607, 0.6961930990219116, 0.6772663593292236, 0.6809266805648804, 0.6878864169120789, 0.6873126029968262, 0.6992717981338501, 0.6927292346954346, 0.692503035068512, 0.677109956741333, 0.683269739151001, 0.6818959712982178, 0.6992717981338501, 0.6873126029968262, 0.7054884433746338, 0.6710627675056458, 0.6930552124977112, 0.6927291750907898, 0.6971197724342346, 0.6809266805648804, 0.683269739151001, 0.6927292346954346, 0.6878863573074341, 0.671062707901001, 0.7117050290107727, 0.6656460762023926, 0.6806219220161438, 0.6724303364753723, 0.7018214464187622, 0.6864635944366455, 0.6861881017684937, 0.7549446225166321, 0.7174546718597412, 0.6934803128242493, 0.6930550932884216, 0.6818959712982178, 0.6744052767753601, 0.6794469356536865, 0.7096381187438965, 0.6794469952583313, 0.717454731464386, 0.7004969716072083, 0.6744052171707153, 0.6654137372970581, 0.6861881017684937, 0.694628119468689, 0.7018214464187622, 0.7145301699638367, 0.7365714907646179, 0.7143958210945129, 0.6878863573074341, 0.671062707901001, 0.7117050290107727, 0.6818959712982178, 0.6744053363800049, 0.6864635944366455, 0.6861881017684937, 0.686011552810669, 0.7141836285591125, 0.6601616740226746, 0.7141836285591125, 0.7204780578613281, 0.6940048336982727, 0.6864635944366455, 0.6627382040023804],\n [0.7148541212081909, 0.6917383074760437, 0.6618366241455078, 0.6686064004898071, 0.6984639763832092, 0.6672256588935852, 0.6541621685028076, 0.6492307782173157, 0.6543554663658142, 0.6441015601158142, 0.631781816482544, 0.6582762002944946, 0.6385629773139954, 0.6361024975776672, 0.6137480139732361, 0.5913801789283752, 0.6222887635231018, 0.6243156790733337, 0.5995888710021973, 0.6076188683509827, 0.568179726600647, 0.5291875004768372, 0.5926321744918823, 0.4744355380535126, 0.5410154461860657, 0.5646359920501709, 0.46340087056159973, 0.5635284185409546, 0.501291036605835, 0.6057778000831604, 0.4884985387325287, 0.5519206523895264, 0.4786244332790375, 0.5201334953308105, 0.4855765402317047, 0.536720871925354, 0.4441765546798706, 0.49751976132392883, 0.48349303007125854, 0.46924060583114624, 0.4291042685508728, 0.42085304856300354, 0.3471009433269501, 0.41016048192977905, 0.373371422290802, 0.42835330963134766, 0.3388358950614929, 0.3145340383052826, 0.27054938673973083, 0.32470786571502686, 0.36382415890693665, 0.36375123262405396, 0.3186143636703491, 0.2888978123664856, 0.2745971083641052, 0.3146398067474365, 0.2334534078836441, 0.23724083602428436, 0.22968396544456482, 0.2925437092781067, 0.21827153861522675, 0.25587406754493713, 0.167545884847641, 0.21072761714458466, 0.20445477962493896, 0.1787356734275818, 0.2026267945766449, 0.17790751159191132, 0.18309608101844788, 0.11933749914169312, 0.1345934420824051, 0.2876547873020172, 0.17307142913341522, 0.16716937720775604, 0.1456492394208908, 0.19155098497867584, 0.13542434573173523, 0.1446809470653534, 0.10407865792512894, 0.1343240588903427, 0.12431978434324265, 0.1017170250415802, 0.06897597759962082, 0.12904725968837738, 0.1577526032924652, 0.07371388375759125, 0.059341367334127426, 0.04938346520066261, 0.061827197670936584, 0.04146246984601021, 0.042282868176698685, 0.05540478602051735, 0.1542506217956543, 0.027186093851923943, 0.02657618746161461, 0.061568886041641235, 0.08250877261161804, 0.03680648282170296, 0.09798908978700638, 0.03346874564886093, 0.020387882366776466, 0.1632000207901001, 0.025366079062223434, 0.08644671738147736, 0.051234256476163864, 0.021099448204040527, 0.03424559906125069, 0.11417791992425919, 0.11327710747718811, 0.28605711460113525, 0.15895381569862366, 0.11990384757518768, 0.04029577597975731, 0.013613909482955933, 0.1630869358778, 0.02302442491054535, 0.12596093118190765, 0.06723032146692276, 0.04851767420768738, 0.015912547707557678, 0.026426436379551888, 0.024496333673596382, 0.022285975515842438, 0.051586855202913284, 0.14623317122459412, 0.1105586513876915, 0.06907850503921509, 0.06893545389175415, 0.0324433408677578, 0.16024038195610046, 0.23663575947284698, 0.015771381556987762, 0.03755446523427963, 0.07496296614408493, 0.6224384307861328, 0.07651115208864212, 0.0333198718726635, 0.13880574703216553, 0.3186022639274597, 0.03964480012655258, 0.03716016188263893, 0.03814265877008438, 0.017961381003260612, 0.03360458090901375, 0.07304424047470093, 0.026338092982769012, 0.02846469357609749, 0.15662965178489685, 0.026965728029608727, 0.0895264595746994, 0.07516946643590927, 0.022477271035313606, 0.013148901052772999, 0.2060529887676239, 0.20922300219535828, 0.02548394165933132, 0.020299972966313362, 0.2752262055873871, 0.09914732724428177, 0.007716770749539137, 0.024892054498195648, 0.3207026720046997, 0.006016118917614222, 0.022090015932917595, 0.00549943745136261, 0.006164054851979017, 0.08513738960027695, 0.04149516671895981, 0.02847282961010933, 0.21248625218868256, 0.008204855024814606, 0.05347968637943268, 0.06895387917757034, 0.0024248012341558933, 0.010131459683179855, 0.05448565632104874, 0.2561221718788147, 0.06306648999452591, 0.030811958014965057, 0.039673637598752975, 0.006389930844306946, 0.011076364666223526, 0.12791746854782104, 0.26567888259887695, 0.034085627645254135, 0.3349073827266693, 0.04625650867819786, 0.10651706904172897, 0.004379840102046728, 0.011859510093927383, 0.7038277983665466, 0.44814741611480713, 0.03070094995200634, 0.04700180143117905, 0.2603203058242798, 0.11481533944606781, 0.058733683079481125, 0.2815445363521576, 0.0029853840824216604, 0.24449318647384644],\n [0.9105777740478516, 0.7933387756347656, 0.8463192582130432, 0.7918081879615784, 0.7697640061378479, 0.8096768856048584, 0.7466207146644592, 0.8237097859382629, 0.7049685120582581, 0.7438586950302124, 0.6250369548797607, 0.7111080884933472, 0.6680461168289185, 0.7448657751083374, 0.6341871023178101, 0.7077524662017822, 0.6534257531166077, 0.6880569458007812, 0.6377463936805725, 0.7155838012695312, 0.6640317440032959, 0.6274323463439941, 0.6414721608161926, 0.6188722848892212, 0.6524910926818848, 0.6060046553611755, 0.5999923944473267, 0.6282068490982056, 0.5909672379493713, 0.5768530964851379, 0.5694571733474731, 0.5613191723823547, 0.5568458437919617, 0.5302345156669617, 0.5246444940567017, 0.47716397047042847, 0.5153546929359436, 0.4299534857273102, 0.469578355550766, 0.4727879464626312, 0.4463765621185303, 0.46711230278015137, 0.517929196357727, 0.4598591923713684, 0.49111396074295044, 0.39144131541252136, 0.3897601068019867, 0.32509711384773254, 0.4019857347011566, 0.3191187381744385, 0.3483905792236328, 0.3704746961593628, 0.28119757771492004, 0.2912859618663788, 0.37586793303489685, 0.34500065445899963, 0.33489009737968445, 0.22955617308616638, 0.2609083950519562, 0.2515026330947876, 0.31677237153053284, 0.2651301622390747, 0.2738949656486511, 0.2660989761352539, 0.23752112686634064, 0.2194589376449585, 0.20843705534934998, 0.21089714765548706, 0.21840155124664307, 0.155356764793396, 0.11363667249679565, 0.1785474568605423, 0.296301007270813, 0.16268691420555115, 0.16795799136161804, 0.13782688975334167, 0.19445651769638062, 0.18362899124622345, 0.17429359257221222, 0.11898397654294968, 0.11223026365041733, 0.14676909148693085, 0.09017810225486755, 0.08577976375818253, 0.2746151387691498, 0.1550959348678589, 0.13420848548412323, 0.20971326529979706, 0.2705133855342865, 0.11943186819553375, 0.2460930198431015, 0.18662546575069427, 0.14037273824214935, 0.1729680597782135, 0.07943809777498245, 0.12036006897687912, 0.09650005400180817, 0.13944436609745026, 0.0531146302819252, 0.10643625259399414, 0.0952540710568428, 0.06654691696166992, 0.07406456023454666, 0.10994937270879745, 0.15316030383110046, 0.08061729371547699, 0.17337092757225037, 0.07077796757221222, 0.08082617074251175, 0.13162891566753387, 0.105400949716568, 0.2131708860397339, 0.059671465307474136, 0.06645253300666809, 0.0316561684012413, 0.10006393492221832, 0.05826093629002571, 0.05543264001607895, 0.18798263370990753, 0.3568416237831116, 0.12922847270965576, 0.035779643803834915, 0.14526259899139404, 0.17341206967830658, 0.15513435006141663, 0.15774483978748322, 0.036930616945028305, 0.06614892929792404, 0.42890435457229614, 0.044646792113780975, 0.08113400638103485, 0.1220894455909729, 0.0700322613120079, 0.07644341140985489, 0.06841076165437698, 0.0467863529920578, 0.2685765326023102, 0.19556483626365662, 0.031755395233631134, 0.06840086728334427, 0.024615561589598656, 0.06184867396950722, 0.03523719310760498, 0.03634818270802498, 0.17069339752197266, 0.0898723304271698, 0.10902316868305206, 0.07381788641214371, 0.2124231457710266, 0.08898714184761047, 0.2835373282432556, 0.04782479628920555, 0.07181047648191452, 0.02646937593817711, 0.15210749208927155, 0.0316777266561985, 0.2190173715353012, 0.03188040107488632, 0.0657329335808754, 0.22030557692050934, 0.048764992505311966, 0.01582949608564377, 0.3480939567089081, 0.037865832448005676, 0.04794154688715935, 0.02150934375822544, 0.07883298397064209, 0.36726662516593933, 0.09953947365283966, 0.01883555017411709, 0.08093956857919693, 0.03289433941245079, 0.03686319291591644, 0.029118213802576065, 0.024932408705353737, 0.10363796353340149, 0.08204440027475357, 0.07387284934520721, 0.016122763976454735, 0.020007118582725525, 0.01712413877248764, 0.008530215360224247, 0.04839913547039032, 0.010972325690090656, 0.01936434768140316, 0.062416922301054, 0.1528264731168747, 0.03210006654262543, 0.03199370205402374, 0.14649109542369843, 0.024310385808348656, 0.10801848769187927, 0.07369007915258408, 0.021832237020134926, 0.46786004304885864, 0.4072805643081665, 0.12573716044425964, 0.21120652556419373, 0.008376529440283775, 0.06598368287086487],\n [0.6903733015060425, 0.7009608745574951, 0.6856290698051453, 0.680499792098999, 0.6878620982170105, 0.6912380456924438, 0.7046892046928406, 0.6892927289009094, 0.6843065619468689, 0.6738888621330261, 0.6771201491355896, 0.7159295678138733, 0.700664222240448, 0.6754940748214722, 0.6951375603675842, 0.6794483065605164, 0.6830631494522095, 0.6754299402236938, 0.6793370842933655, 0.685249924659729, 0.670793354511261, 0.6789981722831726, 0.6728657484054565, 0.6682780385017395, 0.6674402356147766, 0.6694486141204834, 0.6722869277000427, 0.6582105159759521, 0.6440421342849731, 0.6491823792457581, 0.6260234117507935, 0.6528884172439575, 0.6232763528823853, 0.6721002459526062, 0.6289636492729187, 0.5918331742286682, 0.7033864855766296, 0.6339185237884521, 0.6588250994682312, 0.65814208984375, 0.6087459325790405, 0.6441171169281006, 0.6275798678398132, 0.6293297410011292, 0.6081010699272156, 0.6313093304634094, 0.6253479719161987, 0.5859405398368835, 0.5526226162910461, 0.5675443410873413, 0.6374230980873108, 0.6318557858467102, 0.6088757514953613, 0.5734941959381104, 0.5631988644599915, 0.5744077563285828, 0.5525397658348083, 0.53898024559021, 0.5810213088989258, 0.5604568719863892, 0.5983765125274658, 0.529935896396637, 0.5212002396583557, 0.5480972528457642, 0.5961273312568665, 0.511750340461731, 0.5543249249458313, 0.47167980670928955, 0.5529485940933228, 0.46573784947395325, 0.4373716115951538, 0.49984920024871826, 0.5104417204856873, 0.47760239243507385, 0.5419422388076782, 0.5201715230941772, 0.5408003330230713, 0.5562770366668701, 0.44464951753616333, 0.4915640652179718, 0.511140763759613, 0.5533414483070374, 0.564533531665802, 0.44824767112731934, 0.5545175671577454, 0.4754532277584076, 0.5023362636566162, 0.44336003065109253, 0.43134212493896484, 0.5015333294868469, 0.430467426776886, 0.41022002696990967, 0.37658825516700745, 0.4636012315750122, 0.4122171401977539, 0.40090569853782654, 0.4751781225204468, 0.5089173316955566, 0.4864253103733063, 0.42060932517051697, 0.4453369379043579, 0.4455149173736572, 0.36464351415634155, 0.36796995997428894, 0.3798208236694336, 0.48244336247444153, 0.37801608443260193, 0.3555563986301422, 0.40065819025039673, 0.37217941880226135, 0.406179815530777, 0.4074755609035492, 0.32146933674812317, 0.345880925655365, 0.34395790100097656, 0.34425145387649536, 0.3556276261806488, 0.39746567606925964, 0.4400191903114319, 0.38703295588493347, 0.3225582540035248, 0.27072906494140625, 0.25422433018684387, 0.2962683439254761, 0.3172217309474945, 0.42179375886917114, 0.27199214696884155, 0.34931015968322754, 0.2691118121147156, 0.26660478115081787, 0.25914332270622253, 0.2727429270744324, 0.3199344575405121, 0.2222129851579666, 0.31681475043296814, 0.21025501191616058, 0.2560270130634308, 0.3182787001132965, 0.24565091729164124, 0.27624568343162537, 0.2841939330101013, 0.21906104683876038, 0.295320987701416, 0.27973631024360657, 0.16860458254814148, 0.2736617624759674, 0.2200712412595749, 0.28863945603370667, 0.28115972876548767, 0.2226688712835312, 0.2541147470474243, 0.18971645832061768, 0.23961296677589417, 0.19501592218875885, 0.17080029845237732, 0.18293321132659912, 0.2667332589626312, 0.18675312399864197, 0.20367150008678436, 0.40134796500205994, 0.22463224828243256, 0.2175285518169403, 0.32450851798057556, 0.2738719880580902, 0.22148656845092773, 0.16982124745845795, 0.353025883436203, 0.1996312141418457, 0.28466102480888367, 0.21140815317630768, 0.26543599367141724, 0.19926422834396362, 0.16624823212623596, 0.1786045879125595, 0.1413394808769226, 0.15884806215763092, 0.20119567215442657, 0.21787424385547638, 0.21151727437973022, 0.10772288590669632, 0.12995272874832153, 0.2674959599971771, 0.13663990795612335, 0.1810697764158249, 0.21397095918655396, 0.15514032542705536, 0.2994544208049774, 0.22076086699962616, 0.13207988440990448, 0.22355210781097412, 0.23873157799243927, 0.13247328996658325, 0.12091957777738571, 0.23083831369876862, 0.201210618019104, 0.1394393891096115, 0.18561014533042908, 0.2678418457508087, 0.23880578577518463, 0.23782308399677277],\n [0.6950972676277161, 0.624514102935791, 0.583101212978363, 0.6993337869644165, 0.6476080417633057, 0.6364650130271912, 0.6012523174285889, 0.6161369681358337, 0.6181449890136719, 0.6342973113059998, 0.5804245471954346, 0.5431748032569885, 0.5727656483650208, 0.549979031085968, 0.5044635534286499, 0.512463390827179, 0.5102589726448059, 0.5334477424621582, 0.45759502053260803, 0.45609891414642334, 0.4384606182575226, 0.5343804955482483, 0.41797107458114624, 0.4342503845691681, 0.3890399634838104, 0.41044917702674866, 0.35132548213005066, 0.3810461759567261, 0.3425566554069519, 0.3171461820602417, 0.2762487828731537, 0.29926612973213196, 0.27376917004585266, 0.28830668330192566, 0.24913270771503448, 0.19938430190086365, 0.2352171540260315, 0.2669123411178589, 0.20059126615524292, 0.22783060371875763, 0.26497378945350647, 0.23277893662452698, 0.23508895933628082, 0.17620548605918884, 0.140126571059227, 0.17897853255271912, 0.17362214624881744, 0.10912153124809265, 0.20787329971790314, 0.09750537574291229, 0.24669703841209412, 0.09363225847482681, 0.08267596364021301, 0.10493816435337067, 0.19090412557125092, 0.09467125684022903, 0.09239176660776138, 0.05489746481180191, 0.08461172133684158, 0.19685931503772736, 0.18854761123657227, 0.05868925154209137, 0.05014041066169739, 0.15637528896331787, 0.06508491933345795, 0.06407226622104645, 0.035801321268081665, 0.16898909211158752, 0.24060499668121338, 0.2960735559463501, 0.11876606196165085, 0.11272123456001282, 0.2833302617073059, 0.17450375854969025, 0.11714029312133789, 0.066585473716259, 0.357699990272522, 0.053224291652441025, 0.12500178813934326, 0.07176703214645386, 0.06332272291183472, 0.049026332795619965, 0.06790094077587128, 0.07368071377277374, 0.08223270624876022, 0.017215171828866005, 0.12109950929880142, 0.014817332848906517, 0.014553045853972435, 0.13761082291603088, 0.020390180870890617, 0.031047077849507332, 0.2506447732448578, 0.1129269227385521, 0.06206779554486275, 0.2958768308162689, 0.024755340069532394, 0.026094689965248108, 0.10279291123151779, 0.013493400067090988, 0.018021859228610992, 0.25203627347946167, 0.056582916527986526, 0.03349141776561737, 0.1758762001991272, 0.12884046137332916, 0.23406201601028442, 0.04947153478860855, 0.08455410599708557, 0.2010369449853897, 0.07859295606613159, 0.05398073047399521, 0.018245480954647064, 0.0462871678173542, 0.04117013141512871, 0.07244301587343216, 0.012532993219792843, 0.022867579013109207, 0.21418723464012146, 0.04215965420007706, 0.017704876139760017, 0.033693164587020874, 0.025307342410087585, 0.2049824297428131, 0.1619843691587448, 0.07375242561101913, 0.05191435664892197, 0.03824467211961746, 0.03036719746887684, 0.03482874482870102, 0.11714939028024673, 0.059163156896829605, 0.007508269976824522, 0.5880686044692993, 0.00375778297893703, 0.3006771504878998, 0.14734655618667603, 0.17704173922538757, 0.011697666719555855, 0.0760924369096756, 0.044274553656578064, 0.03366928920149803, 0.01301832590252161, 0.06542199850082397, 0.016044197604060173, 0.2634228467941284, 0.0016423964407294989, 0.07154283672571182, 0.0497405044734478, 0.014087897725403309, 0.2702277898788452, 0.0058559393510222435, 0.001999075058847666, 0.20934492349624634, 0.027578163892030716, 0.0185542069375515, 0.009298637509346008, 0.2260131686925888, 0.01682950370013714, 0.049877066165208817, 0.1795051246881485, 0.06075964495539665, 0.1485597938299179, 0.005703788250684738, 0.014600261114537716, 0.27203476428985596, 0.003919422160834074, 0.004767566919326782, 0.09969690442085266, 0.010243923403322697, 0.1011170893907547, 0.07468743622303009, 0.03205803036689758, 0.008248867467045784, 0.00845771562308073, 0.2290336936712265, 0.004072263836860657, 0.09669051319360733, 0.08218424022197723, 0.13435952365398407, 0.02292836830019951, 0.030399208888411522, 0.012336748652160168, 0.010568873025476933, 0.08546885848045349, 0.012400435283780098, 0.16309873759746552, 0.017225008457899094, 0.07175344973802567, 0.02251896634697914, 0.020442284643650055, 0.0015783198177814484, 0.01836889237165451, 0.002289178315550089, 0.10199715942144394, 0.08479230850934982, 0.25068947672843933, 0.0004926148685626686, 0.17583408951759338, 0.0029860532376915216]])\nCNN_ae8_MNIST = np.array([[0.6952692270278931, 0.6954190135002136, 0.6935893297195435, 0.6936798691749573, 0.6925517916679382, 0.6879379153251648, 0.6905089020729065, 0.681292712688446, 0.6959484219551086, 0.6895558834075928, 0.6956443786621094, 0.689063310623169, 0.6808215975761414, 0.6917092204093933, 0.6780702471733093, 0.6866255402565002, 0.690582811832428, 0.6955596804618835, 0.6937610507011414, 0.6873553991317749, 0.6924179792404175, 0.6872708797454834, 0.6776237487792969, 0.690467119216919, 0.6931454539299011, 0.6814525723457336, 0.6873436570167542, 0.6808752417564392, 0.6824637055397034, 0.6889563202857971, 0.6789340972900391, 0.6761460304260254, 0.6845279932022095, 0.675183117389679, 0.660584568977356, 0.6687278151512146, 0.6910607814788818, 0.6525632739067078, 0.6704840660095215, 0.6578121185302734, 0.6599693298339844, 0.6949693560600281, 0.6427035331726074, 0.6697946786880493, 0.6576442718505859, 0.6947981119155884, 0.7222045063972473, 0.6798616051673889, 0.6535014510154724, 0.6712633371353149, 0.6631866693496704, 0.6535589098930359, 0.6592772603034973, 0.6435713171958923, 0.6538013219833374, 0.6575586795806885, 0.6395674347877502, 0.6808083057403564, 0.6622372269630432, 0.631817102432251, 0.6445865631103516, 0.6544660925865173, 0.6499393582344055, 0.6389854550361633, 0.6526691913604736, 0.6433974504470825, 0.6317873597145081, 0.6179752945899963, 0.6405547857284546, 0.6444377899169922, 0.6398964524269104, 0.6609359979629517, 0.6097671389579773, 0.6156068444252014, 0.6116361618041992, 0.6082613468170166, 0.5778841972351074, 0.6008344888687134, 0.6064746379852295, 0.5954804420471191, 0.620853841304779, 0.610678493976593, 0.5571263432502747, 0.5849411487579346, 0.5651984810829163, 0.5866627097129822, 0.5654610991477966, 0.5963013172149658, 0.6225815415382385, 0.5773814916610718, 0.5964592695236206, 0.5686890482902527, 0.5940383672714233, 0.5925736427307129, 0.5829817652702332, 0.5169069170951843, 0.551339864730835, 0.5882797837257385, 0.5258330702781677, 0.5541460514068604, 0.5718514323234558, 0.5180520415306091, 0.5013167262077332, 0.5128101110458374, 0.47474464774131775, 0.5375312566757202, 0.4833562970161438, 0.5939033627510071, 0.5356352925300598, 0.5085546374320984, 0.5322512984275818, 0.4153386652469635, 0.4883955419063568, 0.48993396759033203, 0.5314427614212036, 0.48800715804100037, 0.39985597133636475, 0.5726277828216553, 0.3892905116081238, 0.4296627342700958, 0.5702428221702576, 0.5117926597595215, 0.5170469284057617, 0.46425777673721313, 0.4908629357814789, 0.3509584367275238, 0.546364426612854, 0.5176830887794495, 0.3958730697631836, 0.48073384165763855, 0.4480891823768616, 0.5009954571723938, 0.4895556569099426, 0.3627477288246155, 0.4711475670337677, 0.4472874701023102, 0.36391791701316833, 0.4501035809516907, 0.3550823926925659, 0.5257269740104675, 0.38061392307281494, 0.3826988637447357, 0.48065435886383057, 0.40830862522125244, 0.38660115003585815, 0.5415598750114441, 0.3522301912307739, 0.3868362307548523, 0.4260152578353882, 0.3827424645423889, 0.46325793862342834, 0.39867186546325684, 0.2635889947414398, 0.44326362013816833, 0.4126530587673187, 0.3090335428714752, 0.49344363808631897, 0.3341295123100281, 0.31646403670310974, 0.3464512228965759, 0.3438747525215149, 0.30379265546798706, 0.40022483468055725, 0.46279671788215637, 0.29108646512031555, 0.4979743957519531, 0.44132956862449646, 0.38173991441726685, 0.3476773202419281, 0.3728725016117096, 0.3789258599281311, 0.3827957510948181, 0.4120387136936188, 0.37812188267707825, 0.34417665004730225, 0.30209752917289734, 0.36531829833984375, 0.45270678400993347, 0.4122479557991028, 0.3110288381576538, 0.43685051798820496, 0.3300723731517792, 0.44571414589881897, 0.3947547972202301, 0.2749323546886444, 0.3647018373012543, 0.3170792758464813, 0.2588762044906616, 0.30455315113067627, 0.41304025053977966, 0.34445467591285706, 0.2621881067752838, 0.466246634721756, 0.3696736991405487, 0.29275214672088623, 0.42953723669052124, 0.2403566688299179, 0.3985254764556885, 0.2967580258846283, 0.33705490827560425],\n [0.7446916699409485, 0.7462478876113892, 0.7147709727287292, 0.705237865447998, 0.7096515893936157, 0.7287588715553284, 0.7136187553405762, 0.7170451283454895, 0.6939293146133423, 0.7106002569198608, 0.7021846771240234, 0.6791289448738098, 0.7007320523262024, 0.6980238556861877, 0.6858439445495605, 0.6750596761703491, 0.6874801516532898, 0.6840221285820007, 0.6748316287994385, 0.6757656931877136, 0.6846880316734314, 0.674010157585144, 0.672813892364502, 0.6830368041992188, 0.6639166474342346, 0.6662044525146484, 0.7023374438285828, 0.6715332865715027, 0.656481146812439, 0.6704840660095215, 0.6631489396095276, 0.664164125919342, 0.6603219509124756, 0.6583611369132996, 0.6359708309173584, 0.6877472400665283, 0.6578577160835266, 0.6487371921539307, 0.6248115301132202, 0.6286807656288147, 0.6506132483482361, 0.6587142944335938, 0.6374536752700806, 0.6321330666542053, 0.5927815437316895, 0.6693291664123535, 0.6456887722015381, 0.6525410413742065, 0.616619348526001, 0.6420068144798279, 0.6638038158416748, 0.6551480889320374, 0.6187013387680054, 0.5929698348045349, 0.5747960805892944, 0.6257189512252808, 0.5963592529296875, 0.6335296630859375, 0.622265636920929, 0.6301565766334534, 0.6104306578636169, 0.5972219705581665, 0.6028149127960205, 0.5698716640472412, 0.5918635725975037, 0.6053493022918701, 0.5951831936836243, 0.6108173727989197, 0.6113954782485962, 0.5676735043525696, 0.5552043318748474, 0.5645337104797363, 0.5583030581474304, 0.48770222067832947, 0.5372879505157471, 0.5783659219741821, 0.5623982548713684, 0.5815510749816895, 0.5337636470794678, 0.5576398372650146, 0.5712721347808838, 0.5593597888946533, 0.5536128282546997, 0.5201967358589172, 0.5749572515487671, 0.5507311820983887, 0.5134822130203247, 0.5547167062759399, 0.5380232930183411, 0.48977071046829224, 0.5138996839523315, 0.5049607157707214, 0.5116373300552368, 0.5705171823501587, 0.4911816418170929, 0.4676399230957031, 0.5912642478942871, 0.4205586910247803, 0.599958598613739, 0.48908206820487976, 0.43613678216934204, 0.47337791323661804, 0.47866958379745483, 0.5198819637298584, 0.4819088280200958, 0.4444981515407562, 0.4665561616420746, 0.5249155163764954, 0.4230271577835083, 0.43675461411476135, 0.48525139689445496, 0.47924432158470154, 0.42495813965797424, 0.46942460536956787, 0.45737117528915405, 0.45412537455558777, 0.3893934488296509, 0.3433205485343933, 0.3675767779350281, 0.48472899198532104, 0.4318505823612213, 0.45801854133605957, 0.39207911491394043, 0.4271838665008545, 0.3814970850944519, 0.5019781589508057, 0.3884572684764862, 0.3802814781665802, 0.4898897409439087, 0.49095290899276733, 0.3928977847099304, 0.41787633299827576, 0.49212905764579773, 0.4603811502456665, 0.47808635234832764, 0.42825067043304443, 0.36483070254325867, 0.39486002922058105, 0.45685309171676636, 0.43509194254875183, 0.3852032423019409, 0.46865108609199524, 0.4736804962158203, 0.34893593192100525, 0.3780778646469116, 0.4639703035354614, 0.3833337128162384, 0.3788967430591583, 0.3570201098918915, 0.3822886347770691, 0.41992926597595215, 0.3957677483558655, 0.40686050057411194, 0.31562888622283936, 0.4186554253101349, 0.2743915021419525, 0.4664614796638489, 0.4338654577732086, 0.36355477571487427, 0.576821506023407, 0.37429752945899963, 0.3489258289337158, 0.45112136006355286, 0.33845701813697815, 0.3156965672969818, 0.3582088053226471, 0.39915138483047485, 0.37103140354156494, 0.3530580997467041, 0.3424665331840515, 0.34336671233177185, 0.43329107761383057, 0.2769938111305237, 0.4251658320426941, 0.3638579547405243, 0.3758576512336731, 0.3676673173904419, 0.43007832765579224, 0.3190065622329712, 0.30812758207321167, 0.3147698640823364, 0.33803772926330566, 0.30822011828422546, 0.397549033164978, 0.3043270409107208, 0.34162911772727966, 0.33627060055732727, 0.28034132719039917, 0.37074539065361023, 0.38271018862724304, 0.3714558482170105, 0.3501148223876953, 0.2879908084869385, 0.27289846539497375, 0.3092705011367798, 0.34151899814605713, 0.23836749792099, 0.2805699408054352, 0.28617772459983826, 0.36297568678855896],\n [0.7300472855567932, 0.6867007613182068, 0.6846880316734314, 0.6840613484382629, 0.7379503846168518, 0.7141649723052979, 0.6768766641616821, 0.7168253064155579, 0.6865659356117249, 0.7158349752426147, 0.6955016255378723, 0.7160823345184326, 0.6909663677215576, 0.6839632987976074, 0.7168858051300049, 0.7112577557563782, 0.6876682043075562, 0.6938645243644714, 0.6912950277328491, 0.7029373645782471, 0.6912244558334351, 0.6886566877365112, 0.6692138910293579, 0.6587687730789185, 0.6587306261062622, 0.6906790137290955, 0.6571764945983887, 0.7349249124526978, 0.6769081354141235, 0.6851152181625366, 0.6883368492126465, 0.6931599378585815, 0.660807728767395, 0.6832686066627502, 0.663866400718689, 0.7190250158309937, 0.6782967448234558, 0.6647338271141052, 0.7190255522727966, 0.6869436502456665, 0.7135319709777832, 0.6910357475280762, 0.6790058016777039, 0.6729632019996643, 0.6573574542999268, 0.6717900037765503, 0.6499390602111816, 0.6566978693008423, 0.6533600091934204, 0.6694302558898926, 0.7413253784179688, 0.6782134175300598, 0.651166558265686, 0.6957483887672424, 0.6816896200180054, 0.6836461424827576, 0.65700364112854, 0.6469296813011169, 0.6859912872314453, 0.6694807410240173, 0.6653313636779785, 0.6578412652015686, 0.6503434181213379, 0.6439347863197327, 0.6405288577079773, 0.6545844078063965, 0.6400285363197327, 0.6392137408256531, 0.662876307964325, 0.6274128556251526, 0.611276388168335, 0.6296300888061523, 0.6971974968910217, 0.6152403354644775, 0.6399407386779785, 0.6099597811698914, 0.614037275314331, 0.6306928396224976, 0.6233912110328674, 0.6071301102638245, 0.5422117710113525, 0.6034693121910095, 0.5755348801612854, 0.6107156276702881, 0.5766172409057617, 0.6092644333839417, 0.5880871415138245, 0.6234793663024902, 0.5463911890983582, 0.5608245134353638, 0.5888428092002869, 0.5628297924995422, 0.6291128396987915, 0.5931711792945862, 0.5677962303161621, 0.6117801070213318, 0.5791870355606079, 0.5750445127487183, 0.5487401485443115, 0.5867695212364197, 0.6092665791511536, 0.5819268226623535, 0.5454160571098328, 0.5633161067962646, 0.5455551743507385, 0.5346310138702393, 0.5471535325050354, 0.5041360259056091, 0.5242226123809814, 0.5793042182922363, 0.5374759435653687, 0.538278341293335, 0.5183054208755493, 0.5400028824806213, 0.5124049186706543, 0.549633264541626, 0.4841218590736389, 0.46203166246414185, 0.5005708932876587, 0.48604172468185425, 0.49221688508987427, 0.4566079378128052, 0.46951553225517273, 0.43924057483673096, 0.5289251208305359, 0.4628008008003235, 0.41659680008888245, 0.43333157896995544, 0.43089747428894043, 0.4631856679916382, 0.4540521204471588, 0.5134605169296265, 0.4546918570995331, 0.3948281407356262, 0.4207550287246704, 0.41652747988700867, 0.41493555903434753, 0.37897631525993347, 0.35793769359588623, 0.38966673612594604, 0.4038204550743103, 0.35448944568634033, 0.38130858540534973, 0.4311788082122803, 0.39160025119781494, 0.3783036470413208, 0.3963787853717804, 0.3914090692996979, 0.3896610140800476, 0.30679458379745483, 0.32349467277526855, 0.31617602705955505, 0.28154852986335754, 0.28761547803878784, 0.2818848788738251, 0.2809687852859497, 0.3206581175327301, 0.27352452278137207, 0.25297045707702637, 0.30204370617866516, 0.21382515132427216, 0.3405051827430725, 0.3692021071910858, 0.25636792182922363, 0.37404823303222656, 0.30002561211586, 0.31114882230758667, 0.33810797333717346, 0.40628308057785034, 0.26683157682418823, 0.3439508080482483, 0.27956536412239075, 0.29898345470428467, 0.1934766173362732, 0.44087517261505127, 0.31433001160621643, 0.2813761830329895, 0.19954250752925873, 0.27304938435554504, 0.2746534049510956, 0.2640751600265503, 0.3004697561264038, 0.2624701261520386, 0.31487491726875305, 0.2908383011817932, 0.17720551788806915, 0.26453515887260437, 0.2599099278450012, 0.1590421348810196, 0.21811221539974213, 0.21677884459495544, 0.28607749938964844, 0.19262443482875824, 0.35018521547317505, 0.20188915729522705, 0.19327974319458008, 0.19713938236236572, 0.24497658014297485, 0.219795823097229, 0.24567201733589172],\n [0.6855006217956543, 0.6683534383773804, 0.6976334452629089, 0.7014869451522827, 0.6894210577011108, 0.6801190376281738, 0.6752902865409851, 0.6663470268249512, 0.6576423645019531, 0.688560426235199, 0.6956480145454407, 0.6744001507759094, 0.6609869599342346, 0.7014440894126892, 0.6882161498069763, 0.6909891366958618, 0.6773833632469177, 0.6732174754142761, 0.6793199181556702, 0.6728932857513428, 0.6587716937065125, 0.672234058380127, 0.6357423663139343, 0.7170616388320923, 0.6830359697341919, 0.683824360370636, 0.6609387397766113, 0.6609007120132446, 0.7004918456077576, 0.6633859872817993, 0.6673099994659424, 0.6681905388832092, 0.6512619256973267, 0.6680063009262085, 0.644162654876709, 0.6711700558662415, 0.6526409983634949, 0.6572614908218384, 0.6210101842880249, 0.6481871008872986, 0.631973385810852, 0.6269660592079163, 0.6380199193954468, 0.6666997671127319, 0.6170499324798584, 0.6310739517211914, 0.6544679403305054, 0.6101044416427612, 0.6150673031806946, 0.6122120022773743, 0.6137775182723999, 0.6526362895965576, 0.6059761643409729, 0.643004834651947, 0.5651429891586304, 0.5998908877372742, 0.6135813593864441, 0.6074697375297546, 0.5752083659172058, 0.6223533153533936, 0.6050518155097961, 0.5963067412376404, 0.5565653443336487, 0.508618950843811, 0.5940349698066711, 0.5791954398155212, 0.607430100440979, 0.5531137585639954, 0.6016480922698975, 0.611286997795105, 0.5905721187591553, 0.5562214851379395, 0.6107726693153381, 0.6119919419288635, 0.5428463220596313, 0.5302413702011108, 0.5703117847442627, 0.6247423887252808, 0.5531580448150635, 0.5578283667564392, 0.5733267068862915, 0.4968901574611664, 0.5486629605293274, 0.5990302562713623, 0.4940904974937439, 0.5626466870307922, 0.5192036628723145, 0.5489379167556763, 0.508159339427948, 0.48375093936920166, 0.5948370099067688, 0.5229615569114685, 0.5163667798042297, 0.5738478302955627, 0.562227189540863, 0.5080826878547668, 0.4606781303882599, 0.5100054144859314, 0.5573685169219971, 0.4546825885772705, 0.5117080211639404, 0.4994901716709137, 0.6420841217041016, 0.45629116892814636, 0.4510968029499054, 0.5476057529449463, 0.6092849373817444, 0.5381268858909607, 0.5068832635879517, 0.5333427786827087, 0.45233798027038574, 0.5086703300476074, 0.6088682413101196, 0.4161103069782257, 0.4438551664352417, 0.40600454807281494, 0.4537449777126312, 0.6216455698013306, 0.4722939729690552, 0.49088165163993835, 0.4874024987220764, 0.47236594557762146, 0.42606791853904724, 0.5494129657745361, 0.43068259954452515, 0.476716548204422, 0.45469146966934204, 0.407479852437973, 0.4520869851112366, 0.4798360764980316, 0.5087173581123352, 0.48703113198280334, 0.4840613901615143, 0.38822245597839355, 0.4944747984409332, 0.42739400267601013, 0.4855794906616211, 0.4680446982383728, 0.5466461777687073, 0.43323612213134766, 0.3971397280693054, 0.43370264768600464, 0.3504500091075897, 0.45511358976364136, 0.47195035219192505, 0.5434073805809021, 0.5047140121459961, 0.5063510537147522, 0.4441201388835907, 0.4191301763057709, 0.3612808287143707, 0.46317043900489807, 0.41102489829063416, 0.467690646648407, 0.33494076132774353, 0.4871577322483063, 0.4719245135784149, 0.40738558769226074, 0.5017144680023193, 0.39158982038497925, 0.3715653717517853, 0.3963679075241089, 0.40490463376045227, 0.6033336520195007, 0.38330721855163574, 0.5863525867462158, 0.475930392742157, 0.4671630859375, 0.37604689598083496, 0.5735698938369751, 0.40167009830474854, 0.4351791441440582, 0.4286106824874878, 0.3088095784187317, 0.3441198468208313, 0.4076881408691406, 0.42151668667793274, 0.4043821394443512, 0.5563496947288513, 0.36643096804618835, 0.568260908126831, 0.44196057319641113, 0.4549090266227722, 0.40986168384552, 0.34101513028144836, 0.2839103043079376, 0.34913575649261475, 0.37095099687576294, 0.40869227051734924, 0.4013737738132477, 0.3731957972049713, 0.31773507595062256, 0.3644598722457886, 0.38408324122428894, 0.5548095107078552, 0.36554211378097534, 0.38086527585983276, 0.5022920370101929, 0.3946997821331024, 0.47319257259368896],\n [0.6944005489349365, 0.6628090739250183, 0.6946060061454773, 0.7173568606376648, 0.7004551887512207, 0.6992331743240356, 0.671159029006958, 0.674471914768219, 0.6864744424819946, 0.7017762660980225, 0.6934648156166077, 0.6992331743240356, 0.671159029006958, 0.674471914768219, 0.7004551887512207, 0.7116137742996216, 0.692720353603363, 0.7108584642410278, 0.6772125363349915, 0.6924972534179688, 0.6734221577644348, 0.6970875263214111, 0.6961641907691956, 0.6953407526016235, 0.6858556866645813, 0.692350447177887, 0.6999544501304626, 0.683379590511322, 0.6885834336280823, 0.7062681317329407, 0.6772125363349915, 0.7016777992248535, 0.6847931146621704, 0.692497193813324, 0.6847932934761047, 0.664955198764801, 0.6603783965110779, 0.6992331743240356, 0.7035009860992432, 0.6879068613052368, 0.6873300075531006, 0.7178041338920593, 0.6981106400489807, 0.7016778588294983, 0.6961641907691956, 0.6893602013587952, 0.699954628944397, 0.6833794116973877, 0.6999545097351074, 0.686369776725769, 0.6847931742668152, 0.7016777992248535, 0.6847932934761047, 0.692497193813324, 0.6923738121986389, 0.686369776725769, 0.6734222173690796, 0.6924972534179688, 0.6961641907691956, 0.6893602013587952, 0.6847931742668152, 0.6787262558937073, 0.7035009860992432, 0.6787262558937073, 0.7088912725448608, 0.6924972534179688, 0.6772125959396362, 0.7016780376434326, 0.6999545097351074, 0.6714181303977966, 0.6961641907691956, 0.6953407526016235, 0.6968075037002563, 0.6926029324531555, 0.6946170926094055, 0.6898223161697388, 0.6924266815185547, 0.6803891062736511, 0.6885837316513062, 0.7016779184341431, 0.6885834336280823, 0.6970875263214111, 0.6885835528373718, 0.7016778588294983, 0.7037448287010193, 0.6893602013587952, 0.696164071559906, 0.7013214826583862, 0.6924266815185547, 0.692350447177887, 0.6810030937194824, 0.6879068613052368, 0.6927202343940735, 0.6970876455307007, 0.6885836720466614, 0.687906801700592, 0.6657686829566956, 0.7178040146827698, 0.6819396018981934, 0.6806621551513672, 0.7144356369972229, 0.6868523955345154, 0.6864744424819946, 0.6939859986305237, 0.6794841289520264, 0.6784052848815918, 0.7031963467597961, 0.7017762064933777, 0.7004550099372864, 0.6930428147315979, 0.7035009860992432, 0.6787263751029968, 0.6657687425613403, 0.6744717955589294, 0.6864744424819946, 0.6861956119537354, 0.6688350439071655, 0.6671543121337891, 0.7063332200050354, 0.6859349012374878, 0.6961429119110107, 0.7047154903411865, 0.6860157251358032, 0.6577639579772949, 0.6961429119110107, 0.6953252553939819, 0.6688351631164551, 0.7328864932060242, 0.6946060657501221, 0.6939857602119446, 0.7074453830718994, 0.7116137146949768, 0.6819397211074829, 0.7054235339164734, 0.6873300075531006, 0.6930428147315979, 0.6765493750572205, 0.7178040146827698, 0.6981105804443359, 0.6924972534179688, 0.6734225749969482, 0.692497193813324, 0.6885836720466614, 0.6970876455307007, 0.6772127747535706, 0.6924972534179688, 0.6961643099784851, 0.7043116092681885, 0.6924269199371338, 0.6983310580253601, 0.6858559250831604, 0.6923503875732422, 0.692374050617218, 0.6773987412452698, 0.7075351476669312, 0.6923503279685974, 0.6772127747535706, 0.6787264347076416, 0.692720353603363, 0.6970877051353455, 0.6923739910125732, 0.6923503279685974, 0.7075350880622864, 0.6863697171211243, 0.6885836720466614, 0.683316707611084, 0.7088912129402161, 0.683316707611084, 0.6981105804443359, 0.7108585238456726, 0.6961642503738403, 0.6893600225448608, 0.6999545097351074, 0.6893600225448608, 0.6923739910125732, 0.7013212442398071, 0.6814754009246826, 0.6923503279685974, 0.6734225749969482, 0.6787265539169312, 0.6873299479484558, 0.674471914768219, 0.7284162044525146, 0.6682815551757812, 0.6864743828773499, 0.7017760276794434, 0.7004550099372864, 0.6868523955345154, 0.6794841289520264, 0.6939857602119446, 0.6934646368026733, 0.7116135954856873, 0.6711589694023132, 0.7054233551025391, 0.7035009860992432, 0.6970876455307007, 0.681003212928772, 0.6833168268203735, 0.6711589694023132, 0.7054234147071838]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "CNN_ae8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "CNN_ae8_MNIST = np.array([[0.6952692270278931, 0.6954190135002136, 0.6935893297195435, 0.6936798691749573, 0.6925517916679382, 0.6879379153251648, 0.6905089020729065, 0.681292712688446, 0.6959484219551086, 0.6895558834075928, 0.6956443786621094, 0.689063310623169, 0.6808215975761414, 0.6917092204093933, 0.6780702471733093, 0.6866255402565002, 0.690582811832428, 0.6955596804618835, 0.6937610507011414, 0.6873553991317749, 0.6924179792404175, 0.6872708797454834, 0.6776237487792969, 0.690467119216919, 0.6931454539299011, 0.6814525723457336, 0.6873436570167542, 0.6808752417564392, 0.6824637055397034, 0.6889563202857971, 0.6789340972900391, 0.6761460304260254, 0.6845279932022095, 0.675183117389679, 0.660584568977356, 0.6687278151512146, 0.6910607814788818, 0.6525632739067078, 0.6704840660095215, 0.6578121185302734, 0.6599693298339844, 0.6949693560600281, 0.6427035331726074, 0.6697946786880493, 0.6576442718505859, 0.6947981119155884, 0.7222045063972473, 0.6798616051673889, 0.6535014510154724, 0.6712633371353149, 0.6631866693496704, 0.6535589098930359, 0.6592772603034973, 0.6435713171958923, 0.6538013219833374, 0.6575586795806885, 0.6395674347877502, 0.6808083057403564, 0.6622372269630432, 0.631817102432251, 0.6445865631103516, 0.6544660925865173, 0.6499393582344055, 0.6389854550361633, 0.6526691913604736, 0.6433974504470825, 0.6317873597145081, 0.6179752945899963, 0.6405547857284546, 0.6444377899169922, 0.6398964524269104, 0.6609359979629517, 0.6097671389579773, 0.6156068444252014, 0.6116361618041992, 0.6082613468170166, 0.5778841972351074, 0.6008344888687134, 0.6064746379852295, 0.5954804420471191, 0.620853841304779, 0.610678493976593, 0.5571263432502747, 0.5849411487579346, 0.5651984810829163, 0.5866627097129822, 0.5654610991477966, 0.5963013172149658, 0.6225815415382385, 0.5773814916610718, 0.5964592695236206, 0.5686890482902527, 0.5940383672714233, 0.5925736427307129, 0.5829817652702332, 0.5169069170951843, 0.551339864730835, 0.5882797837257385, 0.5258330702781677, 0.5541460514068604, 0.5718514323234558, 0.5180520415306091, 0.5013167262077332, 0.5128101110458374, 0.47474464774131775, 0.5375312566757202, 0.4833562970161438, 0.5939033627510071, 0.5356352925300598, 0.5085546374320984, 0.5322512984275818, 0.4153386652469635, 0.4883955419063568, 0.48993396759033203, 0.5314427614212036, 0.48800715804100037, 0.39985597133636475, 0.5726277828216553, 0.3892905116081238, 0.4296627342700958, 0.5702428221702576, 0.5117926597595215, 0.5170469284057617, 0.46425777673721313, 0.4908629357814789, 0.3509584367275238, 0.546364426612854, 0.5176830887794495, 0.3958730697631836, 0.48073384165763855, 0.4480891823768616, 0.5009954571723938, 0.4895556569099426, 0.3627477288246155, 0.4711475670337677, 0.4472874701023102, 0.36391791701316833, 0.4501035809516907, 0.3550823926925659, 0.5257269740104675, 0.38061392307281494, 0.3826988637447357, 0.48065435886383057, 0.40830862522125244, 0.38660115003585815, 0.5415598750114441, 0.3522301912307739, 0.3868362307548523, 0.4260152578353882, 0.3827424645423889, 0.46325793862342834, 0.39867186546325684, 0.2635889947414398, 0.44326362013816833, 0.4126530587673187, 0.3090335428714752, 0.49344363808631897, 0.3341295123100281, 0.31646403670310974, 0.3464512228965759, 0.3438747525215149, 0.30379265546798706, 0.40022483468055725, 0.46279671788215637, 0.29108646512031555, 0.4979743957519531, 0.44132956862449646, 0.38173991441726685, 0.3476773202419281, 0.3728725016117096, 0.3789258599281311, 0.3827957510948181, 0.4120387136936188, 0.37812188267707825, 0.34417665004730225, 0.30209752917289734, 0.36531829833984375, 0.45270678400993347, 0.4122479557991028, 0.3110288381576538, 0.43685051798820496, 0.3300723731517792, 0.44571414589881897, 0.3947547972202301, 0.2749323546886444, 0.3647018373012543, 0.3170792758464813, 0.2588762044906616, 0.30455315113067627, 0.41304025053977966, 0.34445467591285706, 0.2621881067752838, 0.466246634721756, 0.3696736991405487, 0.29275214672088623, 0.42953723669052124, 0.2403566688299179, 0.3985254764556885, 0.2967580258846283, 0.33705490827560425],\n [0.7446916699409485, 0.7462478876113892, 0.7147709727287292, 0.705237865447998, 0.7096515893936157, 0.7287588715553284, 0.7136187553405762, 0.7170451283454895, 0.6939293146133423, 0.7106002569198608, 0.7021846771240234, 0.6791289448738098, 0.7007320523262024, 0.6980238556861877, 0.6858439445495605, 0.6750596761703491, 0.6874801516532898, 0.6840221285820007, 0.6748316287994385, 0.6757656931877136, 0.6846880316734314, 0.674010157585144, 0.672813892364502, 0.6830368041992188, 0.6639166474342346, 0.6662044525146484, 0.7023374438285828, 0.6715332865715027, 0.656481146812439, 0.6704840660095215, 0.6631489396095276, 0.664164125919342, 0.6603219509124756, 0.6583611369132996, 0.6359708309173584, 0.6877472400665283, 0.6578577160835266, 0.6487371921539307, 0.6248115301132202, 0.6286807656288147, 0.6506132483482361, 0.6587142944335938, 0.6374536752700806, 0.6321330666542053, 0.5927815437316895, 0.6693291664123535, 0.6456887722015381, 0.6525410413742065, 0.616619348526001, 0.6420068144798279, 0.6638038158416748, 0.6551480889320374, 0.6187013387680054, 0.5929698348045349, 0.5747960805892944, 0.6257189512252808, 0.5963592529296875, 0.6335296630859375, 0.622265636920929, 0.6301565766334534, 0.6104306578636169, 0.5972219705581665, 0.6028149127960205, 0.5698716640472412, 0.5918635725975037, 0.6053493022918701, 0.5951831936836243, 0.6108173727989197, 0.6113954782485962, 0.5676735043525696, 0.5552043318748474, 0.5645337104797363, 0.5583030581474304, 0.48770222067832947, 0.5372879505157471, 0.5783659219741821, 0.5623982548713684, 0.5815510749816895, 0.5337636470794678, 0.5576398372650146, 0.5712721347808838, 0.5593597888946533, 0.5536128282546997, 0.5201967358589172, 0.5749572515487671, 0.5507311820983887, 0.5134822130203247, 0.5547167062759399, 0.5380232930183411, 0.48977071046829224, 0.5138996839523315, 0.5049607157707214, 0.5116373300552368, 0.5705171823501587, 0.4911816418170929, 0.4676399230957031, 0.5912642478942871, 0.4205586910247803, 0.599958598613739, 0.48908206820487976, 0.43613678216934204, 0.47337791323661804, 0.47866958379745483, 0.5198819637298584, 0.4819088280200958, 0.4444981515407562, 0.4665561616420746, 0.5249155163764954, 0.4230271577835083, 0.43675461411476135, 0.48525139689445496, 0.47924432158470154, 0.42495813965797424, 0.46942460536956787, 0.45737117528915405, 0.45412537455558777, 0.3893934488296509, 0.3433205485343933, 0.3675767779350281, 0.48472899198532104, 0.4318505823612213, 0.45801854133605957, 0.39207911491394043, 0.4271838665008545, 0.3814970850944519, 0.5019781589508057, 0.3884572684764862, 0.3802814781665802, 0.4898897409439087, 0.49095290899276733, 0.3928977847099304, 0.41787633299827576, 0.49212905764579773, 0.4603811502456665, 0.47808635234832764, 0.42825067043304443, 0.36483070254325867, 0.39486002922058105, 0.45685309171676636, 0.43509194254875183, 0.3852032423019409, 0.46865108609199524, 0.4736804962158203, 0.34893593192100525, 0.3780778646469116, 0.4639703035354614, 0.3833337128162384, 0.3788967430591583, 0.3570201098918915, 0.3822886347770691, 0.41992926597595215, 0.3957677483558655, 0.40686050057411194, 0.31562888622283936, 0.4186554253101349, 0.2743915021419525, 0.4664614796638489, 0.4338654577732086, 0.36355477571487427, 0.576821506023407, 0.37429752945899963, 0.3489258289337158, 0.45112136006355286, 0.33845701813697815, 0.3156965672969818, 0.3582088053226471, 0.39915138483047485, 0.37103140354156494, 0.3530580997467041, 0.3424665331840515, 0.34336671233177185, 0.43329107761383057, 0.2769938111305237, 0.4251658320426941, 0.3638579547405243, 0.3758576512336731, 0.3676673173904419, 0.43007832765579224, 0.3190065622329712, 0.30812758207321167, 0.3147698640823364, 0.33803772926330566, 0.30822011828422546, 0.397549033164978, 0.3043270409107208, 0.34162911772727966, 0.33627060055732727, 0.28034132719039917, 0.37074539065361023, 0.38271018862724304, 0.3714558482170105, 0.3501148223876953, 0.2879908084869385, 0.27289846539497375, 0.3092705011367798, 0.34151899814605713, 0.23836749792099, 0.2805699408054352, 0.28617772459983826, 0.36297568678855896],\n [0.7300472855567932, 0.6867007613182068, 0.6846880316734314, 0.6840613484382629, 0.7379503846168518, 0.7141649723052979, 0.6768766641616821, 0.7168253064155579, 0.6865659356117249, 0.7158349752426147, 0.6955016255378723, 0.7160823345184326, 0.6909663677215576, 0.6839632987976074, 0.7168858051300049, 0.7112577557563782, 0.6876682043075562, 0.6938645243644714, 0.6912950277328491, 0.7029373645782471, 0.6912244558334351, 0.6886566877365112, 0.6692138910293579, 0.6587687730789185, 0.6587306261062622, 0.6906790137290955, 0.6571764945983887, 0.7349249124526978, 0.6769081354141235, 0.6851152181625366, 0.6883368492126465, 0.6931599378585815, 0.660807728767395, 0.6832686066627502, 0.663866400718689, 0.7190250158309937, 0.6782967448234558, 0.6647338271141052, 0.7190255522727966, 0.6869436502456665, 0.7135319709777832, 0.6910357475280762, 0.6790058016777039, 0.6729632019996643, 0.6573574542999268, 0.6717900037765503, 0.6499390602111816, 0.6566978693008423, 0.6533600091934204, 0.6694302558898926, 0.7413253784179688, 0.6782134175300598, 0.651166558265686, 0.6957483887672424, 0.6816896200180054, 0.6836461424827576, 0.65700364112854, 0.6469296813011169, 0.6859912872314453, 0.6694807410240173, 0.6653313636779785, 0.6578412652015686, 0.6503434181213379, 0.6439347863197327, 0.6405288577079773, 0.6545844078063965, 0.6400285363197327, 0.6392137408256531, 0.662876307964325, 0.6274128556251526, 0.611276388168335, 0.6296300888061523, 0.6971974968910217, 0.6152403354644775, 0.6399407386779785, 0.6099597811698914, 0.614037275314331, 0.6306928396224976, 0.6233912110328674, 0.6071301102638245, 0.5422117710113525, 0.6034693121910095, 0.5755348801612854, 0.6107156276702881, 0.5766172409057617, 0.6092644333839417, 0.5880871415138245, 0.6234793663024902, 0.5463911890983582, 0.5608245134353638, 0.5888428092002869, 0.5628297924995422, 0.6291128396987915, 0.5931711792945862, 0.5677962303161621, 0.6117801070213318, 0.5791870355606079, 0.5750445127487183, 0.5487401485443115, 0.5867695212364197, 0.6092665791511536, 0.5819268226623535, 0.5454160571098328, 0.5633161067962646, 0.5455551743507385, 0.5346310138702393, 0.5471535325050354, 0.5041360259056091, 0.5242226123809814, 0.5793042182922363, 0.5374759435653687, 0.538278341293335, 0.5183054208755493, 0.5400028824806213, 0.5124049186706543, 0.549633264541626, 0.4841218590736389, 0.46203166246414185, 0.5005708932876587, 0.48604172468185425, 0.49221688508987427, 0.4566079378128052, 0.46951553225517273, 0.43924057483673096, 0.5289251208305359, 0.4628008008003235, 0.41659680008888245, 0.43333157896995544, 0.43089747428894043, 0.4631856679916382, 0.4540521204471588, 0.5134605169296265, 0.4546918570995331, 0.3948281407356262, 0.4207550287246704, 0.41652747988700867, 0.41493555903434753, 0.37897631525993347, 0.35793769359588623, 0.38966673612594604, 0.4038204550743103, 0.35448944568634033, 0.38130858540534973, 0.4311788082122803, 0.39160025119781494, 0.3783036470413208, 0.3963787853717804, 0.3914090692996979, 0.3896610140800476, 0.30679458379745483, 0.32349467277526855, 0.31617602705955505, 0.28154852986335754, 0.28761547803878784, 0.2818848788738251, 0.2809687852859497, 0.3206581175327301, 0.27352452278137207, 0.25297045707702637, 0.30204370617866516, 0.21382515132427216, 0.3405051827430725, 0.3692021071910858, 0.25636792182922363, 0.37404823303222656, 0.30002561211586, 0.31114882230758667, 0.33810797333717346, 0.40628308057785034, 0.26683157682418823, 0.3439508080482483, 0.27956536412239075, 0.29898345470428467, 0.1934766173362732, 0.44087517261505127, 0.31433001160621643, 0.2813761830329895, 0.19954250752925873, 0.27304938435554504, 0.2746534049510956, 0.2640751600265503, 0.3004697561264038, 0.2624701261520386, 0.31487491726875305, 0.2908383011817932, 0.17720551788806915, 0.26453515887260437, 0.2599099278450012, 0.1590421348810196, 0.21811221539974213, 0.21677884459495544, 0.28607749938964844, 0.19262443482875824, 0.35018521547317505, 0.20188915729522705, 0.19327974319458008, 0.19713938236236572, 0.24497658014297485, 0.219795823097229, 0.24567201733589172],\n [0.6855006217956543, 0.6683534383773804, 0.6976334452629089, 0.7014869451522827, 0.6894210577011108, 0.6801190376281738, 0.6752902865409851, 0.6663470268249512, 0.6576423645019531, 0.688560426235199, 0.6956480145454407, 0.6744001507759094, 0.6609869599342346, 0.7014440894126892, 0.6882161498069763, 0.6909891366958618, 0.6773833632469177, 0.6732174754142761, 0.6793199181556702, 0.6728932857513428, 0.6587716937065125, 0.672234058380127, 0.6357423663139343, 0.7170616388320923, 0.6830359697341919, 0.683824360370636, 0.6609387397766113, 0.6609007120132446, 0.7004918456077576, 0.6633859872817993, 0.6673099994659424, 0.6681905388832092, 0.6512619256973267, 0.6680063009262085, 0.644162654876709, 0.6711700558662415, 0.6526409983634949, 0.6572614908218384, 0.6210101842880249, 0.6481871008872986, 0.631973385810852, 0.6269660592079163, 0.6380199193954468, 0.6666997671127319, 0.6170499324798584, 0.6310739517211914, 0.6544679403305054, 0.6101044416427612, 0.6150673031806946, 0.6122120022773743, 0.6137775182723999, 0.6526362895965576, 0.6059761643409729, 0.643004834651947, 0.5651429891586304, 0.5998908877372742, 0.6135813593864441, 0.6074697375297546, 0.5752083659172058, 0.6223533153533936, 0.6050518155097961, 0.5963067412376404, 0.5565653443336487, 0.508618950843811, 0.5940349698066711, 0.5791954398155212, 0.607430100440979, 0.5531137585639954, 0.6016480922698975, 0.611286997795105, 0.5905721187591553, 0.5562214851379395, 0.6107726693153381, 0.6119919419288635, 0.5428463220596313, 0.5302413702011108, 0.5703117847442627, 0.6247423887252808, 0.5531580448150635, 0.5578283667564392, 0.5733267068862915, 0.4968901574611664, 0.5486629605293274, 0.5990302562713623, 0.4940904974937439, 0.5626466870307922, 0.5192036628723145, 0.5489379167556763, 0.508159339427948, 0.48375093936920166, 0.5948370099067688, 0.5229615569114685, 0.5163667798042297, 0.5738478302955627, 0.562227189540863, 0.5080826878547668, 0.4606781303882599, 0.5100054144859314, 0.5573685169219971, 0.4546825885772705, 0.5117080211639404, 0.4994901716709137, 0.6420841217041016, 0.45629116892814636, 0.4510968029499054, 0.5476057529449463, 0.6092849373817444, 0.5381268858909607, 0.5068832635879517, 0.5333427786827087, 0.45233798027038574, 0.5086703300476074, 0.6088682413101196, 0.4161103069782257, 0.4438551664352417, 0.40600454807281494, 0.4537449777126312, 0.6216455698013306, 0.4722939729690552, 0.49088165163993835, 0.4874024987220764, 0.47236594557762146, 0.42606791853904724, 0.5494129657745361, 0.43068259954452515, 0.476716548204422, 0.45469146966934204, 0.407479852437973, 0.4520869851112366, 0.4798360764980316, 0.5087173581123352, 0.48703113198280334, 0.4840613901615143, 0.38822245597839355, 0.4944747984409332, 0.42739400267601013, 0.4855794906616211, 0.4680446982383728, 0.5466461777687073, 0.43323612213134766, 0.3971397280693054, 0.43370264768600464, 0.3504500091075897, 0.45511358976364136, 0.47195035219192505, 0.5434073805809021, 0.5047140121459961, 0.5063510537147522, 0.4441201388835907, 0.4191301763057709, 0.3612808287143707, 0.46317043900489807, 0.41102489829063416, 0.467690646648407, 0.33494076132774353, 0.4871577322483063, 0.4719245135784149, 0.40738558769226074, 0.5017144680023193, 0.39158982038497925, 0.3715653717517853, 0.3963679075241089, 0.40490463376045227, 0.6033336520195007, 0.38330721855163574, 0.5863525867462158, 0.475930392742157, 0.4671630859375, 0.37604689598083496, 0.5735698938369751, 0.40167009830474854, 0.4351791441440582, 0.4286106824874878, 0.3088095784187317, 0.3441198468208313, 0.4076881408691406, 0.42151668667793274, 0.4043821394443512, 0.5563496947288513, 0.36643096804618835, 0.568260908126831, 0.44196057319641113, 0.4549090266227722, 0.40986168384552, 0.34101513028144836, 0.2839103043079376, 0.34913575649261475, 0.37095099687576294, 0.40869227051734924, 0.4013737738132477, 0.3731957972049713, 0.31773507595062256, 0.3644598722457886, 0.38408324122428894, 0.5548095107078552, 0.36554211378097534, 0.38086527585983276, 0.5022920370101929, 0.3946997821331024, 0.47319257259368896],\n [0.6944005489349365, 0.6628090739250183, 0.6946060061454773, 0.7173568606376648, 0.7004551887512207, 0.6992331743240356, 0.671159029006958, 0.674471914768219, 0.6864744424819946, 0.7017762660980225, 0.6934648156166077, 0.6992331743240356, 0.671159029006958, 0.674471914768219, 0.7004551887512207, 0.7116137742996216, 0.692720353603363, 0.7108584642410278, 0.6772125363349915, 0.6924972534179688, 0.6734221577644348, 0.6970875263214111, 0.6961641907691956, 0.6953407526016235, 0.6858556866645813, 0.692350447177887, 0.6999544501304626, 0.683379590511322, 0.6885834336280823, 0.7062681317329407, 0.6772125363349915, 0.7016777992248535, 0.6847931146621704, 0.692497193813324, 0.6847932934761047, 0.664955198764801, 0.6603783965110779, 0.6992331743240356, 0.7035009860992432, 0.6879068613052368, 0.6873300075531006, 0.7178041338920593, 0.6981106400489807, 0.7016778588294983, 0.6961641907691956, 0.6893602013587952, 0.699954628944397, 0.6833794116973877, 0.6999545097351074, 0.686369776725769, 0.6847931742668152, 0.7016777992248535, 0.6847932934761047, 0.692497193813324, 0.6923738121986389, 0.686369776725769, 0.6734222173690796, 0.6924972534179688, 0.6961641907691956, 0.6893602013587952, 0.6847931742668152, 0.6787262558937073, 0.7035009860992432, 0.6787262558937073, 0.7088912725448608, 0.6924972534179688, 0.6772125959396362, 0.7016780376434326, 0.6999545097351074, 0.6714181303977966, 0.6961641907691956, 0.6953407526016235, 0.6968075037002563, 0.6926029324531555, 0.6946170926094055, 0.6898223161697388, 0.6924266815185547, 0.6803891062736511, 0.6885837316513062, 0.7016779184341431, 0.6885834336280823, 0.6970875263214111, 0.6885835528373718, 0.7016778588294983, 0.7037448287010193, 0.6893602013587952, 0.696164071559906, 0.7013214826583862, 0.6924266815185547, 0.692350447177887, 0.6810030937194824, 0.6879068613052368, 0.6927202343940735, 0.6970876455307007, 0.6885836720466614, 0.687906801700592, 0.6657686829566956, 0.7178040146827698, 0.6819396018981934, 0.6806621551513672, 0.7144356369972229, 0.6868523955345154, 0.6864744424819946, 0.6939859986305237, 0.6794841289520264, 0.6784052848815918, 0.7031963467597961, 0.7017762064933777, 0.7004550099372864, 0.6930428147315979, 0.7035009860992432, 0.6787263751029968, 0.6657687425613403, 0.6744717955589294, 0.6864744424819946, 0.6861956119537354, 0.6688350439071655, 0.6671543121337891, 0.7063332200050354, 0.6859349012374878, 0.6961429119110107, 0.7047154903411865, 0.6860157251358032, 0.6577639579772949, 0.6961429119110107, 0.6953252553939819, 0.6688351631164551, 0.7328864932060242, 0.6946060657501221, 0.6939857602119446, 0.7074453830718994, 0.7116137146949768, 0.6819397211074829, 0.7054235339164734, 0.6873300075531006, 0.6930428147315979, 0.6765493750572205, 0.7178040146827698, 0.6981105804443359, 0.6924972534179688, 0.6734225749969482, 0.692497193813324, 0.6885836720466614, 0.6970876455307007, 0.6772127747535706, 0.6924972534179688, 0.6961643099784851, 0.7043116092681885, 0.6924269199371338, 0.6983310580253601, 0.6858559250831604, 0.6923503875732422, 0.692374050617218, 0.6773987412452698, 0.7075351476669312, 0.6923503279685974, 0.6772127747535706, 0.6787264347076416, 0.692720353603363, 0.6970877051353455, 0.6923739910125732, 0.6923503279685974, 0.7075350880622864, 0.6863697171211243, 0.6885836720466614, 0.683316707611084, 0.7088912129402161, 0.683316707611084, 0.6981105804443359, 0.7108585238456726, 0.6961642503738403, 0.6893600225448608, 0.6999545097351074, 0.6893600225448608, 0.6923739910125732, 0.7013212442398071, 0.6814754009246826, 0.6923503279685974, 0.6734225749969482, 0.6787265539169312, 0.6873299479484558, 0.674471914768219, 0.7284162044525146, 0.6682815551757812, 0.6864743828773499, 0.7017760276794434, 0.7004550099372864, 0.6868523955345154, 0.6794841289520264, 0.6939857602119446, 0.6934646368026733, 0.7116135954856873, 0.6711589694023132, 0.7054233551025391, 0.7035009860992432, 0.6970876455307007, 0.681003212928772, 0.6833168268203735, 0.6711589694023132, 0.7054234147071838]])\nCNN_pca16_MNIST = np.array([[0.6806145310401917, 0.6891177892684937, 0.6810808777809143, 0.681459367275238, 0.677582859992981, 0.6844503283500671, 0.67902672290802, 0.6765640377998352, 0.6553121209144592, 0.6765022873878479, 0.6750913262367249, 0.6459337472915649, 0.6562581062316895, 0.6212171316146851, 0.6898217797279358, 0.6673468947410583, 0.6731657385826111, 0.6452456116676331, 0.6621044278144836, 0.6463764905929565, 0.6492829322814941, 0.6068952679634094, 0.652141273021698, 0.6378786563873291, 0.5709349513053894, 0.5982591509819031, 0.6228333115577698, 0.5726571083068848, 0.6219260692596436, 0.5821561217308044, 0.5760451555252075, 0.6092997193336487, 0.5991590619087219, 0.5411335229873657, 0.6268000602722168, 0.5387431383132935, 0.5482476949691772, 0.5561688542366028, 0.5522549152374268, 0.5078320503234863, 0.516356348991394, 0.45860224962234497, 0.5300887227058411, 0.43249356746673584, 0.47263625264167786, 0.4376852512359619, 0.5084747672080994, 0.4813513457775116, 0.39343154430389404, 0.41196948289871216, 0.39305344223976135, 0.29378262162208557, 0.3937959671020508, 0.4503069818019867, 0.4290500581264496, 0.3154439926147461, 0.45116299390792847, 0.4852575659751892, 0.26925888657569885, 0.3362044394016266, 0.34540772438049316, 0.3762468099594116, 0.3919201195240021, 0.3082425594329834, 0.39521321654319763, 0.2762869596481323, 0.23747150599956512, 0.24453525245189667, 0.30715182423591614, 0.2399640530347824, 0.2911878526210785, 0.2603612244129181, 0.2213561236858368, 0.1843515783548355, 0.36367589235305786, 0.27601704001426697, 0.3306904137134552, 0.30314236879348755, 0.36693233251571655, 0.16952678561210632, 0.29714977741241455, 0.2678932249546051, 0.21657207608222961, 0.31770190596580505, 0.16845642030239105, 0.17133162915706635, 0.20860345661640167, 0.241547092795372, 0.16557984054088593, 0.13195481896400452, 0.12042249739170074, 0.20844042301177979, 0.16757270693778992, 0.13524217903614044, 0.26231148838996887, 0.14653021097183228, 0.10943739861249924, 0.1613474041223526, 0.1389721781015396, 0.12163649499416351, 0.07840366661548615, 0.054724883288145065, 0.25314319133758545, 0.08236272633075714, 0.1612531691789627, 0.09361427277326584, 0.06425020098686218, 0.08957542479038239, 0.1381072998046875, 0.15880292654037476, 0.17611543834209442, 0.11794227361679077, 0.46597373485565186, 0.1963619738817215, 0.17961978912353516, 0.06808473914861679, 0.09268160909414291, 0.1391472965478897, 0.06637661159038544, 0.15846601128578186, 0.1327289640903473, 0.2182898074388504, 0.1058807298541069, 0.10732986778020859, 0.08478998392820358, 0.10124629735946655, 0.14144763350486755, 0.07451979070901871, 0.06936513632535934, 0.0612974688410759, 0.30444279313087463, 0.06056065484881401, 0.06175949424505234, 0.040860239416360855, 0.06308306008577347, 0.04872123897075653, 0.09641899168491364, 0.038198236376047134, 0.02865372598171234, 0.13943105936050415, 0.11848892271518707, 0.046460047364234924, 0.16574667394161224, 0.09097571671009064, 0.12407471984624863, 0.060693301260471344, 0.027036622166633606, 0.09020046144723892, 0.0703904777765274, 0.06513619422912598, 0.052513524889945984, 0.03958103060722351, 0.027356235310435295, 0.0737571269273758, 0.05449620261788368, 0.027848217636346817, 0.04617871344089508, 0.06447454541921616, 0.024810027331113815, 0.06698387861251831, 0.1253947764635086, 0.04443974420428276, 0.03353843837976456, 0.04199234023690224, 0.025889378041028976, 0.013814088888466358, 0.02019309438765049, 0.08621712774038315, 0.15111301839351654, 0.029435500502586365, 0.31726235151290894, 0.018413979560136795, 0.015832992270588875, 0.040774665772914886, 0.0763082504272461, 0.0244026780128479, 0.032695330679416656, 0.022717302665114403, 0.073860764503479, 0.06612318754196167, 0.03921329602599144, 0.1316828578710556, 0.02125360816717148, 0.05114375054836273, 0.18408994376659393, 0.4109771251678467, 0.08513437956571579, 0.10579138994216919, 0.03613586723804474, 0.09319301694631577, 0.057633984833955765, 0.015234878286719322, 0.03201673924922943, 0.05869147181510925, 0.045520443469285965, 0.15026597678661346, 0.27123087644577026, 0.08878064900636673, 0.04769028723239899, 0.07738181948661804],\n [0.6483059525489807, 0.6563783884048462, 0.6850159168243408, 0.6411230564117432, 0.6380452513694763, 0.6505370140075684, 0.6264790296554565, 0.6292885541915894, 0.596828281879425, 0.5569097995758057, 0.5843960046768188, 0.6377838850021362, 0.5724678635597229, 0.5427122712135315, 0.49737539887428284, 0.5103490948677063, 0.46374934911727905, 0.5569535493850708, 0.5538923740386963, 0.496002197265625, 0.5204284191131592, 0.48952487111091614, 0.47405749559402466, 0.4267624020576477, 0.4289661645889282, 0.43901026248931885, 0.5648939609527588, 0.4655061960220337, 0.4698445200920105, 0.4614178538322449, 0.44124358892440796, 0.32259997725486755, 0.3483031392097473, 0.45954185724258423, 0.3501482903957367, 0.37872546911239624, 0.29290395975112915, 0.28891175985336304, 0.3467039465904236, 0.25331732630729675, 0.2541784942150116, 0.3113395571708679, 0.28468358516693115, 0.22865089774131775, 0.22395992279052734, 0.3142634928226471, 0.2739853858947754, 0.20418451726436615, 0.21552890539169312, 0.19829422235488892, 0.16342101991176605, 0.19272656738758087, 0.1497659683227539, 0.3798947036266327, 0.0730527937412262, 0.157745361328125, 0.13460098206996918, 0.12959285080432892, 0.0926138311624527, 0.1781025379896164, 0.17468847334384918, 0.1651872992515564, 0.0967901200056076, 0.044743023812770844, 0.19361089169979095, 0.06055908277630806, 0.10038939118385315, 0.046859148889780045, 0.08200687170028687, 0.048592112958431244, 0.1743631511926651, 0.3577481806278229, 0.04464635252952576, 0.07165711373090744, 0.06481656432151794, 0.02404109388589859, 0.06484148651361465, 0.09828437864780426, 0.021815918385982513, 0.07678967714309692, 0.08977449685335159, 0.04380042478442192, 0.19307667016983032, 0.021066617220640182, 0.03211529180407524, 0.11584176123142242, 0.2807219624519348, 0.016751103103160858, 0.04332469403743744, 0.007093906868249178, 0.07024945318698883, 0.09839142113924026, 0.08572863787412643, 0.0621042475104332, 0.20664802193641663, 0.03727364167571068, 0.08116946369409561, 0.034467652440071106, 0.0623333565890789, 0.026000985875725746, 0.007840643636882305, 0.013071736320853233, 0.048665374517440796, 0.03948287293314934, 0.024740396067500114, 0.15374822914600372, 0.007785605266690254, 0.012651367112994194, 0.009137640707194805, 0.08676660805940628, 0.005300299730151892, 0.14843745529651642, 0.08412238210439682, 0.010875853709876537, 0.07558389008045197, 0.003561086254194379, 0.006547960452735424, 0.04602324590086937, 0.047185491770505905, 0.29085931181907654, 0.24853603541851044, 0.03257639333605766, 0.018488867208361626, 0.01640714332461357, 0.008174451999366283, 0.055812906473875046, 0.01203722134232521, 0.007658732123672962, 0.12586677074432373, 0.5034170746803284, 0.010695054195821285, 0.06965052336454391, 0.01679280586540699, 0.002653795760124922, 0.04166708514094353, 0.07821805030107498, 0.7347344756126404, 0.04188626632094383, 0.016318675130605698, 0.37407955527305603, 0.22020810842514038, 0.008696100674569607, 0.15934669971466064, 0.11055561155080795, 0.02056865207850933, 0.02888043411076069, 0.026288440451025963, 0.3896704912185669, 0.18966376781463623, 0.10935170203447342, 0.0884619653224945, 0.0037616356275975704, 0.030014991760253906, 0.004645687527954578, 0.015064898878335953, 0.019719423726201057, 0.008488441817462444, 0.1289932131767273, 0.0023506588768213987, 0.027266468852758408, 0.00494847446680069, 0.0010905511444434524, 0.006871162448078394, 0.0013412274420261383, 0.013323437422513962, 0.2185526341199875, 0.26384472846984863, 0.5073553919792175, 0.009816479869186878, 0.023475145921111107, 1.0379524230957031, 0.06343718618154526, 0.000646052067168057, 0.001310146413743496, 0.0015178965404629707, 0.0019234382780268788, 0.04544300585985184, 0.01408923789858818, 0.0064981672912836075, 0.004513423889875412, 0.01304874662309885, 0.0001350530656054616, 0.15383785963058472, 0.27203622460365295, 0.0007774900877848268, 0.17590706050395966, 0.0004588304436765611, 0.0009490635711699724, 0.19386570155620575, 0.006364853587001562, 0.11958801001310349, 0.005988326855003834, 0.1914440095424652, 0.20153051614761353, 0.0010578660294413567, 0.07399755716323853, 0.0011458895169198513, 0.12265504896640778, 0.00024639678304083645, 0.0046604098752141],\n [0.6869201064109802, 0.6689411401748657, 0.7047302722930908, 0.6602210998535156, 0.7235260009765625, 0.6946125626564026, 0.6705976128578186, 0.6688188910484314, 0.7047302722930908, 0.6946125626564026, 0.7173848152160645, 0.6934691667556763, 0.6806504726409912, 0.7074649333953857, 0.7116398811340332, 0.6927230358123779, 0.7016947269439697, 0.6885767579078674, 0.6879010796546936, 0.6765294075012207, 0.6744526028633118, 0.7284586429595947, 0.6682547926902771, 0.6654778122901917, 0.6861932873725891, 0.7032104730606079, 0.6705976724624634, 0.6774168610572815, 0.695332407951355, 0.6946125626564026, 0.6939913034439087, 0.6864714026451111, 0.6783956289291382, 0.6946125626564026, 0.717384934425354, 0.6584799289703369, 0.7017890810966492, 0.7004672288894653, 0.6930462718009949, 0.6927230358123779, 0.6833032369613647, 0.6927230358123779, 0.6879011392593384, 0.687325119972229, 0.705441951751709, 0.6819272637367249, 0.6992441415786743, 0.6981208920478821, 0.6970969438552856, 0.6847788095474243, 0.7154884338378906, 0.6847789287567139, 0.6970969438552856, 0.6885767579078674, 0.7108905911445618, 0.6885767579078674, 0.6787053942680359, 0.7035187482833862, 0.6879010796546936, 0.7035187482833862, 0.6787053942680359, 0.687325119972229, 0.6992440819740295, 0.6927230358123779, 0.710890531539917, 0.6733852624893188, 0.6879011392593384, 0.6819272637367249, 0.6682546734809875, 0.6654778122901917, 0.6550018787384033, 0.6774168610572815, 0.6859344244003296, 0.7369425892829895, 0.6859345436096191, 0.6757553219795227, 0.7300616502761841, 0.6553595662117004, 0.6860702037811279, 0.6655573844909668, 0.675072431564331, 0.7216790914535522, 0.708065927028656, 0.6859530806541443, 0.6577409505844116, 0.6859531998634338, 0.6953323483467102, 0.6946125030517578, 0.6861934661865234, 0.6946125030517578, 0.7173848748207092, 0.6724755764007568, 0.6861934065818787, 0.6946125626564026, 0.6939913034439087, 0.6724755764007568, 0.6705976724624634, 0.6516231298446655, 0.7141281366348267, 0.6774167418479919, 0.6859345436096191, 0.7165467739105225, 0.7141281962394714, 0.6688189506530762, 0.6859345436096191, 0.6553597450256348, 0.7410594820976257, 0.6859533190727234, 0.7047301530838013, 0.6774168610572815, 0.6859343647956848, 0.7165467739105225, 0.6671386957168579, 0.6961510181427002, 0.6859344244003296, 0.7063490152359009, 0.6577412486076355, 0.6553597450256348, 0.7190638184547424, 0.6859531998634338, 0.6671387553215027, 0.6859531998634338, 0.7141281366348267, 0.6860145330429077, 0.6859345436096191, 0.6961511373519897, 0.6765368580818176, 0.726744532585144, 0.7235260605812073, 0.694612443447113, 0.6783955097198486, 0.6774166822433472, 0.6859346032142639, 0.6553597450256348, 0.6420789361000061, 0.6626895666122437, 0.7117941379547119, 0.6980831027030945, 0.6970681548118591, 0.7165467739105225, 0.7141281366348267, 0.7032101154327393, 0.6939912438392639, 0.6934691667556763, 0.6744528412818909, 0.7074648141860962, 0.6992443203926086, 0.6873250007629395, 0.662057101726532, 0.6864713430404663, 0.7017890810966492, 0.7004669904708862, 0.711639940738678, 0.7035185098648071, 0.6924991011619568, 0.6885769367218018, 0.6833033561706543, 0.6657336354255676, 0.7054421305656433, 0.6927227973937988, 0.7062925100326538, 0.6771834492683411, 0.6787055134773254, 0.7089163064956665, 0.6787055134773254, 0.7035185098648071, 0.7154881358146667, 0.6923748254776001, 0.6893523335456848, 0.6961726546287537, 0.6923501491546631, 0.6771834492683411, 0.7108903527259827, 0.6923748254776001, 0.6833566427230835, 0.6961726546287537, 0.6983458995819092, 0.6902278661727905, 0.710337221622467, 0.6946234703063965, 0.6939986944198608, 0.6934741139411926, 0.6936558485031128, 0.6916806101799011, 0.6939986944198608, 0.6922784447669983, 0.6926009654998779, 0.6946234703063965, 0.6953966021537781, 0.6922784447669983, 0.6939988136291504, 0.6904849410057068, 0.7009879350662231, 0.6904849410057068, 0.6939988136291504, 0.6952676177024841, 0.6932514905929565, 0.6916806101799011, 0.6926009654998779, 0.6924256682395935],\n [0.6845834851264954, 0.714557409286499, 0.6417245268821716, 0.6426114439964294, 0.676908016204834, 0.6596363186836243, 0.6010047793388367, 0.6583335399627686, 0.6423051953315735, 0.6252385973930359, 0.5903713703155518, 0.6268495321273804, 0.589474618434906, 0.5797603130340576, 0.5605499148368835, 0.561373233795166, 0.571896493434906, 0.5515089631080627, 0.53475022315979, 0.5062263607978821, 0.5230075716972351, 0.510095477104187, 0.404560923576355, 0.4952531158924103, 0.5080372095108032, 0.4546820819377899, 0.3762242794036865, 0.4545120298862457, 0.4356318712234497, 0.42067408561706543, 0.3556440770626068, 0.37861964106559753, 0.39252662658691406, 0.40825438499450684, 0.32834678888320923, 0.348328173160553, 0.24916067719459534, 0.34087973833084106, 0.29008814692497253, 0.27842581272125244, 0.2763988971710205, 0.2681778371334076, 0.2402539700269699, 0.2275475263595581, 0.24937856197357178, 0.24112485349178314, 0.2519858479499817, 0.17717988789081573, 0.18434566259384155, 0.1777828186750412, 0.2565017342567444, 0.19697146117687225, 0.17142488062381744, 0.12871775031089783, 0.11260759085416794, 0.2114068865776062, 0.1914333701133728, 0.11530886590480804, 0.167332723736763, 0.16041263937950134, 0.12866191565990448, 0.15597859025001526, 0.2081473022699356, 0.0956139788031578, 0.14390972256660461, 0.08720321953296661, 0.16051031649112701, 0.07090023905038834, 0.12660036981105804, 0.06036912277340889, 0.07868777215480804, 0.07437726855278015, 0.03706624358892441, 0.08708903193473816, 0.048048458993434906, 0.08304107934236526, 0.13906075060367584, 0.08239659667015076, 0.04523441940546036, 0.14315050840377808, 0.06168464571237564, 0.02829252928495407, 0.06346246600151062, 0.09711773693561554, 0.0713791698217392, 0.030042534694075584, 0.020309817045927048, 0.017385778948664665, 0.1023128479719162, 0.033943045884370804, 0.15105409920215607, 0.0875217467546463, 0.0241351630538702, 0.021511144936084747, 0.43396979570388794, 0.05194273963570595, 0.018932990729808807, 0.30235084891319275, 0.014932023361325264, 0.035737793892621994, 0.07673761248588562, 0.3509562611579895, 0.049613747745752335, 0.07966945320367813, 0.004679353907704353, 0.044478461146354675, 0.043420229107141495, 0.02522539161145687, 0.38835403323173523, 0.017609840258955956, 0.016579898074269295, 0.03199652582406998, 0.09706789255142212, 0.005945832934230566, 0.23662501573562622, 0.03121006488800049, 0.037620846182107925, 0.013692362233996391, 0.022500745952129364, 0.011120911687612534, 0.027049649506807327, 0.027720334008336067, 0.0038731980603188276, 0.004106536507606506, 0.11524190753698349, 0.08156333118677139, 0.09217409044504166, 0.26894962787628174, 0.02597770281136036, 0.01886932924389839, 0.04878406599164009, 0.5152551531791687, 0.36342406272888184, 0.004652795847505331, 0.005375933833420277, 0.012198200449347496, 0.019326575100421906, 0.04520021751523018, 0.23542580008506775, 0.090694859623909, 0.00967296864837408, 0.28795763850212097, 0.026030009612441063, 0.06408455967903137, 0.08752475678920746, 0.0022947590332478285, 0.004767645616084337, 0.025067416951060295, 0.16141098737716675, 0.09186969697475433, 0.09171997010707855, 0.027097012847661972, 0.11574391275644302, 0.18159152567386627, 0.019621726125478745, 0.23002763092517853, 0.004370448179543018, 0.007135648746043444, 0.019487056881189346, 0.01647159270942211, 0.22402292490005493, 0.0019302433356642723, 0.014990568161010742, 0.06524307280778885, 0.025793546810746193, 0.0039999703876674175, 0.21573284268379211, 0.01643890142440796, 0.003423704532906413, 0.006153523921966553, 0.003971239551901817, 0.06890764087438583, 0.0006971824914216995, 0.11413619667291641, 0.06743254512548447, 0.07301442325115204, 0.01851137913763523, 0.022294579073786736, 0.014995374716818333, 0.005857307929545641, 0.021342884749174118, 0.0018055700929835439, 0.000623525062110275, 0.018472924828529358, 0.22669300436973572, 0.0006525359931401908, 0.29654935002326965, 0.21874898672103882, 0.0008165992330759764, 0.24519266188144684, 0.009697365574538708, 0.18859906494617462, 0.13186874985694885, 0.1245938315987587, 0.0009449649369344115, 0.0004151250177528709, 0.013996547088027, 0.0066335154697299, 0.10195517539978027, 0.05289201810956001],\n [0.745194673538208, 0.6906776428222656, 0.687400758266449, 0.6717780828475952, 0.7604597210884094, 0.752467930316925, 0.6976206302642822, 0.704662561416626, 0.6916715502738953, 0.6934672594070435, 0.6470069289207458, 0.7148513793945312, 0.6763886213302612, 0.6802818775177002, 0.6672106385231018, 0.6484056711196899, 0.6637589931488037, 0.6583610773086548, 0.6557639837265015, 0.6428236365318298, 0.642822265625, 0.6243969202041626, 0.6294710040092468, 0.6075966358184814, 0.6001461744308472, 0.6056424379348755, 0.6350275278091431, 0.5835784673690796, 0.5997673273086548, 0.5480678081512451, 0.5856139659881592, 0.5785164833068848, 0.5884747505187988, 0.5700724124908447, 0.4928150177001953, 0.5337228775024414, 0.5427252054214478, 0.5247185230255127, 0.5774044394493103, 0.5185093879699707, 0.5197265148162842, 0.4411318302154541, 0.5511063933372498, 0.5263891220092773, 0.45463821291923523, 0.5314087271690369, 0.47028473019599915, 0.4092979431152344, 0.5777102112770081, 0.5306834578514099, 0.42406684160232544, 0.4687281847000122, 0.5070981383323669, 0.47439032793045044, 0.37494319677352905, 0.4947170913219452, 0.4206700921058655, 0.4383554756641388, 0.409442663192749, 0.44664183259010315, 0.4921582043170929, 0.3810812830924988, 0.4118553102016449, 0.39941662549972534, 0.43318724632263184, 0.31750908493995667, 0.34022581577301025, 0.3589905798435211, 0.3815973997116089, 0.3389066755771637, 0.32979461550712585, 0.32433709502220154, 0.28116682171821594, 0.31867024302482605, 0.29788222908973694, 0.31168806552886963, 0.3142642080783844, 0.25582820177078247, 0.32176223397254944, 0.3123543858528137, 0.2679462432861328, 0.2546009123325348, 0.24331651628017426, 0.402679443359375, 0.21918657422065735, 0.23834611475467682, 0.36965858936309814, 0.24380004405975342, 0.22266092896461487, 0.28505799174308777, 0.33410510420799255, 0.2647447884082794, 0.23237332701683044, 0.22678805887699127, 0.2724705934524536, 0.22559556365013123, 0.200675368309021, 0.2558078169822693, 0.36495375633239746, 0.1662682741880417, 0.26781201362609863, 0.22160503268241882, 0.24419258534908295, 0.2391909956932068, 0.1725086271762848, 0.19450058043003082, 0.13128599524497986, 0.15883126854896545, 0.08883234858512878, 0.22445882856845856, 0.19227375090122223, 0.12227177619934082, 0.14730407297611237, 0.10880385339260101, 0.15898071229457855, 0.14434635639190674, 0.10622251778841019, 0.13339510560035706, 0.12552213668823242, 0.0874275416135788, 0.21024318039417267, 0.05612509697675705, 0.2596701383590698, 0.06474778801202774, 0.21115590631961823, 0.08705268055200577, 0.2965729236602783, 0.11845093965530396, 0.3113614022731781, 0.0852881520986557, 0.057083774358034134, 0.05873868614435196, 0.18759706616401672, 0.1588224172592163, 0.2532350420951843, 0.06962744891643524, 0.07326000928878784, 0.07850103080272675, 0.07247595489025116, 0.05099700763821602, 0.09197735041379929, 0.0621706023812294, 0.13775122165679932, 0.03626300394535065, 0.09360489994287491, 0.08558189123868942, 0.0900314450263977, 0.06604598462581635, 0.05721459537744522, 0.12379155308008194, 0.07557165622711182, 0.18699273467063904, 0.04573093727231026, 0.04888857528567314, 0.07472683489322662, 0.1503097116947174, 0.13833612203598022, 0.036567263305187225, 0.09546559303998947, 0.17797045409679413, 0.20176424086093903, 0.06245369091629982, 0.20733888447284698, 0.07638655602931976, 0.22693926095962524, 0.09672175347805023, 0.18307197093963623, 0.2140817642211914, 0.024585099890828133, 0.047724127769470215, 0.03778181970119476, 0.035520292818546295, 0.06756170094013214, 0.025606723502278328, 0.0960211381316185, 0.055418919771909714, 0.08863727748394012, 0.11362741142511368, 0.09254298359155655, 0.3518190383911133, 0.0551731139421463, 0.12171444296836853, 0.12081704288721085, 0.08439555764198303, 0.027323825284838676, 0.10690995305776596, 0.1244480237364769, 0.0726374164223671, 0.06508301943540573, 0.17972348630428314, 0.10436990112066269, 0.09515547752380371, 0.09833931922912598, 0.1103367730975151, 0.09164026379585266, 0.03350993990898132, 0.05515069514513016, 0.1332990527153015, 0.027255253866314888, 0.05536457523703575]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "CNN_pca16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "CNN_pca16_MNIST = np.array([[0.6806145310401917, 0.6891177892684937, 0.6810808777809143, 0.681459367275238, 0.677582859992981, 0.6844503283500671, 0.67902672290802, 0.6765640377998352, 0.6553121209144592, 0.6765022873878479, 0.6750913262367249, 0.6459337472915649, 0.6562581062316895, 0.6212171316146851, 0.6898217797279358, 0.6673468947410583, 0.6731657385826111, 0.6452456116676331, 0.6621044278144836, 0.6463764905929565, 0.6492829322814941, 0.6068952679634094, 0.652141273021698, 0.6378786563873291, 0.5709349513053894, 0.5982591509819031, 0.6228333115577698, 0.5726571083068848, 0.6219260692596436, 0.5821561217308044, 0.5760451555252075, 0.6092997193336487, 0.5991590619087219, 0.5411335229873657, 0.6268000602722168, 0.5387431383132935, 0.5482476949691772, 0.5561688542366028, 0.5522549152374268, 0.5078320503234863, 0.516356348991394, 0.45860224962234497, 0.5300887227058411, 0.43249356746673584, 0.47263625264167786, 0.4376852512359619, 0.5084747672080994, 0.4813513457775116, 0.39343154430389404, 0.41196948289871216, 0.39305344223976135, 0.29378262162208557, 0.3937959671020508, 0.4503069818019867, 0.4290500581264496, 0.3154439926147461, 0.45116299390792847, 0.4852575659751892, 0.26925888657569885, 0.3362044394016266, 0.34540772438049316, 0.3762468099594116, 0.3919201195240021, 0.3082425594329834, 0.39521321654319763, 0.2762869596481323, 0.23747150599956512, 0.24453525245189667, 0.30715182423591614, 0.2399640530347824, 0.2911878526210785, 0.2603612244129181, 0.2213561236858368, 0.1843515783548355, 0.36367589235305786, 0.27601704001426697, 0.3306904137134552, 0.30314236879348755, 0.36693233251571655, 0.16952678561210632, 0.29714977741241455, 0.2678932249546051, 0.21657207608222961, 0.31770190596580505, 0.16845642030239105, 0.17133162915706635, 0.20860345661640167, 0.241547092795372, 0.16557984054088593, 0.13195481896400452, 0.12042249739170074, 0.20844042301177979, 0.16757270693778992, 0.13524217903614044, 0.26231148838996887, 0.14653021097183228, 0.10943739861249924, 0.1613474041223526, 0.1389721781015396, 0.12163649499416351, 0.07840366661548615, 0.054724883288145065, 0.25314319133758545, 0.08236272633075714, 0.1612531691789627, 0.09361427277326584, 0.06425020098686218, 0.08957542479038239, 0.1381072998046875, 0.15880292654037476, 0.17611543834209442, 0.11794227361679077, 0.46597373485565186, 0.1963619738817215, 0.17961978912353516, 0.06808473914861679, 0.09268160909414291, 0.1391472965478897, 0.06637661159038544, 0.15846601128578186, 0.1327289640903473, 0.2182898074388504, 0.1058807298541069, 0.10732986778020859, 0.08478998392820358, 0.10124629735946655, 0.14144763350486755, 0.07451979070901871, 0.06936513632535934, 0.0612974688410759, 0.30444279313087463, 0.06056065484881401, 0.06175949424505234, 0.040860239416360855, 0.06308306008577347, 0.04872123897075653, 0.09641899168491364, 0.038198236376047134, 0.02865372598171234, 0.13943105936050415, 0.11848892271518707, 0.046460047364234924, 0.16574667394161224, 0.09097571671009064, 0.12407471984624863, 0.060693301260471344, 0.027036622166633606, 0.09020046144723892, 0.0703904777765274, 0.06513619422912598, 0.052513524889945984, 0.03958103060722351, 0.027356235310435295, 0.0737571269273758, 0.05449620261788368, 0.027848217636346817, 0.04617871344089508, 0.06447454541921616, 0.024810027331113815, 0.06698387861251831, 0.1253947764635086, 0.04443974420428276, 0.03353843837976456, 0.04199234023690224, 0.025889378041028976, 0.013814088888466358, 0.02019309438765049, 0.08621712774038315, 0.15111301839351654, 0.029435500502586365, 0.31726235151290894, 0.018413979560136795, 0.015832992270588875, 0.040774665772914886, 0.0763082504272461, 0.0244026780128479, 0.032695330679416656, 0.022717302665114403, 0.073860764503479, 0.06612318754196167, 0.03921329602599144, 0.1316828578710556, 0.02125360816717148, 0.05114375054836273, 0.18408994376659393, 0.4109771251678467, 0.08513437956571579, 0.10579138994216919, 0.03613586723804474, 0.09319301694631577, 0.057633984833955765, 0.015234878286719322, 0.03201673924922943, 0.05869147181510925, 0.045520443469285965, 0.15026597678661346, 0.27123087644577026, 0.08878064900636673, 0.04769028723239899, 0.07738181948661804],\n [0.6483059525489807, 0.6563783884048462, 0.6850159168243408, 0.6411230564117432, 0.6380452513694763, 0.6505370140075684, 0.6264790296554565, 0.6292885541915894, 0.596828281879425, 0.5569097995758057, 0.5843960046768188, 0.6377838850021362, 0.5724678635597229, 0.5427122712135315, 0.49737539887428284, 0.5103490948677063, 0.46374934911727905, 0.5569535493850708, 0.5538923740386963, 0.496002197265625, 0.5204284191131592, 0.48952487111091614, 0.47405749559402466, 0.4267624020576477, 0.4289661645889282, 0.43901026248931885, 0.5648939609527588, 0.4655061960220337, 0.4698445200920105, 0.4614178538322449, 0.44124358892440796, 0.32259997725486755, 0.3483031392097473, 0.45954185724258423, 0.3501482903957367, 0.37872546911239624, 0.29290395975112915, 0.28891175985336304, 0.3467039465904236, 0.25331732630729675, 0.2541784942150116, 0.3113395571708679, 0.28468358516693115, 0.22865089774131775, 0.22395992279052734, 0.3142634928226471, 0.2739853858947754, 0.20418451726436615, 0.21552890539169312, 0.19829422235488892, 0.16342101991176605, 0.19272656738758087, 0.1497659683227539, 0.3798947036266327, 0.0730527937412262, 0.157745361328125, 0.13460098206996918, 0.12959285080432892, 0.0926138311624527, 0.1781025379896164, 0.17468847334384918, 0.1651872992515564, 0.0967901200056076, 0.044743023812770844, 0.19361089169979095, 0.06055908277630806, 0.10038939118385315, 0.046859148889780045, 0.08200687170028687, 0.048592112958431244, 0.1743631511926651, 0.3577481806278229, 0.04464635252952576, 0.07165711373090744, 0.06481656432151794, 0.02404109388589859, 0.06484148651361465, 0.09828437864780426, 0.021815918385982513, 0.07678967714309692, 0.08977449685335159, 0.04380042478442192, 0.19307667016983032, 0.021066617220640182, 0.03211529180407524, 0.11584176123142242, 0.2807219624519348, 0.016751103103160858, 0.04332469403743744, 0.007093906868249178, 0.07024945318698883, 0.09839142113924026, 0.08572863787412643, 0.0621042475104332, 0.20664802193641663, 0.03727364167571068, 0.08116946369409561, 0.034467652440071106, 0.0623333565890789, 0.026000985875725746, 0.007840643636882305, 0.013071736320853233, 0.048665374517440796, 0.03948287293314934, 0.024740396067500114, 0.15374822914600372, 0.007785605266690254, 0.012651367112994194, 0.009137640707194805, 0.08676660805940628, 0.005300299730151892, 0.14843745529651642, 0.08412238210439682, 0.010875853709876537, 0.07558389008045197, 0.003561086254194379, 0.006547960452735424, 0.04602324590086937, 0.047185491770505905, 0.29085931181907654, 0.24853603541851044, 0.03257639333605766, 0.018488867208361626, 0.01640714332461357, 0.008174451999366283, 0.055812906473875046, 0.01203722134232521, 0.007658732123672962, 0.12586677074432373, 0.5034170746803284, 0.010695054195821285, 0.06965052336454391, 0.01679280586540699, 0.002653795760124922, 0.04166708514094353, 0.07821805030107498, 0.7347344756126404, 0.04188626632094383, 0.016318675130605698, 0.37407955527305603, 0.22020810842514038, 0.008696100674569607, 0.15934669971466064, 0.11055561155080795, 0.02056865207850933, 0.02888043411076069, 0.026288440451025963, 0.3896704912185669, 0.18966376781463623, 0.10935170203447342, 0.0884619653224945, 0.0037616356275975704, 0.030014991760253906, 0.004645687527954578, 0.015064898878335953, 0.019719423726201057, 0.008488441817462444, 0.1289932131767273, 0.0023506588768213987, 0.027266468852758408, 0.00494847446680069, 0.0010905511444434524, 0.006871162448078394, 0.0013412274420261383, 0.013323437422513962, 0.2185526341199875, 0.26384472846984863, 0.5073553919792175, 0.009816479869186878, 0.023475145921111107, 1.0379524230957031, 0.06343718618154526, 0.000646052067168057, 0.001310146413743496, 0.0015178965404629707, 0.0019234382780268788, 0.04544300585985184, 0.01408923789858818, 0.0064981672912836075, 0.004513423889875412, 0.01304874662309885, 0.0001350530656054616, 0.15383785963058472, 0.27203622460365295, 0.0007774900877848268, 0.17590706050395966, 0.0004588304436765611, 0.0009490635711699724, 0.19386570155620575, 0.006364853587001562, 0.11958801001310349, 0.005988326855003834, 0.1914440095424652, 0.20153051614761353, 0.0010578660294413567, 0.07399755716323853, 0.0011458895169198513, 0.12265504896640778, 0.00024639678304083645, 0.0046604098752141],\n [0.6869201064109802, 0.6689411401748657, 0.7047302722930908, 0.6602210998535156, 0.7235260009765625, 0.6946125626564026, 0.6705976128578186, 0.6688188910484314, 0.7047302722930908, 0.6946125626564026, 0.7173848152160645, 0.6934691667556763, 0.6806504726409912, 0.7074649333953857, 0.7116398811340332, 0.6927230358123779, 0.7016947269439697, 0.6885767579078674, 0.6879010796546936, 0.6765294075012207, 0.6744526028633118, 0.7284586429595947, 0.6682547926902771, 0.6654778122901917, 0.6861932873725891, 0.7032104730606079, 0.6705976724624634, 0.6774168610572815, 0.695332407951355, 0.6946125626564026, 0.6939913034439087, 0.6864714026451111, 0.6783956289291382, 0.6946125626564026, 0.717384934425354, 0.6584799289703369, 0.7017890810966492, 0.7004672288894653, 0.6930462718009949, 0.6927230358123779, 0.6833032369613647, 0.6927230358123779, 0.6879011392593384, 0.687325119972229, 0.705441951751709, 0.6819272637367249, 0.6992441415786743, 0.6981208920478821, 0.6970969438552856, 0.6847788095474243, 0.7154884338378906, 0.6847789287567139, 0.6970969438552856, 0.6885767579078674, 0.7108905911445618, 0.6885767579078674, 0.6787053942680359, 0.7035187482833862, 0.6879010796546936, 0.7035187482833862, 0.6787053942680359, 0.687325119972229, 0.6992440819740295, 0.6927230358123779, 0.710890531539917, 0.6733852624893188, 0.6879011392593384, 0.6819272637367249, 0.6682546734809875, 0.6654778122901917, 0.6550018787384033, 0.6774168610572815, 0.6859344244003296, 0.7369425892829895, 0.6859345436096191, 0.6757553219795227, 0.7300616502761841, 0.6553595662117004, 0.6860702037811279, 0.6655573844909668, 0.675072431564331, 0.7216790914535522, 0.708065927028656, 0.6859530806541443, 0.6577409505844116, 0.6859531998634338, 0.6953323483467102, 0.6946125030517578, 0.6861934661865234, 0.6946125030517578, 0.7173848748207092, 0.6724755764007568, 0.6861934065818787, 0.6946125626564026, 0.6939913034439087, 0.6724755764007568, 0.6705976724624634, 0.6516231298446655, 0.7141281366348267, 0.6774167418479919, 0.6859345436096191, 0.7165467739105225, 0.7141281962394714, 0.6688189506530762, 0.6859345436096191, 0.6553597450256348, 0.7410594820976257, 0.6859533190727234, 0.7047301530838013, 0.6774168610572815, 0.6859343647956848, 0.7165467739105225, 0.6671386957168579, 0.6961510181427002, 0.6859344244003296, 0.7063490152359009, 0.6577412486076355, 0.6553597450256348, 0.7190638184547424, 0.6859531998634338, 0.6671387553215027, 0.6859531998634338, 0.7141281366348267, 0.6860145330429077, 0.6859345436096191, 0.6961511373519897, 0.6765368580818176, 0.726744532585144, 0.7235260605812073, 0.694612443447113, 0.6783955097198486, 0.6774166822433472, 0.6859346032142639, 0.6553597450256348, 0.6420789361000061, 0.6626895666122437, 0.7117941379547119, 0.6980831027030945, 0.6970681548118591, 0.7165467739105225, 0.7141281366348267, 0.7032101154327393, 0.6939912438392639, 0.6934691667556763, 0.6744528412818909, 0.7074648141860962, 0.6992443203926086, 0.6873250007629395, 0.662057101726532, 0.6864713430404663, 0.7017890810966492, 0.7004669904708862, 0.711639940738678, 0.7035185098648071, 0.6924991011619568, 0.6885769367218018, 0.6833033561706543, 0.6657336354255676, 0.7054421305656433, 0.6927227973937988, 0.7062925100326538, 0.6771834492683411, 0.6787055134773254, 0.7089163064956665, 0.6787055134773254, 0.7035185098648071, 0.7154881358146667, 0.6923748254776001, 0.6893523335456848, 0.6961726546287537, 0.6923501491546631, 0.6771834492683411, 0.7108903527259827, 0.6923748254776001, 0.6833566427230835, 0.6961726546287537, 0.6983458995819092, 0.6902278661727905, 0.710337221622467, 0.6946234703063965, 0.6939986944198608, 0.6934741139411926, 0.6936558485031128, 0.6916806101799011, 0.6939986944198608, 0.6922784447669983, 0.6926009654998779, 0.6946234703063965, 0.6953966021537781, 0.6922784447669983, 0.6939988136291504, 0.6904849410057068, 0.7009879350662231, 0.6904849410057068, 0.6939988136291504, 0.6952676177024841, 0.6932514905929565, 0.6916806101799011, 0.6926009654998779, 0.6924256682395935],\n [0.6845834851264954, 0.714557409286499, 0.6417245268821716, 0.6426114439964294, 0.676908016204834, 0.6596363186836243, 0.6010047793388367, 0.6583335399627686, 0.6423051953315735, 0.6252385973930359, 0.5903713703155518, 0.6268495321273804, 0.589474618434906, 0.5797603130340576, 0.5605499148368835, 0.561373233795166, 0.571896493434906, 0.5515089631080627, 0.53475022315979, 0.5062263607978821, 0.5230075716972351, 0.510095477104187, 0.404560923576355, 0.4952531158924103, 0.5080372095108032, 0.4546820819377899, 0.3762242794036865, 0.4545120298862457, 0.4356318712234497, 0.42067408561706543, 0.3556440770626068, 0.37861964106559753, 0.39252662658691406, 0.40825438499450684, 0.32834678888320923, 0.348328173160553, 0.24916067719459534, 0.34087973833084106, 0.29008814692497253, 0.27842581272125244, 0.2763988971710205, 0.2681778371334076, 0.2402539700269699, 0.2275475263595581, 0.24937856197357178, 0.24112485349178314, 0.2519858479499817, 0.17717988789081573, 0.18434566259384155, 0.1777828186750412, 0.2565017342567444, 0.19697146117687225, 0.17142488062381744, 0.12871775031089783, 0.11260759085416794, 0.2114068865776062, 0.1914333701133728, 0.11530886590480804, 0.167332723736763, 0.16041263937950134, 0.12866191565990448, 0.15597859025001526, 0.2081473022699356, 0.0956139788031578, 0.14390972256660461, 0.08720321953296661, 0.16051031649112701, 0.07090023905038834, 0.12660036981105804, 0.06036912277340889, 0.07868777215480804, 0.07437726855278015, 0.03706624358892441, 0.08708903193473816, 0.048048458993434906, 0.08304107934236526, 0.13906075060367584, 0.08239659667015076, 0.04523441940546036, 0.14315050840377808, 0.06168464571237564, 0.02829252928495407, 0.06346246600151062, 0.09711773693561554, 0.0713791698217392, 0.030042534694075584, 0.020309817045927048, 0.017385778948664665, 0.1023128479719162, 0.033943045884370804, 0.15105409920215607, 0.0875217467546463, 0.0241351630538702, 0.021511144936084747, 0.43396979570388794, 0.05194273963570595, 0.018932990729808807, 0.30235084891319275, 0.014932023361325264, 0.035737793892621994, 0.07673761248588562, 0.3509562611579895, 0.049613747745752335, 0.07966945320367813, 0.004679353907704353, 0.044478461146354675, 0.043420229107141495, 0.02522539161145687, 0.38835403323173523, 0.017609840258955956, 0.016579898074269295, 0.03199652582406998, 0.09706789255142212, 0.005945832934230566, 0.23662501573562622, 0.03121006488800049, 0.037620846182107925, 0.013692362233996391, 0.022500745952129364, 0.011120911687612534, 0.027049649506807327, 0.027720334008336067, 0.0038731980603188276, 0.004106536507606506, 0.11524190753698349, 0.08156333118677139, 0.09217409044504166, 0.26894962787628174, 0.02597770281136036, 0.01886932924389839, 0.04878406599164009, 0.5152551531791687, 0.36342406272888184, 0.004652795847505331, 0.005375933833420277, 0.012198200449347496, 0.019326575100421906, 0.04520021751523018, 0.23542580008506775, 0.090694859623909, 0.00967296864837408, 0.28795763850212097, 0.026030009612441063, 0.06408455967903137, 0.08752475678920746, 0.0022947590332478285, 0.004767645616084337, 0.025067416951060295, 0.16141098737716675, 0.09186969697475433, 0.09171997010707855, 0.027097012847661972, 0.11574391275644302, 0.18159152567386627, 0.019621726125478745, 0.23002763092517853, 0.004370448179543018, 0.007135648746043444, 0.019487056881189346, 0.01647159270942211, 0.22402292490005493, 0.0019302433356642723, 0.014990568161010742, 0.06524307280778885, 0.025793546810746193, 0.0039999703876674175, 0.21573284268379211, 0.01643890142440796, 0.003423704532906413, 0.006153523921966553, 0.003971239551901817, 0.06890764087438583, 0.0006971824914216995, 0.11413619667291641, 0.06743254512548447, 0.07301442325115204, 0.01851137913763523, 0.022294579073786736, 0.014995374716818333, 0.005857307929545641, 0.021342884749174118, 0.0018055700929835439, 0.000623525062110275, 0.018472924828529358, 0.22669300436973572, 0.0006525359931401908, 0.29654935002326965, 0.21874898672103882, 0.0008165992330759764, 0.24519266188144684, 0.009697365574538708, 0.18859906494617462, 0.13186874985694885, 0.1245938315987587, 0.0009449649369344115, 0.0004151250177528709, 0.013996547088027, 0.0066335154697299, 0.10195517539978027, 0.05289201810956001],\n [0.745194673538208, 0.6906776428222656, 0.687400758266449, 0.6717780828475952, 0.7604597210884094, 0.752467930316925, 0.6976206302642822, 0.704662561416626, 0.6916715502738953, 0.6934672594070435, 0.6470069289207458, 0.7148513793945312, 0.6763886213302612, 0.6802818775177002, 0.6672106385231018, 0.6484056711196899, 0.6637589931488037, 0.6583610773086548, 0.6557639837265015, 0.6428236365318298, 0.642822265625, 0.6243969202041626, 0.6294710040092468, 0.6075966358184814, 0.6001461744308472, 0.6056424379348755, 0.6350275278091431, 0.5835784673690796, 0.5997673273086548, 0.5480678081512451, 0.5856139659881592, 0.5785164833068848, 0.5884747505187988, 0.5700724124908447, 0.4928150177001953, 0.5337228775024414, 0.5427252054214478, 0.5247185230255127, 0.5774044394493103, 0.5185093879699707, 0.5197265148162842, 0.4411318302154541, 0.5511063933372498, 0.5263891220092773, 0.45463821291923523, 0.5314087271690369, 0.47028473019599915, 0.4092979431152344, 0.5777102112770081, 0.5306834578514099, 0.42406684160232544, 0.4687281847000122, 0.5070981383323669, 0.47439032793045044, 0.37494319677352905, 0.4947170913219452, 0.4206700921058655, 0.4383554756641388, 0.409442663192749, 0.44664183259010315, 0.4921582043170929, 0.3810812830924988, 0.4118553102016449, 0.39941662549972534, 0.43318724632263184, 0.31750908493995667, 0.34022581577301025, 0.3589905798435211, 0.3815973997116089, 0.3389066755771637, 0.32979461550712585, 0.32433709502220154, 0.28116682171821594, 0.31867024302482605, 0.29788222908973694, 0.31168806552886963, 0.3142642080783844, 0.25582820177078247, 0.32176223397254944, 0.3123543858528137, 0.2679462432861328, 0.2546009123325348, 0.24331651628017426, 0.402679443359375, 0.21918657422065735, 0.23834611475467682, 0.36965858936309814, 0.24380004405975342, 0.22266092896461487, 0.28505799174308777, 0.33410510420799255, 0.2647447884082794, 0.23237332701683044, 0.22678805887699127, 0.2724705934524536, 0.22559556365013123, 0.200675368309021, 0.2558078169822693, 0.36495375633239746, 0.1662682741880417, 0.26781201362609863, 0.22160503268241882, 0.24419258534908295, 0.2391909956932068, 0.1725086271762848, 0.19450058043003082, 0.13128599524497986, 0.15883126854896545, 0.08883234858512878, 0.22445882856845856, 0.19227375090122223, 0.12227177619934082, 0.14730407297611237, 0.10880385339260101, 0.15898071229457855, 0.14434635639190674, 0.10622251778841019, 0.13339510560035706, 0.12552213668823242, 0.0874275416135788, 0.21024318039417267, 0.05612509697675705, 0.2596701383590698, 0.06474778801202774, 0.21115590631961823, 0.08705268055200577, 0.2965729236602783, 0.11845093965530396, 0.3113614022731781, 0.0852881520986557, 0.057083774358034134, 0.05873868614435196, 0.18759706616401672, 0.1588224172592163, 0.2532350420951843, 0.06962744891643524, 0.07326000928878784, 0.07850103080272675, 0.07247595489025116, 0.05099700763821602, 0.09197735041379929, 0.0621706023812294, 0.13775122165679932, 0.03626300394535065, 0.09360489994287491, 0.08558189123868942, 0.0900314450263977, 0.06604598462581635, 0.05721459537744522, 0.12379155308008194, 0.07557165622711182, 0.18699273467063904, 0.04573093727231026, 0.04888857528567314, 0.07472683489322662, 0.1503097116947174, 0.13833612203598022, 0.036567263305187225, 0.09546559303998947, 0.17797045409679413, 0.20176424086093903, 0.06245369091629982, 0.20733888447284698, 0.07638655602931976, 0.22693926095962524, 0.09672175347805023, 0.18307197093963623, 0.2140817642211914, 0.024585099890828133, 0.047724127769470215, 0.03778181970119476, 0.035520292818546295, 0.06756170094013214, 0.025606723502278328, 0.0960211381316185, 0.055418919771909714, 0.08863727748394012, 0.11362741142511368, 0.09254298359155655, 0.3518190383911133, 0.0551731139421463, 0.12171444296836853, 0.12081704288721085, 0.08439555764198303, 0.027323825284838676, 0.10690995305776596, 0.1244480237364769, 0.0726374164223671, 0.06508301943540573, 0.17972348630428314, 0.10436990112066269, 0.09515547752380371, 0.09833931922912598, 0.1103367730975151, 0.09164026379585266, 0.03350993990898132, 0.05515069514513016, 0.1332990527153015, 0.027255253866314888, 0.05536457523703575]])\nCNN_ae16_MNIST = np.array([[0.7065368890762329, 0.7003437876701355, 0.7149812579154968, 0.702370822429657, 0.6912967562675476, 0.6774630546569824, 0.7349916100502014, 0.6901237368583679, 0.6894038915634155, 0.6863296031951904, 0.69279944896698, 0.6839656233787537, 0.6706349849700928, 0.6464716196060181, 0.7043266296386719, 0.6820791363716125, 0.6842585206031799, 0.6803882122039795, 0.6784244775772095, 0.665507972240448, 0.6820626854896545, 0.6624901294708252, 0.6764769554138184, 0.66874098777771, 0.6772385239601135, 0.6694998741149902, 0.6600099802017212, 0.6915762424468994, 0.6615751385688782, 0.6696990728378296, 0.6297839283943176, 0.6528367400169373, 0.6536012887954712, 0.6452928781509399, 0.6481162309646606, 0.6480607390403748, 0.6370586156845093, 0.6382020711898804, 0.6140261888504028, 0.6135526299476624, 0.6092220544815063, 0.6079503297805786, 0.5887094736099243, 0.5322624444961548, 0.6122974753379822, 0.5561879277229309, 0.6315677762031555, 0.6449221968650818, 0.5963243246078491, 0.5868348479270935, 0.6084422469139099, 0.5791459083557129, 0.6085484027862549, 0.5580623745918274, 0.5514646768569946, 0.5585185885429382, 0.551949679851532, 0.5090385675430298, 0.48625242710113525, 0.5502084493637085, 0.4771082401275635, 0.4774615168571472, 0.48718807101249695, 0.4917224943637848, 0.4732765257358551, 0.48384198546409607, 0.43808743357658386, 0.4454682171344757, 0.4767683446407318, 0.482342004776001, 0.45924878120422363, 0.45319855213165283, 0.48693686723709106, 0.3850550949573517, 0.410541832447052, 0.4543793499469757, 0.366267591714859, 0.4305279552936554, 0.34036755561828613, 0.36081624031066895, 0.3392334282398224, 0.37473422288894653, 0.28155049681663513, 0.3425044119358063, 0.2705245912075043, 0.31728631258010864, 0.2448488026857376, 0.31782156229019165, 0.3272281587123871, 0.37958672642707825, 0.29934489727020264, 0.21500541269779205, 0.29144471883773804, 0.30118197202682495, 0.16127417981624603, 0.371753454208374, 0.21204784512519836, 0.2533915042877197, 0.18653224408626556, 0.20099136233329773, 0.15585556626319885, 0.15119512379169464, 0.2260332703590393, 0.2896896302700043, 0.1453980952501297, 0.20472170412540436, 0.16929101943969727, 0.20646162331104279, 0.14845998585224152, 0.10794796794652939, 0.09016146510839462, 0.12105012685060501, 0.23024918138980865, 0.14298059046268463, 0.1215953603386879, 0.29008549451828003, 0.13764692842960358, 0.1657395362854004, 0.15904945135116577, 0.09157120436429977, 0.13966745138168335, 0.16450193524360657, 0.09266634285449982, 0.16199813783168793, 0.22420541942119598, 0.1499921977519989, 0.08530532568693161, 0.20761361718177795, 0.07668470591306686, 0.2324078232049942, 0.04722387343645096, 0.2246658354997635, 0.10235711932182312, 0.16499486565589905, 0.04430126026272774, 0.22800308465957642, 0.07830563187599182, 0.11839699000120163, 0.07917902618646622, 0.09214942902326584, 0.08970518410205841, 0.14069652557373047, 0.0409579798579216, 0.0552971251308918, 0.10124063491821289, 0.02533102221786976, 0.16206085681915283, 0.03471945971250534, 0.10241880267858505, 0.1285448670387268, 0.12653371691703796, 0.12206773459911346, 0.06682068109512329, 0.050219982862472534, 0.10264866054058075, 0.041177548468112946, 0.047057829797267914, 0.5175573825836182, 0.03898908570408821, 0.16199985146522522, 0.04879048466682434, 0.029836969450116158, 0.553887128829956, 0.06854133307933807, 0.021930286660790443, 0.02233067713677883, 0.02433081343770027, 0.02149748057126999, 0.017385564744472504, 0.006968638394027948, 0.3127046227455139, 0.007114108186215162, 0.0656125620007515, 0.35484519600868225, 0.013066131621599197, 0.056551698595285416, 0.07355687767267227, 0.02248852699995041, 0.07129108905792236, 0.029671985656023026, 0.06515096873044968, 0.02897089160978794, 0.15118078887462616, 0.22743676602840424, 0.14168672263622284, 0.02568655088543892, 0.031071646139025688, 0.01346743106842041, 0.004205648321658373, 0.004720211960375309, 0.15807323157787323, 0.3110294044017792, 0.2914554178714752, 0.014225986786186695, 0.047441449016332626, 0.051407698541879654, 0.0412781722843647, 0.01342132780700922, 0.012588735669851303, 0.06328532099723816],\n [0.6780185103416443, 0.6644222736358643, 0.7100399136543274, 0.6734641790390015, 0.718930721282959, 0.6536064743995667, 0.6319339871406555, 0.6494995951652527, 0.6571738719940186, 0.628243625164032, 0.6847190856933594, 0.6382237076759338, 0.6548020839691162, 0.6372569799423218, 0.611177384853363, 0.6946464776992798, 0.6374369859695435, 0.5625322461128235, 0.5938064455986023, 0.6619278192520142, 0.6325113773345947, 0.6251735091209412, 0.584563672542572, 0.5929437279701233, 0.5849477052688599, 0.578557014465332, 0.5738715529441833, 0.5618525147438049, 0.5726300477981567, 0.5653887987136841, 0.5166035890579224, 0.5080764889717102, 0.5085264444351196, 0.5040621161460876, 0.5203070640563965, 0.47883617877960205, 0.44310492277145386, 0.5014610290527344, 0.5264825224876404, 0.44693753123283386, 0.4781651198863983, 0.49643418192863464, 0.39393559098243713, 0.3919772803783417, 0.38888055086135864, 0.4487069845199585, 0.40776526927948, 0.39846813678741455, 0.40925952792167664, 0.42793193459510803, 0.35712873935699463, 0.3715760111808777, 0.40747466683387756, 0.4689115285873413, 0.4186822474002838, 0.35661789774894714, 0.4552532434463501, 0.4150001108646393, 0.40666961669921875, 0.3998912572860718, 0.40753278136253357, 0.4202790856361389, 0.33726823329925537, 0.27870211005210876, 0.32782912254333496, 0.3981657922267914, 0.36285293102264404, 0.3723219931125641, 0.29885634779930115, 0.3615414798259735, 0.26683101058006287, 0.28359338641166687, 0.2416490614414215, 0.28949710726737976, 0.3177807927131653, 0.25654059648513794, 0.24744893610477448, 0.266760915517807, 0.23680643737316132, 0.23136383295059204, 0.3128264546394348, 0.20581257343292236, 0.32397106289863586, 0.21248655021190643, 0.28707897663116455, 0.1919068545103073, 0.25955668091773987, 0.22676771879196167, 0.1780906617641449, 0.15193215012550354, 0.2890777289867401, 0.2031192183494568, 0.2267063707113266, 0.2370833158493042, 0.1690015345811844, 0.2356700450181961, 0.21451929211616516, 0.1733465939760208, 0.18371181190013885, 0.18114548921585083, 0.19573748111724854, 0.20266081392765045, 0.20864766836166382, 0.1162150502204895, 0.1900711953639984, 0.19181284308433533, 0.1968395859003067, 0.17900457978248596, 0.15975268185138702, 0.09224855154752731, 0.23955699801445007, 0.4099787473678589, 0.250662237405777, 0.16017885506153107, 0.12024583667516708, 0.20037534832954407, 0.13225311040878296, 0.12927724421024323, 0.14735716581344604, 0.1250939518213272, 0.16668175160884857, 0.07290227711200714, 0.08595309406518936, 0.10609139502048492, 0.20080703496932983, 0.1366996169090271, 0.18962329626083374, 0.1065688207745552, 0.14510667324066162, 0.13127821683883667, 0.16892145574092865, 0.2109244167804718, 0.10463409125804901, 0.16487818956375122, 0.17136803269386292, 0.2373611479997635, 0.15197767317295074, 0.17975015938282013, 0.06766042858362198, 0.15626747906208038, 0.1367357224225998, 0.2459682822227478, 0.2758179306983948, 0.17076078057289124, 0.17697373032569885, 0.20246128737926483, 0.20905643701553345, 0.18853744864463806, 0.10223725438117981, 0.06854937225580215, 0.08883953094482422, 0.12384413927793503, 0.08326254785060883, 0.10077401995658875, 0.09738656878471375, 0.15612775087356567, 0.2510952949523926, 0.08482635766267776, 0.13705775141716003, 0.22087755799293518, 0.1654137372970581, 0.11067379266023636, 0.153641015291214, 0.10795620828866959, 0.12058870494365692, 0.0814756453037262, 0.05282342806458473, 0.10233134031295776, 0.19444438815116882, 0.04038533195853233, 0.08606348186731339, 0.13369415700435638, 0.1537339985370636, 0.03687490522861481, 0.11885270476341248, 0.21197648346424103, 0.09066817909479141, 0.2732146382331848, 0.036027535796165466, 0.15028147399425507, 0.10358303040266037, 0.2559829652309418, 0.17508073151111603, 0.1235189139842987, 0.12764960527420044, 0.08290048688650131, 0.0637039840221405, 0.05582984909415245, 0.18901661038398743, 0.1595655381679535, 0.10146508365869522, 0.05268688499927521, 0.080548495054245, 0.04376934468746185, 0.3382478356361389, 0.22849732637405396, 0.03953217715024948, 0.40877434611320496, 0.10987094789743423, 0.08727411180734634],\n [0.6770550608634949, 0.7237497568130493, 0.6947844624519348, 0.7067459225654602, 0.6957970261573792, 0.6787075996398926, 0.70314621925354, 0.6880691647529602, 0.7025871276855469, 0.7005719542503357, 0.6837099194526672, 0.6818492412567139, 0.6908509135246277, 0.6804149746894836, 0.6801525354385376, 0.6744945645332336, 0.6976860761642456, 0.6703171730041504, 0.6541774868965149, 0.6656630635261536, 0.6684213876724243, 0.6587322950363159, 0.6615912914276123, 0.658138632774353, 0.6534469723701477, 0.6312496066093445, 0.6428653597831726, 0.6649488210678101, 0.6348531246185303, 0.6151833534240723, 0.6282292604446411, 0.6142253279685974, 0.5753376483917236, 0.5842522978782654, 0.5632873177528381, 0.6573774218559265, 0.588456928730011, 0.5157027840614319, 0.5911018252372742, 0.6025866866111755, 0.5390349626541138, 0.6093457937240601, 0.5013173818588257, 0.5636354684829712, 0.5328893065452576, 0.5500198602676392, 0.5299379825592041, 0.5293447971343994, 0.4289722740650177, 0.5439169406890869, 0.46730121970176697, 0.5336241722106934, 0.490133136510849, 0.472728967666626, 0.46983009576797485, 0.4608429968357086, 0.35383450984954834, 0.5250436067581177, 0.3721282184123993, 0.4489944577217102, 0.43230074644088745, 0.39253008365631104, 0.4301861822605133, 0.40975403785705566, 0.3801082968711853, 0.3886154890060425, 0.4382496774196625, 0.4278712570667267, 0.4541921615600586, 0.3530365228652954, 0.4126523733139038, 0.30551573634147644, 0.38659581542015076, 0.3840978145599365, 0.33606183528900146, 0.33827731013298035, 0.25422403216362, 0.38028475642204285, 0.4306930601596832, 0.312373548746109, 0.2753980755805969, 0.4396611154079437, 0.34227100014686584, 0.2695944905281067, 0.2943863272666931, 0.3525199592113495, 0.3128001093864441, 0.22347086668014526, 0.29590070247650146, 0.3093019723892212, 0.25390151143074036, 0.2948029637336731, 0.3667975664138794, 0.244979590177536, 0.23593711853027344, 0.2099091112613678, 0.27848178148269653, 0.16480441391468048, 0.24051722884178162, 0.20824038982391357, 0.31749868392944336, 0.23730355501174927, 0.34100231528282166, 0.16794155538082123, 0.19178971648216248, 0.21412892639636993, 0.2682236433029175, 0.21702884137630463, 0.2093488723039627, 0.22882378101348877, 0.32993048429489136, 0.26206931471824646, 0.22616299986839294, 0.17785926163196564, 0.16299331188201904, 0.1902775913476944, 0.19327259063720703, 0.1913856565952301, 0.240230992436409, 0.12252030521631241, 0.2209087759256363, 0.11843746900558472, 0.21306882798671722, 0.16986201703548431, 0.0772564634680748, 0.22831083834171295, 0.09920819103717804, 0.1827843338251114, 0.12026525288820267, 0.20385046303272247, 0.23395688831806183, 0.1167125403881073, 0.12339767813682556, 0.18948201835155487, 0.13558858633041382, 0.17519596219062805, 0.19959506392478943, 0.1766834855079651, 0.19575665891170502, 0.07383810728788376, 0.12596653401851654, 0.355495423078537, 0.22941932082176208, 0.13620354235172272, 0.15461966395378113, 0.0815073773264885, 0.16304072737693787, 0.11182961612939835, 0.09898733347654343, 0.1867913007736206, 0.11733875423669815, 0.20732858777046204, 0.07889830321073532, 0.060346633195877075, 0.13220614194869995, 0.21017083525657654, 0.1633220911026001, 0.22407913208007812, 0.1629914790391922, 0.07849102467298508, 0.0634986013174057, 0.08346734195947647, 0.08234986662864685, 0.07101165503263474, 0.06477346271276474, 0.0874917134642601, 0.16463083028793335, 0.07359597831964493, 0.30306488275527954, 0.0926952064037323, 0.057987429201602936, 0.10901831835508347, 0.08484792709350586, 0.07345649600028992, 0.0626828670501709, 0.0643729493021965, 0.03987567126750946, 0.11905873566865921, 0.047831740230321884, 0.06862327456474304, 0.0418168306350708, 0.08677629381418228, 0.08008544892072678, 0.12643030285835266, 0.02508743852376938, 0.15694452822208405, 0.021024145185947418, 0.04301491752266884, 0.03548141196370125, 0.019000593572854996, 0.2740986943244934, 0.05691000819206238, 0.13156375288963318, 0.07106415182352066, 0.05433608964085579, 0.08655295521020889, 0.13069750368595123, 0.07799041271209717, 0.13537554442882538, 0.13349904119968414],\n [0.7577635049819946, 0.678739070892334, 0.7259731292724609, 0.7489646673202515, 0.6658592820167542, 0.7305440306663513, 0.6511856317520142, 0.6305023431777954, 0.6440613269805908, 0.704413652420044, 0.6304953098297119, 0.6533329486846924, 0.6710024476051331, 0.657980740070343, 0.6494162678718567, 0.6259380578994751, 0.6381387114524841, 0.6340850591659546, 0.6146249175071716, 0.5878856182098389, 0.6152567267417908, 0.5630015730857849, 0.590792715549469, 0.5971272587776184, 0.5606115460395813, 0.5727154016494751, 0.5503643155097961, 0.5492763519287109, 0.5123627185821533, 0.5328343510627747, 0.529090940952301, 0.4617682695388794, 0.5146098136901855, 0.4771166145801544, 0.4632205069065094, 0.4295308291912079, 0.4490692615509033, 0.44627848267555237, 0.4484800100326538, 0.444498747587204, 0.3908357620239258, 0.4689673185348511, 0.4632866382598877, 0.4115380346775055, 0.39370858669281006, 0.30355367064476013, 0.41898539662361145, 0.4747936725616455, 0.40032410621643066, 0.3521833121776581, 0.3285800516605377, 0.3449598550796509, 0.3267514109611511, 0.27666252851486206, 0.40049445629119873, 0.28862252831459045, 0.2854417860507965, 0.2501191198825836, 0.41706961393356323, 0.39194434881210327, 0.2826034426689148, 0.30905184149742126, 0.2411859929561615, 0.2892632782459259, 0.26024991273880005, 0.3531080484390259, 0.2718490660190582, 0.2617754638195038, 0.30297356843948364, 0.23234951496124268, 0.2546401619911194, 0.2674880921840668, 0.17543582618236542, 0.29156941175460815, 0.21771784126758575, 0.19351263344287872, 0.2697051167488098, 0.2424647957086563, 0.30030739307403564, 0.3032171130180359, 0.22974607348442078, 0.22927767038345337, 0.3299483060836792, 0.31792691349983215, 0.1899193823337555, 0.34642454981803894, 0.16717128455638885, 0.23219604790210724, 0.31647077202796936, 0.16464035212993622, 0.2792598009109497, 0.3845776617527008, 0.22498831152915955, 0.368135005235672, 0.3216128945350647, 0.4005036950111389, 0.11494707316160202, 0.27637749910354614, 0.2742801904678345, 0.2814248502254486, 0.12983664870262146, 0.18431724607944489, 0.3716731369495392, 0.1450338512659073, 0.1519049108028412, 0.08916042000055313, 0.5924513339996338, 0.3596770763397217, 0.1730351597070694, 0.24228975176811218, 0.32532668113708496, 0.18476377427577972, 0.3176874816417694, 0.22536367177963257, 0.10236401855945587, 0.1522556096315384, 0.25119826197624207, 0.24328704178333282, 0.15541183948516846, 0.17297859489917755, 0.3045123219490051, 0.3897930085659027, 0.09784858673810959, 0.1992620974779129, 0.16695831716060638, 0.11487163603305817, 0.18504993617534637, 0.3366742730140686, 0.2548673748970032, 0.08781551569700241, 0.10056190192699432, 0.23175100982189178, 0.17468896508216858, 0.10740732401609421, 0.2690909504890442, 0.2774558663368225, 0.24492861330509186, 0.14368529617786407, 0.09874387085437775, 0.3076358437538147, 0.12575972080230713, 0.10234867036342621, 0.0761861652135849, 0.3204321265220642, 0.1882958561182022, 0.5237845778465271, 0.22675678133964539, 0.23402005434036255, 0.36445730924606323, 0.25543537735939026, 0.3169364035129547, 0.2608758211135864, 0.10393598675727844, 0.11738705635070801, 0.17619460821151733, 0.1662699580192566, 0.3865358233451843, 0.1978747546672821, 0.19265756011009216, 0.15979154407978058, 0.14881408214569092, 0.2497793436050415, 0.18908999860286713, 0.24054080247879028, 0.1965220868587494, 0.18746213614940643, 0.25249794125556946, 0.11708733439445496, 0.11060264706611633, 0.06545650213956833, 0.06839670985937119, 0.13645055890083313, 0.16494742035865784, 0.23327822983264923, 0.2458374798297882, 0.26116079092025757, 0.04233985394239426, 0.15902374684810638, 0.11706103384494781, 0.21211761236190796, 0.15592440962791443, 0.27285754680633545, 0.2595636248588562, 0.13461200892925262, 0.12773233652114868, 0.23858562111854553, 0.09488450735807419, 0.2547829747200012, 0.06679581850767136, 0.1866726130247116, 0.22343210875988007, 0.08384843170642853, 0.45484665036201477, 0.18964597582817078, 0.1164105013012886, 0.20695441961288452, 0.44840899109840393, 0.2154744565486908, 0.1576719731092453, 0.066319040954113],\n [0.8061190247535706, 0.7576419711112976, 0.7872461676597595, 0.7264105081558228, 0.7155753970146179, 0.708035945892334, 0.7211142778396606, 0.6964155435562134, 0.7199532985687256, 0.7073280811309814, 0.7112325429916382, 0.694496750831604, 0.6934433579444885, 0.6954359412193298, 0.6844663023948669, 0.6808801293373108, 0.7078092098236084, 0.6868506073951721, 0.6697349548339844, 0.6985956430435181, 0.663241446018219, 0.6235044002532959, 0.6443854570388794, 0.690306544303894, 0.6288673281669617, 0.6155658960342407, 0.733472466468811, 0.6430254578590393, 0.6549240946769714, 0.6049500107765198, 0.6359832286834717, 0.6050975918769836, 0.6063121557235718, 0.6351621150970459, 0.579750120639801, 0.5867777466773987, 0.59004807472229, 0.5693145990371704, 0.5568662881851196, 0.5739469528198242, 0.5782334804534912, 0.5429723858833313, 0.5128393769264221, 0.5466747283935547, 0.48293545842170715, 0.48196735978126526, 0.5065867900848389, 0.49852344393730164, 0.45901215076446533, 0.40931978821754456, 0.428974449634552, 0.47155138850212097, 0.43810999393463135, 0.46254390478134155, 0.4710542559623718, 0.4128185510635376, 0.4943501949310303, 0.42550089955329895, 0.4105769395828247, 0.3495163321495056, 0.3751329481601715, 0.36509761214256287, 0.4177945852279663, 0.3158710300922394, 0.33697348833084106, 0.3092765808105469, 0.2896036207675934, 0.37296169996261597, 0.25482386350631714, 0.3853759467601776, 0.254815936088562, 0.23426395654678345, 0.31131431460380554, 0.3108240067958832, 0.14274778962135315, 0.2358514368534088, 0.18734636902809143, 0.21697750687599182, 0.22164548933506012, 0.3447626233100891, 0.17762762308120728, 0.19720008969306946, 0.1745360940694809, 0.2939648926258087, 0.18006889522075653, 0.15123696625232697, 0.16843503713607788, 0.2367013692855835, 0.13912208378314972, 0.16171474754810333, 0.2124139815568924, 0.28575873374938965, 0.14535123109817505, 0.2045067399740219, 0.17241224646568298, 0.14510378241539001, 0.125568687915802, 0.13140064477920532, 0.1751890629529953, 0.06767867505550385, 0.1862007975578308, 0.20667432248592377, 0.14108432829380035, 0.2074945867061615, 0.06632974743843079, 0.18803325295448303, 0.13561967015266418, 0.20918512344360352, 0.06811677664518356, 0.2285996824502945, 0.1469579041004181, 0.15120923519134521, 0.1583244651556015, 0.08857985585927963, 0.14347146451473236, 0.22411327064037323, 0.17649629712104797, 0.0809209942817688, 0.13232378661632538, 0.12987901270389557, 0.19623716175556183, 0.18164461851119995, 0.10701523721218109, 0.16451211273670197, 0.1826278567314148, 0.16277055442333221, 0.12477701157331467, 0.14174526929855347, 0.13562481105327606, 0.13789580762386322, 0.08075360953807831, 0.1419735550880432, 0.1007152646780014, 0.08524568378925323, 0.08857472240924835, 0.20039601624011993, 0.10289433598518372, 0.06806305050849915, 0.14893199503421783, 0.0533442497253418, 0.08558276295661926, 0.17814120650291443, 0.08843161910772324, 0.07493478059768677, 0.07206104695796967, 0.04764445126056671, 0.2933162748813629, 0.20048680901527405, 0.09041830152273178, 0.15401454269886017, 0.1780223697423935, 0.27529364824295044, 0.27035948634147644, 0.14288881421089172, 0.04312678799033165, 0.27407005429267883, 0.07684631645679474, 0.12959372997283936, 0.09508629143238068, 0.0811772346496582, 0.09898901730775833, 0.029180357232689857, 0.03322149068117142, 0.036205098032951355, 0.031379591673612595, 0.2282934933900833, 0.08287832885980606, 0.03673117607831955, 0.019469337537884712, 0.1488036960363388, 0.24263115227222443, 0.054281048476696014, 0.13069437444210052, 0.05626692622900009, 0.35922014713287354, 0.12846451997756958, 0.03870527818799019, 0.024457039311528206, 0.06294092535972595, 0.046115029603242874, 0.028667038306593895, 0.1919068545103073, 0.11461522430181503, 0.07431669533252716, 0.08665861934423447, 0.030386319383978844, 0.04411404952406883, 0.03287091106176376, 0.053779855370521545, 0.2850929796695709, 0.16664470732212067, 0.18365567922592163, 0.17537373304367065, 0.05938854068517685, 0.041627515107393265, 0.048675309866666794, 0.1356741487979889, 0.10164114087820053, 0.07315782457590103, 0.12214832007884979]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "CNN_ae16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "CNN_ae16_MNIST = np.array([[0.7065368890762329, 0.7003437876701355, 0.7149812579154968, 0.702370822429657, 0.6912967562675476, 0.6774630546569824, 0.7349916100502014, 0.6901237368583679, 0.6894038915634155, 0.6863296031951904, 0.69279944896698, 0.6839656233787537, 0.6706349849700928, 0.6464716196060181, 0.7043266296386719, 0.6820791363716125, 0.6842585206031799, 0.6803882122039795, 0.6784244775772095, 0.665507972240448, 0.6820626854896545, 0.6624901294708252, 0.6764769554138184, 0.66874098777771, 0.6772385239601135, 0.6694998741149902, 0.6600099802017212, 0.6915762424468994, 0.6615751385688782, 0.6696990728378296, 0.6297839283943176, 0.6528367400169373, 0.6536012887954712, 0.6452928781509399, 0.6481162309646606, 0.6480607390403748, 0.6370586156845093, 0.6382020711898804, 0.6140261888504028, 0.6135526299476624, 0.6092220544815063, 0.6079503297805786, 0.5887094736099243, 0.5322624444961548, 0.6122974753379822, 0.5561879277229309, 0.6315677762031555, 0.6449221968650818, 0.5963243246078491, 0.5868348479270935, 0.6084422469139099, 0.5791459083557129, 0.6085484027862549, 0.5580623745918274, 0.5514646768569946, 0.5585185885429382, 0.551949679851532, 0.5090385675430298, 0.48625242710113525, 0.5502084493637085, 0.4771082401275635, 0.4774615168571472, 0.48718807101249695, 0.4917224943637848, 0.4732765257358551, 0.48384198546409607, 0.43808743357658386, 0.4454682171344757, 0.4767683446407318, 0.482342004776001, 0.45924878120422363, 0.45319855213165283, 0.48693686723709106, 0.3850550949573517, 0.410541832447052, 0.4543793499469757, 0.366267591714859, 0.4305279552936554, 0.34036755561828613, 0.36081624031066895, 0.3392334282398224, 0.37473422288894653, 0.28155049681663513, 0.3425044119358063, 0.2705245912075043, 0.31728631258010864, 0.2448488026857376, 0.31782156229019165, 0.3272281587123871, 0.37958672642707825, 0.29934489727020264, 0.21500541269779205, 0.29144471883773804, 0.30118197202682495, 0.16127417981624603, 0.371753454208374, 0.21204784512519836, 0.2533915042877197, 0.18653224408626556, 0.20099136233329773, 0.15585556626319885, 0.15119512379169464, 0.2260332703590393, 0.2896896302700043, 0.1453980952501297, 0.20472170412540436, 0.16929101943969727, 0.20646162331104279, 0.14845998585224152, 0.10794796794652939, 0.09016146510839462, 0.12105012685060501, 0.23024918138980865, 0.14298059046268463, 0.1215953603386879, 0.29008549451828003, 0.13764692842960358, 0.1657395362854004, 0.15904945135116577, 0.09157120436429977, 0.13966745138168335, 0.16450193524360657, 0.09266634285449982, 0.16199813783168793, 0.22420541942119598, 0.1499921977519989, 0.08530532568693161, 0.20761361718177795, 0.07668470591306686, 0.2324078232049942, 0.04722387343645096, 0.2246658354997635, 0.10235711932182312, 0.16499486565589905, 0.04430126026272774, 0.22800308465957642, 0.07830563187599182, 0.11839699000120163, 0.07917902618646622, 0.09214942902326584, 0.08970518410205841, 0.14069652557373047, 0.0409579798579216, 0.0552971251308918, 0.10124063491821289, 0.02533102221786976, 0.16206085681915283, 0.03471945971250534, 0.10241880267858505, 0.1285448670387268, 0.12653371691703796, 0.12206773459911346, 0.06682068109512329, 0.050219982862472534, 0.10264866054058075, 0.041177548468112946, 0.047057829797267914, 0.5175573825836182, 0.03898908570408821, 0.16199985146522522, 0.04879048466682434, 0.029836969450116158, 0.553887128829956, 0.06854133307933807, 0.021930286660790443, 0.02233067713677883, 0.02433081343770027, 0.02149748057126999, 0.017385564744472504, 0.006968638394027948, 0.3127046227455139, 0.007114108186215162, 0.0656125620007515, 0.35484519600868225, 0.013066131621599197, 0.056551698595285416, 0.07355687767267227, 0.02248852699995041, 0.07129108905792236, 0.029671985656023026, 0.06515096873044968, 0.02897089160978794, 0.15118078887462616, 0.22743676602840424, 0.14168672263622284, 0.02568655088543892, 0.031071646139025688, 0.01346743106842041, 0.004205648321658373, 0.004720211960375309, 0.15807323157787323, 0.3110294044017792, 0.2914554178714752, 0.014225986786186695, 0.047441449016332626, 0.051407698541879654, 0.0412781722843647, 0.01342132780700922, 0.012588735669851303, 0.06328532099723816],\n [0.6780185103416443, 0.6644222736358643, 0.7100399136543274, 0.6734641790390015, 0.718930721282959, 0.6536064743995667, 0.6319339871406555, 0.6494995951652527, 0.6571738719940186, 0.628243625164032, 0.6847190856933594, 0.6382237076759338, 0.6548020839691162, 0.6372569799423218, 0.611177384853363, 0.6946464776992798, 0.6374369859695435, 0.5625322461128235, 0.5938064455986023, 0.6619278192520142, 0.6325113773345947, 0.6251735091209412, 0.584563672542572, 0.5929437279701233, 0.5849477052688599, 0.578557014465332, 0.5738715529441833, 0.5618525147438049, 0.5726300477981567, 0.5653887987136841, 0.5166035890579224, 0.5080764889717102, 0.5085264444351196, 0.5040621161460876, 0.5203070640563965, 0.47883617877960205, 0.44310492277145386, 0.5014610290527344, 0.5264825224876404, 0.44693753123283386, 0.4781651198863983, 0.49643418192863464, 0.39393559098243713, 0.3919772803783417, 0.38888055086135864, 0.4487069845199585, 0.40776526927948, 0.39846813678741455, 0.40925952792167664, 0.42793193459510803, 0.35712873935699463, 0.3715760111808777, 0.40747466683387756, 0.4689115285873413, 0.4186822474002838, 0.35661789774894714, 0.4552532434463501, 0.4150001108646393, 0.40666961669921875, 0.3998912572860718, 0.40753278136253357, 0.4202790856361389, 0.33726823329925537, 0.27870211005210876, 0.32782912254333496, 0.3981657922267914, 0.36285293102264404, 0.3723219931125641, 0.29885634779930115, 0.3615414798259735, 0.26683101058006287, 0.28359338641166687, 0.2416490614414215, 0.28949710726737976, 0.3177807927131653, 0.25654059648513794, 0.24744893610477448, 0.266760915517807, 0.23680643737316132, 0.23136383295059204, 0.3128264546394348, 0.20581257343292236, 0.32397106289863586, 0.21248655021190643, 0.28707897663116455, 0.1919068545103073, 0.25955668091773987, 0.22676771879196167, 0.1780906617641449, 0.15193215012550354, 0.2890777289867401, 0.2031192183494568, 0.2267063707113266, 0.2370833158493042, 0.1690015345811844, 0.2356700450181961, 0.21451929211616516, 0.1733465939760208, 0.18371181190013885, 0.18114548921585083, 0.19573748111724854, 0.20266081392765045, 0.20864766836166382, 0.1162150502204895, 0.1900711953639984, 0.19181284308433533, 0.1968395859003067, 0.17900457978248596, 0.15975268185138702, 0.09224855154752731, 0.23955699801445007, 0.4099787473678589, 0.250662237405777, 0.16017885506153107, 0.12024583667516708, 0.20037534832954407, 0.13225311040878296, 0.12927724421024323, 0.14735716581344604, 0.1250939518213272, 0.16668175160884857, 0.07290227711200714, 0.08595309406518936, 0.10609139502048492, 0.20080703496932983, 0.1366996169090271, 0.18962329626083374, 0.1065688207745552, 0.14510667324066162, 0.13127821683883667, 0.16892145574092865, 0.2109244167804718, 0.10463409125804901, 0.16487818956375122, 0.17136803269386292, 0.2373611479997635, 0.15197767317295074, 0.17975015938282013, 0.06766042858362198, 0.15626747906208038, 0.1367357224225998, 0.2459682822227478, 0.2758179306983948, 0.17076078057289124, 0.17697373032569885, 0.20246128737926483, 0.20905643701553345, 0.18853744864463806, 0.10223725438117981, 0.06854937225580215, 0.08883953094482422, 0.12384413927793503, 0.08326254785060883, 0.10077401995658875, 0.09738656878471375, 0.15612775087356567, 0.2510952949523926, 0.08482635766267776, 0.13705775141716003, 0.22087755799293518, 0.1654137372970581, 0.11067379266023636, 0.153641015291214, 0.10795620828866959, 0.12058870494365692, 0.0814756453037262, 0.05282342806458473, 0.10233134031295776, 0.19444438815116882, 0.04038533195853233, 0.08606348186731339, 0.13369415700435638, 0.1537339985370636, 0.03687490522861481, 0.11885270476341248, 0.21197648346424103, 0.09066817909479141, 0.2732146382331848, 0.036027535796165466, 0.15028147399425507, 0.10358303040266037, 0.2559829652309418, 0.17508073151111603, 0.1235189139842987, 0.12764960527420044, 0.08290048688650131, 0.0637039840221405, 0.05582984909415245, 0.18901661038398743, 0.1595655381679535, 0.10146508365869522, 0.05268688499927521, 0.080548495054245, 0.04376934468746185, 0.3382478356361389, 0.22849732637405396, 0.03953217715024948, 0.40877434611320496, 0.10987094789743423, 0.08727411180734634],\n [0.6770550608634949, 0.7237497568130493, 0.6947844624519348, 0.7067459225654602, 0.6957970261573792, 0.6787075996398926, 0.70314621925354, 0.6880691647529602, 0.7025871276855469, 0.7005719542503357, 0.6837099194526672, 0.6818492412567139, 0.6908509135246277, 0.6804149746894836, 0.6801525354385376, 0.6744945645332336, 0.6976860761642456, 0.6703171730041504, 0.6541774868965149, 0.6656630635261536, 0.6684213876724243, 0.6587322950363159, 0.6615912914276123, 0.658138632774353, 0.6534469723701477, 0.6312496066093445, 0.6428653597831726, 0.6649488210678101, 0.6348531246185303, 0.6151833534240723, 0.6282292604446411, 0.6142253279685974, 0.5753376483917236, 0.5842522978782654, 0.5632873177528381, 0.6573774218559265, 0.588456928730011, 0.5157027840614319, 0.5911018252372742, 0.6025866866111755, 0.5390349626541138, 0.6093457937240601, 0.5013173818588257, 0.5636354684829712, 0.5328893065452576, 0.5500198602676392, 0.5299379825592041, 0.5293447971343994, 0.4289722740650177, 0.5439169406890869, 0.46730121970176697, 0.5336241722106934, 0.490133136510849, 0.472728967666626, 0.46983009576797485, 0.4608429968357086, 0.35383450984954834, 0.5250436067581177, 0.3721282184123993, 0.4489944577217102, 0.43230074644088745, 0.39253008365631104, 0.4301861822605133, 0.40975403785705566, 0.3801082968711853, 0.3886154890060425, 0.4382496774196625, 0.4278712570667267, 0.4541921615600586, 0.3530365228652954, 0.4126523733139038, 0.30551573634147644, 0.38659581542015076, 0.3840978145599365, 0.33606183528900146, 0.33827731013298035, 0.25422403216362, 0.38028475642204285, 0.4306930601596832, 0.312373548746109, 0.2753980755805969, 0.4396611154079437, 0.34227100014686584, 0.2695944905281067, 0.2943863272666931, 0.3525199592113495, 0.3128001093864441, 0.22347086668014526, 0.29590070247650146, 0.3093019723892212, 0.25390151143074036, 0.2948029637336731, 0.3667975664138794, 0.244979590177536, 0.23593711853027344, 0.2099091112613678, 0.27848178148269653, 0.16480441391468048, 0.24051722884178162, 0.20824038982391357, 0.31749868392944336, 0.23730355501174927, 0.34100231528282166, 0.16794155538082123, 0.19178971648216248, 0.21412892639636993, 0.2682236433029175, 0.21702884137630463, 0.2093488723039627, 0.22882378101348877, 0.32993048429489136, 0.26206931471824646, 0.22616299986839294, 0.17785926163196564, 0.16299331188201904, 0.1902775913476944, 0.19327259063720703, 0.1913856565952301, 0.240230992436409, 0.12252030521631241, 0.2209087759256363, 0.11843746900558472, 0.21306882798671722, 0.16986201703548431, 0.0772564634680748, 0.22831083834171295, 0.09920819103717804, 0.1827843338251114, 0.12026525288820267, 0.20385046303272247, 0.23395688831806183, 0.1167125403881073, 0.12339767813682556, 0.18948201835155487, 0.13558858633041382, 0.17519596219062805, 0.19959506392478943, 0.1766834855079651, 0.19575665891170502, 0.07383810728788376, 0.12596653401851654, 0.355495423078537, 0.22941932082176208, 0.13620354235172272, 0.15461966395378113, 0.0815073773264885, 0.16304072737693787, 0.11182961612939835, 0.09898733347654343, 0.1867913007736206, 0.11733875423669815, 0.20732858777046204, 0.07889830321073532, 0.060346633195877075, 0.13220614194869995, 0.21017083525657654, 0.1633220911026001, 0.22407913208007812, 0.1629914790391922, 0.07849102467298508, 0.0634986013174057, 0.08346734195947647, 0.08234986662864685, 0.07101165503263474, 0.06477346271276474, 0.0874917134642601, 0.16463083028793335, 0.07359597831964493, 0.30306488275527954, 0.0926952064037323, 0.057987429201602936, 0.10901831835508347, 0.08484792709350586, 0.07345649600028992, 0.0626828670501709, 0.0643729493021965, 0.03987567126750946, 0.11905873566865921, 0.047831740230321884, 0.06862327456474304, 0.0418168306350708, 0.08677629381418228, 0.08008544892072678, 0.12643030285835266, 0.02508743852376938, 0.15694452822208405, 0.021024145185947418, 0.04301491752266884, 0.03548141196370125, 0.019000593572854996, 0.2740986943244934, 0.05691000819206238, 0.13156375288963318, 0.07106415182352066, 0.05433608964085579, 0.08655295521020889, 0.13069750368595123, 0.07799041271209717, 0.13537554442882538, 0.13349904119968414],\n [0.7577635049819946, 0.678739070892334, 0.7259731292724609, 0.7489646673202515, 0.6658592820167542, 0.7305440306663513, 0.6511856317520142, 0.6305023431777954, 0.6440613269805908, 0.704413652420044, 0.6304953098297119, 0.6533329486846924, 0.6710024476051331, 0.657980740070343, 0.6494162678718567, 0.6259380578994751, 0.6381387114524841, 0.6340850591659546, 0.6146249175071716, 0.5878856182098389, 0.6152567267417908, 0.5630015730857849, 0.590792715549469, 0.5971272587776184, 0.5606115460395813, 0.5727154016494751, 0.5503643155097961, 0.5492763519287109, 0.5123627185821533, 0.5328343510627747, 0.529090940952301, 0.4617682695388794, 0.5146098136901855, 0.4771166145801544, 0.4632205069065094, 0.4295308291912079, 0.4490692615509033, 0.44627848267555237, 0.4484800100326538, 0.444498747587204, 0.3908357620239258, 0.4689673185348511, 0.4632866382598877, 0.4115380346775055, 0.39370858669281006, 0.30355367064476013, 0.41898539662361145, 0.4747936725616455, 0.40032410621643066, 0.3521833121776581, 0.3285800516605377, 0.3449598550796509, 0.3267514109611511, 0.27666252851486206, 0.40049445629119873, 0.28862252831459045, 0.2854417860507965, 0.2501191198825836, 0.41706961393356323, 0.39194434881210327, 0.2826034426689148, 0.30905184149742126, 0.2411859929561615, 0.2892632782459259, 0.26024991273880005, 0.3531080484390259, 0.2718490660190582, 0.2617754638195038, 0.30297356843948364, 0.23234951496124268, 0.2546401619911194, 0.2674880921840668, 0.17543582618236542, 0.29156941175460815, 0.21771784126758575, 0.19351263344287872, 0.2697051167488098, 0.2424647957086563, 0.30030739307403564, 0.3032171130180359, 0.22974607348442078, 0.22927767038345337, 0.3299483060836792, 0.31792691349983215, 0.1899193823337555, 0.34642454981803894, 0.16717128455638885, 0.23219604790210724, 0.31647077202796936, 0.16464035212993622, 0.2792598009109497, 0.3845776617527008, 0.22498831152915955, 0.368135005235672, 0.3216128945350647, 0.4005036950111389, 0.11494707316160202, 0.27637749910354614, 0.2742801904678345, 0.2814248502254486, 0.12983664870262146, 0.18431724607944489, 0.3716731369495392, 0.1450338512659073, 0.1519049108028412, 0.08916042000055313, 0.5924513339996338, 0.3596770763397217, 0.1730351597070694, 0.24228975176811218, 0.32532668113708496, 0.18476377427577972, 0.3176874816417694, 0.22536367177963257, 0.10236401855945587, 0.1522556096315384, 0.25119826197624207, 0.24328704178333282, 0.15541183948516846, 0.17297859489917755, 0.3045123219490051, 0.3897930085659027, 0.09784858673810959, 0.1992620974779129, 0.16695831716060638, 0.11487163603305817, 0.18504993617534637, 0.3366742730140686, 0.2548673748970032, 0.08781551569700241, 0.10056190192699432, 0.23175100982189178, 0.17468896508216858, 0.10740732401609421, 0.2690909504890442, 0.2774558663368225, 0.24492861330509186, 0.14368529617786407, 0.09874387085437775, 0.3076358437538147, 0.12575972080230713, 0.10234867036342621, 0.0761861652135849, 0.3204321265220642, 0.1882958561182022, 0.5237845778465271, 0.22675678133964539, 0.23402005434036255, 0.36445730924606323, 0.25543537735939026, 0.3169364035129547, 0.2608758211135864, 0.10393598675727844, 0.11738705635070801, 0.17619460821151733, 0.1662699580192566, 0.3865358233451843, 0.1978747546672821, 0.19265756011009216, 0.15979154407978058, 0.14881408214569092, 0.2497793436050415, 0.18908999860286713, 0.24054080247879028, 0.1965220868587494, 0.18746213614940643, 0.25249794125556946, 0.11708733439445496, 0.11060264706611633, 0.06545650213956833, 0.06839670985937119, 0.13645055890083313, 0.16494742035865784, 0.23327822983264923, 0.2458374798297882, 0.26116079092025757, 0.04233985394239426, 0.15902374684810638, 0.11706103384494781, 0.21211761236190796, 0.15592440962791443, 0.27285754680633545, 0.2595636248588562, 0.13461200892925262, 0.12773233652114868, 0.23858562111854553, 0.09488450735807419, 0.2547829747200012, 0.06679581850767136, 0.1866726130247116, 0.22343210875988007, 0.08384843170642853, 0.45484665036201477, 0.18964597582817078, 0.1164105013012886, 0.20695441961288452, 0.44840899109840393, 0.2154744565486908, 0.1576719731092453, 0.066319040954113],\n [0.8061190247535706, 0.7576419711112976, 0.7872461676597595, 0.7264105081558228, 0.7155753970146179, 0.708035945892334, 0.7211142778396606, 0.6964155435562134, 0.7199532985687256, 0.7073280811309814, 0.7112325429916382, 0.694496750831604, 0.6934433579444885, 0.6954359412193298, 0.6844663023948669, 0.6808801293373108, 0.7078092098236084, 0.6868506073951721, 0.6697349548339844, 0.6985956430435181, 0.663241446018219, 0.6235044002532959, 0.6443854570388794, 0.690306544303894, 0.6288673281669617, 0.6155658960342407, 0.733472466468811, 0.6430254578590393, 0.6549240946769714, 0.6049500107765198, 0.6359832286834717, 0.6050975918769836, 0.6063121557235718, 0.6351621150970459, 0.579750120639801, 0.5867777466773987, 0.59004807472229, 0.5693145990371704, 0.5568662881851196, 0.5739469528198242, 0.5782334804534912, 0.5429723858833313, 0.5128393769264221, 0.5466747283935547, 0.48293545842170715, 0.48196735978126526, 0.5065867900848389, 0.49852344393730164, 0.45901215076446533, 0.40931978821754456, 0.428974449634552, 0.47155138850212097, 0.43810999393463135, 0.46254390478134155, 0.4710542559623718, 0.4128185510635376, 0.4943501949310303, 0.42550089955329895, 0.4105769395828247, 0.3495163321495056, 0.3751329481601715, 0.36509761214256287, 0.4177945852279663, 0.3158710300922394, 0.33697348833084106, 0.3092765808105469, 0.2896036207675934, 0.37296169996261597, 0.25482386350631714, 0.3853759467601776, 0.254815936088562, 0.23426395654678345, 0.31131431460380554, 0.3108240067958832, 0.14274778962135315, 0.2358514368534088, 0.18734636902809143, 0.21697750687599182, 0.22164548933506012, 0.3447626233100891, 0.17762762308120728, 0.19720008969306946, 0.1745360940694809, 0.2939648926258087, 0.18006889522075653, 0.15123696625232697, 0.16843503713607788, 0.2367013692855835, 0.13912208378314972, 0.16171474754810333, 0.2124139815568924, 0.28575873374938965, 0.14535123109817505, 0.2045067399740219, 0.17241224646568298, 0.14510378241539001, 0.125568687915802, 0.13140064477920532, 0.1751890629529953, 0.06767867505550385, 0.1862007975578308, 0.20667432248592377, 0.14108432829380035, 0.2074945867061615, 0.06632974743843079, 0.18803325295448303, 0.13561967015266418, 0.20918512344360352, 0.06811677664518356, 0.2285996824502945, 0.1469579041004181, 0.15120923519134521, 0.1583244651556015, 0.08857985585927963, 0.14347146451473236, 0.22411327064037323, 0.17649629712104797, 0.0809209942817688, 0.13232378661632538, 0.12987901270389557, 0.19623716175556183, 0.18164461851119995, 0.10701523721218109, 0.16451211273670197, 0.1826278567314148, 0.16277055442333221, 0.12477701157331467, 0.14174526929855347, 0.13562481105327606, 0.13789580762386322, 0.08075360953807831, 0.1419735550880432, 0.1007152646780014, 0.08524568378925323, 0.08857472240924835, 0.20039601624011993, 0.10289433598518372, 0.06806305050849915, 0.14893199503421783, 0.0533442497253418, 0.08558276295661926, 0.17814120650291443, 0.08843161910772324, 0.07493478059768677, 0.07206104695796967, 0.04764445126056671, 0.2933162748813629, 0.20048680901527405, 0.09041830152273178, 0.15401454269886017, 0.1780223697423935, 0.27529364824295044, 0.27035948634147644, 0.14288881421089172, 0.04312678799033165, 0.27407005429267883, 0.07684631645679474, 0.12959372997283936, 0.09508629143238068, 0.0811772346496582, 0.09898901730775833, 0.029180357232689857, 0.03322149068117142, 0.036205098032951355, 0.031379591673612595, 0.2282934933900833, 0.08287832885980606, 0.03673117607831955, 0.019469337537884712, 0.1488036960363388, 0.24263115227222443, 0.054281048476696014, 0.13069437444210052, 0.05626692622900009, 0.35922014713287354, 0.12846451997756958, 0.03870527818799019, 0.024457039311528206, 0.06294092535972595, 0.046115029603242874, 0.028667038306593895, 0.1919068545103073, 0.11461522430181503, 0.07431669533252716, 0.08665861934423447, 0.030386319383978844, 0.04411404952406883, 0.03287091106176376, 0.053779855370521545, 0.2850929796695709, 0.16664470732212067, 0.18365567922592163, 0.17537373304367065, 0.05938854068517685, 0.041627515107393265, 0.048675309866666794, 0.1356741487979889, 0.10164114087820053, 0.07315782457590103, 0.12214832007884979]])\nCNN_pca8_FASHION = np.array([[0.7024438381195068, 0.701317310333252, 0.6876907348632812, 0.6847043037414551, 0.6933199167251587, 0.7034915089607239, 0.6764124035835266, 0.6747254729270935, 0.7082949280738831, 0.708881676197052, 0.6698758006095886, 0.719658613204956, 0.6659788489341736, 0.6932814121246338, 0.7149137854576111, 0.6934373378753662, 0.6875498294830322, 0.680873692035675, 0.6850370168685913, 0.6768752932548523, 0.6873149275779724, 0.681423544883728, 0.6941041350364685, 0.6781513094902039, 0.6840881109237671, 0.6746655106544495, 0.6761900186538696, 0.700124204158783, 0.6598073840141296, 0.6508470773696899, 0.6769809126853943, 0.6652989387512207, 0.6651935577392578, 0.6551887392997742, 0.6170886158943176, 0.634204626083374, 0.6916629672050476, 0.6379752159118652, 0.6714327931404114, 0.6315291523933411, 0.5938360095024109, 0.6737152934074402, 0.6550662517547607, 0.6163589358329773, 0.6557160019874573, 0.5911333560943604, 0.6381785869598389, 0.6171723008155823, 0.591861367225647, 0.5763111710548401, 0.5719621777534485, 0.5295212864875793, 0.5759477019309998, 0.601412832736969, 0.5296574831008911, 0.5451980829238892, 0.605498194694519, 0.5693132281303406, 0.6204257011413574, 0.5838996171951294, 0.5469677448272705, 0.5217484831809998, 0.5119621157646179, 0.4840445816516876, 0.48638203740119934, 0.4502175450325012, 0.47423630952835083, 0.504289448261261, 0.49905872344970703, 0.7024049162864685, 0.5467042326927185, 0.4570502042770386, 0.5044639110565186, 0.42137786746025085, 0.6284716129302979, 0.5955803394317627, 0.5164405703544617, 0.41035547852516174, 0.5792757868766785, 0.3985511064529419, 0.47310149669647217, 0.41142207384109497, 0.5692822933197021, 0.4134823679924011, 0.4085578918457031, 0.5547319650650024, 0.4686039090156555, 0.4193034768104553, 0.4465419054031372, 0.3757850229740143, 0.31999069452285767, 0.4855785071849823, 0.312707781791687, 0.3045315146446228, 0.39933907985687256, 0.36081263422966003, 0.298431396484375, 0.5087718963623047, 0.3218119442462921, 0.5063819289207458, 0.3105223476886749, 0.5032217502593994, 0.40607917308807373, 0.569314181804657, 0.44149184226989746, 0.4622828960418701, 0.47339510917663574, 0.503004789352417, 0.3218116760253906, 0.373760461807251, 0.5830525755882263, 0.33768123388290405, 0.3200608789920807, 0.3584187626838684, 0.5484757423400879, 0.2849207818508148, 0.38294509053230286, 0.26305460929870605, 0.42440125346183777, 0.31314557790756226, 0.27498316764831543, 0.45281466841697693, 0.20203173160552979, 0.5584656000137329, 0.4944888949394226, 0.2684931755065918, 0.3663282096385956, 0.4298987090587616, 0.6349244713783264, 0.30895593762397766, 0.21519573032855988, 0.5118795037269592, 0.22751285135746002, 0.47238999605178833, 0.34088703989982605, 0.29520055651664734, 0.283777117729187, 0.75107741355896, 0.3200981616973877, 0.4257638156414032, 0.31904301047325134, 0.21819691359996796, 0.41086483001708984, 0.31581297516822815, 0.34817051887512207, 0.31119513511657715, 0.4424825608730316, 0.26970574259757996, 0.3470630943775177, 0.3130325376987457, 0.34305381774902344, 0.5418984293937683, 0.2891307771205902, 0.21262122690677643, 0.34585750102996826, 0.3669237196445465, 0.2340727597475052, 0.4622315466403961, 0.23753364384174347, 0.3647005558013916, 0.32093876600265503, 0.22319766879081726, 0.39732033014297485, 0.37264034152030945, 0.42672955989837646, 0.1754530370235443, 0.36036014556884766, 0.19132505357265472, 0.4249000549316406, 0.1446569263935089, 0.4729052782058716, 0.5277539491653442, 0.36327388882637024, 0.16382533311843872, 0.5792173147201538, 0.34819790720939636, 0.25707876682281494, 0.48427093029022217, 0.3475936949253082, 0.2773706316947937, 0.3499819040298462, 0.21925830841064453, 0.14740735292434692, 0.21108940243721008, 0.27492037415504456, 0.2885427176952362, 0.5016036629676819, 0.3607233762741089, 0.21803052723407745, 0.3336958587169647, 0.2177143692970276, 0.13436760008335114, 0.32131683826446533, 0.34203651547431946, 0.2509497404098511, 0.2636679410934448, 0.2886880040168762, 0.3715705871582031, 0.22568900883197784, 0.42904385924339294],\n [0.7185938358306885, 0.6604992747306824, 0.6967400312423706, 0.6770632266998291, 0.7065595984458923, 0.7104727029800415, 0.716935932636261, 0.6715752482414246, 0.6795244812965393, 0.6829398274421692, 0.6919105052947998, 0.6911031603813171, 0.6959630846977234, 0.6710406541824341, 0.6888791918754578, 0.6843737959861755, 0.6749078631401062, 0.7137250304222107, 0.6732360124588013, 0.6779041290283203, 0.7363390326499939, 0.6784221529960632, 0.6690196394920349, 0.659263551235199, 0.6786253452301025, 0.6711382269859314, 0.6844541430473328, 0.6591963171958923, 0.6696309447288513, 0.6563599109649658, 0.6677483320236206, 0.6503051519393921, 0.659803032875061, 0.6344764828681946, 0.6299199461936951, 0.6587289571762085, 0.6597774624824524, 0.6346151232719421, 0.6273216605186462, 0.5881815552711487, 0.6003080606460571, 0.5798957347869873, 0.6204898953437805, 0.6316098570823669, 0.6258500218391418, 0.5693320631980896, 0.6216154098510742, 0.6092434525489807, 0.56674724817276, 0.523102343082428, 0.6059455275535583, 0.6056551337242126, 0.5810922384262085, 0.5900036692619324, 0.561721920967102, 0.5596206784248352, 0.5735964179039001, 0.5286743640899658, 0.5743589401245117, 0.5271419882774353, 0.5284360647201538, 0.5176397562026978, 0.5148100852966309, 0.5217126607894897, 0.5180081725120544, 0.4918561577796936, 0.4408998191356659, 0.5110407471656799, 0.4879508912563324, 0.46306025981903076, 0.46404024958610535, 0.4086320102214813, 0.5053701996803284, 0.3534437417984009, 0.3630962073802948, 0.3462725579738617, 0.4419665038585663, 0.4270862936973572, 0.48466911911964417, 0.42236822843551636, 0.5155899524688721, 0.3582645356655121, 0.3383621275424957, 0.4068378210067749, 0.4942166805267334, 0.3816855549812317, 0.39382854104042053, 0.31292039155960083, 0.3597201406955719, 0.33144763112068176, 0.3735921084880829, 0.37888598442077637, 0.392160028219223, 0.3043500483036041, 0.41463106870651245, 0.36486178636550903, 0.4119946360588074, 0.3629731237888336, 0.373713880777359, 0.4745834767818451, 0.30028828978538513, 0.26262903213500977, 0.4429750442504883, 0.426891028881073, 0.32811039686203003, 0.3924146294593811, 0.3213866353034973, 0.48806655406951904, 0.2627648413181305, 0.34866753220558167, 0.40109702944755554, 0.18872395157814026, 0.39481040835380554, 0.2632366418838501, 0.3209681808948517, 0.3966921865940094, 0.4742789566516876, 0.32062554359436035, 0.43540799617767334, 0.2358834147453308, 0.35373878479003906, 0.21102140843868256, 0.34872835874557495, 0.45766204595565796, 0.27182236313819885, 0.252424418926239, 0.17766869068145752, 0.31417953968048096, 0.26415348052978516, 0.2501792311668396, 0.21763435006141663, 0.30250853300094604, 0.28825610876083374, 0.2638463079929352, 0.4555850327014923, 0.21854054927825928, 0.4027482271194458, 0.2644772231578827, 0.18938909471035004, 0.18993347883224487, 0.1994181126356125, 0.2564356029033661, 0.34466439485549927, 0.44632112979888916, 0.1714322417974472, 0.25900983810424805, 0.2889957129955292, 0.3652031719684601, 0.3336139917373657, 0.2878737449645996, 0.3079991042613983, 0.15895380079746246, 0.5084056258201599, 0.2624356746673584, 0.10251109302043915, 0.41438597440719604, 0.17054475843906403, 0.3204120695590973, 0.29824286699295044, 0.2611217200756073, 0.4017789363861084, 0.4717103838920593, 0.35568371415138245, 0.3665515184402466, 0.17177030444145203, 0.11903706192970276, 0.38929831981658936, 0.42342454195022583, 0.24814686179161072, 0.1272808313369751, 0.32529786229133606, 0.2594032287597656, 0.19042231142520905, 0.2852585017681122, 0.353363037109375, 0.353385329246521, 0.3859407901763916, 0.1847701221704483, 0.3008573353290558, 0.2820724546909332, 0.24565532803535461, 0.1900235116481781, 0.18949896097183228, 0.3231547474861145, 0.2127327173948288, 0.1944090723991394, 0.3178519904613495, 0.2710948884487152, 0.16719381511211395, 0.18091560900211334, 0.2940332293510437, 0.2849684953689575, 0.1908695101737976, 0.26715561747550964, 0.21560430526733398, 0.17860615253448486, 0.15859295427799225, 0.29036495089530945, 0.11836032569408417, 0.20900192856788635],\n [0.7272396087646484, 0.7295688390731812, 0.7570897936820984, 0.7087957262992859, 0.7274313569068909, 0.7022408843040466, 0.6864199042320251, 0.699434220790863, 0.6982783675193787, 0.6959832906723022, 0.6976252794265747, 0.6931410431861877, 0.6930719614028931, 0.6893796324729919, 0.6975778341293335, 0.6893512010574341, 0.6978790760040283, 0.7049446105957031, 0.6925809979438782, 0.6892191171646118, 0.7070729732513428, 0.7048653364181519, 0.6851789951324463, 0.6766936779022217, 0.702186107635498, 0.6891911029815674, 0.6973949670791626, 0.6923474073410034, 0.7022337317466736, 0.6829305291175842, 0.6827681064605713, 0.6996974349021912, 0.6973342299461365, 0.6984550952911377, 0.6910497546195984, 0.6889910101890564, 0.6827915906906128, 0.6663788557052612, 0.7112968564033508, 0.6854071021080017, 0.6761595010757446, 0.7062471508979797, 0.6855767965316772, 0.7055447101593018, 0.6941657066345215, 0.6775194406509399, 0.6755390167236328, 0.7036972641944885, 0.6627815365791321, 0.6511377096176147, 0.6956942081451416, 0.6593714952468872, 0.7439809441566467, 0.6913168430328369, 0.6690685153007507, 0.6907264590263367, 0.6702031493186951, 0.678770899772644, 0.7161229848861694, 0.6777403950691223, 0.642853319644928, 0.7137056589126587, 0.6518657207489014, 0.6615353226661682, 0.714718759059906, 0.6691334247589111, 0.7253159880638123, 0.6783788800239563, 0.7046250700950623, 0.6893599033355713, 0.6771127581596375, 0.6625726222991943, 0.6623744368553162, 0.6594424247741699, 0.7303259372711182, 0.6832818388938904, 0.6950793266296387, 0.6705347299575806, 0.6765984892845154, 0.6506450772285461, 0.6619067192077637, 0.6445882320404053, 0.6630840301513672, 0.6295471787452698, 0.6536338329315186, 0.6425454020500183, 0.6671119928359985, 0.666012704372406, 0.6505255699157715, 0.6488088369369507, 0.6459009051322937, 0.6408039331436157, 0.5989455580711365, 0.6081981062889099, 0.6189438104629517, 0.6539271473884583, 0.6395933032035828, 0.6286918520927429, 0.5886989235877991, 0.6259792447090149, 0.588421642780304, 0.5792858600616455, 0.5914623141288757, 0.6219715476036072, 0.5813407301902771, 0.5458603501319885, 0.6520585417747498, 0.659098207950592, 0.5533766746520996, 0.5545827746391296, 0.5535733103752136, 0.5933940410614014, 0.5356205701828003, 0.6077077984809875, 0.6699368357658386, 0.5931479930877686, 0.6122397780418396, 0.5298587083816528, 0.5677626132965088, 0.5550642013549805, 0.6128010749816895, 0.6263720989227295, 0.5651345252990723, 0.5553138852119446, 0.5478275418281555, 0.5822321176528931, 0.5386983156204224, 0.4926390051841736, 0.5206858515739441, 0.5107952952384949, 0.509948194026947, 0.4823342263698578, 0.5333036184310913, 0.47406020760536194, 0.42698532342910767, 0.503563642501831, 0.4918605387210846, 0.48852309584617615, 0.5684753656387329, 0.45037415623664856, 0.523054301738739, 0.40684664249420166, 0.48148486018180847, 0.48747944831848145, 0.5040246248245239, 0.5649238228797913, 0.3981987237930298, 0.4773406982421875, 0.4948986768722534, 0.5817337036132812, 0.48667991161346436, 0.5322973728179932, 0.571322500705719, 0.48079100251197815, 0.4295814633369446, 0.5479949712753296, 0.46066680550575256, 0.4725620746612549, 0.42795437574386597, 0.42624568939208984, 0.5229694843292236, 0.5307640433311462, 0.3672819435596466, 0.40361347794532776, 0.4325066804885864, 0.49274033308029175, 0.5536319613456726, 0.4658926725387573, 0.6071437001228333, 0.48129138350486755, 0.5220579504966736, 0.5487117767333984, 0.43383896350860596, 0.4137386679649353, 0.4771384298801422, 0.5129045844078064, 0.5398615002632141, 0.47063007950782776, 0.3663185238838196, 0.40931767225265503, 0.3742317855358124, 0.5270165801048279, 0.3159030079841614, 0.4720088243484497, 0.5110960006713867, 0.5423228144645691, 0.48916095495224, 0.3241633176803589, 0.32349222898483276, 0.5001606345176697, 0.3952382802963257, 0.38832223415374756, 0.6230665445327759, 0.5061535835266113, 0.42162856459617615, 0.3890904486179352, 0.4138750433921814, 0.4940250813961029, 0.4154129922389984, 0.3442753851413727],\n [0.681438684463501, 0.6889891028404236, 0.6886380910873413, 0.6888549327850342, 0.7001448273658752, 0.6896989345550537, 0.6827958822250366, 0.6809604167938232, 0.6802992820739746, 0.6798832416534424, 0.6873130202293396, 0.6706038117408752, 0.6825727820396423, 0.6711708307266235, 0.6692336797714233, 0.6626574993133545, 0.6598449945449829, 0.6661273241043091, 0.6690754890441895, 0.6628720164299011, 0.6578156352043152, 0.6536375284194946, 0.6395933032035828, 0.6494665741920471, 0.6431573629379272, 0.6422117352485657, 0.6304263472557068, 0.6317248940467834, 0.6545873284339905, 0.6310784816741943, 0.6355032920837402, 0.6170492768287659, 0.6209865808486938, 0.6031823754310608, 0.5973383188247681, 0.5987150073051453, 0.5997783541679382, 0.5827844142913818, 0.5649464130401611, 0.5976262092590332, 0.5651332139968872, 0.5687211155891418, 0.5581858158111572, 0.5616911053657532, 0.5290145874023438, 0.5284591317176819, 0.5671270489692688, 0.47875893115997314, 0.5753456950187683, 0.5510994791984558, 0.5518543720245361, 0.4903143048286438, 0.48829033970832825, 0.5358155965805054, 0.530174732208252, 0.5644151568412781, 0.4735930562019348, 0.4666731357574463, 0.4378604590892792, 0.470310240983963, 0.4612236022949219, 0.5205991268157959, 0.4360387921333313, 0.44255444407463074, 0.5049978494644165, 0.40847957134246826, 0.4495522379875183, 0.4902504086494446, 0.4367920160293579, 0.4487551152706146, 0.4421207010746002, 0.37032660841941833, 0.3559698164463043, 0.42548468708992004, 0.4063631296157837, 0.45850908756256104, 0.38015541434288025, 0.3413124084472656, 0.4414171576499939, 0.3528973460197449, 0.3753133714199066, 0.4682203233242035, 0.38733014464378357, 0.3464372158050537, 0.3330712914466858, 0.38146668672561646, 0.37155666947364807, 0.3937838077545166, 0.3397389352321625, 0.2911732792854309, 0.3185632824897766, 0.34439948201179504, 0.37463000416755676, 0.2941756844520569, 0.3446425497531891, 0.2543509304523468, 0.44017115235328674, 0.2634207010269165, 0.32429203391075134, 0.3531845808029175, 0.4209728538990021, 0.2728058993816376, 0.29612597823143005, 0.36481037735939026, 0.23760972917079926, 0.31274184584617615, 0.29005640745162964, 0.3057081699371338, 0.3319462239742279, 0.4440755546092987, 0.3070727586746216, 0.3044629395008087, 0.32753700017929077, 0.2656729221343994, 0.422760009765625, 0.2262178659439087, 0.34336617588996887, 0.28752654790878296, 0.22466769814491272, 0.19892671704292297, 0.32547518610954285, 0.3219960033893585, 0.2053043395280838, 0.33121222257614136, 0.24561984837055206, 0.18318641185760498, 0.3192058503627777, 0.30691757798194885, 0.18206992745399475, 0.22018828988075256, 0.38999852538108826, 0.17489850521087646, 0.30276167392730713, 0.21708929538726807, 0.35204121470451355, 0.20095224678516388, 0.26634764671325684, 0.1619335412979126, 0.17421387135982513, 0.17063964903354645, 0.23774376511573792, 0.21871469914913177, 0.28005892038345337, 0.14502578973770142, 0.2649957239627838, 0.31908106803894043, 0.22305874526500702, 0.19752325117588043, 0.22612883150577545, 0.2095600664615631, 0.3497889041900635, 0.24804994463920593, 0.18516618013381958, 0.19408148527145386, 0.39791882038116455, 0.2503810226917267, 0.13216803967952728, 0.1892985850572586, 0.2676987946033478, 0.1970151662826538, 0.2100503146648407, 0.14209169149398804, 0.1863841563463211, 0.14308282732963562, 0.3121347725391388, 0.2626650333404541, 0.12991006672382355, 0.16739721596240997, 0.11467546224594116, 0.32184040546417236, 0.11009383946657181, 0.19579708576202393, 0.13897623121738434, 0.1434190273284912, 0.31635451316833496, 0.2833975851535797, 0.16934165358543396, 0.14960110187530518, 0.31888577342033386, 0.21167083084583282, 0.30760568380355835, 0.41453269124031067, 0.229119211435318, 0.1743178516626358, 0.2994597852230072, 0.24154801666736603, 0.09829962998628616, 0.1747366487979889, 0.29539310932159424, 0.2035258263349533, 0.30196595191955566, 0.30807745456695557, 0.3411264419555664, 0.15731027722358704, 0.12981931865215302, 0.2966948449611664, 0.21025419235229492, 0.15241996943950653, 0.07024487853050232, 0.23431310057640076],\n [0.7926881909370422, 0.6943184733390808, 0.724916398525238, 0.725375771522522, 0.7073952555656433, 0.6848509907722473, 0.6693649291992188, 0.6957626342773438, 0.6717135906219482, 0.673644483089447, 0.6972193121910095, 0.7146568894386292, 0.6860081553459167, 0.7142075896263123, 0.6870988607406616, 0.6949147582054138, 0.6425225734710693, 0.6958219408988953, 0.6599481701850891, 0.7045291066169739, 0.6925467848777771, 0.7045005559921265, 0.686108410358429, 0.6769193410873413, 0.682338535785675, 0.6690472960472107, 0.6917040348052979, 0.6668921709060669, 0.7072445750236511, 0.7044053673744202, 0.718180239200592, 0.6744627356529236, 0.7052643299102783, 0.7005559802055359, 0.6911374926567078, 0.6969266533851624, 0.6899242997169495, 0.6865996718406677, 0.6805969476699829, 0.704833447933197, 0.6821359992027283, 0.6927057504653931, 0.677013635635376, 0.6619738936424255, 0.695767343044281, 0.6727330088615417, 0.688576877117157, 0.6700695157051086, 0.6829382181167603, 0.6628155708312988, 0.6508015990257263, 0.6543688178062439, 0.667511522769928, 0.6499519944190979, 0.6432656049728394, 0.6761520504951477, 0.7019364237785339, 0.6382509469985962, 0.636896550655365, 0.6385931968688965, 0.620511531829834, 0.6146273612976074, 0.6234485507011414, 0.5818889737129211, 0.5527447462081909, 0.5901055932044983, 0.5930953621864319, 0.5530558228492737, 0.5988510251045227, 0.5692178606987, 0.5622672438621521, 0.5097872614860535, 0.5923498272895813, 0.5072301030158997, 0.6285861730575562, 0.5253883600234985, 0.506665825843811, 0.6181252002716064, 0.5479302406311035, 0.4898171126842499, 0.4825979173183441, 0.5096496343612671, 0.47084715962409973, 0.38744720816612244, 0.5614428520202637, 0.4947513937950134, 0.3650178611278534, 0.4312703013420105, 0.4307132065296173, 0.3908964991569519, 0.49059590697288513, 0.4534572660923004, 0.440878689289093, 0.4681662321090698, 0.43900272250175476, 0.4869661033153534, 0.47630172967910767, 0.3929968774318695, 0.3844168484210968, 0.5419467091560364, 0.33349668979644775, 0.3804889917373657, 0.4053753614425659, 0.3849819600582123, 0.5149462819099426, 0.4681555926799774, 0.39894184470176697, 0.4161919355392456, 0.5927491188049316, 0.39550545811653137, 0.4547486901283264, 0.5225046277046204, 0.34957191348075867, 0.45948532223701477, 0.37251201272010803, 0.4916895031929016, 0.295067697763443, 0.4786352217197418, 0.421528697013855, 0.31180649995803833, 0.28369444608688354, 0.2648847699165344, 0.35451003909111023, 0.43641865253448486, 0.36730897426605225, 0.2720189392566681, 0.33152616024017334, 0.2540571093559265, 0.22555945813655853, 0.46975404024124146, 0.3541400134563446, 0.3469198942184448, 0.2294202744960785, 0.36853235960006714, 0.3760600984096527, 0.34043994545936584, 0.2598644196987152, 0.24092306196689606, 0.2868442237377167, 0.18181255459785461, 0.26207858324050903, 0.2916255593299866, 0.19571438431739807, 0.35200226306915283, 0.496856153011322, 0.3683459162712097, 0.27670279145240784, 0.1787511110305786, 0.5051004886627197, 0.3936381936073303, 0.15478552877902985, 0.5155048370361328, 0.3085480034351349, 0.4393378794193268, 0.26719388365745544, 0.33330830931663513, 0.39656341075897217, 0.31738728284835815, 0.33005809783935547, 0.2064027637243271, 0.15768247842788696, 0.35187432169914246, 0.2247389853000641, 0.43645012378692627, 0.6171329021453857, 0.3514723479747772, 0.31207093596458435, 0.236627995967865, 0.28743186593055725, 0.19466204941272736, 0.30667591094970703, 0.2142365574836731, 0.2447064220905304, 0.402235746383667, 0.4022824168205261, 0.409971684217453, 0.2473350167274475, 0.26397138833999634, 0.31409120559692383, 0.34682631492614746, 0.26574432849884033, 0.4209563136100769, 0.23204396665096283, 0.1841460019350052, 0.3346189260482788, 0.2229692041873932, 0.21979694068431854, 0.1793777048587799, 0.43134182691574097, 0.23858889937400818, 0.15659116208553314, 0.22294828295707703, 0.1925022304058075, 0.31423401832580566, 0.09654498845338821, 0.3890306055545807, 0.17093206942081451, 0.18873277306556702, 0.2963242828845978, 0.17343668639659882]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "CNN_pca8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "CNN_pca8_FASHION = np.array([[0.7024438381195068, 0.701317310333252, 0.6876907348632812, 0.6847043037414551, 0.6933199167251587, 0.7034915089607239, 0.6764124035835266, 0.6747254729270935, 0.7082949280738831, 0.708881676197052, 0.6698758006095886, 0.719658613204956, 0.6659788489341736, 0.6932814121246338, 0.7149137854576111, 0.6934373378753662, 0.6875498294830322, 0.680873692035675, 0.6850370168685913, 0.6768752932548523, 0.6873149275779724, 0.681423544883728, 0.6941041350364685, 0.6781513094902039, 0.6840881109237671, 0.6746655106544495, 0.6761900186538696, 0.700124204158783, 0.6598073840141296, 0.6508470773696899, 0.6769809126853943, 0.6652989387512207, 0.6651935577392578, 0.6551887392997742, 0.6170886158943176, 0.634204626083374, 0.6916629672050476, 0.6379752159118652, 0.6714327931404114, 0.6315291523933411, 0.5938360095024109, 0.6737152934074402, 0.6550662517547607, 0.6163589358329773, 0.6557160019874573, 0.5911333560943604, 0.6381785869598389, 0.6171723008155823, 0.591861367225647, 0.5763111710548401, 0.5719621777534485, 0.5295212864875793, 0.5759477019309998, 0.601412832736969, 0.5296574831008911, 0.5451980829238892, 0.605498194694519, 0.5693132281303406, 0.6204257011413574, 0.5838996171951294, 0.5469677448272705, 0.5217484831809998, 0.5119621157646179, 0.4840445816516876, 0.48638203740119934, 0.4502175450325012, 0.47423630952835083, 0.504289448261261, 0.49905872344970703, 0.7024049162864685, 0.5467042326927185, 0.4570502042770386, 0.5044639110565186, 0.42137786746025085, 0.6284716129302979, 0.5955803394317627, 0.5164405703544617, 0.41035547852516174, 0.5792757868766785, 0.3985511064529419, 0.47310149669647217, 0.41142207384109497, 0.5692822933197021, 0.4134823679924011, 0.4085578918457031, 0.5547319650650024, 0.4686039090156555, 0.4193034768104553, 0.4465419054031372, 0.3757850229740143, 0.31999069452285767, 0.4855785071849823, 0.312707781791687, 0.3045315146446228, 0.39933907985687256, 0.36081263422966003, 0.298431396484375, 0.5087718963623047, 0.3218119442462921, 0.5063819289207458, 0.3105223476886749, 0.5032217502593994, 0.40607917308807373, 0.569314181804657, 0.44149184226989746, 0.4622828960418701, 0.47339510917663574, 0.503004789352417, 0.3218116760253906, 0.373760461807251, 0.5830525755882263, 0.33768123388290405, 0.3200608789920807, 0.3584187626838684, 0.5484757423400879, 0.2849207818508148, 0.38294509053230286, 0.26305460929870605, 0.42440125346183777, 0.31314557790756226, 0.27498316764831543, 0.45281466841697693, 0.20203173160552979, 0.5584656000137329, 0.4944888949394226, 0.2684931755065918, 0.3663282096385956, 0.4298987090587616, 0.6349244713783264, 0.30895593762397766, 0.21519573032855988, 0.5118795037269592, 0.22751285135746002, 0.47238999605178833, 0.34088703989982605, 0.29520055651664734, 0.283777117729187, 0.75107741355896, 0.3200981616973877, 0.4257638156414032, 0.31904301047325134, 0.21819691359996796, 0.41086483001708984, 0.31581297516822815, 0.34817051887512207, 0.31119513511657715, 0.4424825608730316, 0.26970574259757996, 0.3470630943775177, 0.3130325376987457, 0.34305381774902344, 0.5418984293937683, 0.2891307771205902, 0.21262122690677643, 0.34585750102996826, 0.3669237196445465, 0.2340727597475052, 0.4622315466403961, 0.23753364384174347, 0.3647005558013916, 0.32093876600265503, 0.22319766879081726, 0.39732033014297485, 0.37264034152030945, 0.42672955989837646, 0.1754530370235443, 0.36036014556884766, 0.19132505357265472, 0.4249000549316406, 0.1446569263935089, 0.4729052782058716, 0.5277539491653442, 0.36327388882637024, 0.16382533311843872, 0.5792173147201538, 0.34819790720939636, 0.25707876682281494, 0.48427093029022217, 0.3475936949253082, 0.2773706316947937, 0.3499819040298462, 0.21925830841064453, 0.14740735292434692, 0.21108940243721008, 0.27492037415504456, 0.2885427176952362, 0.5016036629676819, 0.3607233762741089, 0.21803052723407745, 0.3336958587169647, 0.2177143692970276, 0.13436760008335114, 0.32131683826446533, 0.34203651547431946, 0.2509497404098511, 0.2636679410934448, 0.2886880040168762, 0.3715705871582031, 0.22568900883197784, 0.42904385924339294],\n [0.7185938358306885, 0.6604992747306824, 0.6967400312423706, 0.6770632266998291, 0.7065595984458923, 0.7104727029800415, 0.716935932636261, 0.6715752482414246, 0.6795244812965393, 0.6829398274421692, 0.6919105052947998, 0.6911031603813171, 0.6959630846977234, 0.6710406541824341, 0.6888791918754578, 0.6843737959861755, 0.6749078631401062, 0.7137250304222107, 0.6732360124588013, 0.6779041290283203, 0.7363390326499939, 0.6784221529960632, 0.6690196394920349, 0.659263551235199, 0.6786253452301025, 0.6711382269859314, 0.6844541430473328, 0.6591963171958923, 0.6696309447288513, 0.6563599109649658, 0.6677483320236206, 0.6503051519393921, 0.659803032875061, 0.6344764828681946, 0.6299199461936951, 0.6587289571762085, 0.6597774624824524, 0.6346151232719421, 0.6273216605186462, 0.5881815552711487, 0.6003080606460571, 0.5798957347869873, 0.6204898953437805, 0.6316098570823669, 0.6258500218391418, 0.5693320631980896, 0.6216154098510742, 0.6092434525489807, 0.56674724817276, 0.523102343082428, 0.6059455275535583, 0.6056551337242126, 0.5810922384262085, 0.5900036692619324, 0.561721920967102, 0.5596206784248352, 0.5735964179039001, 0.5286743640899658, 0.5743589401245117, 0.5271419882774353, 0.5284360647201538, 0.5176397562026978, 0.5148100852966309, 0.5217126607894897, 0.5180081725120544, 0.4918561577796936, 0.4408998191356659, 0.5110407471656799, 0.4879508912563324, 0.46306025981903076, 0.46404024958610535, 0.4086320102214813, 0.5053701996803284, 0.3534437417984009, 0.3630962073802948, 0.3462725579738617, 0.4419665038585663, 0.4270862936973572, 0.48466911911964417, 0.42236822843551636, 0.5155899524688721, 0.3582645356655121, 0.3383621275424957, 0.4068378210067749, 0.4942166805267334, 0.3816855549812317, 0.39382854104042053, 0.31292039155960083, 0.3597201406955719, 0.33144763112068176, 0.3735921084880829, 0.37888598442077637, 0.392160028219223, 0.3043500483036041, 0.41463106870651245, 0.36486178636550903, 0.4119946360588074, 0.3629731237888336, 0.373713880777359, 0.4745834767818451, 0.30028828978538513, 0.26262903213500977, 0.4429750442504883, 0.426891028881073, 0.32811039686203003, 0.3924146294593811, 0.3213866353034973, 0.48806655406951904, 0.2627648413181305, 0.34866753220558167, 0.40109702944755554, 0.18872395157814026, 0.39481040835380554, 0.2632366418838501, 0.3209681808948517, 0.3966921865940094, 0.4742789566516876, 0.32062554359436035, 0.43540799617767334, 0.2358834147453308, 0.35373878479003906, 0.21102140843868256, 0.34872835874557495, 0.45766204595565796, 0.27182236313819885, 0.252424418926239, 0.17766869068145752, 0.31417953968048096, 0.26415348052978516, 0.2501792311668396, 0.21763435006141663, 0.30250853300094604, 0.28825610876083374, 0.2638463079929352, 0.4555850327014923, 0.21854054927825928, 0.4027482271194458, 0.2644772231578827, 0.18938909471035004, 0.18993347883224487, 0.1994181126356125, 0.2564356029033661, 0.34466439485549927, 0.44632112979888916, 0.1714322417974472, 0.25900983810424805, 0.2889957129955292, 0.3652031719684601, 0.3336139917373657, 0.2878737449645996, 0.3079991042613983, 0.15895380079746246, 0.5084056258201599, 0.2624356746673584, 0.10251109302043915, 0.41438597440719604, 0.17054475843906403, 0.3204120695590973, 0.29824286699295044, 0.2611217200756073, 0.4017789363861084, 0.4717103838920593, 0.35568371415138245, 0.3665515184402466, 0.17177030444145203, 0.11903706192970276, 0.38929831981658936, 0.42342454195022583, 0.24814686179161072, 0.1272808313369751, 0.32529786229133606, 0.2594032287597656, 0.19042231142520905, 0.2852585017681122, 0.353363037109375, 0.353385329246521, 0.3859407901763916, 0.1847701221704483, 0.3008573353290558, 0.2820724546909332, 0.24565532803535461, 0.1900235116481781, 0.18949896097183228, 0.3231547474861145, 0.2127327173948288, 0.1944090723991394, 0.3178519904613495, 0.2710948884487152, 0.16719381511211395, 0.18091560900211334, 0.2940332293510437, 0.2849684953689575, 0.1908695101737976, 0.26715561747550964, 0.21560430526733398, 0.17860615253448486, 0.15859295427799225, 0.29036495089530945, 0.11836032569408417, 0.20900192856788635],\n [0.7272396087646484, 0.7295688390731812, 0.7570897936820984, 0.7087957262992859, 0.7274313569068909, 0.7022408843040466, 0.6864199042320251, 0.699434220790863, 0.6982783675193787, 0.6959832906723022, 0.6976252794265747, 0.6931410431861877, 0.6930719614028931, 0.6893796324729919, 0.6975778341293335, 0.6893512010574341, 0.6978790760040283, 0.7049446105957031, 0.6925809979438782, 0.6892191171646118, 0.7070729732513428, 0.7048653364181519, 0.6851789951324463, 0.6766936779022217, 0.702186107635498, 0.6891911029815674, 0.6973949670791626, 0.6923474073410034, 0.7022337317466736, 0.6829305291175842, 0.6827681064605713, 0.6996974349021912, 0.6973342299461365, 0.6984550952911377, 0.6910497546195984, 0.6889910101890564, 0.6827915906906128, 0.6663788557052612, 0.7112968564033508, 0.6854071021080017, 0.6761595010757446, 0.7062471508979797, 0.6855767965316772, 0.7055447101593018, 0.6941657066345215, 0.6775194406509399, 0.6755390167236328, 0.7036972641944885, 0.6627815365791321, 0.6511377096176147, 0.6956942081451416, 0.6593714952468872, 0.7439809441566467, 0.6913168430328369, 0.6690685153007507, 0.6907264590263367, 0.6702031493186951, 0.678770899772644, 0.7161229848861694, 0.6777403950691223, 0.642853319644928, 0.7137056589126587, 0.6518657207489014, 0.6615353226661682, 0.714718759059906, 0.6691334247589111, 0.7253159880638123, 0.6783788800239563, 0.7046250700950623, 0.6893599033355713, 0.6771127581596375, 0.6625726222991943, 0.6623744368553162, 0.6594424247741699, 0.7303259372711182, 0.6832818388938904, 0.6950793266296387, 0.6705347299575806, 0.6765984892845154, 0.6506450772285461, 0.6619067192077637, 0.6445882320404053, 0.6630840301513672, 0.6295471787452698, 0.6536338329315186, 0.6425454020500183, 0.6671119928359985, 0.666012704372406, 0.6505255699157715, 0.6488088369369507, 0.6459009051322937, 0.6408039331436157, 0.5989455580711365, 0.6081981062889099, 0.6189438104629517, 0.6539271473884583, 0.6395933032035828, 0.6286918520927429, 0.5886989235877991, 0.6259792447090149, 0.588421642780304, 0.5792858600616455, 0.5914623141288757, 0.6219715476036072, 0.5813407301902771, 0.5458603501319885, 0.6520585417747498, 0.659098207950592, 0.5533766746520996, 0.5545827746391296, 0.5535733103752136, 0.5933940410614014, 0.5356205701828003, 0.6077077984809875, 0.6699368357658386, 0.5931479930877686, 0.6122397780418396, 0.5298587083816528, 0.5677626132965088, 0.5550642013549805, 0.6128010749816895, 0.6263720989227295, 0.5651345252990723, 0.5553138852119446, 0.5478275418281555, 0.5822321176528931, 0.5386983156204224, 0.4926390051841736, 0.5206858515739441, 0.5107952952384949, 0.509948194026947, 0.4823342263698578, 0.5333036184310913, 0.47406020760536194, 0.42698532342910767, 0.503563642501831, 0.4918605387210846, 0.48852309584617615, 0.5684753656387329, 0.45037415623664856, 0.523054301738739, 0.40684664249420166, 0.48148486018180847, 0.48747944831848145, 0.5040246248245239, 0.5649238228797913, 0.3981987237930298, 0.4773406982421875, 0.4948986768722534, 0.5817337036132812, 0.48667991161346436, 0.5322973728179932, 0.571322500705719, 0.48079100251197815, 0.4295814633369446, 0.5479949712753296, 0.46066680550575256, 0.4725620746612549, 0.42795437574386597, 0.42624568939208984, 0.5229694843292236, 0.5307640433311462, 0.3672819435596466, 0.40361347794532776, 0.4325066804885864, 0.49274033308029175, 0.5536319613456726, 0.4658926725387573, 0.6071437001228333, 0.48129138350486755, 0.5220579504966736, 0.5487117767333984, 0.43383896350860596, 0.4137386679649353, 0.4771384298801422, 0.5129045844078064, 0.5398615002632141, 0.47063007950782776, 0.3663185238838196, 0.40931767225265503, 0.3742317855358124, 0.5270165801048279, 0.3159030079841614, 0.4720088243484497, 0.5110960006713867, 0.5423228144645691, 0.48916095495224, 0.3241633176803589, 0.32349222898483276, 0.5001606345176697, 0.3952382802963257, 0.38832223415374756, 0.6230665445327759, 0.5061535835266113, 0.42162856459617615, 0.3890904486179352, 0.4138750433921814, 0.4940250813961029, 0.4154129922389984, 0.3442753851413727],\n [0.681438684463501, 0.6889891028404236, 0.6886380910873413, 0.6888549327850342, 0.7001448273658752, 0.6896989345550537, 0.6827958822250366, 0.6809604167938232, 0.6802992820739746, 0.6798832416534424, 0.6873130202293396, 0.6706038117408752, 0.6825727820396423, 0.6711708307266235, 0.6692336797714233, 0.6626574993133545, 0.6598449945449829, 0.6661273241043091, 0.6690754890441895, 0.6628720164299011, 0.6578156352043152, 0.6536375284194946, 0.6395933032035828, 0.6494665741920471, 0.6431573629379272, 0.6422117352485657, 0.6304263472557068, 0.6317248940467834, 0.6545873284339905, 0.6310784816741943, 0.6355032920837402, 0.6170492768287659, 0.6209865808486938, 0.6031823754310608, 0.5973383188247681, 0.5987150073051453, 0.5997783541679382, 0.5827844142913818, 0.5649464130401611, 0.5976262092590332, 0.5651332139968872, 0.5687211155891418, 0.5581858158111572, 0.5616911053657532, 0.5290145874023438, 0.5284591317176819, 0.5671270489692688, 0.47875893115997314, 0.5753456950187683, 0.5510994791984558, 0.5518543720245361, 0.4903143048286438, 0.48829033970832825, 0.5358155965805054, 0.530174732208252, 0.5644151568412781, 0.4735930562019348, 0.4666731357574463, 0.4378604590892792, 0.470310240983963, 0.4612236022949219, 0.5205991268157959, 0.4360387921333313, 0.44255444407463074, 0.5049978494644165, 0.40847957134246826, 0.4495522379875183, 0.4902504086494446, 0.4367920160293579, 0.4487551152706146, 0.4421207010746002, 0.37032660841941833, 0.3559698164463043, 0.42548468708992004, 0.4063631296157837, 0.45850908756256104, 0.38015541434288025, 0.3413124084472656, 0.4414171576499939, 0.3528973460197449, 0.3753133714199066, 0.4682203233242035, 0.38733014464378357, 0.3464372158050537, 0.3330712914466858, 0.38146668672561646, 0.37155666947364807, 0.3937838077545166, 0.3397389352321625, 0.2911732792854309, 0.3185632824897766, 0.34439948201179504, 0.37463000416755676, 0.2941756844520569, 0.3446425497531891, 0.2543509304523468, 0.44017115235328674, 0.2634207010269165, 0.32429203391075134, 0.3531845808029175, 0.4209728538990021, 0.2728058993816376, 0.29612597823143005, 0.36481037735939026, 0.23760972917079926, 0.31274184584617615, 0.29005640745162964, 0.3057081699371338, 0.3319462239742279, 0.4440755546092987, 0.3070727586746216, 0.3044629395008087, 0.32753700017929077, 0.2656729221343994, 0.422760009765625, 0.2262178659439087, 0.34336617588996887, 0.28752654790878296, 0.22466769814491272, 0.19892671704292297, 0.32547518610954285, 0.3219960033893585, 0.2053043395280838, 0.33121222257614136, 0.24561984837055206, 0.18318641185760498, 0.3192058503627777, 0.30691757798194885, 0.18206992745399475, 0.22018828988075256, 0.38999852538108826, 0.17489850521087646, 0.30276167392730713, 0.21708929538726807, 0.35204121470451355, 0.20095224678516388, 0.26634764671325684, 0.1619335412979126, 0.17421387135982513, 0.17063964903354645, 0.23774376511573792, 0.21871469914913177, 0.28005892038345337, 0.14502578973770142, 0.2649957239627838, 0.31908106803894043, 0.22305874526500702, 0.19752325117588043, 0.22612883150577545, 0.2095600664615631, 0.3497889041900635, 0.24804994463920593, 0.18516618013381958, 0.19408148527145386, 0.39791882038116455, 0.2503810226917267, 0.13216803967952728, 0.1892985850572586, 0.2676987946033478, 0.1970151662826538, 0.2100503146648407, 0.14209169149398804, 0.1863841563463211, 0.14308282732963562, 0.3121347725391388, 0.2626650333404541, 0.12991006672382355, 0.16739721596240997, 0.11467546224594116, 0.32184040546417236, 0.11009383946657181, 0.19579708576202393, 0.13897623121738434, 0.1434190273284912, 0.31635451316833496, 0.2833975851535797, 0.16934165358543396, 0.14960110187530518, 0.31888577342033386, 0.21167083084583282, 0.30760568380355835, 0.41453269124031067, 0.229119211435318, 0.1743178516626358, 0.2994597852230072, 0.24154801666736603, 0.09829962998628616, 0.1747366487979889, 0.29539310932159424, 0.2035258263349533, 0.30196595191955566, 0.30807745456695557, 0.3411264419555664, 0.15731027722358704, 0.12981931865215302, 0.2966948449611664, 0.21025419235229492, 0.15241996943950653, 0.07024487853050232, 0.23431310057640076],\n [0.7926881909370422, 0.6943184733390808, 0.724916398525238, 0.725375771522522, 0.7073952555656433, 0.6848509907722473, 0.6693649291992188, 0.6957626342773438, 0.6717135906219482, 0.673644483089447, 0.6972193121910095, 0.7146568894386292, 0.6860081553459167, 0.7142075896263123, 0.6870988607406616, 0.6949147582054138, 0.6425225734710693, 0.6958219408988953, 0.6599481701850891, 0.7045291066169739, 0.6925467848777771, 0.7045005559921265, 0.686108410358429, 0.6769193410873413, 0.682338535785675, 0.6690472960472107, 0.6917040348052979, 0.6668921709060669, 0.7072445750236511, 0.7044053673744202, 0.718180239200592, 0.6744627356529236, 0.7052643299102783, 0.7005559802055359, 0.6911374926567078, 0.6969266533851624, 0.6899242997169495, 0.6865996718406677, 0.6805969476699829, 0.704833447933197, 0.6821359992027283, 0.6927057504653931, 0.677013635635376, 0.6619738936424255, 0.695767343044281, 0.6727330088615417, 0.688576877117157, 0.6700695157051086, 0.6829382181167603, 0.6628155708312988, 0.6508015990257263, 0.6543688178062439, 0.667511522769928, 0.6499519944190979, 0.6432656049728394, 0.6761520504951477, 0.7019364237785339, 0.6382509469985962, 0.636896550655365, 0.6385931968688965, 0.620511531829834, 0.6146273612976074, 0.6234485507011414, 0.5818889737129211, 0.5527447462081909, 0.5901055932044983, 0.5930953621864319, 0.5530558228492737, 0.5988510251045227, 0.5692178606987, 0.5622672438621521, 0.5097872614860535, 0.5923498272895813, 0.5072301030158997, 0.6285861730575562, 0.5253883600234985, 0.506665825843811, 0.6181252002716064, 0.5479302406311035, 0.4898171126842499, 0.4825979173183441, 0.5096496343612671, 0.47084715962409973, 0.38744720816612244, 0.5614428520202637, 0.4947513937950134, 0.3650178611278534, 0.4312703013420105, 0.4307132065296173, 0.3908964991569519, 0.49059590697288513, 0.4534572660923004, 0.440878689289093, 0.4681662321090698, 0.43900272250175476, 0.4869661033153534, 0.47630172967910767, 0.3929968774318695, 0.3844168484210968, 0.5419467091560364, 0.33349668979644775, 0.3804889917373657, 0.4053753614425659, 0.3849819600582123, 0.5149462819099426, 0.4681555926799774, 0.39894184470176697, 0.4161919355392456, 0.5927491188049316, 0.39550545811653137, 0.4547486901283264, 0.5225046277046204, 0.34957191348075867, 0.45948532223701477, 0.37251201272010803, 0.4916895031929016, 0.295067697763443, 0.4786352217197418, 0.421528697013855, 0.31180649995803833, 0.28369444608688354, 0.2648847699165344, 0.35451003909111023, 0.43641865253448486, 0.36730897426605225, 0.2720189392566681, 0.33152616024017334, 0.2540571093559265, 0.22555945813655853, 0.46975404024124146, 0.3541400134563446, 0.3469198942184448, 0.2294202744960785, 0.36853235960006714, 0.3760600984096527, 0.34043994545936584, 0.2598644196987152, 0.24092306196689606, 0.2868442237377167, 0.18181255459785461, 0.26207858324050903, 0.2916255593299866, 0.19571438431739807, 0.35200226306915283, 0.496856153011322, 0.3683459162712097, 0.27670279145240784, 0.1787511110305786, 0.5051004886627197, 0.3936381936073303, 0.15478552877902985, 0.5155048370361328, 0.3085480034351349, 0.4393378794193268, 0.26719388365745544, 0.33330830931663513, 0.39656341075897217, 0.31738728284835815, 0.33005809783935547, 0.2064027637243271, 0.15768247842788696, 0.35187432169914246, 0.2247389853000641, 0.43645012378692627, 0.6171329021453857, 0.3514723479747772, 0.31207093596458435, 0.236627995967865, 0.28743186593055725, 0.19466204941272736, 0.30667591094970703, 0.2142365574836731, 0.2447064220905304, 0.402235746383667, 0.4022824168205261, 0.409971684217453, 0.2473350167274475, 0.26397138833999634, 0.31409120559692383, 0.34682631492614746, 0.26574432849884033, 0.4209563136100769, 0.23204396665096283, 0.1841460019350052, 0.3346189260482788, 0.2229692041873932, 0.21979694068431854, 0.1793777048587799, 0.43134182691574097, 0.23858889937400818, 0.15659116208553314, 0.22294828295707703, 0.1925022304058075, 0.31423401832580566, 0.09654498845338821, 0.3890306055545807, 0.17093206942081451, 0.18873277306556702, 0.2963242828845978, 0.17343668639659882]])\nCNN_ae8_FASHION = np.array([[0.7195901274681091, 0.6704943180084229, 0.7085641622543335, 0.6748741269111633, 0.7423946857452393, 0.7234722375869751, 0.6986007690429688, 0.6960390210151672, 0.7012070417404175, 0.6947640180587769, 0.6792826056480408, 0.7239813804626465, 0.6761874556541443, 0.6902806758880615, 0.6771406531333923, 0.6850265264511108, 0.692349910736084, 0.6808741092681885, 0.6753465533256531, 0.6633909344673157, 0.6686793565750122, 0.6637576222419739, 0.680237889289856, 0.654888391494751, 0.6780327558517456, 0.6408708095550537, 0.6315723061561584, 0.6431190371513367, 0.6341533064842224, 0.6376373767852783, 0.6409605145454407, 0.6432655453681946, 0.6013216972351074, 0.6401882171630859, 0.6355386972427368, 0.5561983585357666, 0.5904024839401245, 0.6024128198623657, 0.5541431903839111, 0.575777530670166, 0.5260757803916931, 0.5354135036468506, 0.44182831048965454, 0.4903651475906372, 0.4765366017818451, 0.4635029733181, 0.5017414093017578, 0.4560497999191284, 0.4651477336883545, 0.4632211923599243, 0.5207407474517822, 0.47012707591056824, 0.35573041439056396, 0.40080904960632324, 0.5226706266403198, 0.3614789843559265, 0.3670787811279297, 0.4320698082447052, 0.4483892321586609, 0.3988180160522461, 0.44299742579460144, 0.4347727596759796, 0.2980192303657532, 0.3726247549057007, 0.42988693714141846, 0.39973509311676025, 0.34497734904289246, 0.39063209295272827, 0.3743012249469757, 0.3100280165672302, 0.3662980794906616, 0.3414981961250305, 0.32001861929893494, 0.3834022283554077, 0.23791366815567017, 0.30651259422302246, 0.2968301773071289, 0.3293622136116028, 0.1965251863002777, 0.3719707429409027, 0.3272671401500702, 0.24777932465076447, 0.3863266110420227, 0.24999620020389557, 0.21851596236228943, 0.21008063852787018, 0.3156169652938843, 0.3615129590034485, 0.2176506221294403, 0.19833393394947052, 0.3163139820098877, 0.3545341491699219, 0.19693829119205475, 0.28649449348449707, 0.31108948588371277, 0.2429426908493042, 0.20192016661167145, 0.21015271544456482, 0.2626788020133972, 0.3365514278411865, 0.2673414647579193, 0.09546613693237305, 0.12053285539150238, 0.17347104847431183, 0.23780611157417297, 0.24000667035579681, 0.24722233414649963, 0.23631177842617035, 0.24386261403560638, 0.16348625719547272, 0.18626368045806885, 0.23263885080814362, 0.12030680477619171, 0.18763351440429688, 0.06662426143884659, 0.2510358393192291, 0.35354670882225037, 0.22257286310195923, 0.24602772295475006, 0.10453713685274124, 0.185865119099617, 0.120824433863163, 0.16575820744037628, 0.16989396512508392, 0.06567985564470291, 0.4476766586303711, 0.13566845655441284, 0.2541764974594116, 0.16894878447055817, 0.31662842631340027, 0.20940165221691132, 0.20982550084590912, 0.2824331521987915, 0.2067907750606537, 0.1581425666809082, 0.2816847264766693, 0.20116807520389557, 0.1924077868461609, 0.17162162065505981, 0.1839713156223297, 0.13327762484550476, 0.12191877514123917, 0.08460020273923874, 0.142286017537117, 0.22156421840190887, 0.1368444859981537, 0.08621849864721298, 0.427219033241272, 0.0529838465154171, 0.09679907560348511, 0.1012415811419487, 0.10245517641305923, 0.31773877143859863, 0.21166706085205078, 0.06600738316774368, 0.12189425528049469, 0.1269007921218872, 0.19244657456874847, 0.27885836362838745, 0.05483828857541084, 0.21648481488227844, 0.20490846037864685, 0.07530874758958817, 0.2320171296596527, 0.02713399939239025, 0.13403251767158508, 0.2914361357688904, 0.07058202475309372, 0.19688743352890015, 0.14317165315151215, 0.3330090343952179, 0.0591675229370594, 0.19346661865711212, 0.13119332492351532, 0.07487566769123077, 0.16602414846420288, 0.016024962067604065, 0.1277586966753006, 0.6630570292472839, 0.2762067914009094, 0.11343107372522354, 0.2670533359050751, 0.11601337790489197, 0.32291847467422485, 0.1735117882490158, 0.11186012625694275, 0.0449039451777935, 0.12063135951757431, 0.09390923380851746, 0.08387088775634766, 0.07510893046855927, 0.11414384096860886, 0.15171335637569427, 0.08825315535068512, 0.050439488142728806, 0.34374308586120605, 0.21407325565814972, 0.18398113548755646, 0.10786985605955124, 0.1447535753250122],\n [0.7516704797744751, 0.7065739631652832, 0.7177786231040955, 0.6988161206245422, 0.6873100996017456, 0.6915654540061951, 0.6718673706054688, 0.6979731917381287, 0.7006716728210449, 0.6793920993804932, 0.7063331007957458, 0.7130346894264221, 0.7032034993171692, 0.6853010058403015, 0.7071249485015869, 0.6858034729957581, 0.6778329610824585, 0.6852532029151917, 0.6957749724388123, 0.7008402347564697, 0.6859275102615356, 0.6807461380958557, 0.7049537897109985, 0.6784515976905823, 0.6917150616645813, 0.6885970234870911, 0.6869394779205322, 0.6769051551818848, 0.6768056750297546, 0.6757507920265198, 0.6661334037780762, 0.6716984510421753, 0.6569916605949402, 0.6858950853347778, 0.6887277364730835, 0.6717148423194885, 0.6754504442214966, 0.653988778591156, 0.6826973557472229, 0.6768375635147095, 0.6716998815536499, 0.6565698981285095, 0.654717206954956, 0.652632474899292, 0.6251044273376465, 0.6333938241004944, 0.671023964881897, 0.6248869895935059, 0.6434370279312134, 0.6406496167182922, 0.6442137360572815, 0.6408711075782776, 0.6192270517349243, 0.6223974823951721, 0.6360308527946472, 0.6417527198791504, 0.620495617389679, 0.6222422122955322, 0.6289806962013245, 0.6184882521629333, 0.6182474493980408, 0.6034700274467468, 0.5960604548454285, 0.6084900498390198, 0.6381043791770935, 0.5495508313179016, 0.6009147763252258, 0.6157949566841125, 0.6300976872444153, 0.5679442882537842, 0.6345940232276917, 0.5627537369728088, 0.5420206189155579, 0.6045297384262085, 0.5437117218971252, 0.6067989468574524, 0.5513220429420471, 0.5556031465530396, 0.5533859133720398, 0.5201352834701538, 0.5419917106628418, 0.5466240644454956, 0.6238994002342224, 0.5992093682289124, 0.5498059988021851, 0.5422661900520325, 0.539126992225647, 0.5893562436103821, 0.5736145973205566, 0.5510547161102295, 0.5538250207901001, 0.5196876525878906, 0.5331659317016602, 0.4861413538455963, 0.5020370483398438, 0.5092563033103943, 0.5084990859031677, 0.5075024962425232, 0.5118969678878784, 0.5855584144592285, 0.5174964070320129, 0.5934208631515503, 0.4917559325695038, 0.5471442937850952, 0.5069800615310669, 0.46246537566185, 0.5390971899032593, 0.44594845175743103, 0.42060670256614685, 0.443085253238678, 0.43953749537467957, 0.42830604314804077, 0.4746414124965668, 0.3671223521232605, 0.4509015679359436, 0.5123873353004456, 0.40507256984710693, 0.5037357211112976, 0.3678974211215973, 0.5298925638198853, 0.5347847938537598, 0.3994475305080414, 0.4885033369064331, 0.41977837681770325, 0.4458164870738983, 0.36221379041671753, 0.49601128697395325, 0.3235497772693634, 0.37650465965270996, 0.4012485444545746, 0.3742760419845581, 0.3742140233516693, 0.345962792634964, 0.33293259143829346, 0.3427349030971527, 0.38441479206085205, 0.42992955446243286, 0.43352600932121277, 0.35998740792274475, 0.3864070773124695, 0.2941230535507202, 0.362151563167572, 0.391042023897171, 0.4633826017379761, 0.3288975656032562, 0.49078360199928284, 0.3693424165248871, 0.3894713521003723, 0.40827667713165283, 0.3419622480869293, 0.4581927955150604, 0.667818009853363, 0.3283355236053467, 0.3829954266548157, 0.5511351227760315, 0.3844967186450958, 0.2906494438648224, 0.34944263100624084, 0.34513264894485474, 0.44616225361824036, 0.4089508354663849, 0.28983834385871887, 0.3065376281738281, 0.30819642543792725, 0.25706416368484497, 0.31127095222473145, 0.3219928443431854, 0.4300624132156372, 0.2786441743373871, 0.25064653158187866, 0.4192938506603241, 0.3501150906085968, 0.3980068266391754, 0.2929675579071045, 0.40567848086357117, 0.3111266791820526, 0.2616802155971527, 0.22785893082618713, 0.20667234063148499, 0.2801305651664734, 0.45084649324417114, 0.3886106610298157, 0.33633705973625183, 0.467225581407547, 0.22317172586917877, 0.3375663757324219, 0.26255837082862854, 0.4577648937702179, 0.3446107506752014, 0.5412831902503967, 0.11619064211845398, 0.31177616119384766, 0.27549871802330017, 0.16478247940540314, 0.42012864351272583, 0.22515465319156647, 0.3135913610458374, 0.42616692185401917, 0.22686177492141724, 0.4207249581813812],\n [0.6817137002944946, 0.6615646481513977, 0.6366598010063171, 0.6411684155464172, 0.645469069480896, 0.6509441137313843, 0.6438326239585876, 0.6066687107086182, 0.6154254674911499, 0.554307758808136, 0.5890083312988281, 0.6674041152000427, 0.6036184430122375, 0.5637800693511963, 0.5705496668815613, 0.5692859888076782, 0.5476226210594177, 0.5290272831916809, 0.5432417392730713, 0.4941267669200897, 0.4748789966106415, 0.5766087770462036, 0.5181878805160522, 0.5513178706169128, 0.4901775121688843, 0.42888548970222473, 0.5532817840576172, 0.4343584477901459, 0.42852726578712463, 0.43104323744773865, 0.39308100938796997, 0.39998260140419006, 0.389909029006958, 0.3633889853954315, 0.49101540446281433, 0.32019343972206116, 0.499344140291214, 0.34450027346611023, 0.35798752307891846, 0.41341838240623474, 0.3434102237224579, 0.4260443150997162, 0.30714330077171326, 0.2929210364818573, 0.3473846912384033, 0.31607702374458313, 0.36865103244781494, 0.4452957808971405, 0.24939660727977753, 0.3227460980415344, 0.2124432772397995, 0.47893908619880676, 0.24765804409980774, 0.2755025029182434, 0.2895154654979706, 0.3780994415283203, 0.22080828249454498, 0.2891670763492584, 0.46621865034103394, 0.14434616267681122, 0.27163875102996826, 0.2163638472557068, 0.18264782428741455, 0.20815429091453552, 0.27392908930778503, 0.14814500510692596, 0.22589942812919617, 0.2194739580154419, 0.17249436676502228, 0.1415621042251587, 0.20570795238018036, 0.11196053773164749, 0.10553021728992462, 0.1396767646074295, 0.21743622422218323, 0.20948007702827454, 0.22955062985420227, 0.2612748146057129, 0.20618678629398346, 0.2837976813316345, 0.3249105215072632, 0.1430315375328064, 0.14132623374462128, 0.19811300933361053, 0.11860231310129166, 0.09477400779724121, 0.10301287472248077, 0.16514348983764648, 0.19341720640659332, 0.10124733299016953, 0.07803380489349365, 0.1750197410583496, 0.116635262966156, 0.22294554114341736, 0.12197713553905487, 0.20078353583812714, 0.28982535004615784, 0.12285526096820831, 0.17577600479125977, 0.10296636819839478, 0.30473577976226807, 0.1885652393102646, 0.21657711267471313, 0.19357189536094666, 0.28389960527420044, 0.1601661890745163, 0.12174772471189499, 0.10253459960222244, 0.2284347116947174, 0.2323991060256958, 0.24133162200450897, 0.04143475368618965, 0.06370050460100174, 0.1283499300479889, 0.15657056868076324, 0.2104494273662567, 0.09984733909368515, 0.42283058166503906, 0.0659141018986702, 0.07644255459308624, 0.232825368642807, 0.19313445687294006, 0.07657952606678009, 0.11364288628101349, 0.26721248030662537, 0.07296297699213028, 0.0989551842212677, 0.11556176841259003, 0.09409289062023163, 0.10922737419605255, 0.10793822258710861, 0.15487068891525269, 0.2401411235332489, 0.15316109359264374, 0.1573168933391571, 0.04519626125693321, 0.03717436641454697, 0.17760318517684937, 0.03483760729432106, 0.08435384929180145, 0.05051611736416817, 0.06360103189945221, 0.04908892139792442, 0.057705726474523544, 0.01984848640859127, 0.06584611535072327, 0.020403260365128517, 0.15538044273853302, 0.11682464927434921, 0.12702828645706177, 0.06731581687927246, 0.06398048996925354, 0.0800272524356842, 0.137851282954216, 0.07310399413108826, 0.07777485251426697, 0.038942813873291016, 0.13675454258918762, 0.2398657202720642, 0.11021780222654343, 0.24431468546390533, 0.10099349915981293, 0.09677542746067047, 0.3201201558113098, 0.0940764993429184, 0.1450691521167755, 0.06352393329143524, 0.3598504960536957, 0.14286644756793976, 0.10099989920854568, 0.03265047073364258, 0.19711226224899292, 0.19432342052459717, 0.0424191914498806, 0.3160055875778198, 0.1956709623336792, 0.1950051188468933, 0.24435091018676758, 0.08932845294475555, 0.33373069763183594, 0.26101240515708923, 0.16131366789340973, 0.37541016936302185, 0.20479139685630798, 0.2147551029920578, 0.07293548434972763, 0.03892720490694046, 0.06481657922267914, 0.264176607131958, 0.06397152692079544, 0.13802307844161987, 0.03463400527834892, 0.07504448294639587, 0.04797868803143501, 0.08116436004638672, 0.053655315190553665, 0.07981380820274353, 0.06644740700721741, 0.03914301097393036, 0.06399258971214294],\n [0.6746343374252319, 0.5994893908500671, 0.7110544443130493, 0.7393292188644409, 0.6624982953071594, 0.6827334761619568, 0.6996858716011047, 0.6961559057235718, 0.6780551671981812, 0.6631280779838562, 0.6838871240615845, 0.6593284010887146, 0.6504503488540649, 0.6337440609931946, 0.6859282851219177, 0.6528726816177368, 0.6486431360244751, 0.6363474130630493, 0.6233773827552795, 0.6510074138641357, 0.6497734785079956, 0.6415407061576843, 0.6150392293930054, 0.5840941071510315, 0.5873358249664307, 0.5971943140029907, 0.6007674932479858, 0.5888298153877258, 0.5983092784881592, 0.5701997876167297, 0.5564712285995483, 0.5428546667098999, 0.5997887253761292, 0.5544871687889099, 0.5505860447883606, 0.4994127154350281, 0.5049074292182922, 0.5528979301452637, 0.5944012999534607, 0.5127501487731934, 0.4849454462528229, 0.4318809509277344, 0.4249861240386963, 0.45798733830451965, 0.5179145932197571, 0.4615478515625, 0.45832541584968567, 0.441960871219635, 0.46994778513908386, 0.4249405264854431, 0.40832093358039856, 0.41338250041007996, 0.41194993257522583, 0.344819575548172, 0.3802470266819, 0.355091392993927, 0.3808969259262085, 0.4201855957508087, 0.4025615453720093, 0.377044141292572, 0.4238446056842804, 0.30919671058654785, 0.3147760033607483, 0.48465773463249207, 0.3286767899990082, 0.35047444701194763, 0.36755234003067017, 0.34414613246917725, 0.38067111372947693, 0.28744155168533325, 0.26492413878440857, 0.416656494140625, 0.2676748037338257, 0.3535442650318146, 0.3660116493701935, 0.33142322301864624, 0.38479140400886536, 0.3556807041168213, 0.28546205163002014, 0.20903387665748596, 0.37447938323020935, 0.26823097467422485, 0.2812305986881256, 0.20874153077602386, 0.23323333263397217, 0.25920745730400085, 0.26707255840301514, 0.18040616810321808, 0.1934993714094162, 0.27906760573387146, 0.17230893671512604, 0.18538755178451538, 0.23255807161331177, 0.23377887904644012, 0.2035377472639084, 0.12283509224653244, 0.22045131027698517, 0.35115188360214233, 0.31356412172317505, 0.19090427458286285, 0.24820321798324585, 0.30505114793777466, 0.2598440647125244, 0.30029401183128357, 0.31699877977371216, 0.2731929123401642, 0.25009480118751526, 0.28404226899147034, 0.15194882452487946, 0.31830933690071106, 0.24852900207042694, 0.33830708265304565, 0.13149519264698029, 0.22396619617938995, 0.25475791096687317, 0.31433913111686707, 0.16240423917770386, 0.26831722259521484, 0.2824966013431549, 0.3126300871372223, 0.19898945093154907, 0.2144409716129303, 0.2768673598766327, 0.2780621349811554, 0.3201328217983246, 0.3169085383415222, 0.2929736077785492, 0.21536393463611603, 0.16173334419727325, 0.24256230890750885, 0.175984188914299, 0.10654361546039581, 0.22850938141345978, 0.18687604367733002, 0.2144414782524109, 0.23117868602275848, 0.25243547558784485, 0.11712697148323059, 0.19230245053768158, 0.2400604635477066, 0.2569529116153717, 0.312290221452713, 0.16537722945213318, 0.179532989859581, 0.3347550630569458, 0.1307370960712433, 0.2694743573665619, 0.11377780884504318, 0.1052650734782219, 0.16692954301834106, 0.1155405342578888, 0.07838231325149536, 0.4032217860221863, 0.1905570775270462, 0.19953244924545288, 0.20159876346588135, 0.142192080616951, 0.1171998530626297, 0.19267511367797852, 0.14102491736412048, 0.13060443103313446, 0.12704327702522278, 0.20596915483474731, 0.12436442077159882, 0.12645657360553741, 0.1509505808353424, 0.12074847519397736, 0.1492425501346588, 0.21165935695171356, 0.19639554619789124, 0.08970772475004196, 0.095691978931427, 0.35680338740348816, 0.05773213878273964, 0.13692854344844818, 0.14343683421611786, 0.12966778874397278, 0.06586255878210068, 0.07469162344932556, 0.06036107987165451, 0.11594424396753311, 0.4168913960456848, 0.16323938965797424, 0.14433780312538147, 0.1365155428647995, 0.376469224691391, 0.27118414640426636, 0.14511579275131226, 0.1872788369655609, 0.09430745244026184, 0.3316349685192108, 0.23601409792900085, 0.308277428150177, 0.20528706908226013, 0.21814756095409393, 0.12882357835769653, 0.12432215362787247, 0.19387319684028625, 0.2740398049354553, 0.08346080780029297],\n [0.7153546810150146, 0.7030813694000244, 0.7277228832244873, 0.7142347693443298, 0.7085127234458923, 0.6949998736381531, 0.692003607749939, 0.6754096746444702, 0.657611608505249, 0.6963468194007874, 0.6677248477935791, 0.6900421380996704, 0.661736249923706, 0.6443219184875488, 0.7006863355636597, 0.6857239603996277, 0.6794114112854004, 0.6731691956520081, 0.6473671793937683, 0.6960208415985107, 0.6825878620147705, 0.6500006318092346, 0.6574584245681763, 0.6501126289367676, 0.6131012439727783, 0.6935343146324158, 0.7094688415527344, 0.6681602001190186, 0.6454523205757141, 0.636630654335022, 0.6244427561759949, 0.6185072064399719, 0.6164110898971558, 0.6186311841011047, 0.5919642448425293, 0.5645604133605957, 0.5882812738418579, 0.5667659044265747, 0.5640703439712524, 0.549278736114502, 0.5574508905410767, 0.6015141010284424, 0.501664400100708, 0.5850852727890015, 0.5755879878997803, 0.536745548248291, 0.5546498894691467, 0.5764511823654175, 0.5864372849464417, 0.5200899839401245, 0.49705734848976135, 0.5372902750968933, 0.48795628547668457, 0.5725132822990417, 0.5382624864578247, 0.47224634885787964, 0.4909862279891968, 0.5289753675460815, 0.4255463778972626, 0.3725813627243042, 0.33090078830718994, 0.45349574089050293, 0.42186012864112854, 0.47551968693733215, 0.43544894456863403, 0.4177694618701935, 0.5214508771896362, 0.4032882750034332, 0.4547790586948395, 0.3960264325141907, 0.3787967562675476, 0.37195834517478943, 0.42335158586502075, 0.4452804625034332, 0.335038959980011, 0.3983038663864136, 0.3214837908744812, 0.30753886699676514, 0.3403218388557434, 0.43443939089775085, 0.3027626872062683, 0.29049965739250183, 0.44440555572509766, 0.36881133913993835, 0.2666590213775635, 0.2836593985557556, 0.28922998905181885, 0.37258923053741455, 0.34263792634010315, 0.460660845041275, 0.29289671778678894, 0.29883694648742676, 0.41615164279937744, 0.3813245892524719, 0.3746056854724884, 0.2689972519874573, 0.20711852610111237, 0.3412376046180725, 0.2648549973964691, 0.3766975402832031, 0.2729026675224304, 0.17174845933914185, 0.29543593525886536, 0.2749519646167755, 0.20897561311721802, 0.2930591106414795, 0.21833495795726776, 0.1300906240940094, 0.49851280450820923, 0.4526282250881195, 0.1681995838880539, 0.5442366600036621, 0.38133761286735535, 0.1732625961303711, 0.24919794499874115, 0.33048442006111145, 0.4457699954509735, 0.17145465314388275, 0.24569128453731537, 0.3565487563610077, 0.14851173758506775, 0.3678271770477295, 0.16271790862083435, 0.3586886525154114, 0.11294788122177124, 0.22315840423107147, 0.18739932775497437, 0.2987673580646515, 0.40438830852508545, 0.2042427957057953, 0.3004828095436096, 0.21913978457450867, 0.489410936832428, 0.2686103880405426, 0.3169401288032532, 0.24966201186180115, 0.3921944499015808, 0.26718953251838684, 0.2858505845069885, 0.30226385593414307, 0.18251407146453857, 0.2986615002155304, 0.2402917891740799, 0.3263099789619446, 0.44293174147605896, 0.250706285238266, 0.2317279428243637, 0.15525202453136444, 0.31748539209365845, 0.39082562923431396, 0.25867778062820435, 0.32304462790489197, 0.2843267619609833, 0.2764100730419159, 0.25813835859298706, 0.21175424754619598, 0.29446712136268616, 0.3892250955104828, 0.24740539491176605, 0.1892499327659607, 0.15800794959068298, 0.3565944731235504, 0.2948969900608063, 0.17167317867279053, 0.27770310640335083, 0.28338414430618286, 0.20137113332748413, 0.31037619709968567, 0.18456566333770752, 0.20611052215099335, 0.20140905678272247, 0.3342708945274353, 0.21534372866153717, 0.17464739084243774, 0.38221195340156555, 0.3044288158416748, 0.23055267333984375, 0.1360432654619217, 0.40757209062576294, 0.25235721468925476, 0.2324773073196411, 0.41178658604621887, 0.5970068573951721, 0.16127561032772064, 0.17991848289966583, 0.1899176836013794, 0.23723144829273224, 0.16540923714637756, 0.24532297253608704, 0.3357166647911072, 0.24552012979984283, 0.193105086684227, 0.2767459750175476, 0.16189640760421753, 0.2370912879705429, 0.32571518421173096, 0.34836989641189575, 0.2187020480632782, 0.4109748899936676, 0.16257040202617645]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "CNN_ae8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "CNN_ae8_FASHION = np.array([[0.7195901274681091, 0.6704943180084229, 0.7085641622543335, 0.6748741269111633, 0.7423946857452393, 0.7234722375869751, 0.6986007690429688, 0.6960390210151672, 0.7012070417404175, 0.6947640180587769, 0.6792826056480408, 0.7239813804626465, 0.6761874556541443, 0.6902806758880615, 0.6771406531333923, 0.6850265264511108, 0.692349910736084, 0.6808741092681885, 0.6753465533256531, 0.6633909344673157, 0.6686793565750122, 0.6637576222419739, 0.680237889289856, 0.654888391494751, 0.6780327558517456, 0.6408708095550537, 0.6315723061561584, 0.6431190371513367, 0.6341533064842224, 0.6376373767852783, 0.6409605145454407, 0.6432655453681946, 0.6013216972351074, 0.6401882171630859, 0.6355386972427368, 0.5561983585357666, 0.5904024839401245, 0.6024128198623657, 0.5541431903839111, 0.575777530670166, 0.5260757803916931, 0.5354135036468506, 0.44182831048965454, 0.4903651475906372, 0.4765366017818451, 0.4635029733181, 0.5017414093017578, 0.4560497999191284, 0.4651477336883545, 0.4632211923599243, 0.5207407474517822, 0.47012707591056824, 0.35573041439056396, 0.40080904960632324, 0.5226706266403198, 0.3614789843559265, 0.3670787811279297, 0.4320698082447052, 0.4483892321586609, 0.3988180160522461, 0.44299742579460144, 0.4347727596759796, 0.2980192303657532, 0.3726247549057007, 0.42988693714141846, 0.39973509311676025, 0.34497734904289246, 0.39063209295272827, 0.3743012249469757, 0.3100280165672302, 0.3662980794906616, 0.3414981961250305, 0.32001861929893494, 0.3834022283554077, 0.23791366815567017, 0.30651259422302246, 0.2968301773071289, 0.3293622136116028, 0.1965251863002777, 0.3719707429409027, 0.3272671401500702, 0.24777932465076447, 0.3863266110420227, 0.24999620020389557, 0.21851596236228943, 0.21008063852787018, 0.3156169652938843, 0.3615129590034485, 0.2176506221294403, 0.19833393394947052, 0.3163139820098877, 0.3545341491699219, 0.19693829119205475, 0.28649449348449707, 0.31108948588371277, 0.2429426908493042, 0.20192016661167145, 0.21015271544456482, 0.2626788020133972, 0.3365514278411865, 0.2673414647579193, 0.09546613693237305, 0.12053285539150238, 0.17347104847431183, 0.23780611157417297, 0.24000667035579681, 0.24722233414649963, 0.23631177842617035, 0.24386261403560638, 0.16348625719547272, 0.18626368045806885, 0.23263885080814362, 0.12030680477619171, 0.18763351440429688, 0.06662426143884659, 0.2510358393192291, 0.35354670882225037, 0.22257286310195923, 0.24602772295475006, 0.10453713685274124, 0.185865119099617, 0.120824433863163, 0.16575820744037628, 0.16989396512508392, 0.06567985564470291, 0.4476766586303711, 0.13566845655441284, 0.2541764974594116, 0.16894878447055817, 0.31662842631340027, 0.20940165221691132, 0.20982550084590912, 0.2824331521987915, 0.2067907750606537, 0.1581425666809082, 0.2816847264766693, 0.20116807520389557, 0.1924077868461609, 0.17162162065505981, 0.1839713156223297, 0.13327762484550476, 0.12191877514123917, 0.08460020273923874, 0.142286017537117, 0.22156421840190887, 0.1368444859981537, 0.08621849864721298, 0.427219033241272, 0.0529838465154171, 0.09679907560348511, 0.1012415811419487, 0.10245517641305923, 0.31773877143859863, 0.21166706085205078, 0.06600738316774368, 0.12189425528049469, 0.1269007921218872, 0.19244657456874847, 0.27885836362838745, 0.05483828857541084, 0.21648481488227844, 0.20490846037864685, 0.07530874758958817, 0.2320171296596527, 0.02713399939239025, 0.13403251767158508, 0.2914361357688904, 0.07058202475309372, 0.19688743352890015, 0.14317165315151215, 0.3330090343952179, 0.0591675229370594, 0.19346661865711212, 0.13119332492351532, 0.07487566769123077, 0.16602414846420288, 0.016024962067604065, 0.1277586966753006, 0.6630570292472839, 0.2762067914009094, 0.11343107372522354, 0.2670533359050751, 0.11601337790489197, 0.32291847467422485, 0.1735117882490158, 0.11186012625694275, 0.0449039451777935, 0.12063135951757431, 0.09390923380851746, 0.08387088775634766, 0.07510893046855927, 0.11414384096860886, 0.15171335637569427, 0.08825315535068512, 0.050439488142728806, 0.34374308586120605, 0.21407325565814972, 0.18398113548755646, 0.10786985605955124, 0.1447535753250122],\n [0.7516704797744751, 0.7065739631652832, 0.7177786231040955, 0.6988161206245422, 0.6873100996017456, 0.6915654540061951, 0.6718673706054688, 0.6979731917381287, 0.7006716728210449, 0.6793920993804932, 0.7063331007957458, 0.7130346894264221, 0.7032034993171692, 0.6853010058403015, 0.7071249485015869, 0.6858034729957581, 0.6778329610824585, 0.6852532029151917, 0.6957749724388123, 0.7008402347564697, 0.6859275102615356, 0.6807461380958557, 0.7049537897109985, 0.6784515976905823, 0.6917150616645813, 0.6885970234870911, 0.6869394779205322, 0.6769051551818848, 0.6768056750297546, 0.6757507920265198, 0.6661334037780762, 0.6716984510421753, 0.6569916605949402, 0.6858950853347778, 0.6887277364730835, 0.6717148423194885, 0.6754504442214966, 0.653988778591156, 0.6826973557472229, 0.6768375635147095, 0.6716998815536499, 0.6565698981285095, 0.654717206954956, 0.652632474899292, 0.6251044273376465, 0.6333938241004944, 0.671023964881897, 0.6248869895935059, 0.6434370279312134, 0.6406496167182922, 0.6442137360572815, 0.6408711075782776, 0.6192270517349243, 0.6223974823951721, 0.6360308527946472, 0.6417527198791504, 0.620495617389679, 0.6222422122955322, 0.6289806962013245, 0.6184882521629333, 0.6182474493980408, 0.6034700274467468, 0.5960604548454285, 0.6084900498390198, 0.6381043791770935, 0.5495508313179016, 0.6009147763252258, 0.6157949566841125, 0.6300976872444153, 0.5679442882537842, 0.6345940232276917, 0.5627537369728088, 0.5420206189155579, 0.6045297384262085, 0.5437117218971252, 0.6067989468574524, 0.5513220429420471, 0.5556031465530396, 0.5533859133720398, 0.5201352834701538, 0.5419917106628418, 0.5466240644454956, 0.6238994002342224, 0.5992093682289124, 0.5498059988021851, 0.5422661900520325, 0.539126992225647, 0.5893562436103821, 0.5736145973205566, 0.5510547161102295, 0.5538250207901001, 0.5196876525878906, 0.5331659317016602, 0.4861413538455963, 0.5020370483398438, 0.5092563033103943, 0.5084990859031677, 0.5075024962425232, 0.5118969678878784, 0.5855584144592285, 0.5174964070320129, 0.5934208631515503, 0.4917559325695038, 0.5471442937850952, 0.5069800615310669, 0.46246537566185, 0.5390971899032593, 0.44594845175743103, 0.42060670256614685, 0.443085253238678, 0.43953749537467957, 0.42830604314804077, 0.4746414124965668, 0.3671223521232605, 0.4509015679359436, 0.5123873353004456, 0.40507256984710693, 0.5037357211112976, 0.3678974211215973, 0.5298925638198853, 0.5347847938537598, 0.3994475305080414, 0.4885033369064331, 0.41977837681770325, 0.4458164870738983, 0.36221379041671753, 0.49601128697395325, 0.3235497772693634, 0.37650465965270996, 0.4012485444545746, 0.3742760419845581, 0.3742140233516693, 0.345962792634964, 0.33293259143829346, 0.3427349030971527, 0.38441479206085205, 0.42992955446243286, 0.43352600932121277, 0.35998740792274475, 0.3864070773124695, 0.2941230535507202, 0.362151563167572, 0.391042023897171, 0.4633826017379761, 0.3288975656032562, 0.49078360199928284, 0.3693424165248871, 0.3894713521003723, 0.40827667713165283, 0.3419622480869293, 0.4581927955150604, 0.667818009853363, 0.3283355236053467, 0.3829954266548157, 0.5511351227760315, 0.3844967186450958, 0.2906494438648224, 0.34944263100624084, 0.34513264894485474, 0.44616225361824036, 0.4089508354663849, 0.28983834385871887, 0.3065376281738281, 0.30819642543792725, 0.25706416368484497, 0.31127095222473145, 0.3219928443431854, 0.4300624132156372, 0.2786441743373871, 0.25064653158187866, 0.4192938506603241, 0.3501150906085968, 0.3980068266391754, 0.2929675579071045, 0.40567848086357117, 0.3111266791820526, 0.2616802155971527, 0.22785893082618713, 0.20667234063148499, 0.2801305651664734, 0.45084649324417114, 0.3886106610298157, 0.33633705973625183, 0.467225581407547, 0.22317172586917877, 0.3375663757324219, 0.26255837082862854, 0.4577648937702179, 0.3446107506752014, 0.5412831902503967, 0.11619064211845398, 0.31177616119384766, 0.27549871802330017, 0.16478247940540314, 0.42012864351272583, 0.22515465319156647, 0.3135913610458374, 0.42616692185401917, 0.22686177492141724, 0.4207249581813812],\n [0.6817137002944946, 0.6615646481513977, 0.6366598010063171, 0.6411684155464172, 0.645469069480896, 0.6509441137313843, 0.6438326239585876, 0.6066687107086182, 0.6154254674911499, 0.554307758808136, 0.5890083312988281, 0.6674041152000427, 0.6036184430122375, 0.5637800693511963, 0.5705496668815613, 0.5692859888076782, 0.5476226210594177, 0.5290272831916809, 0.5432417392730713, 0.4941267669200897, 0.4748789966106415, 0.5766087770462036, 0.5181878805160522, 0.5513178706169128, 0.4901775121688843, 0.42888548970222473, 0.5532817840576172, 0.4343584477901459, 0.42852726578712463, 0.43104323744773865, 0.39308100938796997, 0.39998260140419006, 0.389909029006958, 0.3633889853954315, 0.49101540446281433, 0.32019343972206116, 0.499344140291214, 0.34450027346611023, 0.35798752307891846, 0.41341838240623474, 0.3434102237224579, 0.4260443150997162, 0.30714330077171326, 0.2929210364818573, 0.3473846912384033, 0.31607702374458313, 0.36865103244781494, 0.4452957808971405, 0.24939660727977753, 0.3227460980415344, 0.2124432772397995, 0.47893908619880676, 0.24765804409980774, 0.2755025029182434, 0.2895154654979706, 0.3780994415283203, 0.22080828249454498, 0.2891670763492584, 0.46621865034103394, 0.14434616267681122, 0.27163875102996826, 0.2163638472557068, 0.18264782428741455, 0.20815429091453552, 0.27392908930778503, 0.14814500510692596, 0.22589942812919617, 0.2194739580154419, 0.17249436676502228, 0.1415621042251587, 0.20570795238018036, 0.11196053773164749, 0.10553021728992462, 0.1396767646074295, 0.21743622422218323, 0.20948007702827454, 0.22955062985420227, 0.2612748146057129, 0.20618678629398346, 0.2837976813316345, 0.3249105215072632, 0.1430315375328064, 0.14132623374462128, 0.19811300933361053, 0.11860231310129166, 0.09477400779724121, 0.10301287472248077, 0.16514348983764648, 0.19341720640659332, 0.10124733299016953, 0.07803380489349365, 0.1750197410583496, 0.116635262966156, 0.22294554114341736, 0.12197713553905487, 0.20078353583812714, 0.28982535004615784, 0.12285526096820831, 0.17577600479125977, 0.10296636819839478, 0.30473577976226807, 0.1885652393102646, 0.21657711267471313, 0.19357189536094666, 0.28389960527420044, 0.1601661890745163, 0.12174772471189499, 0.10253459960222244, 0.2284347116947174, 0.2323991060256958, 0.24133162200450897, 0.04143475368618965, 0.06370050460100174, 0.1283499300479889, 0.15657056868076324, 0.2104494273662567, 0.09984733909368515, 0.42283058166503906, 0.0659141018986702, 0.07644255459308624, 0.232825368642807, 0.19313445687294006, 0.07657952606678009, 0.11364288628101349, 0.26721248030662537, 0.07296297699213028, 0.0989551842212677, 0.11556176841259003, 0.09409289062023163, 0.10922737419605255, 0.10793822258710861, 0.15487068891525269, 0.2401411235332489, 0.15316109359264374, 0.1573168933391571, 0.04519626125693321, 0.03717436641454697, 0.17760318517684937, 0.03483760729432106, 0.08435384929180145, 0.05051611736416817, 0.06360103189945221, 0.04908892139792442, 0.057705726474523544, 0.01984848640859127, 0.06584611535072327, 0.020403260365128517, 0.15538044273853302, 0.11682464927434921, 0.12702828645706177, 0.06731581687927246, 0.06398048996925354, 0.0800272524356842, 0.137851282954216, 0.07310399413108826, 0.07777485251426697, 0.038942813873291016, 0.13675454258918762, 0.2398657202720642, 0.11021780222654343, 0.24431468546390533, 0.10099349915981293, 0.09677542746067047, 0.3201201558113098, 0.0940764993429184, 0.1450691521167755, 0.06352393329143524, 0.3598504960536957, 0.14286644756793976, 0.10099989920854568, 0.03265047073364258, 0.19711226224899292, 0.19432342052459717, 0.0424191914498806, 0.3160055875778198, 0.1956709623336792, 0.1950051188468933, 0.24435091018676758, 0.08932845294475555, 0.33373069763183594, 0.26101240515708923, 0.16131366789340973, 0.37541016936302185, 0.20479139685630798, 0.2147551029920578, 0.07293548434972763, 0.03892720490694046, 0.06481657922267914, 0.264176607131958, 0.06397152692079544, 0.13802307844161987, 0.03463400527834892, 0.07504448294639587, 0.04797868803143501, 0.08116436004638672, 0.053655315190553665, 0.07981380820274353, 0.06644740700721741, 0.03914301097393036, 0.06399258971214294],\n [0.6746343374252319, 0.5994893908500671, 0.7110544443130493, 0.7393292188644409, 0.6624982953071594, 0.6827334761619568, 0.6996858716011047, 0.6961559057235718, 0.6780551671981812, 0.6631280779838562, 0.6838871240615845, 0.6593284010887146, 0.6504503488540649, 0.6337440609931946, 0.6859282851219177, 0.6528726816177368, 0.6486431360244751, 0.6363474130630493, 0.6233773827552795, 0.6510074138641357, 0.6497734785079956, 0.6415407061576843, 0.6150392293930054, 0.5840941071510315, 0.5873358249664307, 0.5971943140029907, 0.6007674932479858, 0.5888298153877258, 0.5983092784881592, 0.5701997876167297, 0.5564712285995483, 0.5428546667098999, 0.5997887253761292, 0.5544871687889099, 0.5505860447883606, 0.4994127154350281, 0.5049074292182922, 0.5528979301452637, 0.5944012999534607, 0.5127501487731934, 0.4849454462528229, 0.4318809509277344, 0.4249861240386963, 0.45798733830451965, 0.5179145932197571, 0.4615478515625, 0.45832541584968567, 0.441960871219635, 0.46994778513908386, 0.4249405264854431, 0.40832093358039856, 0.41338250041007996, 0.41194993257522583, 0.344819575548172, 0.3802470266819, 0.355091392993927, 0.3808969259262085, 0.4201855957508087, 0.4025615453720093, 0.377044141292572, 0.4238446056842804, 0.30919671058654785, 0.3147760033607483, 0.48465773463249207, 0.3286767899990082, 0.35047444701194763, 0.36755234003067017, 0.34414613246917725, 0.38067111372947693, 0.28744155168533325, 0.26492413878440857, 0.416656494140625, 0.2676748037338257, 0.3535442650318146, 0.3660116493701935, 0.33142322301864624, 0.38479140400886536, 0.3556807041168213, 0.28546205163002014, 0.20903387665748596, 0.37447938323020935, 0.26823097467422485, 0.2812305986881256, 0.20874153077602386, 0.23323333263397217, 0.25920745730400085, 0.26707255840301514, 0.18040616810321808, 0.1934993714094162, 0.27906760573387146, 0.17230893671512604, 0.18538755178451538, 0.23255807161331177, 0.23377887904644012, 0.2035377472639084, 0.12283509224653244, 0.22045131027698517, 0.35115188360214233, 0.31356412172317505, 0.19090427458286285, 0.24820321798324585, 0.30505114793777466, 0.2598440647125244, 0.30029401183128357, 0.31699877977371216, 0.2731929123401642, 0.25009480118751526, 0.28404226899147034, 0.15194882452487946, 0.31830933690071106, 0.24852900207042694, 0.33830708265304565, 0.13149519264698029, 0.22396619617938995, 0.25475791096687317, 0.31433913111686707, 0.16240423917770386, 0.26831722259521484, 0.2824966013431549, 0.3126300871372223, 0.19898945093154907, 0.2144409716129303, 0.2768673598766327, 0.2780621349811554, 0.3201328217983246, 0.3169085383415222, 0.2929736077785492, 0.21536393463611603, 0.16173334419727325, 0.24256230890750885, 0.175984188914299, 0.10654361546039581, 0.22850938141345978, 0.18687604367733002, 0.2144414782524109, 0.23117868602275848, 0.25243547558784485, 0.11712697148323059, 0.19230245053768158, 0.2400604635477066, 0.2569529116153717, 0.312290221452713, 0.16537722945213318, 0.179532989859581, 0.3347550630569458, 0.1307370960712433, 0.2694743573665619, 0.11377780884504318, 0.1052650734782219, 0.16692954301834106, 0.1155405342578888, 0.07838231325149536, 0.4032217860221863, 0.1905570775270462, 0.19953244924545288, 0.20159876346588135, 0.142192080616951, 0.1171998530626297, 0.19267511367797852, 0.14102491736412048, 0.13060443103313446, 0.12704327702522278, 0.20596915483474731, 0.12436442077159882, 0.12645657360553741, 0.1509505808353424, 0.12074847519397736, 0.1492425501346588, 0.21165935695171356, 0.19639554619789124, 0.08970772475004196, 0.095691978931427, 0.35680338740348816, 0.05773213878273964, 0.13692854344844818, 0.14343683421611786, 0.12966778874397278, 0.06586255878210068, 0.07469162344932556, 0.06036107987165451, 0.11594424396753311, 0.4168913960456848, 0.16323938965797424, 0.14433780312538147, 0.1365155428647995, 0.376469224691391, 0.27118414640426636, 0.14511579275131226, 0.1872788369655609, 0.09430745244026184, 0.3316349685192108, 0.23601409792900085, 0.308277428150177, 0.20528706908226013, 0.21814756095409393, 0.12882357835769653, 0.12432215362787247, 0.19387319684028625, 0.2740398049354553, 0.08346080780029297],\n [0.7153546810150146, 0.7030813694000244, 0.7277228832244873, 0.7142347693443298, 0.7085127234458923, 0.6949998736381531, 0.692003607749939, 0.6754096746444702, 0.657611608505249, 0.6963468194007874, 0.6677248477935791, 0.6900421380996704, 0.661736249923706, 0.6443219184875488, 0.7006863355636597, 0.6857239603996277, 0.6794114112854004, 0.6731691956520081, 0.6473671793937683, 0.6960208415985107, 0.6825878620147705, 0.6500006318092346, 0.6574584245681763, 0.6501126289367676, 0.6131012439727783, 0.6935343146324158, 0.7094688415527344, 0.6681602001190186, 0.6454523205757141, 0.636630654335022, 0.6244427561759949, 0.6185072064399719, 0.6164110898971558, 0.6186311841011047, 0.5919642448425293, 0.5645604133605957, 0.5882812738418579, 0.5667659044265747, 0.5640703439712524, 0.549278736114502, 0.5574508905410767, 0.6015141010284424, 0.501664400100708, 0.5850852727890015, 0.5755879878997803, 0.536745548248291, 0.5546498894691467, 0.5764511823654175, 0.5864372849464417, 0.5200899839401245, 0.49705734848976135, 0.5372902750968933, 0.48795628547668457, 0.5725132822990417, 0.5382624864578247, 0.47224634885787964, 0.4909862279891968, 0.5289753675460815, 0.4255463778972626, 0.3725813627243042, 0.33090078830718994, 0.45349574089050293, 0.42186012864112854, 0.47551968693733215, 0.43544894456863403, 0.4177694618701935, 0.5214508771896362, 0.4032882750034332, 0.4547790586948395, 0.3960264325141907, 0.3787967562675476, 0.37195834517478943, 0.42335158586502075, 0.4452804625034332, 0.335038959980011, 0.3983038663864136, 0.3214837908744812, 0.30753886699676514, 0.3403218388557434, 0.43443939089775085, 0.3027626872062683, 0.29049965739250183, 0.44440555572509766, 0.36881133913993835, 0.2666590213775635, 0.2836593985557556, 0.28922998905181885, 0.37258923053741455, 0.34263792634010315, 0.460660845041275, 0.29289671778678894, 0.29883694648742676, 0.41615164279937744, 0.3813245892524719, 0.3746056854724884, 0.2689972519874573, 0.20711852610111237, 0.3412376046180725, 0.2648549973964691, 0.3766975402832031, 0.2729026675224304, 0.17174845933914185, 0.29543593525886536, 0.2749519646167755, 0.20897561311721802, 0.2930591106414795, 0.21833495795726776, 0.1300906240940094, 0.49851280450820923, 0.4526282250881195, 0.1681995838880539, 0.5442366600036621, 0.38133761286735535, 0.1732625961303711, 0.24919794499874115, 0.33048442006111145, 0.4457699954509735, 0.17145465314388275, 0.24569128453731537, 0.3565487563610077, 0.14851173758506775, 0.3678271770477295, 0.16271790862083435, 0.3586886525154114, 0.11294788122177124, 0.22315840423107147, 0.18739932775497437, 0.2987673580646515, 0.40438830852508545, 0.2042427957057953, 0.3004828095436096, 0.21913978457450867, 0.489410936832428, 0.2686103880405426, 0.3169401288032532, 0.24966201186180115, 0.3921944499015808, 0.26718953251838684, 0.2858505845069885, 0.30226385593414307, 0.18251407146453857, 0.2986615002155304, 0.2402917891740799, 0.3263099789619446, 0.44293174147605896, 0.250706285238266, 0.2317279428243637, 0.15525202453136444, 0.31748539209365845, 0.39082562923431396, 0.25867778062820435, 0.32304462790489197, 0.2843267619609833, 0.2764100730419159, 0.25813835859298706, 0.21175424754619598, 0.29446712136268616, 0.3892250955104828, 0.24740539491176605, 0.1892499327659607, 0.15800794959068298, 0.3565944731235504, 0.2948969900608063, 0.17167317867279053, 0.27770310640335083, 0.28338414430618286, 0.20137113332748413, 0.31037619709968567, 0.18456566333770752, 0.20611052215099335, 0.20140905678272247, 0.3342708945274353, 0.21534372866153717, 0.17464739084243774, 0.38221195340156555, 0.3044288158416748, 0.23055267333984375, 0.1360432654619217, 0.40757209062576294, 0.25235721468925476, 0.2324773073196411, 0.41178658604621887, 0.5970068573951721, 0.16127561032772064, 0.17991848289966583, 0.1899176836013794, 0.23723144829273224, 0.16540923714637756, 0.24532297253608704, 0.3357166647911072, 0.24552012979984283, 0.193105086684227, 0.2767459750175476, 0.16189640760421753, 0.2370912879705429, 0.32571518421173096, 0.34836989641189575, 0.2187020480632782, 0.4109748899936676, 0.16257040202617645]])\nCNN_pca16_FASHION = np.array([[0.6731927394866943, 0.7146048545837402, 0.7230467200279236, 0.6912106275558472, 0.6896969676017761, 0.6904683709144592, 0.7217395901679993, 0.6933130621910095, 0.674804151058197, 0.6758207678794861, 0.7124243378639221, 0.6624137759208679, 0.6685926914215088, 0.6979824304580688, 0.6782591938972473, 0.7127623558044434, 0.6770104765892029, 0.6767700910568237, 0.6640782356262207, 0.6772103309631348, 0.6772346496582031, 0.6630069017410278, 0.6676750183105469, 0.6695066690444946, 0.6444168090820312, 0.650453507900238, 0.6427541375160217, 0.6385244131088257, 0.630479633808136, 0.6545811295509338, 0.6039774417877197, 0.6195574402809143, 0.6535524129867554, 0.662842869758606, 0.6519677042961121, 0.641146183013916, 0.6377530694007874, 0.6153199672698975, 0.6133739948272705, 0.6175796389579773, 0.6322136521339417, 0.6327928900718689, 0.6279188394546509, 0.6057998538017273, 0.5918271541595459, 0.5787946581840515, 0.5489441156387329, 0.5286718010902405, 0.553946852684021, 0.5940749645233154, 0.5976575613021851, 0.5534763932228088, 0.5435949563980103, 0.5652422904968262, 0.5287343859672546, 0.5400424599647522, 0.5558791160583496, 0.46408379077911377, 0.5044425129890442, 0.501401960849762, 0.4914953112602234, 0.48421379923820496, 0.503724992275238, 0.5585326552391052, 0.5284748077392578, 0.4420931339263916, 0.38656747341156006, 0.4064899682998657, 0.4788673520088196, 0.44518935680389404, 0.44881802797317505, 0.4787812829017639, 0.3793487250804901, 0.38220471143722534, 0.5208271145820618, 0.41611140966415405, 0.3540223240852356, 0.4896760880947113, 0.5182095170021057, 0.37803736329078674, 0.41520553827285767, 0.48707449436187744, 0.3895787000656128, 0.3696741461753845, 0.4299297332763672, 0.31142738461494446, 0.40215638279914856, 0.3027937412261963, 0.3300667107105255, 0.4679208993911743, 0.3062335252761841, 0.30544963479042053, 0.3699619174003601, 0.3423232138156891, 0.4148320257663727, 0.26448342204093933, 0.25111880898475647, 0.370278000831604, 0.3359261453151703, 0.3404152989387512, 0.22789347171783447, 0.27344048023223877, 0.16363538801670074, 0.2594521641731262, 0.3091864287853241, 0.29142042994499207, 0.3417450189590454, 0.23091702163219452, 0.2521711587905884, 0.2761750817298889, 0.3453601598739624, 0.25372835993766785, 0.3214697539806366, 0.2513537108898163, 0.4535459876060486, 0.21347159147262573, 0.3437364101409912, 0.4174922704696655, 0.3632654845714569, 0.2421703189611435, 0.3371511399745941, 0.30026817321777344, 0.4300374984741211, 0.27588149905204773, 0.28645434975624084, 0.40643027424812317, 0.36599281430244446, 0.35138460993766785, 0.2540344297885895, 0.34348198771476746, 0.18901720643043518, 0.2661953866481781, 0.26320555806159973, 0.2392750382423401, 0.43855077028274536, 0.10793644189834595, 0.34170421957969666, 0.42567962408065796, 0.34721696376800537, 0.41165363788604736, 0.20691877603530884, 0.30683374404907227, 0.2551877796649933, 0.28764501214027405, 0.39302679896354675, 0.27500835061073303, 0.28974899649620056, 0.2715095281600952, 0.24993008375167847, 0.1962193101644516, 0.27541664242744446, 0.19081838428974152, 0.12181317061185837, 0.31095078587532043, 0.2776842415332794, 0.324067085981369, 0.3013809323310852, 0.274642676115036, 0.13004633784294128, 0.17618465423583984, 0.47217032313346863, 0.47584113478660583, 0.3582492470741272, 0.257831871509552, 0.2891808748245239, 0.5542411208152771, 0.3348042964935303, 0.2161121815443039, 0.17372289299964905, 0.1992451250553131, 0.2041819989681244, 0.20778600871562958, 0.294563889503479, 0.23810671269893646, 0.13483597338199615, 0.2081194519996643, 0.2094554752111435, 0.3220142722129822, 0.21733644604682922, 0.2521625757217407, 0.19243156909942627, 0.2082565724849701, 0.34712842106819153, 0.4203895330429077, 0.25077930092811584, 0.22349044680595398, 0.15413586795330048, 0.3142079710960388, 0.272480845451355, 0.22095875442028046, 0.16435635089874268, 0.20021413266658783, 0.276922345161438, 0.2321307510137558, 0.2074410766363144, 0.1829778254032135, 0.30715394020080566, 0.2518363296985626, 0.34063780307769775, 0.17795924842357635],\n [0.7776962518692017, 0.7024533748626709, 0.6616161465644836, 0.7086943984031677, 0.7002975940704346, 0.6972193121910095, 0.6912907361984253, 0.6819018721580505, 0.681541383266449, 0.663880467414856, 0.69146728515625, 0.664833664894104, 0.6442254781723022, 0.7529103755950928, 0.6681588888168335, 0.6561442613601685, 0.6413994431495667, 0.7046892046928406, 0.6271281242370605, 0.6647970676422119, 0.653058648109436, 0.6430742144584656, 0.6455214619636536, 0.609468400478363, 0.6612895727157593, 0.6007395386695862, 0.6073524355888367, 0.6056865453720093, 0.5805904865264893, 0.6196667551994324, 0.5274646282196045, 0.6620069146156311, 0.5743842124938965, 0.5139610767364502, 0.6728946566581726, 0.6222364902496338, 0.5798086524009705, 0.5696758031845093, 0.6324554681777954, 0.585455596446991, 0.5590006709098816, 0.585722029209137, 0.567783772945404, 0.5522187352180481, 0.5296674966812134, 0.5424594879150391, 0.49772632122039795, 0.4818328619003296, 0.5226886868476868, 0.5111159682273865, 0.5239538550376892, 0.4668481945991516, 0.5357046723365784, 0.5097754001617432, 0.47646191716194153, 0.5100411176681519, 0.45254385471343994, 0.45006805658340454, 0.44648903608322144, 0.4015660881996155, 0.36558303236961365, 0.37721091508865356, 0.4359946548938751, 0.4040745496749878, 0.38954976201057434, 0.3225783407688141, 0.3661939203739166, 0.3853372633457184, 0.40720880031585693, 0.32728949189186096, 0.5035262703895569, 0.2781963646411896, 0.2824750244617462, 0.5335702896118164, 0.4222668707370758, 0.32674676179885864, 0.4378054141998291, 0.20145408809185028, 0.3182588219642639, 0.23528333008289337, 0.2364351600408554, 0.39959025382995605, 0.2738944888114929, 0.3101866543292999, 0.36514800786972046, 0.3208860754966736, 0.22734232246875763, 0.2748006582260132, 0.25937265157699585, 0.3914828896522522, 0.21276125311851501, 0.22113056480884552, 0.23010629415512085, 0.17088782787322998, 0.23419725894927979, 0.25591832399368286, 0.435272753238678, 0.4288102090358734, 0.21290430426597595, 0.27550458908081055, 0.23382903635501862, 0.30018436908721924, 0.22520501911640167, 0.16911767423152924, 0.23771461844444275, 0.34546372294425964, 0.2548006474971771, 0.19480571150779724, 0.16613878309726715, 0.35566484928131104, 0.1943793147802353, 0.18300104141235352, 0.1289244145154953, 0.10467776656150818, 0.1429228037595749, 0.2912957966327667, 0.415142297744751, 0.1284041404724121, 0.24282966554164886, 0.2508395314216614, 0.38141047954559326, 0.16505132615566254, 0.16033269464969635, 0.11552358418703079, 0.281181663274765, 0.21095173060894012, 0.3551029562950134, 0.1406410187482834, 0.15599660575389862, 0.3708155155181885, 0.10208328068256378, 0.4262029230594635, 0.15235154330730438, 0.25080421566963196, 0.2236374318599701, 0.24949046969413757, 0.22718992829322815, 0.13715048134326935, 0.24889034032821655, 0.3105410635471344, 0.40713176131248474, 0.13212108612060547, 0.20100905001163483, 0.1726756989955902, 0.15574191510677338, 0.27156731486320496, 0.14885951578617096, 0.12997134029865265, 0.1387035995721817, 0.4399465322494507, 0.08663991093635559, 0.20867086946964264, 0.3240082859992981, 0.3183644413948059, 0.17814907431602478, 0.21885176002979279, 0.1466669738292694, 0.2804082930088043, 0.20367920398712158, 0.1655610054731369, 0.1448660045862198, 0.20291000604629517, 0.11567503958940506, 0.1730584353208542, 0.5053353309631348, 0.23758955299854279, 0.3345824182033539, 0.17121362686157227, 0.18723468482494354, 0.2904627025127411, 0.34380507469177246, 0.17175103724002838, 0.06253214180469513, 0.07159151881933212, 0.3055977523326874, 0.17898565530776978, 0.1098790168762207, 0.35575467348098755, 0.12187526375055313, 0.076075479388237, 0.21234320104122162, 0.0713818371295929, 0.18627002835273743, 0.13102112710475922, 0.2600930333137512, 0.24102959036827087, 0.17136278748512268, 0.14351695775985718, 0.23299483954906464, 0.19734010100364685, 0.19969944655895233, 0.1171194389462471, 0.19051872193813324, 0.1307632029056549, 0.3404226005077362, 0.10315635055303574, 0.21490246057510376, 0.09562312811613083, 0.07051696628332138, 0.027527909725904465],\n [0.5596721768379211, 0.6825861930847168, 0.6937751173973083, 0.727416455745697, 0.6812612414360046, 0.6762853264808655, 0.654773473739624, 0.6948181986808777, 0.6656332612037659, 0.6898285746574402, 0.6832861304283142, 0.6590405106544495, 0.6602960824966431, 0.6606273055076599, 0.667948305606842, 0.6505627632141113, 0.6484672427177429, 0.6460197567939758, 0.6338105201721191, 0.6177550554275513, 0.6051806807518005, 0.6250348091125488, 0.6204485893249512, 0.6320631504058838, 0.6180384159088135, 0.6242111325263977, 0.5959724187850952, 0.5940643548965454, 0.571523129940033, 0.5618717074394226, 0.5667926669120789, 0.6059849262237549, 0.5934391617774963, 0.5307382941246033, 0.5641266107559204, 0.5314565896987915, 0.5524114370346069, 0.5425953269004822, 0.6042528748512268, 0.47421795129776, 0.5280907154083252, 0.5248279571533203, 0.5352306365966797, 0.43632954359054565, 0.45401448011398315, 0.4029659330844879, 0.4238537549972534, 0.4721142649650574, 0.37654152512550354, 0.38760706782341003, 0.4089690148830414, 0.4502021372318268, 0.39007604122161865, 0.3963930606842041, 0.4672841727733612, 0.4095068871974945, 0.39259079098701477, 0.3385017514228821, 0.3756580650806427, 0.3292919397354126, 0.38361695408821106, 0.37147045135498047, 0.3623533248901367, 0.4357340931892395, 0.33929795026779175, 0.33469679951667786, 0.35791027545928955, 0.31688252091407776, 0.27637192606925964, 0.2742409110069275, 0.2912769019603729, 0.2215559184551239, 0.30191826820373535, 0.29059991240501404, 0.19704224169254303, 0.2902638912200928, 0.34331822395324707, 0.2614462673664093, 0.3460378348827362, 0.32718124985694885, 0.19910001754760742, 0.21092285215854645, 0.34823039174079895, 0.2628342807292938, 0.21826288104057312, 0.290345162153244, 0.21115823090076447, 0.27557122707366943, 0.17980043590068817, 0.2155681550502777, 0.14875055849552155, 0.41619113087654114, 0.27073824405670166, 0.20098990201950073, 0.220502570271492, 0.24909473955631256, 0.3425520956516266, 0.22847171127796173, 0.2391130030155182, 0.25364312529563904, 0.2764328420162201, 0.37475040555000305, 0.15679128468036652, 0.3856890797615051, 0.15024790167808533, 0.19684170186519623, 0.22423887252807617, 0.2029688060283661, 0.2481517791748047, 0.2430395931005478, 0.16846199333667755, 0.2874332368373871, 0.147287517786026, 0.2672891914844513, 0.1690724790096283, 0.2421690970659256, 0.13037465512752533, 0.18803200125694275, 0.3254629373550415, 0.07855711877346039, 0.17467620968818665, 0.09351401031017303, 0.1837845742702484, 0.18689562380313873, 0.22990648448467255, 0.19773021340370178, 0.152849018573761, 0.39969849586486816, 0.25362545251846313, 0.21083036065101624, 0.13875387609004974, 0.1095811277627945, 0.2786869704723358, 0.07803401350975037, 0.4641839861869812, 0.24665574729442596, 0.1509629786014557, 0.48766863346099854, 0.28171080350875854, 0.188472181558609, 0.12815599143505096, 0.3165074288845062, 0.08023007214069366, 0.3565349578857422, 0.11062094569206238, 0.1665385216474533, 0.22961339354515076, 0.22054892778396606, 0.09526553004980087, 0.0523594431579113, 0.3053101599216461, 0.40128013491630554, 0.3229648172855377, 0.19846411049365997, 0.2845112979412079, 0.1426791548728943, 0.15526601672172546, 0.3185018301010132, 0.3760956823825836, 0.1614726483821869, 0.22962817549705505, 0.08864698559045792, 0.269163578748703, 0.052639998495578766, 0.43359073996543884, 0.13091707229614258, 0.24067094922065735, 0.32142922282218933, 0.06477908790111542, 0.41858792304992676, 0.12447334080934525, 0.06844282895326614, 0.1843792349100113, 0.3155791163444519, 0.1977083534002304, 0.1190323457121849, 0.2916918098926544, 0.11469092220067978, 0.14936953783035278, 0.1795898824930191, 0.07825055718421936, 0.05803743377327919, 0.01583971455693245, 0.20558494329452515, 0.1375061273574829, 0.27224886417388916, 0.10133395344018936, 0.14099961519241333, 0.3119117021560669, 0.17952686548233032, 0.19760002195835114, 0.21311526000499725, 0.12002325057983398, 0.40670186281204224, 0.201133131980896, 0.0947975441813469, 0.27615290880203247, 0.12309908121824265, 0.073157399892807, 0.45130202174186707],\n [0.6444740295410156, 0.6480551958084106, 0.7680001258850098, 0.6938083171844482, 0.6945194005966187, 0.6434619426727295, 0.6478080153465271, 0.6552700996398926, 0.6474529504776001, 0.6316699385643005, 0.6500007510185242, 0.5896865725517273, 0.7054746150970459, 0.5965741872787476, 0.5899113416671753, 0.5865727066993713, 0.5499936938285828, 0.5815310478210449, 0.5552288293838501, 0.5780386924743652, 0.600023090839386, 0.4392690360546112, 0.5045123100280762, 0.5101278424263, 0.48246777057647705, 0.4923814833164215, 0.443683385848999, 0.3944355249404907, 0.4880301356315613, 0.35789451003074646, 0.36372992396354675, 0.47596442699432373, 0.4648134112358093, 0.31122469902038574, 0.30873405933380127, 0.36700308322906494, 0.2557302713394165, 0.43774741888046265, 0.36664271354675293, 0.38658323884010315, 0.39342930912971497, 0.25078845024108887, 0.332029789686203, 0.3891676962375641, 0.350697785615921, 0.5877716541290283, 0.3053388297557831, 0.35364529490470886, 0.35642775893211365, 0.5797093510627747, 0.308402419090271, 0.33300063014030457, 0.20527392625808716, 0.37823060154914856, 0.23164953291416168, 0.3390084207057953, 0.48220548033714294, 0.19653253257274628, 0.30830979347229004, 0.27321138978004456, 0.25642216205596924, 0.4782291650772095, 0.4833580255508423, 0.24064145982265472, 0.2778085470199585, 0.14250364899635315, 0.21408909559249878, 0.3077957332134247, 0.24375246465206146, 0.2403443455696106, 0.318545937538147, 0.27636250853538513, 0.20684844255447388, 0.39635372161865234, 0.2426823079586029, 0.3302248418331146, 0.21984857320785522, 0.3096882700920105, 0.16199436783790588, 0.2731266915798187, 0.16509762406349182, 0.19987358152866364, 0.21301832795143127, 0.16873307526111603, 0.4657672643661499, 0.19174867868423462, 0.23006097972393036, 0.3353741765022278, 0.2514898180961609, 0.4382299482822418, 0.17479459941387177, 0.1549443155527115, 0.36182865500450134, 0.1583036184310913, 0.19739067554473877, 0.3335547149181366, 0.17704877257347107, 0.22076626121997833, 0.26111721992492676, 0.1475856900215149, 0.39272114634513855, 0.06898980587720871, 0.20126555860042572, 0.1861010193824768, 0.2317998707294464, 0.21326132118701935, 0.2857711911201477, 0.09368398040533066, 0.1964348554611206, 0.18394115567207336, 0.13550300896167755, 0.14457449316978455, 0.15295009315013885, 0.16381102800369263, 0.2795810401439667, 0.17563505470752716, 0.23439742624759674, 0.27755534648895264, 0.19725731015205383, 0.24526743590831757, 0.3030700087547302, 0.22834354639053345, 0.13675029575824738, 0.08429285883903503, 0.1311313807964325, 0.18073631823062897, 0.07007424533367157, 0.3054443299770355, 0.10602294653654099, 0.16532698273658752, 0.15915970504283905, 0.09758727997541428, 0.35041344165802, 0.09038594365119934, 0.16694681346416473, 0.1270684152841568, 0.08292841911315918, 0.26534590125083923, 0.19535087049007416, 0.31837645173072815, 0.19049564003944397, 0.27681252360343933, 0.18552172183990479, 0.03605049103498459, 0.39923956990242004, 0.23028255999088287, 0.12411822378635406, 0.08776320517063141, 0.3358216881752014, 0.17280055582523346, 0.07582242041826248, 0.1299680471420288, 0.07957464456558228, 0.2518991231918335, 0.22218579053878784, 0.2953566014766693, 0.23980411887168884, 0.2516424059867859, 0.1552591174840927, 0.1568763107061386, 0.11077530682086945, 0.06224577873945236, 0.14881819486618042, 0.3115622401237488, 0.26296448707580566, 0.1568690836429596, 0.13243553042411804, 0.16159479320049286, 0.11857082694768906, 0.4102708399295807, 0.12056007981300354, 0.08759808540344238, 0.1313447803258896, 0.2028246819972992, 0.45793774724006653, 0.06600210815668106, 0.08839228749275208, 0.2017313838005066, 0.04523666948080063, 0.4295879304409027, 0.3630306124687195, 0.49682503938674927, 0.4873455762863159, 0.1069202795624733, 0.06406697630882263, 0.18771663308143616, 0.14819641411304474, 0.2584706246852875, 0.1014157086610794, 0.2010267823934555, 0.10214871168136597, 0.1414615660905838, 0.06353311240673065, 0.0687236413359642, 0.16954435408115387, 0.10046547651290894, 0.0897723138332367, 0.17460182309150696, 0.23726686835289001, 0.3276420831680298],\n [0.9350482821464539, 0.639961302280426, 0.7555859088897705, 0.7224767208099365, 0.7389212250709534, 0.676978349685669, 0.6593168377876282, 0.646786630153656, 0.6515583992004395, 0.636940598487854, 0.6395831108093262, 0.6301793456077576, 0.6259217262268066, 0.6181485652923584, 0.6324792504310608, 0.5961924195289612, 0.5731832981109619, 0.6180858016014099, 0.5561828017234802, 0.6062854528427124, 0.547522246837616, 0.5905726552009583, 0.5706572532653809, 0.5045456886291504, 0.4908227026462555, 0.5260190963745117, 0.505631685256958, 0.5336142778396606, 0.4794852137565613, 0.4568559527397156, 0.4610242545604706, 0.47866371273994446, 0.4220275580883026, 0.44966137409210205, 0.4123769998550415, 0.4183032214641571, 0.4053117334842682, 0.4284544885158539, 0.5045552253723145, 0.40772953629493713, 0.4673251211643219, 0.39634278416633606, 0.4473681151866913, 0.4173499047756195, 0.38735973834991455, 0.4023597240447998, 0.5248291492462158, 0.31107622385025024, 0.4076528251171112, 0.3591823875904083, 0.3081345856189728, 0.38340938091278076, 0.31913086771965027, 0.33579307794570923, 0.31607791781425476, 0.29121556878089905, 0.520678699016571, 0.2985853850841522, 0.3086974620819092, 0.35889241099357605, 0.2724573016166687, 0.29105129837989807, 0.3301805257797241, 0.6037798523902893, 0.3424549996852875, 0.25621676445007324, 0.2874658703804016, 0.2584307789802551, 0.28378936648368835, 0.3390101492404938, 0.17771074175834656, 0.23422080278396606, 0.31955909729003906, 0.23263046145439148, 0.23975732922554016, 0.3923954367637634, 0.26562702655792236, 0.22838926315307617, 0.18914592266082764, 0.4090936779975891, 0.28220275044441223, 0.19077445566654205, 0.25337910652160645, 0.4468682110309601, 0.37117430567741394, 0.3122158646583557, 0.24080294370651245, 0.19426283240318298, 0.4130544364452362, 0.19886745512485504, 0.27875611186027527, 0.22300408780574799, 0.23063276708126068, 0.16398067772388458, 0.27974000573158264, 0.23209893703460693, 0.4597893953323364, 0.38361090421676636, 0.17572754621505737, 0.28320246934890747, 0.18293626606464386, 0.2860223054885864, 0.2095450460910797, 0.13976281881332397, 0.23745809495449066, 0.31846678256988525, 0.7520305514335632, 0.31812193989753723, 0.21941393613815308, 0.32086190581321716, 0.3249974846839905, 0.14442965388298035, 0.3172173500061035, 0.43222537636756897, 0.4701368808746338, 0.15118785202503204, 0.25920790433883667, 0.1163109615445137, 0.3548303246498108, 0.0873265191912651, 0.3374319076538086, 0.12830398976802826, 0.16691169142723083, 0.2129189372062683, 0.1718982458114624, 0.3765963613986969, 0.21149608492851257, 0.20916692912578583, 0.12097320705652237, 0.23900476098060608, 0.25003695487976074, 0.2995748817920685, 0.17927783727645874, 0.23055724799633026, 0.23876048624515533, 0.2073303461074829, 0.3278714716434479, 0.19668284058570862, 0.08779395371675491, 0.24356700479984283, 0.3905560374259949, 0.2815341055393219, 0.1486540585756302, 0.6480227708816528, 0.19404202699661255, 0.10964435338973999, 0.34205418825149536, 0.19317121803760529, 0.38578614592552185, 0.2648036777973175, 0.08570266515016556, 0.3086698353290558, 0.13098302483558655, 0.633030891418457, 0.1652841567993164, 0.1283990442752838, 0.1980380117893219, 0.12593874335289001, 0.33324798941612244, 0.3312690854072571, 0.3746316432952881, 0.12610004842281342, 0.07700693607330322, 0.9917551279067993, 0.2386142760515213, 0.15721078217029572, 0.5718650817871094, 0.2692643702030182, 0.2409469485282898, 0.33453309535980225, 0.11659141629934311, 0.12238684296607971, 0.17374467849731445, 0.2866450846195221, 0.3402177095413208, 0.44128116965293884, 0.10629720985889435, 0.1673750877380371, 0.1420934796333313, 0.15705960988998413, 0.09668610244989395, 0.09889023005962372, 0.18064723908901215, 0.11849229782819748, 0.14881527423858643, 0.20469854772090912, 0.07622649520635605, 0.20733581483364105, 0.15758825838565826, 0.26919764280319214, 0.41618555784225464, 0.13635633885860443, 0.16175438463687897, 0.23629629611968994, 0.21702241897583008, 0.15335619449615479, 0.32633477449417114, 0.15847563743591309, 0.4844672679901123, 0.10750871896743774]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "CNN_pca16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "CNN_pca16_FASHION = np.array([[0.6731927394866943, 0.7146048545837402, 0.7230467200279236, 0.6912106275558472, 0.6896969676017761, 0.6904683709144592, 0.7217395901679993, 0.6933130621910095, 0.674804151058197, 0.6758207678794861, 0.7124243378639221, 0.6624137759208679, 0.6685926914215088, 0.6979824304580688, 0.6782591938972473, 0.7127623558044434, 0.6770104765892029, 0.6767700910568237, 0.6640782356262207, 0.6772103309631348, 0.6772346496582031, 0.6630069017410278, 0.6676750183105469, 0.6695066690444946, 0.6444168090820312, 0.650453507900238, 0.6427541375160217, 0.6385244131088257, 0.630479633808136, 0.6545811295509338, 0.6039774417877197, 0.6195574402809143, 0.6535524129867554, 0.662842869758606, 0.6519677042961121, 0.641146183013916, 0.6377530694007874, 0.6153199672698975, 0.6133739948272705, 0.6175796389579773, 0.6322136521339417, 0.6327928900718689, 0.6279188394546509, 0.6057998538017273, 0.5918271541595459, 0.5787946581840515, 0.5489441156387329, 0.5286718010902405, 0.553946852684021, 0.5940749645233154, 0.5976575613021851, 0.5534763932228088, 0.5435949563980103, 0.5652422904968262, 0.5287343859672546, 0.5400424599647522, 0.5558791160583496, 0.46408379077911377, 0.5044425129890442, 0.501401960849762, 0.4914953112602234, 0.48421379923820496, 0.503724992275238, 0.5585326552391052, 0.5284748077392578, 0.4420931339263916, 0.38656747341156006, 0.4064899682998657, 0.4788673520088196, 0.44518935680389404, 0.44881802797317505, 0.4787812829017639, 0.3793487250804901, 0.38220471143722534, 0.5208271145820618, 0.41611140966415405, 0.3540223240852356, 0.4896760880947113, 0.5182095170021057, 0.37803736329078674, 0.41520553827285767, 0.48707449436187744, 0.3895787000656128, 0.3696741461753845, 0.4299297332763672, 0.31142738461494446, 0.40215638279914856, 0.3027937412261963, 0.3300667107105255, 0.4679208993911743, 0.3062335252761841, 0.30544963479042053, 0.3699619174003601, 0.3423232138156891, 0.4148320257663727, 0.26448342204093933, 0.25111880898475647, 0.370278000831604, 0.3359261453151703, 0.3404152989387512, 0.22789347171783447, 0.27344048023223877, 0.16363538801670074, 0.2594521641731262, 0.3091864287853241, 0.29142042994499207, 0.3417450189590454, 0.23091702163219452, 0.2521711587905884, 0.2761750817298889, 0.3453601598739624, 0.25372835993766785, 0.3214697539806366, 0.2513537108898163, 0.4535459876060486, 0.21347159147262573, 0.3437364101409912, 0.4174922704696655, 0.3632654845714569, 0.2421703189611435, 0.3371511399745941, 0.30026817321777344, 0.4300374984741211, 0.27588149905204773, 0.28645434975624084, 0.40643027424812317, 0.36599281430244446, 0.35138460993766785, 0.2540344297885895, 0.34348198771476746, 0.18901720643043518, 0.2661953866481781, 0.26320555806159973, 0.2392750382423401, 0.43855077028274536, 0.10793644189834595, 0.34170421957969666, 0.42567962408065796, 0.34721696376800537, 0.41165363788604736, 0.20691877603530884, 0.30683374404907227, 0.2551877796649933, 0.28764501214027405, 0.39302679896354675, 0.27500835061073303, 0.28974899649620056, 0.2715095281600952, 0.24993008375167847, 0.1962193101644516, 0.27541664242744446, 0.19081838428974152, 0.12181317061185837, 0.31095078587532043, 0.2776842415332794, 0.324067085981369, 0.3013809323310852, 0.274642676115036, 0.13004633784294128, 0.17618465423583984, 0.47217032313346863, 0.47584113478660583, 0.3582492470741272, 0.257831871509552, 0.2891808748245239, 0.5542411208152771, 0.3348042964935303, 0.2161121815443039, 0.17372289299964905, 0.1992451250553131, 0.2041819989681244, 0.20778600871562958, 0.294563889503479, 0.23810671269893646, 0.13483597338199615, 0.2081194519996643, 0.2094554752111435, 0.3220142722129822, 0.21733644604682922, 0.2521625757217407, 0.19243156909942627, 0.2082565724849701, 0.34712842106819153, 0.4203895330429077, 0.25077930092811584, 0.22349044680595398, 0.15413586795330048, 0.3142079710960388, 0.272480845451355, 0.22095875442028046, 0.16435635089874268, 0.20021413266658783, 0.276922345161438, 0.2321307510137558, 0.2074410766363144, 0.1829778254032135, 0.30715394020080566, 0.2518363296985626, 0.34063780307769775, 0.17795924842357635],\n [0.7776962518692017, 0.7024533748626709, 0.6616161465644836, 0.7086943984031677, 0.7002975940704346, 0.6972193121910095, 0.6912907361984253, 0.6819018721580505, 0.681541383266449, 0.663880467414856, 0.69146728515625, 0.664833664894104, 0.6442254781723022, 0.7529103755950928, 0.6681588888168335, 0.6561442613601685, 0.6413994431495667, 0.7046892046928406, 0.6271281242370605, 0.6647970676422119, 0.653058648109436, 0.6430742144584656, 0.6455214619636536, 0.609468400478363, 0.6612895727157593, 0.6007395386695862, 0.6073524355888367, 0.6056865453720093, 0.5805904865264893, 0.6196667551994324, 0.5274646282196045, 0.6620069146156311, 0.5743842124938965, 0.5139610767364502, 0.6728946566581726, 0.6222364902496338, 0.5798086524009705, 0.5696758031845093, 0.6324554681777954, 0.585455596446991, 0.5590006709098816, 0.585722029209137, 0.567783772945404, 0.5522187352180481, 0.5296674966812134, 0.5424594879150391, 0.49772632122039795, 0.4818328619003296, 0.5226886868476868, 0.5111159682273865, 0.5239538550376892, 0.4668481945991516, 0.5357046723365784, 0.5097754001617432, 0.47646191716194153, 0.5100411176681519, 0.45254385471343994, 0.45006805658340454, 0.44648903608322144, 0.4015660881996155, 0.36558303236961365, 0.37721091508865356, 0.4359946548938751, 0.4040745496749878, 0.38954976201057434, 0.3225783407688141, 0.3661939203739166, 0.3853372633457184, 0.40720880031585693, 0.32728949189186096, 0.5035262703895569, 0.2781963646411896, 0.2824750244617462, 0.5335702896118164, 0.4222668707370758, 0.32674676179885864, 0.4378054141998291, 0.20145408809185028, 0.3182588219642639, 0.23528333008289337, 0.2364351600408554, 0.39959025382995605, 0.2738944888114929, 0.3101866543292999, 0.36514800786972046, 0.3208860754966736, 0.22734232246875763, 0.2748006582260132, 0.25937265157699585, 0.3914828896522522, 0.21276125311851501, 0.22113056480884552, 0.23010629415512085, 0.17088782787322998, 0.23419725894927979, 0.25591832399368286, 0.435272753238678, 0.4288102090358734, 0.21290430426597595, 0.27550458908081055, 0.23382903635501862, 0.30018436908721924, 0.22520501911640167, 0.16911767423152924, 0.23771461844444275, 0.34546372294425964, 0.2548006474971771, 0.19480571150779724, 0.16613878309726715, 0.35566484928131104, 0.1943793147802353, 0.18300104141235352, 0.1289244145154953, 0.10467776656150818, 0.1429228037595749, 0.2912957966327667, 0.415142297744751, 0.1284041404724121, 0.24282966554164886, 0.2508395314216614, 0.38141047954559326, 0.16505132615566254, 0.16033269464969635, 0.11552358418703079, 0.281181663274765, 0.21095173060894012, 0.3551029562950134, 0.1406410187482834, 0.15599660575389862, 0.3708155155181885, 0.10208328068256378, 0.4262029230594635, 0.15235154330730438, 0.25080421566963196, 0.2236374318599701, 0.24949046969413757, 0.22718992829322815, 0.13715048134326935, 0.24889034032821655, 0.3105410635471344, 0.40713176131248474, 0.13212108612060547, 0.20100905001163483, 0.1726756989955902, 0.15574191510677338, 0.27156731486320496, 0.14885951578617096, 0.12997134029865265, 0.1387035995721817, 0.4399465322494507, 0.08663991093635559, 0.20867086946964264, 0.3240082859992981, 0.3183644413948059, 0.17814907431602478, 0.21885176002979279, 0.1466669738292694, 0.2804082930088043, 0.20367920398712158, 0.1655610054731369, 0.1448660045862198, 0.20291000604629517, 0.11567503958940506, 0.1730584353208542, 0.5053353309631348, 0.23758955299854279, 0.3345824182033539, 0.17121362686157227, 0.18723468482494354, 0.2904627025127411, 0.34380507469177246, 0.17175103724002838, 0.06253214180469513, 0.07159151881933212, 0.3055977523326874, 0.17898565530776978, 0.1098790168762207, 0.35575467348098755, 0.12187526375055313, 0.076075479388237, 0.21234320104122162, 0.0713818371295929, 0.18627002835273743, 0.13102112710475922, 0.2600930333137512, 0.24102959036827087, 0.17136278748512268, 0.14351695775985718, 0.23299483954906464, 0.19734010100364685, 0.19969944655895233, 0.1171194389462471, 0.19051872193813324, 0.1307632029056549, 0.3404226005077362, 0.10315635055303574, 0.21490246057510376, 0.09562312811613083, 0.07051696628332138, 0.027527909725904465],\n [0.5596721768379211, 0.6825861930847168, 0.6937751173973083, 0.727416455745697, 0.6812612414360046, 0.6762853264808655, 0.654773473739624, 0.6948181986808777, 0.6656332612037659, 0.6898285746574402, 0.6832861304283142, 0.6590405106544495, 0.6602960824966431, 0.6606273055076599, 0.667948305606842, 0.6505627632141113, 0.6484672427177429, 0.6460197567939758, 0.6338105201721191, 0.6177550554275513, 0.6051806807518005, 0.6250348091125488, 0.6204485893249512, 0.6320631504058838, 0.6180384159088135, 0.6242111325263977, 0.5959724187850952, 0.5940643548965454, 0.571523129940033, 0.5618717074394226, 0.5667926669120789, 0.6059849262237549, 0.5934391617774963, 0.5307382941246033, 0.5641266107559204, 0.5314565896987915, 0.5524114370346069, 0.5425953269004822, 0.6042528748512268, 0.47421795129776, 0.5280907154083252, 0.5248279571533203, 0.5352306365966797, 0.43632954359054565, 0.45401448011398315, 0.4029659330844879, 0.4238537549972534, 0.4721142649650574, 0.37654152512550354, 0.38760706782341003, 0.4089690148830414, 0.4502021372318268, 0.39007604122161865, 0.3963930606842041, 0.4672841727733612, 0.4095068871974945, 0.39259079098701477, 0.3385017514228821, 0.3756580650806427, 0.3292919397354126, 0.38361695408821106, 0.37147045135498047, 0.3623533248901367, 0.4357340931892395, 0.33929795026779175, 0.33469679951667786, 0.35791027545928955, 0.31688252091407776, 0.27637192606925964, 0.2742409110069275, 0.2912769019603729, 0.2215559184551239, 0.30191826820373535, 0.29059991240501404, 0.19704224169254303, 0.2902638912200928, 0.34331822395324707, 0.2614462673664093, 0.3460378348827362, 0.32718124985694885, 0.19910001754760742, 0.21092285215854645, 0.34823039174079895, 0.2628342807292938, 0.21826288104057312, 0.290345162153244, 0.21115823090076447, 0.27557122707366943, 0.17980043590068817, 0.2155681550502777, 0.14875055849552155, 0.41619113087654114, 0.27073824405670166, 0.20098990201950073, 0.220502570271492, 0.24909473955631256, 0.3425520956516266, 0.22847171127796173, 0.2391130030155182, 0.25364312529563904, 0.2764328420162201, 0.37475040555000305, 0.15679128468036652, 0.3856890797615051, 0.15024790167808533, 0.19684170186519623, 0.22423887252807617, 0.2029688060283661, 0.2481517791748047, 0.2430395931005478, 0.16846199333667755, 0.2874332368373871, 0.147287517786026, 0.2672891914844513, 0.1690724790096283, 0.2421690970659256, 0.13037465512752533, 0.18803200125694275, 0.3254629373550415, 0.07855711877346039, 0.17467620968818665, 0.09351401031017303, 0.1837845742702484, 0.18689562380313873, 0.22990648448467255, 0.19773021340370178, 0.152849018573761, 0.39969849586486816, 0.25362545251846313, 0.21083036065101624, 0.13875387609004974, 0.1095811277627945, 0.2786869704723358, 0.07803401350975037, 0.4641839861869812, 0.24665574729442596, 0.1509629786014557, 0.48766863346099854, 0.28171080350875854, 0.188472181558609, 0.12815599143505096, 0.3165074288845062, 0.08023007214069366, 0.3565349578857422, 0.11062094569206238, 0.1665385216474533, 0.22961339354515076, 0.22054892778396606, 0.09526553004980087, 0.0523594431579113, 0.3053101599216461, 0.40128013491630554, 0.3229648172855377, 0.19846411049365997, 0.2845112979412079, 0.1426791548728943, 0.15526601672172546, 0.3185018301010132, 0.3760956823825836, 0.1614726483821869, 0.22962817549705505, 0.08864698559045792, 0.269163578748703, 0.052639998495578766, 0.43359073996543884, 0.13091707229614258, 0.24067094922065735, 0.32142922282218933, 0.06477908790111542, 0.41858792304992676, 0.12447334080934525, 0.06844282895326614, 0.1843792349100113, 0.3155791163444519, 0.1977083534002304, 0.1190323457121849, 0.2916918098926544, 0.11469092220067978, 0.14936953783035278, 0.1795898824930191, 0.07825055718421936, 0.05803743377327919, 0.01583971455693245, 0.20558494329452515, 0.1375061273574829, 0.27224886417388916, 0.10133395344018936, 0.14099961519241333, 0.3119117021560669, 0.17952686548233032, 0.19760002195835114, 0.21311526000499725, 0.12002325057983398, 0.40670186281204224, 0.201133131980896, 0.0947975441813469, 0.27615290880203247, 0.12309908121824265, 0.073157399892807, 0.45130202174186707],\n [0.6444740295410156, 0.6480551958084106, 0.7680001258850098, 0.6938083171844482, 0.6945194005966187, 0.6434619426727295, 0.6478080153465271, 0.6552700996398926, 0.6474529504776001, 0.6316699385643005, 0.6500007510185242, 0.5896865725517273, 0.7054746150970459, 0.5965741872787476, 0.5899113416671753, 0.5865727066993713, 0.5499936938285828, 0.5815310478210449, 0.5552288293838501, 0.5780386924743652, 0.600023090839386, 0.4392690360546112, 0.5045123100280762, 0.5101278424263, 0.48246777057647705, 0.4923814833164215, 0.443683385848999, 0.3944355249404907, 0.4880301356315613, 0.35789451003074646, 0.36372992396354675, 0.47596442699432373, 0.4648134112358093, 0.31122469902038574, 0.30873405933380127, 0.36700308322906494, 0.2557302713394165, 0.43774741888046265, 0.36664271354675293, 0.38658323884010315, 0.39342930912971497, 0.25078845024108887, 0.332029789686203, 0.3891676962375641, 0.350697785615921, 0.5877716541290283, 0.3053388297557831, 0.35364529490470886, 0.35642775893211365, 0.5797093510627747, 0.308402419090271, 0.33300063014030457, 0.20527392625808716, 0.37823060154914856, 0.23164953291416168, 0.3390084207057953, 0.48220548033714294, 0.19653253257274628, 0.30830979347229004, 0.27321138978004456, 0.25642216205596924, 0.4782291650772095, 0.4833580255508423, 0.24064145982265472, 0.2778085470199585, 0.14250364899635315, 0.21408909559249878, 0.3077957332134247, 0.24375246465206146, 0.2403443455696106, 0.318545937538147, 0.27636250853538513, 0.20684844255447388, 0.39635372161865234, 0.2426823079586029, 0.3302248418331146, 0.21984857320785522, 0.3096882700920105, 0.16199436783790588, 0.2731266915798187, 0.16509762406349182, 0.19987358152866364, 0.21301832795143127, 0.16873307526111603, 0.4657672643661499, 0.19174867868423462, 0.23006097972393036, 0.3353741765022278, 0.2514898180961609, 0.4382299482822418, 0.17479459941387177, 0.1549443155527115, 0.36182865500450134, 0.1583036184310913, 0.19739067554473877, 0.3335547149181366, 0.17704877257347107, 0.22076626121997833, 0.26111721992492676, 0.1475856900215149, 0.39272114634513855, 0.06898980587720871, 0.20126555860042572, 0.1861010193824768, 0.2317998707294464, 0.21326132118701935, 0.2857711911201477, 0.09368398040533066, 0.1964348554611206, 0.18394115567207336, 0.13550300896167755, 0.14457449316978455, 0.15295009315013885, 0.16381102800369263, 0.2795810401439667, 0.17563505470752716, 0.23439742624759674, 0.27755534648895264, 0.19725731015205383, 0.24526743590831757, 0.3030700087547302, 0.22834354639053345, 0.13675029575824738, 0.08429285883903503, 0.1311313807964325, 0.18073631823062897, 0.07007424533367157, 0.3054443299770355, 0.10602294653654099, 0.16532698273658752, 0.15915970504283905, 0.09758727997541428, 0.35041344165802, 0.09038594365119934, 0.16694681346416473, 0.1270684152841568, 0.08292841911315918, 0.26534590125083923, 0.19535087049007416, 0.31837645173072815, 0.19049564003944397, 0.27681252360343933, 0.18552172183990479, 0.03605049103498459, 0.39923956990242004, 0.23028255999088287, 0.12411822378635406, 0.08776320517063141, 0.3358216881752014, 0.17280055582523346, 0.07582242041826248, 0.1299680471420288, 0.07957464456558228, 0.2518991231918335, 0.22218579053878784, 0.2953566014766693, 0.23980411887168884, 0.2516424059867859, 0.1552591174840927, 0.1568763107061386, 0.11077530682086945, 0.06224577873945236, 0.14881819486618042, 0.3115622401237488, 0.26296448707580566, 0.1568690836429596, 0.13243553042411804, 0.16159479320049286, 0.11857082694768906, 0.4102708399295807, 0.12056007981300354, 0.08759808540344238, 0.1313447803258896, 0.2028246819972992, 0.45793774724006653, 0.06600210815668106, 0.08839228749275208, 0.2017313838005066, 0.04523666948080063, 0.4295879304409027, 0.3630306124687195, 0.49682503938674927, 0.4873455762863159, 0.1069202795624733, 0.06406697630882263, 0.18771663308143616, 0.14819641411304474, 0.2584706246852875, 0.1014157086610794, 0.2010267823934555, 0.10214871168136597, 0.1414615660905838, 0.06353311240673065, 0.0687236413359642, 0.16954435408115387, 0.10046547651290894, 0.0897723138332367, 0.17460182309150696, 0.23726686835289001, 0.3276420831680298],\n [0.9350482821464539, 0.639961302280426, 0.7555859088897705, 0.7224767208099365, 0.7389212250709534, 0.676978349685669, 0.6593168377876282, 0.646786630153656, 0.6515583992004395, 0.636940598487854, 0.6395831108093262, 0.6301793456077576, 0.6259217262268066, 0.6181485652923584, 0.6324792504310608, 0.5961924195289612, 0.5731832981109619, 0.6180858016014099, 0.5561828017234802, 0.6062854528427124, 0.547522246837616, 0.5905726552009583, 0.5706572532653809, 0.5045456886291504, 0.4908227026462555, 0.5260190963745117, 0.505631685256958, 0.5336142778396606, 0.4794852137565613, 0.4568559527397156, 0.4610242545604706, 0.47866371273994446, 0.4220275580883026, 0.44966137409210205, 0.4123769998550415, 0.4183032214641571, 0.4053117334842682, 0.4284544885158539, 0.5045552253723145, 0.40772953629493713, 0.4673251211643219, 0.39634278416633606, 0.4473681151866913, 0.4173499047756195, 0.38735973834991455, 0.4023597240447998, 0.5248291492462158, 0.31107622385025024, 0.4076528251171112, 0.3591823875904083, 0.3081345856189728, 0.38340938091278076, 0.31913086771965027, 0.33579307794570923, 0.31607791781425476, 0.29121556878089905, 0.520678699016571, 0.2985853850841522, 0.3086974620819092, 0.35889241099357605, 0.2724573016166687, 0.29105129837989807, 0.3301805257797241, 0.6037798523902893, 0.3424549996852875, 0.25621676445007324, 0.2874658703804016, 0.2584307789802551, 0.28378936648368835, 0.3390101492404938, 0.17771074175834656, 0.23422080278396606, 0.31955909729003906, 0.23263046145439148, 0.23975732922554016, 0.3923954367637634, 0.26562702655792236, 0.22838926315307617, 0.18914592266082764, 0.4090936779975891, 0.28220275044441223, 0.19077445566654205, 0.25337910652160645, 0.4468682110309601, 0.37117430567741394, 0.3122158646583557, 0.24080294370651245, 0.19426283240318298, 0.4130544364452362, 0.19886745512485504, 0.27875611186027527, 0.22300408780574799, 0.23063276708126068, 0.16398067772388458, 0.27974000573158264, 0.23209893703460693, 0.4597893953323364, 0.38361090421676636, 0.17572754621505737, 0.28320246934890747, 0.18293626606464386, 0.2860223054885864, 0.2095450460910797, 0.13976281881332397, 0.23745809495449066, 0.31846678256988525, 0.7520305514335632, 0.31812193989753723, 0.21941393613815308, 0.32086190581321716, 0.3249974846839905, 0.14442965388298035, 0.3172173500061035, 0.43222537636756897, 0.4701368808746338, 0.15118785202503204, 0.25920790433883667, 0.1163109615445137, 0.3548303246498108, 0.0873265191912651, 0.3374319076538086, 0.12830398976802826, 0.16691169142723083, 0.2129189372062683, 0.1718982458114624, 0.3765963613986969, 0.21149608492851257, 0.20916692912578583, 0.12097320705652237, 0.23900476098060608, 0.25003695487976074, 0.2995748817920685, 0.17927783727645874, 0.23055724799633026, 0.23876048624515533, 0.2073303461074829, 0.3278714716434479, 0.19668284058570862, 0.08779395371675491, 0.24356700479984283, 0.3905560374259949, 0.2815341055393219, 0.1486540585756302, 0.6480227708816528, 0.19404202699661255, 0.10964435338973999, 0.34205418825149536, 0.19317121803760529, 0.38578614592552185, 0.2648036777973175, 0.08570266515016556, 0.3086698353290558, 0.13098302483558655, 0.633030891418457, 0.1652841567993164, 0.1283990442752838, 0.1980380117893219, 0.12593874335289001, 0.33324798941612244, 0.3312690854072571, 0.3746316432952881, 0.12610004842281342, 0.07700693607330322, 0.9917551279067993, 0.2386142760515213, 0.15721078217029572, 0.5718650817871094, 0.2692643702030182, 0.2409469485282898, 0.33453309535980225, 0.11659141629934311, 0.12238684296607971, 0.17374467849731445, 0.2866450846195221, 0.3402177095413208, 0.44128116965293884, 0.10629720985889435, 0.1673750877380371, 0.1420934796333313, 0.15705960988998413, 0.09668610244989395, 0.09889023005962372, 0.18064723908901215, 0.11849229782819748, 0.14881527423858643, 0.20469854772090912, 0.07622649520635605, 0.20733581483364105, 0.15758825838565826, 0.26919764280319214, 0.41618555784225464, 0.13635633885860443, 0.16175438463687897, 0.23629629611968994, 0.21702241897583008, 0.15335619449615479, 0.32633477449417114, 0.15847563743591309, 0.4844672679901123, 0.10750871896743774]])\nCNN_ae16_FASHION = np.array([[0.7238587141036987, 0.8279216289520264, 0.764658510684967, 0.7090054154396057, 0.7613484263420105, 0.6281448602676392, 0.7058420777320862, 0.7101895213127136, 0.6628857254981995, 0.6342789530754089, 0.6941124200820923, 0.7128175497055054, 0.6556928157806396, 0.6770442128181458, 0.6904453039169312, 0.666200578212738, 0.6626161932945251, 0.6629075407981873, 0.6371564865112305, 0.6721763014793396, 0.6730408668518066, 0.6492516398429871, 0.6525641679763794, 0.6698288917541504, 0.641843855381012, 0.6438435316085815, 0.6683945655822754, 0.6380563974380493, 0.6330687999725342, 0.617286741733551, 0.6625140309333801, 0.6244713664054871, 0.643572211265564, 0.6084137558937073, 0.6034361124038696, 0.6094484925270081, 0.6016889214515686, 0.6035388708114624, 0.5681512355804443, 0.629159688949585, 0.5722391605377197, 0.6337224245071411, 0.6382900476455688, 0.5765202045440674, 0.5834353566169739, 0.5758876204490662, 0.5723884105682373, 0.5367166996002197, 0.5580402612686157, 0.5566512942314148, 0.5059840679168701, 0.6380807757377625, 0.49563857913017273, 0.6370260715484619, 0.5208002924919128, 0.580780565738678, 0.5187138915061951, 0.4674728512763977, 0.5914150476455688, 0.5225784182548523, 0.5509077906608582, 0.5437390208244324, 0.5876714587211609, 0.5113809108734131, 0.6130290031433105, 0.5831108093261719, 0.5406965017318726, 0.5444303154945374, 0.5713781118392944, 0.5580171942710876, 0.5936187505722046, 0.5174297094345093, 0.45946064591407776, 0.5292142629623413, 0.49923190474510193, 0.4601333737373352, 0.6448274850845337, 0.47476837038993835, 0.4592137038707733, 0.5359666347503662, 0.43115174770355225, 0.6273414492607117, 0.4708901643753052, 0.4780946373939514, 0.5153037905693054, 0.44760462641716003, 0.4834255576133728, 0.4863620102405548, 0.4590986967086792, 0.6878242492675781, 0.4567679464817047, 0.5359635949134827, 0.5214946269989014, 0.5550475716590881, 0.48009032011032104, 0.6128838062286377, 0.4626908600330353, 0.4212714731693268, 0.48331859707832336, 0.4326198101043701, 0.3995482623577118, 0.5725248456001282, 0.4096750319004059, 0.472186803817749, 0.4253288209438324, 0.4543733596801758, 0.5348018407821655, 0.4374711215496063, 0.547636091709137, 0.4559398889541626, 0.4583567678928375, 0.3975050747394562, 0.506945013999939, 0.44825392961502075, 0.3379732072353363, 0.4212629795074463, 0.43303024768829346, 0.47395405173301697, 0.3946947455406189, 0.3969179391860962, 0.4617078900337219, 0.3339572250843048, 0.4773615598678589, 0.48727986216545105, 0.5114045739173889, 0.4653664827346802, 0.3795110285282135, 0.40783172845840454, 0.5204921364784241, 0.3001338243484497, 0.5036787986755371, 0.552573025226593, 0.3830263614654541, 0.35748371481895447, 0.2902784049510956, 0.33729976415634155, 0.45117583870887756, 0.4340777099132538, 0.32751381397247314, 0.39419934153556824, 0.28892821073532104, 0.5429171919822693, 0.3512280285358429, 0.29519110918045044, 0.32420623302459717, 0.2919383645057678, 0.32059067487716675, 0.418031245470047, 0.38729846477508545, 0.40427932143211365, 0.3720347285270691, 0.39285415410995483, 0.2848249673843384, 0.3801725506782532, 0.379395455121994, 0.3267326354980469, 0.48088333010673523, 0.39165112376213074, 0.3900928795337677, 0.31752026081085205, 0.35108864307403564, 0.3074707090854645, 0.420603483915329, 0.4286137819290161, 0.2575635612010956, 0.4275769889354706, 0.29068946838378906, 0.2700044810771942, 0.45788291096687317, 0.3962259590625763, 0.26208552718162537, 0.3116024136543274, 0.21491478383541107, 0.30482232570648193, 0.4397960305213928, 0.2689315378665924, 0.2315136343240738, 0.23813609778881073, 0.39839962124824524, 0.20211194455623627, 0.18539109826087952, 0.24621745944023132, 0.2110489010810852, 0.2671937346458435, 0.298324316740036, 0.3919094204902649, 0.4767822325229645, 0.3671795129776001, 0.264624685049057, 0.279402494430542, 0.2577192783355713, 0.2227160632610321, 0.4114092290401459, 0.2173977643251419, 0.20937776565551758, 0.23352211713790894, 0.2918354272842407, 0.20685932040214539, 0.2638728618621826, 0.2975238561630249],\n [0.651547908782959, 0.6305937767028809, 0.714191198348999, 0.7366882562637329, 0.7081306576728821, 0.7052466869354248, 0.6959658265113831, 0.6876019239425659, 0.6796861290931702, 0.6682548522949219, 0.6783831119537354, 0.6809177398681641, 0.6594600081443787, 0.6597816944122314, 0.6607940196990967, 0.6554914712905884, 0.6487285494804382, 0.6310643553733826, 0.646901547908783, 0.6084935069084167, 0.646790087223053, 0.620003342628479, 0.6197651028633118, 0.6110859513282776, 0.6023704409599304, 0.5829523205757141, 0.5887001156806946, 0.5428747534751892, 0.5947333574295044, 0.520041823387146, 0.530125081539154, 0.5711768269538879, 0.517208993434906, 0.5556505918502808, 0.5052176117897034, 0.5029464960098267, 0.503192663192749, 0.4674018919467926, 0.4327305555343628, 0.4827580749988556, 0.44052502512931824, 0.45015448331832886, 0.374491810798645, 0.41120240092277527, 0.33931177854537964, 0.28159549832344055, 0.29745686054229736, 0.33262911438941956, 0.2546529471874237, 0.24919618666172028, 0.30038347840309143, 0.28520357608795166, 0.28131112456321716, 0.24188008904457092, 0.19906455278396606, 0.2039707601070404, 0.283230721950531, 0.2393074482679367, 0.16120368242263794, 0.209192156791687, 0.19151107966899872, 0.24003323912620544, 0.2383124679327011, 0.13323870301246643, 0.20210768282413483, 0.21252617239952087, 0.16489844024181366, 0.15838722884655, 0.15414556860923767, 0.08920346945524216, 0.18110282719135284, 0.16188500821590424, 0.14822573959827423, 0.14462649822235107, 0.07673535495996475, 0.18902957439422607, 0.08951728790998459, 0.08679455518722534, 0.1354941427707672, 0.15025372803211212, 0.2071131318807602, 0.23014762997627258, 0.19371269643306732, 0.1132298931479454, 0.05065694823861122, 0.09418382495641708, 0.07476998120546341, 0.11219014972448349, 0.03167591616511345, 0.03173637390136719, 0.1261214315891266, 0.06699670106172562, 0.051265086978673935, 0.1227421760559082, 0.06480707228183746, 0.09140351414680481, 0.019068550318479538, 0.06808232516050339, 0.07977265864610672, 0.1261254847049713, 0.10272825509309769, 0.23310545086860657, 0.08881580084562302, 0.19029644131660461, 0.07674399018287659, 0.03163032978773117, 0.058682236820459366, 0.0618613138794899, 0.0789889469742775, 0.033192627131938934, 0.10236170887947083, 0.05678369104862213, 0.26384907960891724, 0.11628346145153046, 0.07242055237293243, 0.044395193457603455, 0.08786195516586304, 0.032504141330718994, 0.031131453812122345, 0.06867041438817978, 0.07551262527704239, 0.0563284270465374, 0.12378427386283875, 0.10616520047187805, 0.05368490517139435, 0.08634105324745178, 0.017265010625123978, 0.13792777061462402, 0.036241959780454636, 0.019719859585165977, 0.06076066941022873, 0.13962452113628387, 0.12119129300117493, 0.030299704521894455, 0.11001968383789062, 0.050810620188713074, 0.26568764448165894, 0.009919365867972374, 0.05394136533141136, 0.06656327843666077, 0.021763263270258904, 0.015879830345511436, 0.08032137900590897, 0.08495171368122101, 0.02712983824312687, 0.1609228253364563, 0.03401617705821991, 0.011437006294727325, 0.07945907860994339, 0.018461011350154877, 0.07861661165952682, 0.01987522467970848, 0.021168570965528488, 0.0673016831278801, 0.12502692639827728, 0.11859587579965591, 0.15622280538082123, 0.016832754015922546, 0.07315339893102646, 0.04298650100827217, 0.27725979685783386, 0.10535978525876999, 0.07640652358531952, 0.016620127484202385, 0.02031087689101696, 0.050513237714767456, 0.20297472178936005, 0.01747487112879753, 0.03910563141107559, 0.13426096737384796, 0.08585914969444275, 0.028291454538702965, 0.11463844031095505, 0.29609841108322144, 0.015064324252307415, 0.05555401369929314, 0.19431497156620026, 0.020027797669172287, 0.004898903891444206, 0.10011210292577744, 0.07396364212036133, 0.09759460389614105, 0.003400891786441207, 0.06437862664461136, 0.11968780308961868, 0.16718178987503052, 0.0749969407916069, 0.05589193478226662, 0.1745350956916809, 0.018143052235245705, 0.033501315861940384, 0.08575007319450378, 0.017686162143945694, 0.03692617267370224, 0.08320647478103638, 0.023435505107045174, 0.06533226370811462, 0.06523420661687851, 0.11267257481813431, 0.16568031907081604],\n [0.706034243106842, 0.7023885250091553, 0.6945178508758545, 0.6984283328056335, 0.6916305422782898, 0.7008994221687317, 0.6867948770523071, 0.7029416561126709, 0.6939983367919922, 0.6870196461677551, 0.6832935214042664, 0.675546407699585, 0.6829232573509216, 0.6700230240821838, 0.6716575026512146, 0.6608497500419617, 0.6490447521209717, 0.6329697966575623, 0.645972728729248, 0.6719668507575989, 0.6204118132591248, 0.6395977139472961, 0.6233582496643066, 0.6565548181533813, 0.6079490184783936, 0.5930318236351013, 0.5667318105697632, 0.5938836932182312, 0.5426163077354431, 0.5463024377822876, 0.5398862361907959, 0.5175414085388184, 0.476382851600647, 0.5222128629684448, 0.4666450619697571, 0.49085137248039246, 0.41316428780555725, 0.5047715902328491, 0.5985593795776367, 0.48063212633132935, 0.5779052376747131, 0.42677444219589233, 0.3830590546131134, 0.4846416413784027, 0.34301134943962097, 0.2974638044834137, 0.42954280972480774, 0.41722941398620605, 0.37005841732025146, 0.28383979201316833, 0.2831897437572479, 0.3911595046520233, 0.2856868505477905, 0.19628195464611053, 0.2705436646938324, 0.2996447682380676, 0.41134440898895264, 0.22908614575862885, 0.31802675127983093, 0.2407981902360916, 0.19022208452224731, 0.2423754185438156, 0.23106136918067932, 0.15951240062713623, 0.20519429445266724, 0.41269081830978394, 0.28166133165359497, 0.274077832698822, 0.3273050785064697, 0.3500634729862213, 0.16229261457920074, 0.16337497532367706, 0.2637341618537903, 0.10512451082468033, 0.18759353458881378, 0.16274847090244293, 0.1708659678697586, 0.3051771819591522, 0.2276899218559265, 0.10876061767339706, 0.15362103283405304, 0.1506582498550415, 0.18169306218624115, 0.23115521669387817, 0.16524721682071686, 0.2573551535606384, 0.10685427486896515, 0.19559642672538757, 0.25614291429519653, 0.05238534137606621, 0.20131099224090576, 0.3331041634082794, 0.2294325977563858, 0.14123289287090302, 0.26524078845977783, 0.14016057550907135, 0.1461646407842636, 0.10248541831970215, 0.09971999377012253, 0.2335144579410553, 0.21990928053855896, 0.2505458891391754, 0.23943883180618286, 0.10026700794696808, 0.13914231956005096, 0.05601302161812782, 0.29905134439468384, 0.0900081992149353, 0.24555091559886932, 0.0797080546617508, 0.2336335927248001, 0.23410049080848694, 0.16025646030902863, 0.27008286118507385, 0.17621804773807526, 0.1512896567583084, 0.07767941057682037, 0.11097455024719238, 0.1543291211128235, 0.062401141971349716, 0.16105739772319794, 0.13390633463859558, 0.12077003717422485, 0.1098223477602005, 0.15606869757175446, 0.10658898204565048, 0.28911322355270386, 0.24353764951229095, 0.11939273774623871, 0.11072501540184021, 0.19733040034770966, 0.09018509835004807, 0.01866026595234871, 0.13554751873016357, 0.043486423790454865, 0.18385861814022064, 0.12455446273088455, 0.0533093586564064, 0.20611728727817535, 0.18059559166431427, 0.11164116114377975, 0.14029771089553833, 0.1608089655637741, 0.11119112372398376, 0.38275688886642456, 0.16082149744033813, 0.12045828998088837, 0.12935557961463928, 0.4323200285434723, 0.0645999014377594, 0.2109391838312149, 0.059101831167936325, 0.09139616042375565, 0.03292220085859299, 0.05889835208654404, 0.013900360092520714, 0.07583865523338318, 0.1060330793261528, 0.28177642822265625, 0.17605659365653992, 0.03004135936498642, 0.08828425407409668, 0.04051889106631279, 0.1085067167878151, 0.052544135600328445, 0.040457673370838165, 0.007216128055006266, 0.10929879546165466, 0.031988564878702164, 0.029872184619307518, 0.1507331281900406, 0.40568310022354126, 0.19595348834991455, 0.020923934876918793, 0.05222935602068901, 0.04090195149183273, 0.014779500663280487, 0.09993978589773178, 0.022669436410069466, 0.13611064851284027, 0.5441415905952454, 0.17891910672187805, 0.16622483730316162, 0.10202664136886597, 0.21352910995483398, 0.04297564551234245, 0.02968382090330124, 0.06734589487314224, 0.03990267589688301, 0.02192804031074047, 0.01873648539185524, 0.15513800084590912, 0.011869080364704132, 0.22385506331920624, 0.07410099357366562, 0.005417147185653448, 0.02449643611907959, 0.06751690059900284, 0.12114875018596649, 0.01762096956372261],\n [0.6976809501647949, 0.6918372511863708, 0.6944101452827454, 0.6784621477127075, 0.6891022324562073, 0.6993556022644043, 0.6967403292655945, 0.6892054677009583, 0.7053206562995911, 0.6855345368385315, 0.6887295246124268, 0.6843759417533875, 0.6856648325920105, 0.6731378436088562, 0.6912403106689453, 0.6865348815917969, 0.6935524940490723, 0.6893035173416138, 0.6828927397727966, 0.6677338480949402, 0.6705579161643982, 0.6780441999435425, 0.6704750061035156, 0.6678839921951294, 0.6634907722473145, 0.6704196333885193, 0.6715743541717529, 0.6741645336151123, 0.6715762615203857, 0.6668155789375305, 0.6641136407852173, 0.6740790009498596, 0.650947630405426, 0.6427556872367859, 0.671837329864502, 0.6483162045478821, 0.648629367351532, 0.6638175249099731, 0.6340888738632202, 0.6611185669898987, 0.6443658471107483, 0.6918662190437317, 0.6425057053565979, 0.6319636702537537, 0.6655551195144653, 0.613470196723938, 0.6427372694015503, 0.6550756096839905, 0.6403217315673828, 0.6477619409561157, 0.6515638828277588, 0.6681152582168579, 0.6249656081199646, 0.643928587436676, 0.6385620832443237, 0.5995563268661499, 0.6349542140960693, 0.6332414150238037, 0.650719404220581, 0.6037539839744568, 0.6256316900253296, 0.6339825391769409, 0.5858232975006104, 0.6147709488868713, 0.5702931880950928, 0.5909301042556763, 0.6115955114364624, 0.5839518904685974, 0.6018372178077698, 0.5521049499511719, 0.5718849897384644, 0.6396619081497192, 0.5694629549980164, 0.5541239380836487, 0.5786706209182739, 0.5568535923957825, 0.5290969610214233, 0.5551702976226807, 0.5641834735870361, 0.47209280729293823, 0.6538602709770203, 0.5703932046890259, 0.4503854513168335, 0.5301358103752136, 0.4720926582813263, 0.461772084236145, 0.504241406917572, 0.491007000207901, 0.454494833946228, 0.6464589834213257, 0.5042950510978699, 0.5337916612625122, 0.5596120357513428, 0.5136921405792236, 0.557017982006073, 0.6038598418235779, 0.49134594202041626, 0.5265131592750549, 0.5190436840057373, 0.35088345408439636, 0.5242742896080017, 0.34855785965919495, 0.4799119830131531, 0.46053019165992737, 0.5739902257919312, 0.38366150856018066, 0.4532812833786011, 0.5109953284263611, 0.49517467617988586, 0.4389829635620117, 0.4707258641719818, 0.36239537596702576, 0.5373377203941345, 0.3929244577884674, 0.4642297327518463, 0.6287671327590942, 0.46479833126068115, 0.48089760541915894, 0.4028816223144531, 0.5303710699081421, 0.352039098739624, 0.4458397924900055, 0.41422390937805176, 0.4443412721157074, 0.47730496525764465, 0.43217960000038147, 0.4060031771659851, 0.5594614148139954, 0.40046948194503784, 0.36002591252326965, 0.4003417193889618, 0.4156685173511505, 0.3149724304676056, 0.37000900506973267, 0.46881285309791565, 0.3087789714336395, 0.4601677656173706, 0.40573638677597046, 0.3618398606777191, 0.35218578577041626, 0.5509096384048462, 0.196451336145401, 0.49816447496414185, 0.3386375308036804, 0.3612266778945923, 0.4264324903488159, 0.3007291257381439, 0.3952895402908325, 0.33076733350753784, 0.3139330744743347, 0.4563199579715729, 0.4509067237377167, 0.42625170946121216, 0.2793722450733185, 0.3796156346797943, 0.2407342493534088, 0.3889766335487366, 0.2998242676258087, 0.33720552921295166, 0.2252284586429596, 0.4014185965061188, 0.40779221057891846, 0.233357235789299, 0.4863913655281067, 0.35811224579811096, 0.3861272931098938, 0.31492823362350464, 0.3384092450141907, 0.3966335952281952, 0.3258292078971863, 0.40306007862091064, 0.20832879841327667, 0.2906647324562073, 0.3105532228946686, 0.29801419377326965, 0.2927956283092499, 0.27081945538520813, 0.3337678015232086, 0.3676404654979706, 0.367865651845932, 0.24273915588855743, 0.3624434769153595, 0.4430124759674072, 0.3397931754589081, 0.3268018662929535, 0.40703853964805603, 0.2561666965484619, 0.4037596881389618, 0.3530987501144409, 0.3021649420261383, 0.3296608328819275, 0.38417771458625793, 0.2589728832244873, 0.25850096344947815, 0.5683563947677612, 0.1926306188106537, 0.27865445613861084, 0.23031607270240784, 0.3364471197128296, 0.17635715007781982],\n [0.6886885166168213, 0.6776697039604187, 0.6904548406600952, 0.6998702883720398, 0.6970816850662231, 0.6790908575057983, 0.6787399053573608, 0.6807782053947449, 0.6618412137031555, 0.6697934865951538, 0.6754627823829651, 0.647290050983429, 0.6626802086830139, 0.6380178332328796, 0.632561206817627, 0.6070359945297241, 0.6051546335220337, 0.5983079671859741, 0.6444401741027832, 0.5914012789726257, 0.581438422203064, 0.4906451106071472, 0.5101944208145142, 0.526423454284668, 0.5375664830207825, 0.4729424715042114, 0.4785477817058563, 0.45641207695007324, 0.38636210560798645, 0.4737681448459625, 0.3795914947986603, 0.37375408411026, 0.3479461967945099, 0.3930940330028534, 0.3798999786376953, 0.3651489317417145, 0.28502172231674194, 0.26104697585105896, 0.28884491324424744, 0.3448728621006012, 0.22588969767093658, 0.1979808658361435, 0.21846146881580353, 0.22887122631072998, 0.3629720211029053, 0.20735721290111542, 0.2340666800737381, 0.1652492731809616, 0.23462919890880585, 0.19048458337783813, 0.21182133257389069, 0.2414625585079193, 0.17685168981552124, 0.32536545395851135, 0.21093428134918213, 0.16451725363731384, 0.2611170709133148, 0.16923967003822327, 0.11705833673477173, 0.13171321153640747, 0.27177694439888, 0.37192022800445557, 0.2155231237411499, 0.26494765281677246, 0.1686808466911316, 0.10900211334228516, 0.20641911029815674, 0.11570975184440613, 0.16283340752124786, 0.38603663444519043, 0.19186797738075256, 0.19040365517139435, 0.22233262658119202, 0.08178834617137909, 0.2965002655982971, 0.2810497581958771, 0.290170282125473, 0.08941994607448578, 0.11322925239801407, 0.1577697992324829, 0.39716675877571106, 0.19042739272117615, 0.14327339828014374, 0.14022691547870636, 0.2884586751461029, 0.24402953684329987, 0.18681274354457855, 0.21399076282978058, 0.05335967242717743, 0.044971197843551636, 0.4531792402267456, 0.14266860485076904, 0.07034697383642197, 0.0700504332780838, 0.117056705057621, 0.12407850474119186, 0.42452922463417053, 0.18916593492031097, 0.33069097995758057, 0.17776155471801758, 0.1513708233833313, 0.18831342458724976, 0.15991368889808655, 0.06776192039251328, 0.23874765634536743, 0.2674497365951538, 0.1142200380563736, 0.24854296445846558, 0.28676655888557434, 0.23382310569286346, 0.32367974519729614, 0.2732442617416382, 0.1385471671819687, 0.1968492865562439, 0.1663576066493988, 0.38907256722450256, 0.20941582322120667, 0.387044757604599, 0.3501419723033905, 0.3024692237377167, 0.1819605678319931, 0.16455315053462982, 0.2352357655763626, 0.214076429605484, 0.27742111682891846, 0.4033387005329132, 0.26650121808052063, 0.26298201084136963, 0.19030150771141052, 0.4723106622695923, 0.18227426707744598, 0.17867566645145416, 0.08999756723642349, 0.14662764966487885, 0.11761727929115295, 0.16963399946689606, 0.11576689034700394, 0.2133447229862213, 0.14540347456932068, 0.051651645451784134, 0.10879826545715332, 0.3283849060535431, 0.1731925755739212, 0.2590200901031494, 0.39293307065963745, 0.15731245279312134, 0.1544555127620697, 0.25640669465065, 0.30081918835639954, 0.11014952510595322, 0.19849641621112823, 0.15967364609241486, 0.3191027045249939, 0.17158564925193787, 0.020418288186192513, 0.13784024119377136, 0.09616998583078384, 0.0735974982380867, 0.05135686695575714, 0.27819177508354187, 0.06942494958639145, 0.06014550104737282, 0.16261352598667145, 0.10980962961912155, 0.15377016365528107, 0.033965352922677994, 0.3265672028064728, 0.2402167171239853, 0.15089596807956696, 0.1143554151058197, 0.33324378728866577, 0.21906107664108276, 0.23852664232254028, 0.08399449288845062, 0.10740803927183151, 0.06621161103248596, 0.10577378422021866, 0.12487940490245819, 0.5018206238746643, 0.20529313385486603, 0.17339147627353668, 0.07018384337425232, 0.14908267557621002, 0.1859356313943863, 0.22324036061763763, 0.12102147191762924, 0.2357919067144394, 0.2977396249771118, 0.10307928919792175, 0.05937011539936066, 0.10326980799436569, 0.07437019795179367, 0.3063502311706543, 0.06665070354938507, 0.07034043967723846, 0.20620198547840118, 0.08380551636219025, 0.15251868963241577, 0.14770105481147766, 0.1754714548587799]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "CNN_ae16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "CNN_ae16_FASHION = np.array([[0.7238587141036987, 0.8279216289520264, 0.764658510684967, 0.7090054154396057, 0.7613484263420105, 0.6281448602676392, 0.7058420777320862, 0.7101895213127136, 0.6628857254981995, 0.6342789530754089, 0.6941124200820923, 0.7128175497055054, 0.6556928157806396, 0.6770442128181458, 0.6904453039169312, 0.666200578212738, 0.6626161932945251, 0.6629075407981873, 0.6371564865112305, 0.6721763014793396, 0.6730408668518066, 0.6492516398429871, 0.6525641679763794, 0.6698288917541504, 0.641843855381012, 0.6438435316085815, 0.6683945655822754, 0.6380563974380493, 0.6330687999725342, 0.617286741733551, 0.6625140309333801, 0.6244713664054871, 0.643572211265564, 0.6084137558937073, 0.6034361124038696, 0.6094484925270081, 0.6016889214515686, 0.6035388708114624, 0.5681512355804443, 0.629159688949585, 0.5722391605377197, 0.6337224245071411, 0.6382900476455688, 0.5765202045440674, 0.5834353566169739, 0.5758876204490662, 0.5723884105682373, 0.5367166996002197, 0.5580402612686157, 0.5566512942314148, 0.5059840679168701, 0.6380807757377625, 0.49563857913017273, 0.6370260715484619, 0.5208002924919128, 0.580780565738678, 0.5187138915061951, 0.4674728512763977, 0.5914150476455688, 0.5225784182548523, 0.5509077906608582, 0.5437390208244324, 0.5876714587211609, 0.5113809108734131, 0.6130290031433105, 0.5831108093261719, 0.5406965017318726, 0.5444303154945374, 0.5713781118392944, 0.5580171942710876, 0.5936187505722046, 0.5174297094345093, 0.45946064591407776, 0.5292142629623413, 0.49923190474510193, 0.4601333737373352, 0.6448274850845337, 0.47476837038993835, 0.4592137038707733, 0.5359666347503662, 0.43115174770355225, 0.6273414492607117, 0.4708901643753052, 0.4780946373939514, 0.5153037905693054, 0.44760462641716003, 0.4834255576133728, 0.4863620102405548, 0.4590986967086792, 0.6878242492675781, 0.4567679464817047, 0.5359635949134827, 0.5214946269989014, 0.5550475716590881, 0.48009032011032104, 0.6128838062286377, 0.4626908600330353, 0.4212714731693268, 0.48331859707832336, 0.4326198101043701, 0.3995482623577118, 0.5725248456001282, 0.4096750319004059, 0.472186803817749, 0.4253288209438324, 0.4543733596801758, 0.5348018407821655, 0.4374711215496063, 0.547636091709137, 0.4559398889541626, 0.4583567678928375, 0.3975050747394562, 0.506945013999939, 0.44825392961502075, 0.3379732072353363, 0.4212629795074463, 0.43303024768829346, 0.47395405173301697, 0.3946947455406189, 0.3969179391860962, 0.4617078900337219, 0.3339572250843048, 0.4773615598678589, 0.48727986216545105, 0.5114045739173889, 0.4653664827346802, 0.3795110285282135, 0.40783172845840454, 0.5204921364784241, 0.3001338243484497, 0.5036787986755371, 0.552573025226593, 0.3830263614654541, 0.35748371481895447, 0.2902784049510956, 0.33729976415634155, 0.45117583870887756, 0.4340777099132538, 0.32751381397247314, 0.39419934153556824, 0.28892821073532104, 0.5429171919822693, 0.3512280285358429, 0.29519110918045044, 0.32420623302459717, 0.2919383645057678, 0.32059067487716675, 0.418031245470047, 0.38729846477508545, 0.40427932143211365, 0.3720347285270691, 0.39285415410995483, 0.2848249673843384, 0.3801725506782532, 0.379395455121994, 0.3267326354980469, 0.48088333010673523, 0.39165112376213074, 0.3900928795337677, 0.31752026081085205, 0.35108864307403564, 0.3074707090854645, 0.420603483915329, 0.4286137819290161, 0.2575635612010956, 0.4275769889354706, 0.29068946838378906, 0.2700044810771942, 0.45788291096687317, 0.3962259590625763, 0.26208552718162537, 0.3116024136543274, 0.21491478383541107, 0.30482232570648193, 0.4397960305213928, 0.2689315378665924, 0.2315136343240738, 0.23813609778881073, 0.39839962124824524, 0.20211194455623627, 0.18539109826087952, 0.24621745944023132, 0.2110489010810852, 0.2671937346458435, 0.298324316740036, 0.3919094204902649, 0.4767822325229645, 0.3671795129776001, 0.264624685049057, 0.279402494430542, 0.2577192783355713, 0.2227160632610321, 0.4114092290401459, 0.2173977643251419, 0.20937776565551758, 0.23352211713790894, 0.2918354272842407, 0.20685932040214539, 0.2638728618621826, 0.2975238561630249],\n [0.651547908782959, 0.6305937767028809, 0.714191198348999, 0.7366882562637329, 0.7081306576728821, 0.7052466869354248, 0.6959658265113831, 0.6876019239425659, 0.6796861290931702, 0.6682548522949219, 0.6783831119537354, 0.6809177398681641, 0.6594600081443787, 0.6597816944122314, 0.6607940196990967, 0.6554914712905884, 0.6487285494804382, 0.6310643553733826, 0.646901547908783, 0.6084935069084167, 0.646790087223053, 0.620003342628479, 0.6197651028633118, 0.6110859513282776, 0.6023704409599304, 0.5829523205757141, 0.5887001156806946, 0.5428747534751892, 0.5947333574295044, 0.520041823387146, 0.530125081539154, 0.5711768269538879, 0.517208993434906, 0.5556505918502808, 0.5052176117897034, 0.5029464960098267, 0.503192663192749, 0.4674018919467926, 0.4327305555343628, 0.4827580749988556, 0.44052502512931824, 0.45015448331832886, 0.374491810798645, 0.41120240092277527, 0.33931177854537964, 0.28159549832344055, 0.29745686054229736, 0.33262911438941956, 0.2546529471874237, 0.24919618666172028, 0.30038347840309143, 0.28520357608795166, 0.28131112456321716, 0.24188008904457092, 0.19906455278396606, 0.2039707601070404, 0.283230721950531, 0.2393074482679367, 0.16120368242263794, 0.209192156791687, 0.19151107966899872, 0.24003323912620544, 0.2383124679327011, 0.13323870301246643, 0.20210768282413483, 0.21252617239952087, 0.16489844024181366, 0.15838722884655, 0.15414556860923767, 0.08920346945524216, 0.18110282719135284, 0.16188500821590424, 0.14822573959827423, 0.14462649822235107, 0.07673535495996475, 0.18902957439422607, 0.08951728790998459, 0.08679455518722534, 0.1354941427707672, 0.15025372803211212, 0.2071131318807602, 0.23014762997627258, 0.19371269643306732, 0.1132298931479454, 0.05065694823861122, 0.09418382495641708, 0.07476998120546341, 0.11219014972448349, 0.03167591616511345, 0.03173637390136719, 0.1261214315891266, 0.06699670106172562, 0.051265086978673935, 0.1227421760559082, 0.06480707228183746, 0.09140351414680481, 0.019068550318479538, 0.06808232516050339, 0.07977265864610672, 0.1261254847049713, 0.10272825509309769, 0.23310545086860657, 0.08881580084562302, 0.19029644131660461, 0.07674399018287659, 0.03163032978773117, 0.058682236820459366, 0.0618613138794899, 0.0789889469742775, 0.033192627131938934, 0.10236170887947083, 0.05678369104862213, 0.26384907960891724, 0.11628346145153046, 0.07242055237293243, 0.044395193457603455, 0.08786195516586304, 0.032504141330718994, 0.031131453812122345, 0.06867041438817978, 0.07551262527704239, 0.0563284270465374, 0.12378427386283875, 0.10616520047187805, 0.05368490517139435, 0.08634105324745178, 0.017265010625123978, 0.13792777061462402, 0.036241959780454636, 0.019719859585165977, 0.06076066941022873, 0.13962452113628387, 0.12119129300117493, 0.030299704521894455, 0.11001968383789062, 0.050810620188713074, 0.26568764448165894, 0.009919365867972374, 0.05394136533141136, 0.06656327843666077, 0.021763263270258904, 0.015879830345511436, 0.08032137900590897, 0.08495171368122101, 0.02712983824312687, 0.1609228253364563, 0.03401617705821991, 0.011437006294727325, 0.07945907860994339, 0.018461011350154877, 0.07861661165952682, 0.01987522467970848, 0.021168570965528488, 0.0673016831278801, 0.12502692639827728, 0.11859587579965591, 0.15622280538082123, 0.016832754015922546, 0.07315339893102646, 0.04298650100827217, 0.27725979685783386, 0.10535978525876999, 0.07640652358531952, 0.016620127484202385, 0.02031087689101696, 0.050513237714767456, 0.20297472178936005, 0.01747487112879753, 0.03910563141107559, 0.13426096737384796, 0.08585914969444275, 0.028291454538702965, 0.11463844031095505, 0.29609841108322144, 0.015064324252307415, 0.05555401369929314, 0.19431497156620026, 0.020027797669172287, 0.004898903891444206, 0.10011210292577744, 0.07396364212036133, 0.09759460389614105, 0.003400891786441207, 0.06437862664461136, 0.11968780308961868, 0.16718178987503052, 0.0749969407916069, 0.05589193478226662, 0.1745350956916809, 0.018143052235245705, 0.033501315861940384, 0.08575007319450378, 0.017686162143945694, 0.03692617267370224, 0.08320647478103638, 0.023435505107045174, 0.06533226370811462, 0.06523420661687851, 0.11267257481813431, 0.16568031907081604],\n [0.706034243106842, 0.7023885250091553, 0.6945178508758545, 0.6984283328056335, 0.6916305422782898, 0.7008994221687317, 0.6867948770523071, 0.7029416561126709, 0.6939983367919922, 0.6870196461677551, 0.6832935214042664, 0.675546407699585, 0.6829232573509216, 0.6700230240821838, 0.6716575026512146, 0.6608497500419617, 0.6490447521209717, 0.6329697966575623, 0.645972728729248, 0.6719668507575989, 0.6204118132591248, 0.6395977139472961, 0.6233582496643066, 0.6565548181533813, 0.6079490184783936, 0.5930318236351013, 0.5667318105697632, 0.5938836932182312, 0.5426163077354431, 0.5463024377822876, 0.5398862361907959, 0.5175414085388184, 0.476382851600647, 0.5222128629684448, 0.4666450619697571, 0.49085137248039246, 0.41316428780555725, 0.5047715902328491, 0.5985593795776367, 0.48063212633132935, 0.5779052376747131, 0.42677444219589233, 0.3830590546131134, 0.4846416413784027, 0.34301134943962097, 0.2974638044834137, 0.42954280972480774, 0.41722941398620605, 0.37005841732025146, 0.28383979201316833, 0.2831897437572479, 0.3911595046520233, 0.2856868505477905, 0.19628195464611053, 0.2705436646938324, 0.2996447682380676, 0.41134440898895264, 0.22908614575862885, 0.31802675127983093, 0.2407981902360916, 0.19022208452224731, 0.2423754185438156, 0.23106136918067932, 0.15951240062713623, 0.20519429445266724, 0.41269081830978394, 0.28166133165359497, 0.274077832698822, 0.3273050785064697, 0.3500634729862213, 0.16229261457920074, 0.16337497532367706, 0.2637341618537903, 0.10512451082468033, 0.18759353458881378, 0.16274847090244293, 0.1708659678697586, 0.3051771819591522, 0.2276899218559265, 0.10876061767339706, 0.15362103283405304, 0.1506582498550415, 0.18169306218624115, 0.23115521669387817, 0.16524721682071686, 0.2573551535606384, 0.10685427486896515, 0.19559642672538757, 0.25614291429519653, 0.05238534137606621, 0.20131099224090576, 0.3331041634082794, 0.2294325977563858, 0.14123289287090302, 0.26524078845977783, 0.14016057550907135, 0.1461646407842636, 0.10248541831970215, 0.09971999377012253, 0.2335144579410553, 0.21990928053855896, 0.2505458891391754, 0.23943883180618286, 0.10026700794696808, 0.13914231956005096, 0.05601302161812782, 0.29905134439468384, 0.0900081992149353, 0.24555091559886932, 0.0797080546617508, 0.2336335927248001, 0.23410049080848694, 0.16025646030902863, 0.27008286118507385, 0.17621804773807526, 0.1512896567583084, 0.07767941057682037, 0.11097455024719238, 0.1543291211128235, 0.062401141971349716, 0.16105739772319794, 0.13390633463859558, 0.12077003717422485, 0.1098223477602005, 0.15606869757175446, 0.10658898204565048, 0.28911322355270386, 0.24353764951229095, 0.11939273774623871, 0.11072501540184021, 0.19733040034770966, 0.09018509835004807, 0.01866026595234871, 0.13554751873016357, 0.043486423790454865, 0.18385861814022064, 0.12455446273088455, 0.0533093586564064, 0.20611728727817535, 0.18059559166431427, 0.11164116114377975, 0.14029771089553833, 0.1608089655637741, 0.11119112372398376, 0.38275688886642456, 0.16082149744033813, 0.12045828998088837, 0.12935557961463928, 0.4323200285434723, 0.0645999014377594, 0.2109391838312149, 0.059101831167936325, 0.09139616042375565, 0.03292220085859299, 0.05889835208654404, 0.013900360092520714, 0.07583865523338318, 0.1060330793261528, 0.28177642822265625, 0.17605659365653992, 0.03004135936498642, 0.08828425407409668, 0.04051889106631279, 0.1085067167878151, 0.052544135600328445, 0.040457673370838165, 0.007216128055006266, 0.10929879546165466, 0.031988564878702164, 0.029872184619307518, 0.1507331281900406, 0.40568310022354126, 0.19595348834991455, 0.020923934876918793, 0.05222935602068901, 0.04090195149183273, 0.014779500663280487, 0.09993978589773178, 0.022669436410069466, 0.13611064851284027, 0.5441415905952454, 0.17891910672187805, 0.16622483730316162, 0.10202664136886597, 0.21352910995483398, 0.04297564551234245, 0.02968382090330124, 0.06734589487314224, 0.03990267589688301, 0.02192804031074047, 0.01873648539185524, 0.15513800084590912, 0.011869080364704132, 0.22385506331920624, 0.07410099357366562, 0.005417147185653448, 0.02449643611907959, 0.06751690059900284, 0.12114875018596649, 0.01762096956372261],\n [0.6976809501647949, 0.6918372511863708, 0.6944101452827454, 0.6784621477127075, 0.6891022324562073, 0.6993556022644043, 0.6967403292655945, 0.6892054677009583, 0.7053206562995911, 0.6855345368385315, 0.6887295246124268, 0.6843759417533875, 0.6856648325920105, 0.6731378436088562, 0.6912403106689453, 0.6865348815917969, 0.6935524940490723, 0.6893035173416138, 0.6828927397727966, 0.6677338480949402, 0.6705579161643982, 0.6780441999435425, 0.6704750061035156, 0.6678839921951294, 0.6634907722473145, 0.6704196333885193, 0.6715743541717529, 0.6741645336151123, 0.6715762615203857, 0.6668155789375305, 0.6641136407852173, 0.6740790009498596, 0.650947630405426, 0.6427556872367859, 0.671837329864502, 0.6483162045478821, 0.648629367351532, 0.6638175249099731, 0.6340888738632202, 0.6611185669898987, 0.6443658471107483, 0.6918662190437317, 0.6425057053565979, 0.6319636702537537, 0.6655551195144653, 0.613470196723938, 0.6427372694015503, 0.6550756096839905, 0.6403217315673828, 0.6477619409561157, 0.6515638828277588, 0.6681152582168579, 0.6249656081199646, 0.643928587436676, 0.6385620832443237, 0.5995563268661499, 0.6349542140960693, 0.6332414150238037, 0.650719404220581, 0.6037539839744568, 0.6256316900253296, 0.6339825391769409, 0.5858232975006104, 0.6147709488868713, 0.5702931880950928, 0.5909301042556763, 0.6115955114364624, 0.5839518904685974, 0.6018372178077698, 0.5521049499511719, 0.5718849897384644, 0.6396619081497192, 0.5694629549980164, 0.5541239380836487, 0.5786706209182739, 0.5568535923957825, 0.5290969610214233, 0.5551702976226807, 0.5641834735870361, 0.47209280729293823, 0.6538602709770203, 0.5703932046890259, 0.4503854513168335, 0.5301358103752136, 0.4720926582813263, 0.461772084236145, 0.504241406917572, 0.491007000207901, 0.454494833946228, 0.6464589834213257, 0.5042950510978699, 0.5337916612625122, 0.5596120357513428, 0.5136921405792236, 0.557017982006073, 0.6038598418235779, 0.49134594202041626, 0.5265131592750549, 0.5190436840057373, 0.35088345408439636, 0.5242742896080017, 0.34855785965919495, 0.4799119830131531, 0.46053019165992737, 0.5739902257919312, 0.38366150856018066, 0.4532812833786011, 0.5109953284263611, 0.49517467617988586, 0.4389829635620117, 0.4707258641719818, 0.36239537596702576, 0.5373377203941345, 0.3929244577884674, 0.4642297327518463, 0.6287671327590942, 0.46479833126068115, 0.48089760541915894, 0.4028816223144531, 0.5303710699081421, 0.352039098739624, 0.4458397924900055, 0.41422390937805176, 0.4443412721157074, 0.47730496525764465, 0.43217960000038147, 0.4060031771659851, 0.5594614148139954, 0.40046948194503784, 0.36002591252326965, 0.4003417193889618, 0.4156685173511505, 0.3149724304676056, 0.37000900506973267, 0.46881285309791565, 0.3087789714336395, 0.4601677656173706, 0.40573638677597046, 0.3618398606777191, 0.35218578577041626, 0.5509096384048462, 0.196451336145401, 0.49816447496414185, 0.3386375308036804, 0.3612266778945923, 0.4264324903488159, 0.3007291257381439, 0.3952895402908325, 0.33076733350753784, 0.3139330744743347, 0.4563199579715729, 0.4509067237377167, 0.42625170946121216, 0.2793722450733185, 0.3796156346797943, 0.2407342493534088, 0.3889766335487366, 0.2998242676258087, 0.33720552921295166, 0.2252284586429596, 0.4014185965061188, 0.40779221057891846, 0.233357235789299, 0.4863913655281067, 0.35811224579811096, 0.3861272931098938, 0.31492823362350464, 0.3384092450141907, 0.3966335952281952, 0.3258292078971863, 0.40306007862091064, 0.20832879841327667, 0.2906647324562073, 0.3105532228946686, 0.29801419377326965, 0.2927956283092499, 0.27081945538520813, 0.3337678015232086, 0.3676404654979706, 0.367865651845932, 0.24273915588855743, 0.3624434769153595, 0.4430124759674072, 0.3397931754589081, 0.3268018662929535, 0.40703853964805603, 0.2561666965484619, 0.4037596881389618, 0.3530987501144409, 0.3021649420261383, 0.3296608328819275, 0.38417771458625793, 0.2589728832244873, 0.25850096344947815, 0.5683563947677612, 0.1926306188106537, 0.27865445613861084, 0.23031607270240784, 0.3364471197128296, 0.17635715007781982],\n [0.6886885166168213, 0.6776697039604187, 0.6904548406600952, 0.6998702883720398, 0.6970816850662231, 0.6790908575057983, 0.6787399053573608, 0.6807782053947449, 0.6618412137031555, 0.6697934865951538, 0.6754627823829651, 0.647290050983429, 0.6626802086830139, 0.6380178332328796, 0.632561206817627, 0.6070359945297241, 0.6051546335220337, 0.5983079671859741, 0.6444401741027832, 0.5914012789726257, 0.581438422203064, 0.4906451106071472, 0.5101944208145142, 0.526423454284668, 0.5375664830207825, 0.4729424715042114, 0.4785477817058563, 0.45641207695007324, 0.38636210560798645, 0.4737681448459625, 0.3795914947986603, 0.37375408411026, 0.3479461967945099, 0.3930940330028534, 0.3798999786376953, 0.3651489317417145, 0.28502172231674194, 0.26104697585105896, 0.28884491324424744, 0.3448728621006012, 0.22588969767093658, 0.1979808658361435, 0.21846146881580353, 0.22887122631072998, 0.3629720211029053, 0.20735721290111542, 0.2340666800737381, 0.1652492731809616, 0.23462919890880585, 0.19048458337783813, 0.21182133257389069, 0.2414625585079193, 0.17685168981552124, 0.32536545395851135, 0.21093428134918213, 0.16451725363731384, 0.2611170709133148, 0.16923967003822327, 0.11705833673477173, 0.13171321153640747, 0.27177694439888, 0.37192022800445557, 0.2155231237411499, 0.26494765281677246, 0.1686808466911316, 0.10900211334228516, 0.20641911029815674, 0.11570975184440613, 0.16283340752124786, 0.38603663444519043, 0.19186797738075256, 0.19040365517139435, 0.22233262658119202, 0.08178834617137909, 0.2965002655982971, 0.2810497581958771, 0.290170282125473, 0.08941994607448578, 0.11322925239801407, 0.1577697992324829, 0.39716675877571106, 0.19042739272117615, 0.14327339828014374, 0.14022691547870636, 0.2884586751461029, 0.24402953684329987, 0.18681274354457855, 0.21399076282978058, 0.05335967242717743, 0.044971197843551636, 0.4531792402267456, 0.14266860485076904, 0.07034697383642197, 0.0700504332780838, 0.117056705057621, 0.12407850474119186, 0.42452922463417053, 0.18916593492031097, 0.33069097995758057, 0.17776155471801758, 0.1513708233833313, 0.18831342458724976, 0.15991368889808655, 0.06776192039251328, 0.23874765634536743, 0.2674497365951538, 0.1142200380563736, 0.24854296445846558, 0.28676655888557434, 0.23382310569286346, 0.32367974519729614, 0.2732442617416382, 0.1385471671819687, 0.1968492865562439, 0.1663576066493988, 0.38907256722450256, 0.20941582322120667, 0.387044757604599, 0.3501419723033905, 0.3024692237377167, 0.1819605678319931, 0.16455315053462982, 0.2352357655763626, 0.214076429605484, 0.27742111682891846, 0.4033387005329132, 0.26650121808052063, 0.26298201084136963, 0.19030150771141052, 0.4723106622695923, 0.18227426707744598, 0.17867566645145416, 0.08999756723642349, 0.14662764966487885, 0.11761727929115295, 0.16963399946689606, 0.11576689034700394, 0.2133447229862213, 0.14540347456932068, 0.051651645451784134, 0.10879826545715332, 0.3283849060535431, 0.1731925755739212, 0.2590200901031494, 0.39293307065963745, 0.15731245279312134, 0.1544555127620697, 0.25640669465065, 0.30081918835639954, 0.11014952510595322, 0.19849641621112823, 0.15967364609241486, 0.3191027045249939, 0.17158564925193787, 0.020418288186192513, 0.13784024119377136, 0.09616998583078384, 0.0735974982380867, 0.05135686695575714, 0.27819177508354187, 0.06942494958639145, 0.06014550104737282, 0.16261352598667145, 0.10980962961912155, 0.15377016365528107, 0.033965352922677994, 0.3265672028064728, 0.2402167171239853, 0.15089596807956696, 0.1143554151058197, 0.33324378728866577, 0.21906107664108276, 0.23852664232254028, 0.08399449288845062, 0.10740803927183151, 0.06621161103248596, 0.10577378422021866, 0.12487940490245819, 0.5018206238746643, 0.20529313385486603, 0.17339147627353668, 0.07018384337425232, 0.14908267557621002, 0.1859356313943863, 0.22324036061763763, 0.12102147191762924, 0.2357919067144394, 0.2977396249771118, 0.10307928919792175, 0.05937011539936066, 0.10326980799436569, 0.07437019795179367, 0.3063502311706543, 0.06665070354938507, 0.07034043967723846, 0.20620198547840118, 0.08380551636219025, 0.15251868963241577, 0.14770105481147766, 0.1754714548587799]])\nQCNN_pca8_MNIST = np.array([[11.07566780693027, 12.088299760426041, 11.551506604335266, 11.253319359143546, 14.953191630913897, 14.68769292643844, 8.916090725772742, 8.899832950777347, 11.936347288575341, 29.54298973473981, 26.442258040984782, 12.076464451496463, 8.833678525936742, 7.223465353030981, 6.112461716994839, 4.30612928081279, 3.3226676467661993, 5.012194866048839, 5.727182881499713, 3.561962252193606, 4.1752301529612454, 4.375301012629789, 5.355548727438603, 11.660886052400837, 9.822445456234759, 7.484351904953757, 4.522296576815185, 4.718819263196408, 5.067770892390593, 3.2966599407174844, 3.7977423184966694, 4.538326547760809, 6.881627432245426, 7.6059361845564375, 4.536508766497228, 5.0109617444212, 3.9642771830973453, 2.6435308235915045, 6.659696939338312, 6.512886391925427, 5.01178971029818, 4.351728881010261, 3.3793594808076293, 4.900374254131473, 3.9695078955747274, 5.253025586311835, 3.5323489604975227, 2.801419424306982, 7.063143112226623, 6.021297033313683, 3.902614328499857, 2.6873557403049686, 3.7889909801518376, 3.530646538662217, 6.7263663962678715, 4.123122118541927, 3.3362521033557866, 3.5743220427681757, 5.888039930151708, 3.45806091712591, 3.446893136377404, 3.9053661476327424, 3.916839216438724, 5.577295352757063, 2.6071367264315732, 4.609025632421608, 3.13139081304701, 5.134299726263132, 3.973101926022414, 4.560387945439484, 5.0352782252756425, 5.20482777845255, 7.800177896655665, 3.046478533952911, 7.477669010941324, 5.871679421772111, 3.6440834987268174, 2.43663480252514, 4.827962548134281, 5.496591999746828, 4.422199447892381, 4.714409994264919, 5.837586964236823, 4.592798615191176, 5.24395598223208, 6.304675694206337, 4.439474801920172, 3.9513923335546113, 4.3025654860605, 5.436446886580143, 7.159451455421293, 4.155648032949791, 4.071285546424471, 3.13956804285088, 3.9090429015265795, 3.0730432115798645, 6.323041746850747, 3.891368890197287, 3.3209048723064636, 5.555356879726153, 4.830933994979224, 3.0625950910246056, 3.3762268372342126, 5.003842102554069, 4.140149787327214, 2.740201115471464, 4.348055362855259, 3.6724430716053016, 4.452024406524155, 3.4351112354869495, 4.384272772920163, 4.153020044506476, 3.0064860493215604, 4.044472188055539, 3.2035666331610204, 2.8092281843081564, 4.953470186179663, 7.4101323440079305, 5.245970903582007, 3.216295053092832, 4.112101491730218, 3.524649949601306, 3.3050482384718616, 3.3238331005194053, 4.230108877637831, 3.836020143069676, 3.8514042158055646, 4.028186151740516, 3.8612493486473256, 6.090817923897665, 2.5840782116978267, 3.5286935223697955, 8.136499802968219, 4.418548855011245, 4.464234140849569, 3.7049716448085306, 3.948000942623004, 2.9664718159975885, 3.204344969976858, 4.0628069827790485, 4.273796215115928, 3.341657323036869, 4.827564329510189, 3.8259596512655247, 3.8911501530346384, 4.905052220443538, 4.774451730605712, 3.9270653844667534, 3.8736244693507302, 5.24138972464335, 3.041517312588207, 3.2401471643181345, 4.564608111875984, 7.935075508120698, 4.1663171831630885, 3.4256874205131043, 5.554624173800296, 3.9256261326490813, 3.0962908420652386, 3.712284144546821, 4.17996452435959, 5.371219214465313, 4.809803917165166, 4.99669991915913, 2.461585545359129, 4.269901024384005, 5.880048318847954, 4.837468480425993, 4.630896886408107, 5.918722144633159, 5.279186154449831, 3.6013191705213146, 4.1455729094880525, 5.029047791495902, 4.649827060499221, 5.221743671123371, 5.630533855387478, 3.208694054028586, 4.033024381459228, 3.6699144984737653, 4.2655574281910384, 3.646795700808205, 3.485688651486584, 5.5167925965655895, 4.7123755453772835, 3.571815384325478, 4.3106698637842795, 3.958535044105963, 4.13061907937434, 4.169038917461716, 4.0689814958981305, 2.817159284091822, 4.259087420912562, 3.6632157611869465, 2.9982378366564197, 3.266188641210523, 4.004407980729556, 2.941014372059401, 4.274535941803231, 3.924313804492608],\n [13.609274750567263, 11.790145166431767, 8.324845412069271, 9.31809148900475, 17.094556881919093, 10.185389064101527, 7.950737495904608, 15.268428227252532, 28.003531040324237, 19.375776871578957, 7.028067430910302, 6.238199787752007, 6.377942019569903, 7.627646898412078, 10.991046252390797, 8.192361852002204, 4.4101563053194415, 6.5818500880112625, 10.682682138807836, 8.949854557369807, 9.711357118896169, 9.65539464752591, 5.378832685269372, 4.740269523267993, 5.923083167503895, 5.733148155775042, 7.85054933004119, 5.729334012950613, 3.850012218784744, 7.743868007830221, 9.650249371792054, 15.161464529807489, 7.634023087942338, 5.8366929845941575, 4.250035001614327, 4.72045456663158, 4.857041294623427, 4.8172955996489115, 4.275564261653209, 3.095721014580351, 7.068176171358058, 6.293000403937157, 5.601974130939472, 6.648229414561065, 7.167112524032975, 5.241565709490395, 3.8301705765705063, 9.656281756050516, 10.777071393895056, 6.164831996986917, 3.3466920428932707, 7.218482075805271, 5.915877283286026, 4.713081104495018, 4.746163486548834, 5.146033721194816, 5.646064283216532, 4.820878310367751, 3.4755023178648434, 3.745399034898267, 3.758726274965405, 3.955329417014729, 4.745844522885886, 12.343331764156298, 19.375249903450886, 15.472110945964834, 7.936390031444082, 9.909858076411238, 5.704284701418682, 6.251152553271576, 5.833857177186193, 4.712510062108329, 6.415744601583355, 4.358466746137903, 4.464241276383932, 4.584960883818194, 7.990358239690424, 7.76985838489289, 15.281957754742386, 25.381952668080167, 10.438745029358827, 8.19113696810032, 5.947680113327387, 6.067464881696726, 11.659415789345847, 27.95301557375649, 25.013727430707704, 9.599071749214644, 10.442541141298245, 11.97889329141669, 10.659641385857693, 6.327870292015357, 6.154301274699113, 4.975435168302825, 5.090347158741551, 6.745005309476746, 8.886008525133105, 12.712667155675518, 11.33142481041818, 18.793900494631426, 27.0753335296596, 9.89713594584322, 10.421896314729489, 13.117267027473291, 17.198849762508253, 17.112210677407774, 15.821240059327298, 16.60844539733127, 16.383389106724266, 15.569478958904849, 14.86219487240423, 14.301596359200754, 14.355739887730904, 11.456002147888773, 9.110319210402043, 7.002929035323144, 6.645586449187431, 13.27617596418319, 7.176407791070932, 13.200791385342626, 16.58707977953236, 14.333484251429665, 12.876177352456619, 12.928789854746103, 6.751268521655012, 4.3308942556110965, 5.969206923664707, 6.3108879787923025, 11.61885546567767, 26.543308383249418, 29.781149702659775, 13.846859732841459, 9.098766218618605, 8.69550450967204, 6.02851421296757, 7.653140947440827, 4.595324242747394, 3.993993697771646, 3.392743116280023, 5.123561728638506, 15.480387829048658, 20.827455102351237, 20.335798011049096, 11.176729579135683, 10.226309773029051, 10.550365038298738, 11.675486520085073, 7.895737897121682, 7.643888759774583, 7.164376914387503, 7.780438269597392, 9.706812051294962, 14.554890826706902, 10.922174907824482, 5.419938913217733, 5.75041255787905, 7.384123680751176, 7.908776974934407, 17.851018435034455, 33.04541946736583, 14.561103152901858, 14.67867614213417, 12.89809310880798, 11.344672268597172, 10.396597774349695, 8.10005010513771, 6.708095654846824, 5.520673941769321, 7.156582814799177, 7.942571611139103, 5.542424494291016, 6.70571809826743, 11.53617320546384, 27.15210782326675, 42.42885297420549, 18.27071725476836, 22.896373801032993, 18.152396431679087, 21.164552026464214, 17.440088678400812, 15.005763477758713, 17.077964105182666, 18.59845571058319, 19.359928119546083, 20.04227534752496, 13.631147739759, 18.073450422260702, 9.407714973556521, 7.796769526323449, 9.19506286100661, 7.871321214264781, 7.5194114614533305, 6.007747030941974, 6.788526360310606, 7.644075232490532, 5.571986497810234, 4.603353293038771, 6.754279299846348, 5.624505717816605, 4.8099379907757],\n [18.096884384268673, 15.852268884202472, 14.820231708231944, 13.404793188706666, 11.863210199777754, 7.694661740087927, 5.931148808535034, 5.228938306261172, 6.7454492540219295, 5.945019303172127, 5.87775329941431, 3.517399481381538, 3.756199634770436, 4.499328876122449, 4.011496404307967, 4.733335210173935, 4.54994524326696, 5.809436618238216, 6.367698263361558, 3.750443025910619, 4.204071366399012, 6.344404981260076, 6.60474038543796, 5.990079220058454, 11.314065196699868, 4.691830037992517, 5.840400961444706, 5.029154419599599, 5.0142081451196585, 6.91408684914281, 4.167978121065421, 4.517906981341582, 5.973459294711706, 3.5767218858993632, 3.967102173200744, 4.778601034646926, 4.583211873302772, 5.269344468529714, 4.763216941914363, 3.8157368572129027, 3.9834775359732384, 3.87197191498559, 7.2680622855409975, 10.385395273433573, 6.640497285959017, 4.518232734969506, 3.8589535789533196, 4.325578369207487, 7.476293157782777, 4.744509570916178, 4.231706827784663, 3.805742473206669, 4.93515728991834, 3.143645750614709, 5.1586056490990275, 5.816658579740211, 5.636231415423712, 4.210122346009886, 3.6918138452968536, 4.456972249664517, 3.9845661148244864, 4.745778835791205, 2.886223295868351, 3.289661344676216, 3.524332369880167, 3.5270284788846, 3.46832758438505, 6.676530871068425, 5.127497899232571, 4.062830931095155, 3.255554766106602, 3.5634763638333475, 3.368980753449615, 3.1962636086166807, 4.974701862043523, 4.630058242256361, 4.1394448537211535, 2.7101767666485106, 3.944898271475589, 4.537663971098052, 3.0836990911623023, 5.4277431164687595, 3.832278725643912, 3.4894392502579095, 5.854040962477892, 3.281432132887723, 3.760011271667837, 3.5346529057723535, 2.7446648978663575, 5.5587590538969245, 6.228620976915677, 3.7590550686126, 4.565238628091609, 4.827229122739711, 4.746256031881321, 3.3507757501489026, 4.7756136655163965, 3.7630347471780476, 3.7757587618634725, 3.7823281612988375, 3.138600373583696, 3.710407989128023, 2.6935199661096316, 4.2459809708379055, 3.576222728379776, 3.408045488139582, 3.722069369628857, 3.8507836878618766, 3.2563343202234742, 3.7950058653028726, 6.5206441320189255, 4.990085263693601, 7.179241103386444, 4.080714906778325, 3.342218955621151, 3.2982538263143377, 3.2746407571872704, 4.963113795650163, 6.066857114116258, 4.535759844166912, 5.956300792997058, 3.133430129056542, 3.4510364762266144, 3.659110840439187, 5.662885755091636, 5.148103826806315, 3.011488721588434, 3.1586650433224706, 6.002260623384988, 4.652588094203826, 4.091063219813916, 4.964399240491144, 4.2827443135831045, 8.797798457672924, 3.069534148746489, 2.243098436799878, 3.110246183729729, 5.626840556042223, 4.036477703385252, 5.137496597801894, 3.4024575009743043, 4.2733181758801795, 3.597729338633107, 3.7060836733708737, 3.409974963702637, 4.199078678190699, 2.938648268623713, 3.270651121507956, 3.2454527858342903, 4.074156204042057, 3.548980922122498, 6.034481332708098, 6.134321025597675, 4.599942503638246, 3.5738856572781206, 3.643603119093392, 4.1736814222328915, 5.167053717585787, 5.822494981440665, 6.197206437960844, 4.607502181341255, 3.9134403549463825, 3.746264591925334, 3.492837833212506, 3.70664979867618, 3.4194570691394124, 3.942159235481587, 4.940252458793397, 4.3247083943776765, 4.370568976403512, 3.9128773218921817, 3.672192310567561, 3.1970679933515362, 3.4105158579103256, 2.9134274370757183, 3.735440975643731, 4.5333545741192784, 4.145376777698384, 3.0931338586410067, 4.0900640996299416, 4.259241640986883, 3.0887927637320343, 4.301803414880171, 4.7655404979933635, 6.650531787522625, 4.146085785966283, 4.394987284571805, 3.919450849083283, 2.9180494733836664, 2.9041471906853364, 4.597096484065194, 3.402921097202326, 4.203173835461593, 2.739368203919339, 5.701386363179136, 4.541050052964571, 4.417940629263727, 3.5008618313911923, 6.369336738182466, 6.162898973518352],\n [17.2356698238484, 10.971693669469245, 9.6049377789082, 10.780235784007253, 8.002016555780154, 9.380321099867881, 7.602570483204206, 4.7963729276923335, 6.354193621239068, 2.9089819757985977, 4.176354019592642, 6.112580307700947, 5.9649669631631905, 4.168438280087776, 3.9502765869227847, 6.838152544991016, 10.810437891709498, 6.94316394312289, 5.01649837618578, 3.585355548685841, 2.865384194744195, 5.381160754470928, 4.082351637525652, 4.773297792395346, 3.344957199780674, 5.903650130498551, 5.195763103037783, 4.6924063512270475, 4.533484113574728, 6.189188944519186, 2.6245545532926977, 3.788949680729777, 5.434133603671723, 6.677422845789866, 6.4322724344443065, 4.917760112066586, 3.8337946821012467, 4.7192746362983655, 4.103036782483408, 4.300315692558614, 3.634412560011968, 3.519336056932341, 3.3556746267163753, 4.201161022726155, 4.948717203324431, 3.625367379860422, 4.5400103795517674, 2.9781947880078388, 3.4403807643458593, 3.9369573517019747, 3.9802601576271117, 3.5523173096831804, 3.9067241520019813, 2.452714449403413, 3.815629863482705, 4.489633632834001, 6.525330610619972, 2.4558721676505244, 3.5605417793209257, 5.042611137059414, 4.606043694290479, 5.45010540967198, 2.969778587691943, 4.086419594773755, 6.028636516489212, 4.308818607723051, 3.852494201755873, 4.10697368815786, 7.4574245360559415, 4.120130670132272, 3.974933412831884, 4.644607874221422, 3.016526926501615, 3.6446178926084833, 2.704470531928113, 4.14949447794495, 2.434381546737894, 5.61068856844978, 9.196493149981414, 6.38538194634459, 3.8278902987109795, 8.607089024917675, 5.2437623990578635, 6.428774633896829, 4.248338676788096, 5.153048647388701, 5.05277581917047, 3.41965780610391, 3.8994416840405925, 4.261081880259884, 3.793473219716548, 3.1686425760080557, 4.092857736159728, 3.4903290666623654, 5.202650169618569, 4.533683519155149, 3.4688529166178705, 4.346754631722296, 4.595676321752801, 5.295831337364197, 4.263051694243957, 3.993526457539066, 2.930264808334415, 3.8374493993743495, 3.480038197119573, 3.597996230901235, 5.2725707601304475, 7.039012943949839, 3.3712144359488905, 3.6410601261910367, 7.576518644314536, 3.1993085525107814, 4.870307432228216, 2.7213133388286854, 5.52767451960594, 4.117122444006798, 3.919677079432237, 5.504311447087285, 6.9082589511273, 7.540570713425053, 7.061386268124986, 5.799680383181183, 4.555211445202609, 4.292194035552475, 4.606548985909631, 2.9180877153016502, 3.968143943164642, 4.188616171483531, 5.3557214431785685, 6.196053155032347, 4.306781693126012, 4.343186527430035, 2.6914318030415028, 3.9870470813682424, 6.595813378216199, 7.724954455442298, 2.752123185339803, 3.405114091220051, 4.157256276387149, 3.4065194286913347, 7.488695793710785, 4.489703064292184, 4.190743641781118, 6.4379271381944605, 6.268783514498618, 5.395075866965959, 4.185983828167981, 5.905791506160912, 6.038963006236642, 7.318006815592684, 6.547362736545181, 6.540147597888429, 2.276379542632732, 3.226636754020138, 3.8051598429937243, 5.035579159322249, 3.5670589941316275, 3.7894387039535737, 3.2394153304302367, 3.7131157979316813, 2.969285849587599, 8.31486213616027, 6.38404529816691, 4.546834335183441, 5.276601269779959, 2.067079524683459, 4.024292139744206, 4.247082520590729, 5.069612401721668, 3.562362688248546, 3.640089930522303, 5.290198597989332, 4.38752798132655, 4.592693235495391, 3.4653514955931364, 8.680030611729984, 7.983689652655414, 5.324285890482596, 3.4071200795055265, 5.360623297048809, 5.68047271372421, 5.078371483122285, 3.1134273541052364, 4.519048569218754, 4.149902188972754, 3.186788637991477, 3.303689546015046, 4.879545833282899, 4.342066666198046, 4.994572440367204, 7.435690661949689, 14.86210140091668, 7.776897930814217, 7.562241705243096, 5.007010442918624, 3.118690012028835, 5.517503315042275, 4.687528825021606, 2.3334290546491263, 3.577891042612885],\n [19.010290958942527, 18.730038127485884, 17.616268544376375, 16.83897376497371, 17.36103545751292, 15.568042453265454, 15.253384074114525, 13.625400838802955, 12.685374400834284, 12.797703119584295, 11.640579515672068, 12.804955246330437, 11.508459650337068, 12.20864530049625, 10.528520035583075, 11.02647224394073, 11.851039890806412, 11.472365423501323, 11.065694472754236, 10.735429351415226, 10.275485770806911, 11.272919358964739, 9.926137276090248, 10.974216152587998, 9.955776041071225, 11.188695093573552, 11.017249804540038, 10.32701435021692, 9.405406897752538, 8.849720507106774, 9.643207269316228, 10.415227035077873, 10.322094934620802, 10.001781851619727, 8.841724420626816, 9.367026936515538, 8.75563456578973, 7.864967842640772, 8.393092042611352, 7.678278749995313, 5.201931367824358, 5.794735579768396, 6.2876916661327495, 4.4774942592271945, 4.738521802693986, 4.437412805135243, 5.3070166575360895, 4.68109946187927, 3.977704426685706, 4.027818343108029, 5.1447338679222385, 4.485600130301475, 4.7990732433790555, 5.183090269651613, 4.397823826841257, 7.446191426417248, 3.8171634615327066, 8.369155085096086, 4.0025853903210855, 3.9885300077123964, 7.96300631495318, 7.937333170176433, 4.676707803326354, 5.532431376792298, 5.860372287622956, 4.150256490784685, 3.412193172599803, 6.795559923156602, 3.1807322361063135, 4.5306619402025365, 5.7030353116258485, 5.706714374945495, 3.87499210889695, 4.751600939840117, 3.6244369029042987, 5.010002090621885, 4.369094250216802, 4.300693271324623, 3.598790922607551, 3.919025553277423, 5.305480513535443, 4.115307583537069, 4.879886874326567, 5.844761644773662, 6.5418197333359025, 6.0336368397182225, 7.214327567584377, 3.5460799342628393, 4.814589770098439, 3.8668663531570897, 3.730560143760115, 4.9975102964039095, 4.509883905729241, 6.031463019182764, 5.881775251321356, 5.280724467807737, 4.049300435115646, 6.554044135938854, 4.176924372625198, 3.5800627759011876, 6.495806544273655, 5.945591066898653, 4.940332918780744, 6.22922350748202, 5.409785933694561, 6.149656900597772, 6.216415740061427, 4.427064342874765, 5.149458874427763, 5.743145249693337, 6.420826110319528, 5.064093458506964, 5.152544006627681, 4.004655454214687, 7.8123199151524, 3.6056895314227946, 4.95000314513724, 4.633469289845107, 5.889134300264316, 4.179650782566888, 5.951590180006326, 5.33356603236385, 4.9012754133525185, 3.6853215867948466, 4.887111141914829, 4.793375031982881, 3.354235772559694, 5.198507663636922, 9.205911653992818, 5.182682629313467, 6.244276516050074, 4.517695399463262, 4.856290586248337, 5.533877886712027, 4.562017381777088, 4.329110233802339, 3.6870228557908926, 5.4936383448476445, 4.870822311233726, 7.011047221748219, 4.426752863388371, 4.143057403162561, 4.223270912401913, 5.463364534396971, 5.234072162676923, 4.613308154463963, 4.930090903202137, 4.280930337642543, 5.107603688450969, 5.207244944300474, 4.690731166435635, 7.141182358882547, 3.9764135013537625, 3.043585685101406, 3.845272782126034, 4.484627880871906, 5.73747959538469, 7.481699771137542, 5.202520210148081, 3.964043443314169, 3.2811593098562284, 4.726361627278504, 3.488942321106898, 4.530234432312607, 7.052913870751452, 5.773942710862269, 4.215828262589515, 4.293836774736912, 7.604106905694769, 8.353470954231236, 3.9312201846622243, 3.5272559379803363, 7.9316234356421615, 4.134375250290148, 5.456331076620021, 6.36598403568583, 6.835728829439136, 6.138223587722574, 4.240010511195559, 4.076448088234501, 6.555305340900528, 6.486112230660281, 3.570344290089983, 7.1324660945205505, 9.907399818713007, 6.1499693327365454, 6.370195223291997, 4.103136417750757, 4.356727387171187, 3.67681354833498, 4.361396377968585, 4.42022654549385, 4.415107271239581, 2.8034684028420247, 4.831994677751856, 7.220881426782241, 4.534041806976291, 4.360407153607694, 4.375782716785823, 5.148299283324542]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "QCNN_pca8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "QCNN_pca8_MNIST = np.array([[11.07566780693027, 12.088299760426041, 11.551506604335266, 11.253319359143546, 14.953191630913897, 14.68769292643844, 8.916090725772742, 8.899832950777347, 11.936347288575341, 29.54298973473981, 26.442258040984782, 12.076464451496463, 8.833678525936742, 7.223465353030981, 6.112461716994839, 4.30612928081279, 3.3226676467661993, 5.012194866048839, 5.727182881499713, 3.561962252193606, 4.1752301529612454, 4.375301012629789, 5.355548727438603, 11.660886052400837, 9.822445456234759, 7.484351904953757, 4.522296576815185, 4.718819263196408, 5.067770892390593, 3.2966599407174844, 3.7977423184966694, 4.538326547760809, 6.881627432245426, 7.6059361845564375, 4.536508766497228, 5.0109617444212, 3.9642771830973453, 2.6435308235915045, 6.659696939338312, 6.512886391925427, 5.01178971029818, 4.351728881010261, 3.3793594808076293, 4.900374254131473, 3.9695078955747274, 5.253025586311835, 3.5323489604975227, 2.801419424306982, 7.063143112226623, 6.021297033313683, 3.902614328499857, 2.6873557403049686, 3.7889909801518376, 3.530646538662217, 6.7263663962678715, 4.123122118541927, 3.3362521033557866, 3.5743220427681757, 5.888039930151708, 3.45806091712591, 3.446893136377404, 3.9053661476327424, 3.916839216438724, 5.577295352757063, 2.6071367264315732, 4.609025632421608, 3.13139081304701, 5.134299726263132, 3.973101926022414, 4.560387945439484, 5.0352782252756425, 5.20482777845255, 7.800177896655665, 3.046478533952911, 7.477669010941324, 5.871679421772111, 3.6440834987268174, 2.43663480252514, 4.827962548134281, 5.496591999746828, 4.422199447892381, 4.714409994264919, 5.837586964236823, 4.592798615191176, 5.24395598223208, 6.304675694206337, 4.439474801920172, 3.9513923335546113, 4.3025654860605, 5.436446886580143, 7.159451455421293, 4.155648032949791, 4.071285546424471, 3.13956804285088, 3.9090429015265795, 3.0730432115798645, 6.323041746850747, 3.891368890197287, 3.3209048723064636, 5.555356879726153, 4.830933994979224, 3.0625950910246056, 3.3762268372342126, 5.003842102554069, 4.140149787327214, 2.740201115471464, 4.348055362855259, 3.6724430716053016, 4.452024406524155, 3.4351112354869495, 4.384272772920163, 4.153020044506476, 3.0064860493215604, 4.044472188055539, 3.2035666331610204, 2.8092281843081564, 4.953470186179663, 7.4101323440079305, 5.245970903582007, 3.216295053092832, 4.112101491730218, 3.524649949601306, 3.3050482384718616, 3.3238331005194053, 4.230108877637831, 3.836020143069676, 3.8514042158055646, 4.028186151740516, 3.8612493486473256, 6.090817923897665, 2.5840782116978267, 3.5286935223697955, 8.136499802968219, 4.418548855011245, 4.464234140849569, 3.7049716448085306, 3.948000942623004, 2.9664718159975885, 3.204344969976858, 4.0628069827790485, 4.273796215115928, 3.341657323036869, 4.827564329510189, 3.8259596512655247, 3.8911501530346384, 4.905052220443538, 4.774451730605712, 3.9270653844667534, 3.8736244693507302, 5.24138972464335, 3.041517312588207, 3.2401471643181345, 4.564608111875984, 7.935075508120698, 4.1663171831630885, 3.4256874205131043, 5.554624173800296, 3.9256261326490813, 3.0962908420652386, 3.712284144546821, 4.17996452435959, 5.371219214465313, 4.809803917165166, 4.99669991915913, 2.461585545359129, 4.269901024384005, 5.880048318847954, 4.837468480425993, 4.630896886408107, 5.918722144633159, 5.279186154449831, 3.6013191705213146, 4.1455729094880525, 5.029047791495902, 4.649827060499221, 5.221743671123371, 5.630533855387478, 3.208694054028586, 4.033024381459228, 3.6699144984737653, 4.2655574281910384, 3.646795700808205, 3.485688651486584, 5.5167925965655895, 4.7123755453772835, 3.571815384325478, 4.3106698637842795, 3.958535044105963, 4.13061907937434, 4.169038917461716, 4.0689814958981305, 2.817159284091822, 4.259087420912562, 3.6632157611869465, 2.9982378366564197, 3.266188641210523, 4.004407980729556, 2.941014372059401, 4.274535941803231, 3.924313804492608],\n [13.609274750567263, 11.790145166431767, 8.324845412069271, 9.31809148900475, 17.094556881919093, 10.185389064101527, 7.950737495904608, 15.268428227252532, 28.003531040324237, 19.375776871578957, 7.028067430910302, 6.238199787752007, 6.377942019569903, 7.627646898412078, 10.991046252390797, 8.192361852002204, 4.4101563053194415, 6.5818500880112625, 10.682682138807836, 8.949854557369807, 9.711357118896169, 9.65539464752591, 5.378832685269372, 4.740269523267993, 5.923083167503895, 5.733148155775042, 7.85054933004119, 5.729334012950613, 3.850012218784744, 7.743868007830221, 9.650249371792054, 15.161464529807489, 7.634023087942338, 5.8366929845941575, 4.250035001614327, 4.72045456663158, 4.857041294623427, 4.8172955996489115, 4.275564261653209, 3.095721014580351, 7.068176171358058, 6.293000403937157, 5.601974130939472, 6.648229414561065, 7.167112524032975, 5.241565709490395, 3.8301705765705063, 9.656281756050516, 10.777071393895056, 6.164831996986917, 3.3466920428932707, 7.218482075805271, 5.915877283286026, 4.713081104495018, 4.746163486548834, 5.146033721194816, 5.646064283216532, 4.820878310367751, 3.4755023178648434, 3.745399034898267, 3.758726274965405, 3.955329417014729, 4.745844522885886, 12.343331764156298, 19.375249903450886, 15.472110945964834, 7.936390031444082, 9.909858076411238, 5.704284701418682, 6.251152553271576, 5.833857177186193, 4.712510062108329, 6.415744601583355, 4.358466746137903, 4.464241276383932, 4.584960883818194, 7.990358239690424, 7.76985838489289, 15.281957754742386, 25.381952668080167, 10.438745029358827, 8.19113696810032, 5.947680113327387, 6.067464881696726, 11.659415789345847, 27.95301557375649, 25.013727430707704, 9.599071749214644, 10.442541141298245, 11.97889329141669, 10.659641385857693, 6.327870292015357, 6.154301274699113, 4.975435168302825, 5.090347158741551, 6.745005309476746, 8.886008525133105, 12.712667155675518, 11.33142481041818, 18.793900494631426, 27.0753335296596, 9.89713594584322, 10.421896314729489, 13.117267027473291, 17.198849762508253, 17.112210677407774, 15.821240059327298, 16.60844539733127, 16.383389106724266, 15.569478958904849, 14.86219487240423, 14.301596359200754, 14.355739887730904, 11.456002147888773, 9.110319210402043, 7.002929035323144, 6.645586449187431, 13.27617596418319, 7.176407791070932, 13.200791385342626, 16.58707977953236, 14.333484251429665, 12.876177352456619, 12.928789854746103, 6.751268521655012, 4.3308942556110965, 5.969206923664707, 6.3108879787923025, 11.61885546567767, 26.543308383249418, 29.781149702659775, 13.846859732841459, 9.098766218618605, 8.69550450967204, 6.02851421296757, 7.653140947440827, 4.595324242747394, 3.993993697771646, 3.392743116280023, 5.123561728638506, 15.480387829048658, 20.827455102351237, 20.335798011049096, 11.176729579135683, 10.226309773029051, 10.550365038298738, 11.675486520085073, 7.895737897121682, 7.643888759774583, 7.164376914387503, 7.780438269597392, 9.706812051294962, 14.554890826706902, 10.922174907824482, 5.419938913217733, 5.75041255787905, 7.384123680751176, 7.908776974934407, 17.851018435034455, 33.04541946736583, 14.561103152901858, 14.67867614213417, 12.89809310880798, 11.344672268597172, 10.396597774349695, 8.10005010513771, 6.708095654846824, 5.520673941769321, 7.156582814799177, 7.942571611139103, 5.542424494291016, 6.70571809826743, 11.53617320546384, 27.15210782326675, 42.42885297420549, 18.27071725476836, 22.896373801032993, 18.152396431679087, 21.164552026464214, 17.440088678400812, 15.005763477758713, 17.077964105182666, 18.59845571058319, 19.359928119546083, 20.04227534752496, 13.631147739759, 18.073450422260702, 9.407714973556521, 7.796769526323449, 9.19506286100661, 7.871321214264781, 7.5194114614533305, 6.007747030941974, 6.788526360310606, 7.644075232490532, 5.571986497810234, 4.603353293038771, 6.754279299846348, 5.624505717816605, 4.8099379907757],\n [18.096884384268673, 15.852268884202472, 14.820231708231944, 13.404793188706666, 11.863210199777754, 7.694661740087927, 5.931148808535034, 5.228938306261172, 6.7454492540219295, 5.945019303172127, 5.87775329941431, 3.517399481381538, 3.756199634770436, 4.499328876122449, 4.011496404307967, 4.733335210173935, 4.54994524326696, 5.809436618238216, 6.367698263361558, 3.750443025910619, 4.204071366399012, 6.344404981260076, 6.60474038543796, 5.990079220058454, 11.314065196699868, 4.691830037992517, 5.840400961444706, 5.029154419599599, 5.0142081451196585, 6.91408684914281, 4.167978121065421, 4.517906981341582, 5.973459294711706, 3.5767218858993632, 3.967102173200744, 4.778601034646926, 4.583211873302772, 5.269344468529714, 4.763216941914363, 3.8157368572129027, 3.9834775359732384, 3.87197191498559, 7.2680622855409975, 10.385395273433573, 6.640497285959017, 4.518232734969506, 3.8589535789533196, 4.325578369207487, 7.476293157782777, 4.744509570916178, 4.231706827784663, 3.805742473206669, 4.93515728991834, 3.143645750614709, 5.1586056490990275, 5.816658579740211, 5.636231415423712, 4.210122346009886, 3.6918138452968536, 4.456972249664517, 3.9845661148244864, 4.745778835791205, 2.886223295868351, 3.289661344676216, 3.524332369880167, 3.5270284788846, 3.46832758438505, 6.676530871068425, 5.127497899232571, 4.062830931095155, 3.255554766106602, 3.5634763638333475, 3.368980753449615, 3.1962636086166807, 4.974701862043523, 4.630058242256361, 4.1394448537211535, 2.7101767666485106, 3.944898271475589, 4.537663971098052, 3.0836990911623023, 5.4277431164687595, 3.832278725643912, 3.4894392502579095, 5.854040962477892, 3.281432132887723, 3.760011271667837, 3.5346529057723535, 2.7446648978663575, 5.5587590538969245, 6.228620976915677, 3.7590550686126, 4.565238628091609, 4.827229122739711, 4.746256031881321, 3.3507757501489026, 4.7756136655163965, 3.7630347471780476, 3.7757587618634725, 3.7823281612988375, 3.138600373583696, 3.710407989128023, 2.6935199661096316, 4.2459809708379055, 3.576222728379776, 3.408045488139582, 3.722069369628857, 3.8507836878618766, 3.2563343202234742, 3.7950058653028726, 6.5206441320189255, 4.990085263693601, 7.179241103386444, 4.080714906778325, 3.342218955621151, 3.2982538263143377, 3.2746407571872704, 4.963113795650163, 6.066857114116258, 4.535759844166912, 5.956300792997058, 3.133430129056542, 3.4510364762266144, 3.659110840439187, 5.662885755091636, 5.148103826806315, 3.011488721588434, 3.1586650433224706, 6.002260623384988, 4.652588094203826, 4.091063219813916, 4.964399240491144, 4.2827443135831045, 8.797798457672924, 3.069534148746489, 2.243098436799878, 3.110246183729729, 5.626840556042223, 4.036477703385252, 5.137496597801894, 3.4024575009743043, 4.2733181758801795, 3.597729338633107, 3.7060836733708737, 3.409974963702637, 4.199078678190699, 2.938648268623713, 3.270651121507956, 3.2454527858342903, 4.074156204042057, 3.548980922122498, 6.034481332708098, 6.134321025597675, 4.599942503638246, 3.5738856572781206, 3.643603119093392, 4.1736814222328915, 5.167053717585787, 5.822494981440665, 6.197206437960844, 4.607502181341255, 3.9134403549463825, 3.746264591925334, 3.492837833212506, 3.70664979867618, 3.4194570691394124, 3.942159235481587, 4.940252458793397, 4.3247083943776765, 4.370568976403512, 3.9128773218921817, 3.672192310567561, 3.1970679933515362, 3.4105158579103256, 2.9134274370757183, 3.735440975643731, 4.5333545741192784, 4.145376777698384, 3.0931338586410067, 4.0900640996299416, 4.259241640986883, 3.0887927637320343, 4.301803414880171, 4.7655404979933635, 6.650531787522625, 4.146085785966283, 4.394987284571805, 3.919450849083283, 2.9180494733836664, 2.9041471906853364, 4.597096484065194, 3.402921097202326, 4.203173835461593, 2.739368203919339, 5.701386363179136, 4.541050052964571, 4.417940629263727, 3.5008618313911923, 6.369336738182466, 6.162898973518352],\n [17.2356698238484, 10.971693669469245, 9.6049377789082, 10.780235784007253, 8.002016555780154, 9.380321099867881, 7.602570483204206, 4.7963729276923335, 6.354193621239068, 2.9089819757985977, 4.176354019592642, 6.112580307700947, 5.9649669631631905, 4.168438280087776, 3.9502765869227847, 6.838152544991016, 10.810437891709498, 6.94316394312289, 5.01649837618578, 3.585355548685841, 2.865384194744195, 5.381160754470928, 4.082351637525652, 4.773297792395346, 3.344957199780674, 5.903650130498551, 5.195763103037783, 4.6924063512270475, 4.533484113574728, 6.189188944519186, 2.6245545532926977, 3.788949680729777, 5.434133603671723, 6.677422845789866, 6.4322724344443065, 4.917760112066586, 3.8337946821012467, 4.7192746362983655, 4.103036782483408, 4.300315692558614, 3.634412560011968, 3.519336056932341, 3.3556746267163753, 4.201161022726155, 4.948717203324431, 3.625367379860422, 4.5400103795517674, 2.9781947880078388, 3.4403807643458593, 3.9369573517019747, 3.9802601576271117, 3.5523173096831804, 3.9067241520019813, 2.452714449403413, 3.815629863482705, 4.489633632834001, 6.525330610619972, 2.4558721676505244, 3.5605417793209257, 5.042611137059414, 4.606043694290479, 5.45010540967198, 2.969778587691943, 4.086419594773755, 6.028636516489212, 4.308818607723051, 3.852494201755873, 4.10697368815786, 7.4574245360559415, 4.120130670132272, 3.974933412831884, 4.644607874221422, 3.016526926501615, 3.6446178926084833, 2.704470531928113, 4.14949447794495, 2.434381546737894, 5.61068856844978, 9.196493149981414, 6.38538194634459, 3.8278902987109795, 8.607089024917675, 5.2437623990578635, 6.428774633896829, 4.248338676788096, 5.153048647388701, 5.05277581917047, 3.41965780610391, 3.8994416840405925, 4.261081880259884, 3.793473219716548, 3.1686425760080557, 4.092857736159728, 3.4903290666623654, 5.202650169618569, 4.533683519155149, 3.4688529166178705, 4.346754631722296, 4.595676321752801, 5.295831337364197, 4.263051694243957, 3.993526457539066, 2.930264808334415, 3.8374493993743495, 3.480038197119573, 3.597996230901235, 5.2725707601304475, 7.039012943949839, 3.3712144359488905, 3.6410601261910367, 7.576518644314536, 3.1993085525107814, 4.870307432228216, 2.7213133388286854, 5.52767451960594, 4.117122444006798, 3.919677079432237, 5.504311447087285, 6.9082589511273, 7.540570713425053, 7.061386268124986, 5.799680383181183, 4.555211445202609, 4.292194035552475, 4.606548985909631, 2.9180877153016502, 3.968143943164642, 4.188616171483531, 5.3557214431785685, 6.196053155032347, 4.306781693126012, 4.343186527430035, 2.6914318030415028, 3.9870470813682424, 6.595813378216199, 7.724954455442298, 2.752123185339803, 3.405114091220051, 4.157256276387149, 3.4065194286913347, 7.488695793710785, 4.489703064292184, 4.190743641781118, 6.4379271381944605, 6.268783514498618, 5.395075866965959, 4.185983828167981, 5.905791506160912, 6.038963006236642, 7.318006815592684, 6.547362736545181, 6.540147597888429, 2.276379542632732, 3.226636754020138, 3.8051598429937243, 5.035579159322249, 3.5670589941316275, 3.7894387039535737, 3.2394153304302367, 3.7131157979316813, 2.969285849587599, 8.31486213616027, 6.38404529816691, 4.546834335183441, 5.276601269779959, 2.067079524683459, 4.024292139744206, 4.247082520590729, 5.069612401721668, 3.562362688248546, 3.640089930522303, 5.290198597989332, 4.38752798132655, 4.592693235495391, 3.4653514955931364, 8.680030611729984, 7.983689652655414, 5.324285890482596, 3.4071200795055265, 5.360623297048809, 5.68047271372421, 5.078371483122285, 3.1134273541052364, 4.519048569218754, 4.149902188972754, 3.186788637991477, 3.303689546015046, 4.879545833282899, 4.342066666198046, 4.994572440367204, 7.435690661949689, 14.86210140091668, 7.776897930814217, 7.562241705243096, 5.007010442918624, 3.118690012028835, 5.517503315042275, 4.687528825021606, 2.3334290546491263, 3.577891042612885],\n [19.010290958942527, 18.730038127485884, 17.616268544376375, 16.83897376497371, 17.36103545751292, 15.568042453265454, 15.253384074114525, 13.625400838802955, 12.685374400834284, 12.797703119584295, 11.640579515672068, 12.804955246330437, 11.508459650337068, 12.20864530049625, 10.528520035583075, 11.02647224394073, 11.851039890806412, 11.472365423501323, 11.065694472754236, 10.735429351415226, 10.275485770806911, 11.272919358964739, 9.926137276090248, 10.974216152587998, 9.955776041071225, 11.188695093573552, 11.017249804540038, 10.32701435021692, 9.405406897752538, 8.849720507106774, 9.643207269316228, 10.415227035077873, 10.322094934620802, 10.001781851619727, 8.841724420626816, 9.367026936515538, 8.75563456578973, 7.864967842640772, 8.393092042611352, 7.678278749995313, 5.201931367824358, 5.794735579768396, 6.2876916661327495, 4.4774942592271945, 4.738521802693986, 4.437412805135243, 5.3070166575360895, 4.68109946187927, 3.977704426685706, 4.027818343108029, 5.1447338679222385, 4.485600130301475, 4.7990732433790555, 5.183090269651613, 4.397823826841257, 7.446191426417248, 3.8171634615327066, 8.369155085096086, 4.0025853903210855, 3.9885300077123964, 7.96300631495318, 7.937333170176433, 4.676707803326354, 5.532431376792298, 5.860372287622956, 4.150256490784685, 3.412193172599803, 6.795559923156602, 3.1807322361063135, 4.5306619402025365, 5.7030353116258485, 5.706714374945495, 3.87499210889695, 4.751600939840117, 3.6244369029042987, 5.010002090621885, 4.369094250216802, 4.300693271324623, 3.598790922607551, 3.919025553277423, 5.305480513535443, 4.115307583537069, 4.879886874326567, 5.844761644773662, 6.5418197333359025, 6.0336368397182225, 7.214327567584377, 3.5460799342628393, 4.814589770098439, 3.8668663531570897, 3.730560143760115, 4.9975102964039095, 4.509883905729241, 6.031463019182764, 5.881775251321356, 5.280724467807737, 4.049300435115646, 6.554044135938854, 4.176924372625198, 3.5800627759011876, 6.495806544273655, 5.945591066898653, 4.940332918780744, 6.22922350748202, 5.409785933694561, 6.149656900597772, 6.216415740061427, 4.427064342874765, 5.149458874427763, 5.743145249693337, 6.420826110319528, 5.064093458506964, 5.152544006627681, 4.004655454214687, 7.8123199151524, 3.6056895314227946, 4.95000314513724, 4.633469289845107, 5.889134300264316, 4.179650782566888, 5.951590180006326, 5.33356603236385, 4.9012754133525185, 3.6853215867948466, 4.887111141914829, 4.793375031982881, 3.354235772559694, 5.198507663636922, 9.205911653992818, 5.182682629313467, 6.244276516050074, 4.517695399463262, 4.856290586248337, 5.533877886712027, 4.562017381777088, 4.329110233802339, 3.6870228557908926, 5.4936383448476445, 4.870822311233726, 7.011047221748219, 4.426752863388371, 4.143057403162561, 4.223270912401913, 5.463364534396971, 5.234072162676923, 4.613308154463963, 4.930090903202137, 4.280930337642543, 5.107603688450969, 5.207244944300474, 4.690731166435635, 7.141182358882547, 3.9764135013537625, 3.043585685101406, 3.845272782126034, 4.484627880871906, 5.73747959538469, 7.481699771137542, 5.202520210148081, 3.964043443314169, 3.2811593098562284, 4.726361627278504, 3.488942321106898, 4.530234432312607, 7.052913870751452, 5.773942710862269, 4.215828262589515, 4.293836774736912, 7.604106905694769, 8.353470954231236, 3.9312201846622243, 3.5272559379803363, 7.9316234356421615, 4.134375250290148, 5.456331076620021, 6.36598403568583, 6.835728829439136, 6.138223587722574, 4.240010511195559, 4.076448088234501, 6.555305340900528, 6.486112230660281, 3.570344290089983, 7.1324660945205505, 9.907399818713007, 6.1499693327365454, 6.370195223291997, 4.103136417750757, 4.356727387171187, 3.67681354833498, 4.361396377968585, 4.42022654549385, 4.415107271239581, 2.8034684028420247, 4.831994677751856, 7.220881426782241, 4.534041806976291, 4.360407153607694, 4.375782716785823, 5.148299283324542]])\nQCNN_ae8_MNIST = np.array([[23.18267600665, 20.392476422551617, 21.28175967731102, 20.203083882159287, 17.612929675550017, 13.742868656222521, 12.508178039516189, 13.326838229641536, 22.261666436091375, 15.034213615189383, 14.498692999598235, 11.221002750223137, 12.940080538778712, 10.0257072737871, 12.207890103676961, 13.215579259672262, 13.613664844250005, 21.496753350135812, 10.568699024284227, 16.287852567384615, 14.430524948027251, 13.441428029446563, 12.81380575004739, 8.851780454904826, 11.675331805598754, 11.064888740045241, 9.670298569802773, 9.157037231950081, 9.39463600510954, 9.456214163069436, 9.12549429055207, 7.894543069536124, 9.992310930002166, 7.622405506118381, 6.655850914433363, 15.046534716041355, 28.182802757069798, 18.535295355081768, 15.745861861646798, 16.64898298798962, 12.505835340014537, 9.44405351054995, 8.189249490884379, 10.210137131791557, 11.489942954088951, 11.214841666099442, 11.07707786604872, 10.93398675389301, 11.220873597843983, 10.05012344222707, 9.573020404637688, 8.646289136119984, 9.625967092210992, 9.371913029482577, 9.333795346886603, 8.272496374801493, 8.285405488313504, 10.47815497312443, 8.300864710865664, 9.442700929752492, 7.048406225289616, 8.640886587483582, 9.797012343720604, 8.683336297648628, 9.58087070236722, 7.227598625422271, 6.48574392350553, 6.2623616810897, 8.07660630754952, 8.86277513597693, 7.854938763717949, 9.236053356973963, 6.365053515369569, 6.904043657779864, 7.931437786170751, 8.196170669130543, 7.37993279027155, 7.096851524021393, 10.343885907602717, 8.643446836268843, 8.147146986278031, 7.672548417528906, 8.608842801204018, 6.302634360681958, 7.687957789709564, 10.408871321544861, 7.9587426565730475, 8.020656330065494, 7.526301881197238, 10.7933645543224, 9.486666857399284, 8.747691846571891, 9.231388610454514, 8.617609653022157, 8.503247560822441, 9.295280067366104, 10.03551113905038, 8.764433886860443, 7.523769634548136, 9.345172764597287, 8.094791599098912, 8.971654481341357, 8.169950227859287, 8.45120602973494, 8.612867741046921, 9.572334662730496, 6.7556740200213605, 7.5734552664238555, 8.461789037230393, 8.09642007231893, 7.354000080104082, 7.642148572617742, 12.45198437794315, 6.736406252812647, 7.808795179145102, 8.822792007508845, 7.9585381853900214, 9.482189976334459, 7.09245432477452, 7.958172475331851, 9.357514581854373, 8.390545674394616, 10.116557985721348, 9.899538745200926, 9.545681152756472, 8.098815711918633, 8.267092687339852, 8.433743592949924, 12.498734609021616, 7.259533646275991, 7.020865261445277, 8.140033481585851, 7.4134501898186365, 9.154790275593747, 14.259737010639833, 7.71279748659041, 8.232476285079118, 7.320109636149128, 7.108669234938818, 9.605824139430041, 6.342952011232884, 9.968858052095875, 13.262722168930074, 9.227395915465717, 9.9888181255349, 9.486721384140075, 17.5591933872638, 17.08354773460404, 15.060475476117178, 14.083609428242925, 14.54361655680102, 13.711633978707614, 13.484642802154543, 13.035876988388214, 14.45118530285129, 13.749874734140548, 13.161678127310534, 14.643781273319693, 13.15678894588389, 13.930132824898536, 13.905099084673168, 12.174636554097049, 11.682524340576865, 12.678514469536353, 11.938135627395269, 13.147709123507825, 11.078376988423463, 13.05327581884246, 11.702080851470903, 10.822219692321745, 9.916050079748224, 12.426483562401625, 11.342238653320106, 10.979073153109677, 10.852834827474, 12.155845036398748, 9.762967240416835, 8.688959912386386, 11.602923349100934, 9.453525957149225, 9.811616163888912, 9.750145354743548, 10.231583883405792, 8.539225487315635, 8.909800047973485, 8.389112608329619, 9.347554244815665, 9.748394955719187, 9.028179219193701, 9.459303508996754, 7.113110337681364, 10.213431673550348, 9.528799241996179, 9.759130458497971, 12.422518216337027, 8.794294385709339, 8.957399532083912, 8.385904340109, 8.31251234552102, 7.17557650792306],\n [15.938880514830075, 14.991401084813964, 13.81017628503558, 12.763812647959726, 11.438810531400252, 16.876529614705017, 20.025942461879527, 17.825328259997224, 13.712847540452136, 13.480270647636909, 12.934724989801046, 16.638995910605228, 15.251844900283656, 10.87221887477806, 12.773526679441996, 11.528679610911821, 10.713872934849988, 12.042270813348347, 7.163360715275822, 7.395814418184559, 6.828783909808503, 9.360517163932384, 8.22468467798909, 9.821752282972438, 6.307299669040121, 5.907386959949522, 9.782921676236052, 8.649200235348378, 7.960522462144845, 8.933905557913754, 6.4645652845751655, 6.212239619123929, 6.294193033887122, 6.625772779396308, 8.077614081198993, 7.491261414250014, 6.8055295246733865, 5.987775502570335, 8.580140195864733, 6.599648893213433, 5.791924559742866, 6.5589596287443115, 6.852812126886809, 5.899934211261467, 6.62086490737928, 7.245021211551905, 7.415957377661234, 6.154518756988503, 6.230092963303432, 6.873285222979966, 7.660487383766843, 6.723669157662963, 6.547120484668986, 6.582461630033955, 6.93437114563484, 5.797043844490763, 6.825242456633041, 6.800685940932284, 6.026213058150696, 6.137488828274454, 6.955606699941617, 6.07055752369624, 5.165162605035756, 7.636780400717115, 7.362527465419372, 6.344978895093726, 5.384533831390672, 6.89917280598161, 7.054030718341928, 7.7225462519468, 6.099856715397306, 7.113773233606544, 6.456803383288426, 5.870653698755338, 8.946996605868144, 5.916706427245916, 4.826051657034723, 7.720566842644956, 6.728676867466559, 5.929573887125175, 5.440426134534419, 7.136290839629472, 5.297770324568082, 6.146789860803708, 4.431220211036687, 6.383274975916391, 6.609376050072126, 6.240921622932172, 5.928381167326789, 5.811952346713676, 6.915259185306188, 5.361487394656576, 6.944239668414154, 8.70777543998188, 6.899175520081172, 8.119380923751573, 6.100982909475291, 6.730771903219656, 5.140537629795169, 6.865564474008672, 6.903682878401687, 6.196093814087787, 7.015476115029013, 6.283822412627386, 6.054101786469657, 6.360401863351702, 7.033834726205582, 5.820465740789678, 7.934772984169908, 6.382017317913232, 5.395133353915485, 5.633830745335206, 6.628086304646717, 9.261098938106665, 6.338252163833986, 7.048322777225821, 8.352710543811556, 5.809536971010632, 5.709047704926095, 6.801709938792366, 6.256424712866433, 7.513007182202549, 7.849223725191124, 6.605825443034447, 7.22381416284202, 6.8170147855197145, 7.89437932310966, 5.116792488867332, 4.885793215197109, 7.292719336611, 6.629679846610535, 6.6899334427744535, 6.708438498287829, 7.227872609408697, 6.399261478998315, 7.0427799412950245, 4.971704135624181, 6.997206522470702, 7.15690248640464, 7.344881934976968, 6.765697035103642, 6.713332694818636, 6.550206557776439, 7.084112443685265, 6.980229141202967, 6.306132218105991, 6.497451202546453, 6.252581725289929, 6.732075015558131, 7.1801492723150355, 5.942420936831842, 6.153250638301356, 8.145662569233757, 7.028135809495407, 5.794286209228037, 7.261030028892823, 6.391711828868476, 5.634405714375209, 7.3552468987830375, 5.115482313738568, 7.204823694521418, 6.092445313441537, 5.891311220130354, 6.605475043945204, 6.463997692543425, 5.4644948856351085, 7.198055810422648, 7.1919527511575705, 5.327064183532006, 7.215092153917559, 5.343311422669971, 7.35802944735559, 4.78249637612121, 6.049968243659916, 6.488266073627855, 7.434803488764515, 8.385162525581686, 7.533671853870269, 6.357886937134184, 8.418212690482545, 6.2841156660981765, 6.093717373210086, 8.09257340515589, 6.628607365383413, 7.654644770015338, 6.545007117182029, 5.298292211989288, 7.397927049130434, 7.805698859377954, 6.468726187232868, 6.866509422307112, 5.904789941360449, 7.327015039844593, 7.421684979058245, 6.262596326964659, 6.747209736228736, 6.248418027901544, 6.2439232243300875, 8.32399560109846, 6.492478177430712],\n [18.75414610096924, 19.068292518474937, 15.66139508849935, 16.230548901468882, 15.742110711856133, 15.3540746601382, 14.80516767976884, 15.050056980002378, 15.470645023112743, 14.174265206709267, 13.82701665246163, 13.63352034766849, 12.987001686986442, 11.949566886041767, 15.163882062364264, 17.4630828276138, 13.175315792381374, 14.315617822763329, 11.569348055484502, 11.056304047453786, 9.531272038988572, 9.925092455143254, 11.554191359918484, 8.639932393156611, 15.263236800546698, 9.493197345211566, 9.717069133608998, 10.617720088432643, 11.494790052586072, 9.813047953872701, 10.338021687214741, 8.225974662862301, 9.768913158105516, 9.622203260635928, 10.76694463281415, 10.475100664696537, 10.11941509948434, 13.087550824323529, 12.608080238967712, 10.663603078964398, 10.719824480192795, 11.477706052539055, 9.885978028125933, 12.300786960880687, 13.79612334822176, 8.768715849248501, 10.73495943493744, 9.252386851673982, 10.459081188781484, 10.051240025309879, 12.246775505321116, 11.008616626210657, 12.34870458787469, 10.726777762970228, 11.308175519130113, 10.068190806703585, 9.267031381345703, 9.619692917870365, 10.09773072485591, 8.333402915211904, 8.878155781816282, 10.89233893234685, 9.769739016433649, 8.341360911887852, 9.596213450522834, 9.216832252360046, 9.204649394704306, 9.283076403274444, 15.257248085204974, 14.101265208953672, 12.04634079222114, 11.040318590639549, 10.433279520725923, 10.586733373394564, 9.44697481397019, 9.82207797387809, 9.456558715066006, 10.569379289963543, 9.199038455058867, 9.479612056824285, 8.975102484475459, 9.150916159674889, 14.638061973045371, 9.421761790871665, 9.067946833846937, 9.65817219525311, 8.692538713318719, 9.602579818565172, 8.900924687576945, 11.171404573057716, 8.234053487690328, 11.1226640760377, 15.812704389378847, 9.51639443574069, 9.741830707633387, 9.85994941967016, 9.858595473978841, 8.948270953717458, 8.474526474111178, 8.359536910610153, 14.52255524548113, 11.868239630695136, 9.439759689812002, 9.427225626836261, 10.406295352953471, 13.200437824936115, 9.206802159054687, 10.195391402682333, 9.501698837345561, 7.989645467023102, 10.310726310871525, 13.28235638249207, 9.096290047843102, 9.639615834703879, 8.580822732568224, 9.612931651704754, 10.987080473380097, 8.967282647060046, 8.224929487479809, 9.263587032069754, 8.834742249445243, 8.306201921799289, 9.08238285389518, 8.43791576163126, 12.44714071480579, 10.1753701759404, 10.642671425309677, 9.89858560752212, 9.373911976171534, 8.73912986103552, 7.920378003071168, 10.360968694709053, 9.588534988294647, 9.0521977539869, 8.789562450468456, 8.21880502068823, 9.170718871530822, 10.243478751370676, 14.239523861578489, 8.975940299315402, 10.196452908693333, 8.466861795520789, 8.84532353088555, 9.342324928390692, 10.818231394459083, 8.675326182868146, 13.264409633111178, 9.520486924687615, 12.859307638841932, 12.493925512296135, 8.892695572851453, 9.394721165869498, 9.240247333665359, 9.515454261688046, 8.684812639064232, 8.084541845911176, 7.216256185971286, 12.987809164909525, 19.725432315065287, 11.867851073805513, 10.72240323327543, 10.560391831561866, 9.625074521998522, 11.907651627921092, 9.385136795152908, 10.015623408980812, 9.861728215874685, 9.473225996430019, 8.8467016530699, 8.213458336115561, 10.239645944590215, 7.922022240600872, 8.977304229814102, 9.064169247599827, 13.283380716740655, 17.35531149589267, 10.673990374066747, 11.022398881916406, 10.602478430177463, 9.744184512202777, 8.38398006386225, 8.765973040115314, 11.475785426675277, 9.783942916998916, 8.598632534753682, 8.384182035430497, 8.552012387573338, 8.139269581192917, 8.896696339576236, 9.70584896962792, 9.886079519150591, 10.414159282398465, 10.300816090265073, 10.744983652440876, 8.667060664723746, 10.305844448543343, 14.808439550493523, 8.370259865268832, 7.20300739674415, 9.43881488302783],\n [17.067327314938755, 15.223491697486025, 14.027622629290454, 13.37936661134171, 12.399379281475223, 13.81315320694874, 11.853353575638845, 10.665689095204236, 9.141813600298871, 9.11036844550804, 7.699363608648492, 10.673920655296008, 9.16228964485178, 9.183189106526623, 12.918478599403295, 8.105571011402109, 8.244369202650233, 8.846027306887548, 8.767389889900352, 8.344860487514312, 4.615518857773165, 5.862962477431306, 5.71486143648991, 5.4438544384837275, 5.962333607510951, 6.328849942274706, 6.775024086024321, 6.872809432634791, 5.464599979951314, 7.362918014409544, 7.244046360780604, 5.290751902348532, 5.675842484641401, 5.596050115858611, 6.938541693383213, 6.769210229246913, 6.627088073892648, 6.637864159619183, 7.0177629661168615, 5.548932649865592, 5.331669257790846, 7.7403841239962095, 6.4426985225349425, 4.766462975206636, 6.66907351452243, 6.39404987450754, 6.527854578718254, 6.986365586940338, 7.206546835687935, 6.2543478829347965, 6.355601852892827, 5.439964391274556, 6.197049840860625, 8.057313645854915, 8.743547913595368, 8.279092824207666, 9.163281616590641, 8.212492312849022, 8.206892115882203, 5.924074580542415, 7.499429427183627, 6.217589640440942, 5.921350564231541, 6.484928417118682, 6.3581316269949655, 8.056686420788356, 6.72972847094589, 6.176468555896607, 5.306528837724751, 5.6174840415402585, 4.678756880453675, 6.428272448819264, 5.884081608073028, 5.344642000110709, 6.620914758450338, 5.440127818130964, 6.272307070340623, 6.440962025183107, 5.781951934263114, 6.864694396009275, 5.145147242727912, 6.170320165860639, 7.917489551227105, 8.472180233229459, 8.381006549405507, 11.446400945516478, 10.128965122098943, 6.65290718803895, 7.385699512069056, 5.7177805727295485, 5.710048723820802, 4.660576569713477, 6.433284088197848, 6.667661562406891, 6.77816216249128, 6.213187952962437, 7.59722957685512, 5.825892399041465, 6.213922371787718, 5.816461098802296, 16.56945401344998, 18.331532859976345, 7.092297677618126, 7.231535391776812, 7.360122925707431, 7.048071860722907, 6.934432772120863, 5.952118407972448, 8.771149572615936, 6.00920323698242, 7.0604178440449195, 5.817053229630411, 6.159251481037876, 5.630876169730633, 6.213017976427891, 6.7788237125626205, 4.848927616434685, 5.688126857142057, 5.36035345483516, 5.679599782571896, 7.095246897321908, 8.998877959348667, 7.48487411127996, 5.426944680416116, 5.817344573691418, 6.7734972178962485, 8.534615618101778, 7.53924035883349, 5.130224861161187, 6.452360835679216, 6.107101740324019, 6.587025074588629, 7.15360627978338, 6.305604796773872, 5.787682041579271, 6.513001023394777, 5.615687953553938, 5.535773071189084, 6.083786364393193, 6.2650034661046385, 7.348110325695597, 7.9684480604356835, 4.9937231721208475, 6.281168351550957, 6.698566977919708, 5.927635368059436, 6.272405890305827, 6.8195048159650575, 6.790358560416836, 6.98861952199057, 8.546097430532004, 9.654370028539013, 4.776777828097568, 10.00658408232908, 8.85600513487725, 6.343836673397636, 6.276985619213351, 5.600867426416172, 5.653509583750474, 6.100543082614651, 5.551456923827717, 8.067651539494765, 7.570494939397729, 6.955446307996702, 7.938609091853545, 6.849092192747216, 5.173809601191373, 5.821916922181909, 8.25858513318918, 7.941894814048852, 7.872372393503557, 7.179556990007868, 6.724631026716992, 7.761468670285675, 9.023706255867275, 6.002735322143455, 5.679595147130187, 5.825080400076846, 5.617605159211467, 7.44784976423344, 5.3625041340471125, 6.791366562866276, 11.522871762243012, 7.72192087631105, 6.141478446628951, 8.985783310522756, 5.448731995331568, 5.8756396741226355, 6.614361989408393, 7.386841544529035, 6.418912902734918, 6.135156951384522, 6.85347569515587, 6.5407021867201856, 7.3611516061631646, 5.2873265137870815, 5.980428935875487, 7.615621264155301, 4.433082113329641, 7.666404040326761],\n [15.190545015724192, 14.351877240180198, 13.077091798472159, 14.241597535619052, 12.604898670362294, 11.441594817661452, 11.240808819518207, 10.252463802868364, 10.656910584567504, 11.280247289981057, 8.876988151620397, 10.095604410303778, 9.527978645379049, 11.306547664935918, 9.484964603456817, 10.692076317849764, 8.674833884982625, 8.427234735968176, 6.94557483161775, 8.520523534946374, 9.6144076328958, 10.091015817489957, 7.626160333529242, 8.45000236426561, 10.104899976590248, 9.485442757711573, 6.8599222983820916, 8.343071533381696, 8.151234921584866, 8.006398692911821, 7.966852974656033, 9.36002760899374, 10.081728269421236, 7.917602046990228, 6.722622423736359, 13.482895813436022, 9.687362914809931, 10.014294449664625, 7.978618293858056, 7.5694280329825165, 7.085746584348393, 10.610414263785469, 7.116191086509762, 7.262224190574126, 7.348281974384062, 6.090615005927018, 7.838324401916773, 7.73983219194028, 8.777953270861394, 7.712314765669135, 8.099757939641263, 8.554208563314454, 6.466983600965645, 9.938254540761237, 6.842950773584522, 6.275024500692803, 7.888876970256389, 7.174541959311943, 6.912848588787202, 7.099472589996629, 7.141910680041766, 7.913962403151884, 8.700695999629847, 5.903237753882748, 8.02875550234571, 5.535011218790221, 9.16482600279353, 7.264300857885595, 7.487443555751145, 6.310703381348508, 7.30339729608686, 6.903574401706479, 6.462165774413731, 5.783511820480559, 7.438724749677562, 6.901880371253028, 8.17919838264866, 7.395852305668009, 8.09230937776132, 6.821887489839783, 7.933163482129629, 7.198130792052883, 7.674930992341313, 6.787919314742968, 7.407699083040836, 5.2375691268635505, 8.366390559269117, 5.567895611210296, 6.136257447226075, 5.0165375945337045, 7.259113080864518, 5.758549930994977, 4.703955269885598, 7.512929325206505, 6.508384188285521, 7.2556124804562945, 6.494409519188076, 6.689492846855044, 7.039298514100954, 7.341484557124106, 5.267436326726732, 4.991274192290107, 7.372120237921833, 6.17017824324608, 5.300353466988351, 5.6231784428904135, 4.8534150442892114, 6.336494230834849, 6.15810673487119, 6.863707883942724, 7.1487327588207705, 5.814925835479382, 5.555383712705327, 6.472789506842621, 6.1550738048818285, 6.73493090998487, 5.525116578984747, 8.67327707262502, 8.823021288494472, 7.257686021206064, 6.491265499121888, 5.907722702033924, 6.788603120897539, 5.9885736027717265, 6.866380575378689, 6.728582515345494, 9.03574314234789, 7.437229828428383, 10.594735036735738, 8.992672140001565, 6.99576596941868, 5.812603289316051, 6.424770889945808, 8.381746185447687, 9.314846468380056, 6.36339808180491, 7.871451089991449, 7.111323225823814, 6.513187363582582, 7.566733224604883, 5.587682860316956, 6.947521195632962, 7.946004964322261, 6.3056326414590504, 6.702659146806399, 6.981853203648017, 5.770555281645235, 4.534998715534817, 12.073358599786319, 9.451362994851603, 7.397932269228771, 7.124411895172527, 6.570603012774465, 5.565787910703532, 9.76961038959421, 8.858312962068597, 6.004321137116473, 5.346415010506244, 7.322542401124342, 6.392520252270517, 5.910753071883756, 5.403670381484486, 4.647042946689148, 7.383617936328697, 6.837125091789173, 7.035074367220533, 5.909687089577799, 5.313308436805087, 4.316413414442388, 4.421717890091605, 5.408573568370816, 4.886905592487587, 6.209888910249045, 5.231839619869862, 7.636243859108162, 6.817364270234863, 5.698509032643113, 6.402456782479963, 6.988983083255046, 5.585640913260615, 6.97020214095025, 5.622943369091275, 6.844486251650798, 6.49396141949346, 6.603516229234012, 5.307587135304216, 5.433855007121976, 5.893232341799515, 5.775371927476902, 5.441041721520848, 6.521837379999517, 6.325523246233136, 5.656928265634981, 5.690695351675709, 7.569158486987114, 5.7615020909617884, 7.667611197062259, 7.823864440349442, 7.396286182820629, 6.090021272434316]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "QCNN_ae8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "QCNN_ae8_MNIST = np.array([[23.18267600665, 20.392476422551617, 21.28175967731102, 20.203083882159287, 17.612929675550017, 13.742868656222521, 12.508178039516189, 13.326838229641536, 22.261666436091375, 15.034213615189383, 14.498692999598235, 11.221002750223137, 12.940080538778712, 10.0257072737871, 12.207890103676961, 13.215579259672262, 13.613664844250005, 21.496753350135812, 10.568699024284227, 16.287852567384615, 14.430524948027251, 13.441428029446563, 12.81380575004739, 8.851780454904826, 11.675331805598754, 11.064888740045241, 9.670298569802773, 9.157037231950081, 9.39463600510954, 9.456214163069436, 9.12549429055207, 7.894543069536124, 9.992310930002166, 7.622405506118381, 6.655850914433363, 15.046534716041355, 28.182802757069798, 18.535295355081768, 15.745861861646798, 16.64898298798962, 12.505835340014537, 9.44405351054995, 8.189249490884379, 10.210137131791557, 11.489942954088951, 11.214841666099442, 11.07707786604872, 10.93398675389301, 11.220873597843983, 10.05012344222707, 9.573020404637688, 8.646289136119984, 9.625967092210992, 9.371913029482577, 9.333795346886603, 8.272496374801493, 8.285405488313504, 10.47815497312443, 8.300864710865664, 9.442700929752492, 7.048406225289616, 8.640886587483582, 9.797012343720604, 8.683336297648628, 9.58087070236722, 7.227598625422271, 6.48574392350553, 6.2623616810897, 8.07660630754952, 8.86277513597693, 7.854938763717949, 9.236053356973963, 6.365053515369569, 6.904043657779864, 7.931437786170751, 8.196170669130543, 7.37993279027155, 7.096851524021393, 10.343885907602717, 8.643446836268843, 8.147146986278031, 7.672548417528906, 8.608842801204018, 6.302634360681958, 7.687957789709564, 10.408871321544861, 7.9587426565730475, 8.020656330065494, 7.526301881197238, 10.7933645543224, 9.486666857399284, 8.747691846571891, 9.231388610454514, 8.617609653022157, 8.503247560822441, 9.295280067366104, 10.03551113905038, 8.764433886860443, 7.523769634548136, 9.345172764597287, 8.094791599098912, 8.971654481341357, 8.169950227859287, 8.45120602973494, 8.612867741046921, 9.572334662730496, 6.7556740200213605, 7.5734552664238555, 8.461789037230393, 8.09642007231893, 7.354000080104082, 7.642148572617742, 12.45198437794315, 6.736406252812647, 7.808795179145102, 8.822792007508845, 7.9585381853900214, 9.482189976334459, 7.09245432477452, 7.958172475331851, 9.357514581854373, 8.390545674394616, 10.116557985721348, 9.899538745200926, 9.545681152756472, 8.098815711918633, 8.267092687339852, 8.433743592949924, 12.498734609021616, 7.259533646275991, 7.020865261445277, 8.140033481585851, 7.4134501898186365, 9.154790275593747, 14.259737010639833, 7.71279748659041, 8.232476285079118, 7.320109636149128, 7.108669234938818, 9.605824139430041, 6.342952011232884, 9.968858052095875, 13.262722168930074, 9.227395915465717, 9.9888181255349, 9.486721384140075, 17.5591933872638, 17.08354773460404, 15.060475476117178, 14.083609428242925, 14.54361655680102, 13.711633978707614, 13.484642802154543, 13.035876988388214, 14.45118530285129, 13.749874734140548, 13.161678127310534, 14.643781273319693, 13.15678894588389, 13.930132824898536, 13.905099084673168, 12.174636554097049, 11.682524340576865, 12.678514469536353, 11.938135627395269, 13.147709123507825, 11.078376988423463, 13.05327581884246, 11.702080851470903, 10.822219692321745, 9.916050079748224, 12.426483562401625, 11.342238653320106, 10.979073153109677, 10.852834827474, 12.155845036398748, 9.762967240416835, 8.688959912386386, 11.602923349100934, 9.453525957149225, 9.811616163888912, 9.750145354743548, 10.231583883405792, 8.539225487315635, 8.909800047973485, 8.389112608329619, 9.347554244815665, 9.748394955719187, 9.028179219193701, 9.459303508996754, 7.113110337681364, 10.213431673550348, 9.528799241996179, 9.759130458497971, 12.422518216337027, 8.794294385709339, 8.957399532083912, 8.385904340109, 8.31251234552102, 7.17557650792306],\n [15.938880514830075, 14.991401084813964, 13.81017628503558, 12.763812647959726, 11.438810531400252, 16.876529614705017, 20.025942461879527, 17.825328259997224, 13.712847540452136, 13.480270647636909, 12.934724989801046, 16.638995910605228, 15.251844900283656, 10.87221887477806, 12.773526679441996, 11.528679610911821, 10.713872934849988, 12.042270813348347, 7.163360715275822, 7.395814418184559, 6.828783909808503, 9.360517163932384, 8.22468467798909, 9.821752282972438, 6.307299669040121, 5.907386959949522, 9.782921676236052, 8.649200235348378, 7.960522462144845, 8.933905557913754, 6.4645652845751655, 6.212239619123929, 6.294193033887122, 6.625772779396308, 8.077614081198993, 7.491261414250014, 6.8055295246733865, 5.987775502570335, 8.580140195864733, 6.599648893213433, 5.791924559742866, 6.5589596287443115, 6.852812126886809, 5.899934211261467, 6.62086490737928, 7.245021211551905, 7.415957377661234, 6.154518756988503, 6.230092963303432, 6.873285222979966, 7.660487383766843, 6.723669157662963, 6.547120484668986, 6.582461630033955, 6.93437114563484, 5.797043844490763, 6.825242456633041, 6.800685940932284, 6.026213058150696, 6.137488828274454, 6.955606699941617, 6.07055752369624, 5.165162605035756, 7.636780400717115, 7.362527465419372, 6.344978895093726, 5.384533831390672, 6.89917280598161, 7.054030718341928, 7.7225462519468, 6.099856715397306, 7.113773233606544, 6.456803383288426, 5.870653698755338, 8.946996605868144, 5.916706427245916, 4.826051657034723, 7.720566842644956, 6.728676867466559, 5.929573887125175, 5.440426134534419, 7.136290839629472, 5.297770324568082, 6.146789860803708, 4.431220211036687, 6.383274975916391, 6.609376050072126, 6.240921622932172, 5.928381167326789, 5.811952346713676, 6.915259185306188, 5.361487394656576, 6.944239668414154, 8.70777543998188, 6.899175520081172, 8.119380923751573, 6.100982909475291, 6.730771903219656, 5.140537629795169, 6.865564474008672, 6.903682878401687, 6.196093814087787, 7.015476115029013, 6.283822412627386, 6.054101786469657, 6.360401863351702, 7.033834726205582, 5.820465740789678, 7.934772984169908, 6.382017317913232, 5.395133353915485, 5.633830745335206, 6.628086304646717, 9.261098938106665, 6.338252163833986, 7.048322777225821, 8.352710543811556, 5.809536971010632, 5.709047704926095, 6.801709938792366, 6.256424712866433, 7.513007182202549, 7.849223725191124, 6.605825443034447, 7.22381416284202, 6.8170147855197145, 7.89437932310966, 5.116792488867332, 4.885793215197109, 7.292719336611, 6.629679846610535, 6.6899334427744535, 6.708438498287829, 7.227872609408697, 6.399261478998315, 7.0427799412950245, 4.971704135624181, 6.997206522470702, 7.15690248640464, 7.344881934976968, 6.765697035103642, 6.713332694818636, 6.550206557776439, 7.084112443685265, 6.980229141202967, 6.306132218105991, 6.497451202546453, 6.252581725289929, 6.732075015558131, 7.1801492723150355, 5.942420936831842, 6.153250638301356, 8.145662569233757, 7.028135809495407, 5.794286209228037, 7.261030028892823, 6.391711828868476, 5.634405714375209, 7.3552468987830375, 5.115482313738568, 7.204823694521418, 6.092445313441537, 5.891311220130354, 6.605475043945204, 6.463997692543425, 5.4644948856351085, 7.198055810422648, 7.1919527511575705, 5.327064183532006, 7.215092153917559, 5.343311422669971, 7.35802944735559, 4.78249637612121, 6.049968243659916, 6.488266073627855, 7.434803488764515, 8.385162525581686, 7.533671853870269, 6.357886937134184, 8.418212690482545, 6.2841156660981765, 6.093717373210086, 8.09257340515589, 6.628607365383413, 7.654644770015338, 6.545007117182029, 5.298292211989288, 7.397927049130434, 7.805698859377954, 6.468726187232868, 6.866509422307112, 5.904789941360449, 7.327015039844593, 7.421684979058245, 6.262596326964659, 6.747209736228736, 6.248418027901544, 6.2439232243300875, 8.32399560109846, 6.492478177430712],\n [18.75414610096924, 19.068292518474937, 15.66139508849935, 16.230548901468882, 15.742110711856133, 15.3540746601382, 14.80516767976884, 15.050056980002378, 15.470645023112743, 14.174265206709267, 13.82701665246163, 13.63352034766849, 12.987001686986442, 11.949566886041767, 15.163882062364264, 17.4630828276138, 13.175315792381374, 14.315617822763329, 11.569348055484502, 11.056304047453786, 9.531272038988572, 9.925092455143254, 11.554191359918484, 8.639932393156611, 15.263236800546698, 9.493197345211566, 9.717069133608998, 10.617720088432643, 11.494790052586072, 9.813047953872701, 10.338021687214741, 8.225974662862301, 9.768913158105516, 9.622203260635928, 10.76694463281415, 10.475100664696537, 10.11941509948434, 13.087550824323529, 12.608080238967712, 10.663603078964398, 10.719824480192795, 11.477706052539055, 9.885978028125933, 12.300786960880687, 13.79612334822176, 8.768715849248501, 10.73495943493744, 9.252386851673982, 10.459081188781484, 10.051240025309879, 12.246775505321116, 11.008616626210657, 12.34870458787469, 10.726777762970228, 11.308175519130113, 10.068190806703585, 9.267031381345703, 9.619692917870365, 10.09773072485591, 8.333402915211904, 8.878155781816282, 10.89233893234685, 9.769739016433649, 8.341360911887852, 9.596213450522834, 9.216832252360046, 9.204649394704306, 9.283076403274444, 15.257248085204974, 14.101265208953672, 12.04634079222114, 11.040318590639549, 10.433279520725923, 10.586733373394564, 9.44697481397019, 9.82207797387809, 9.456558715066006, 10.569379289963543, 9.199038455058867, 9.479612056824285, 8.975102484475459, 9.150916159674889, 14.638061973045371, 9.421761790871665, 9.067946833846937, 9.65817219525311, 8.692538713318719, 9.602579818565172, 8.900924687576945, 11.171404573057716, 8.234053487690328, 11.1226640760377, 15.812704389378847, 9.51639443574069, 9.741830707633387, 9.85994941967016, 9.858595473978841, 8.948270953717458, 8.474526474111178, 8.359536910610153, 14.52255524548113, 11.868239630695136, 9.439759689812002, 9.427225626836261, 10.406295352953471, 13.200437824936115, 9.206802159054687, 10.195391402682333, 9.501698837345561, 7.989645467023102, 10.310726310871525, 13.28235638249207, 9.096290047843102, 9.639615834703879, 8.580822732568224, 9.612931651704754, 10.987080473380097, 8.967282647060046, 8.224929487479809, 9.263587032069754, 8.834742249445243, 8.306201921799289, 9.08238285389518, 8.43791576163126, 12.44714071480579, 10.1753701759404, 10.642671425309677, 9.89858560752212, 9.373911976171534, 8.73912986103552, 7.920378003071168, 10.360968694709053, 9.588534988294647, 9.0521977539869, 8.789562450468456, 8.21880502068823, 9.170718871530822, 10.243478751370676, 14.239523861578489, 8.975940299315402, 10.196452908693333, 8.466861795520789, 8.84532353088555, 9.342324928390692, 10.818231394459083, 8.675326182868146, 13.264409633111178, 9.520486924687615, 12.859307638841932, 12.493925512296135, 8.892695572851453, 9.394721165869498, 9.240247333665359, 9.515454261688046, 8.684812639064232, 8.084541845911176, 7.216256185971286, 12.987809164909525, 19.725432315065287, 11.867851073805513, 10.72240323327543, 10.560391831561866, 9.625074521998522, 11.907651627921092, 9.385136795152908, 10.015623408980812, 9.861728215874685, 9.473225996430019, 8.8467016530699, 8.213458336115561, 10.239645944590215, 7.922022240600872, 8.977304229814102, 9.064169247599827, 13.283380716740655, 17.35531149589267, 10.673990374066747, 11.022398881916406, 10.602478430177463, 9.744184512202777, 8.38398006386225, 8.765973040115314, 11.475785426675277, 9.783942916998916, 8.598632534753682, 8.384182035430497, 8.552012387573338, 8.139269581192917, 8.896696339576236, 9.70584896962792, 9.886079519150591, 10.414159282398465, 10.300816090265073, 10.744983652440876, 8.667060664723746, 10.305844448543343, 14.808439550493523, 8.370259865268832, 7.20300739674415, 9.43881488302783],\n [17.067327314938755, 15.223491697486025, 14.027622629290454, 13.37936661134171, 12.399379281475223, 13.81315320694874, 11.853353575638845, 10.665689095204236, 9.141813600298871, 9.11036844550804, 7.699363608648492, 10.673920655296008, 9.16228964485178, 9.183189106526623, 12.918478599403295, 8.105571011402109, 8.244369202650233, 8.846027306887548, 8.767389889900352, 8.344860487514312, 4.615518857773165, 5.862962477431306, 5.71486143648991, 5.4438544384837275, 5.962333607510951, 6.328849942274706, 6.775024086024321, 6.872809432634791, 5.464599979951314, 7.362918014409544, 7.244046360780604, 5.290751902348532, 5.675842484641401, 5.596050115858611, 6.938541693383213, 6.769210229246913, 6.627088073892648, 6.637864159619183, 7.0177629661168615, 5.548932649865592, 5.331669257790846, 7.7403841239962095, 6.4426985225349425, 4.766462975206636, 6.66907351452243, 6.39404987450754, 6.527854578718254, 6.986365586940338, 7.206546835687935, 6.2543478829347965, 6.355601852892827, 5.439964391274556, 6.197049840860625, 8.057313645854915, 8.743547913595368, 8.279092824207666, 9.163281616590641, 8.212492312849022, 8.206892115882203, 5.924074580542415, 7.499429427183627, 6.217589640440942, 5.921350564231541, 6.484928417118682, 6.3581316269949655, 8.056686420788356, 6.72972847094589, 6.176468555896607, 5.306528837724751, 5.6174840415402585, 4.678756880453675, 6.428272448819264, 5.884081608073028, 5.344642000110709, 6.620914758450338, 5.440127818130964, 6.272307070340623, 6.440962025183107, 5.781951934263114, 6.864694396009275, 5.145147242727912, 6.170320165860639, 7.917489551227105, 8.472180233229459, 8.381006549405507, 11.446400945516478, 10.128965122098943, 6.65290718803895, 7.385699512069056, 5.7177805727295485, 5.710048723820802, 4.660576569713477, 6.433284088197848, 6.667661562406891, 6.77816216249128, 6.213187952962437, 7.59722957685512, 5.825892399041465, 6.213922371787718, 5.816461098802296, 16.56945401344998, 18.331532859976345, 7.092297677618126, 7.231535391776812, 7.360122925707431, 7.048071860722907, 6.934432772120863, 5.952118407972448, 8.771149572615936, 6.00920323698242, 7.0604178440449195, 5.817053229630411, 6.159251481037876, 5.630876169730633, 6.213017976427891, 6.7788237125626205, 4.848927616434685, 5.688126857142057, 5.36035345483516, 5.679599782571896, 7.095246897321908, 8.998877959348667, 7.48487411127996, 5.426944680416116, 5.817344573691418, 6.7734972178962485, 8.534615618101778, 7.53924035883349, 5.130224861161187, 6.452360835679216, 6.107101740324019, 6.587025074588629, 7.15360627978338, 6.305604796773872, 5.787682041579271, 6.513001023394777, 5.615687953553938, 5.535773071189084, 6.083786364393193, 6.2650034661046385, 7.348110325695597, 7.9684480604356835, 4.9937231721208475, 6.281168351550957, 6.698566977919708, 5.927635368059436, 6.272405890305827, 6.8195048159650575, 6.790358560416836, 6.98861952199057, 8.546097430532004, 9.654370028539013, 4.776777828097568, 10.00658408232908, 8.85600513487725, 6.343836673397636, 6.276985619213351, 5.600867426416172, 5.653509583750474, 6.100543082614651, 5.551456923827717, 8.067651539494765, 7.570494939397729, 6.955446307996702, 7.938609091853545, 6.849092192747216, 5.173809601191373, 5.821916922181909, 8.25858513318918, 7.941894814048852, 7.872372393503557, 7.179556990007868, 6.724631026716992, 7.761468670285675, 9.023706255867275, 6.002735322143455, 5.679595147130187, 5.825080400076846, 5.617605159211467, 7.44784976423344, 5.3625041340471125, 6.791366562866276, 11.522871762243012, 7.72192087631105, 6.141478446628951, 8.985783310522756, 5.448731995331568, 5.8756396741226355, 6.614361989408393, 7.386841544529035, 6.418912902734918, 6.135156951384522, 6.85347569515587, 6.5407021867201856, 7.3611516061631646, 5.2873265137870815, 5.980428935875487, 7.615621264155301, 4.433082113329641, 7.666404040326761],\n [15.190545015724192, 14.351877240180198, 13.077091798472159, 14.241597535619052, 12.604898670362294, 11.441594817661452, 11.240808819518207, 10.252463802868364, 10.656910584567504, 11.280247289981057, 8.876988151620397, 10.095604410303778, 9.527978645379049, 11.306547664935918, 9.484964603456817, 10.692076317849764, 8.674833884982625, 8.427234735968176, 6.94557483161775, 8.520523534946374, 9.6144076328958, 10.091015817489957, 7.626160333529242, 8.45000236426561, 10.104899976590248, 9.485442757711573, 6.8599222983820916, 8.343071533381696, 8.151234921584866, 8.006398692911821, 7.966852974656033, 9.36002760899374, 10.081728269421236, 7.917602046990228, 6.722622423736359, 13.482895813436022, 9.687362914809931, 10.014294449664625, 7.978618293858056, 7.5694280329825165, 7.085746584348393, 10.610414263785469, 7.116191086509762, 7.262224190574126, 7.348281974384062, 6.090615005927018, 7.838324401916773, 7.73983219194028, 8.777953270861394, 7.712314765669135, 8.099757939641263, 8.554208563314454, 6.466983600965645, 9.938254540761237, 6.842950773584522, 6.275024500692803, 7.888876970256389, 7.174541959311943, 6.912848588787202, 7.099472589996629, 7.141910680041766, 7.913962403151884, 8.700695999629847, 5.903237753882748, 8.02875550234571, 5.535011218790221, 9.16482600279353, 7.264300857885595, 7.487443555751145, 6.310703381348508, 7.30339729608686, 6.903574401706479, 6.462165774413731, 5.783511820480559, 7.438724749677562, 6.901880371253028, 8.17919838264866, 7.395852305668009, 8.09230937776132, 6.821887489839783, 7.933163482129629, 7.198130792052883, 7.674930992341313, 6.787919314742968, 7.407699083040836, 5.2375691268635505, 8.366390559269117, 5.567895611210296, 6.136257447226075, 5.0165375945337045, 7.259113080864518, 5.758549930994977, 4.703955269885598, 7.512929325206505, 6.508384188285521, 7.2556124804562945, 6.494409519188076, 6.689492846855044, 7.039298514100954, 7.341484557124106, 5.267436326726732, 4.991274192290107, 7.372120237921833, 6.17017824324608, 5.300353466988351, 5.6231784428904135, 4.8534150442892114, 6.336494230834849, 6.15810673487119, 6.863707883942724, 7.1487327588207705, 5.814925835479382, 5.555383712705327, 6.472789506842621, 6.1550738048818285, 6.73493090998487, 5.525116578984747, 8.67327707262502, 8.823021288494472, 7.257686021206064, 6.491265499121888, 5.907722702033924, 6.788603120897539, 5.9885736027717265, 6.866380575378689, 6.728582515345494, 9.03574314234789, 7.437229828428383, 10.594735036735738, 8.992672140001565, 6.99576596941868, 5.812603289316051, 6.424770889945808, 8.381746185447687, 9.314846468380056, 6.36339808180491, 7.871451089991449, 7.111323225823814, 6.513187363582582, 7.566733224604883, 5.587682860316956, 6.947521195632962, 7.946004964322261, 6.3056326414590504, 6.702659146806399, 6.981853203648017, 5.770555281645235, 4.534998715534817, 12.073358599786319, 9.451362994851603, 7.397932269228771, 7.124411895172527, 6.570603012774465, 5.565787910703532, 9.76961038959421, 8.858312962068597, 6.004321137116473, 5.346415010506244, 7.322542401124342, 6.392520252270517, 5.910753071883756, 5.403670381484486, 4.647042946689148, 7.383617936328697, 6.837125091789173, 7.035074367220533, 5.909687089577799, 5.313308436805087, 4.316413414442388, 4.421717890091605, 5.408573568370816, 4.886905592487587, 6.209888910249045, 5.231839619869862, 7.636243859108162, 6.817364270234863, 5.698509032643113, 6.402456782479963, 6.988983083255046, 5.585640913260615, 6.97020214095025, 5.622943369091275, 6.844486251650798, 6.49396141949346, 6.603516229234012, 5.307587135304216, 5.433855007121976, 5.893232341799515, 5.775371927476902, 5.441041721520848, 6.521837379999517, 6.325523246233136, 5.656928265634981, 5.690695351675709, 7.569158486987114, 5.7615020909617884, 7.667611197062259, 7.823864440349442, 7.396286182820629, 6.090021272434316]])\nQCNN_pca16_MNIST = np.array([[15.0403534902811, 11.679744539511557, 9.086342105907185, 6.664554929588416, 6.566171879053282, 5.910776543533218, 6.931977168932667, 4.79529806898609, 3.755134914745378, 3.2357970556554827, 3.6323854680801455, 4.5115729864610135, 3.617077685829789, 5.718633611604171, 5.157951558065299, 7.654176746786203, 4.9864096284034725, 2.982486299976871, 6.806045486607213, 4.087469195545893, 5.261078435223882, 5.874669411484631, 4.240703479610863, 4.673604699857989, 4.902012155668855, 5.787842389530163, 4.1822601256875185, 5.688282096750136, 3.6007480715760867, 3.946650389033329, 6.608606140363109, 2.811459108676007, 4.065815440640769, 4.044175336351451, 3.424415886988854, 3.679671728084075, 4.28610495574031, 3.3178635185993546, 2.044339290228743, 3.701298930305038, 2.8294347859875297, 4.658087545833273, 4.757296945161872, 4.025100016210282, 2.694485984354044, 3.4031388227439088, 2.8009814881206214, 3.1403066797921735, 6.6082002359203695, 3.578493957595615, 3.1988250886006875, 2.0832025658992146, 2.576907923773283, 3.2454733353438154, 5.153927537635788, 3.0395325521216154, 2.693714572203974, 3.860309307400375, 3.3093790997964656, 4.190464547289133, 4.726344745820113, 3.5446970135729154, 3.3609735845122253, 4.724594306192631, 3.5009182750556924, 2.2518520534054725, 6.274600583930375, 6.032593729403149, 5.0163263890954015, 3.950512092278221, 5.6808451631975965, 4.951416036625423, 4.275887444355786, 4.239359540924417, 8.295463063013294, 4.520937009893485, 4.182897662336456, 4.6709477369006684, 4.377355626583974, 3.731679162608877, 3.2653239107628194, 3.5893690612208613, 2.881273250653501, 4.228633007918818, 3.1812669874249013, 2.584631593321535, 4.1709692830307965, 4.006896806533891, 3.255961016185142, 3.6764351274944174, 2.3374250754423334, 3.346986771695794, 3.146580849036632, 4.558694440296939, 3.7873357951752027, 3.2647251488485938, 4.241168825634643, 3.5049526099919697, 3.135584867198829, 6.533798767943627, 3.8629206195329173, 8.151142053229588, 4.358137347234198, 3.6120080960372625, 2.4290147304015828, 5.361213301970355, 2.8697284429940737, 5.343097293398063, 3.796832394991832, 2.8498626767446456, 3.8864412047466717, 3.998725918661383, 2.732569829592892, 3.875316637915336, 3.415049169698115, 3.4771214212428965, 6.59724517933933, 3.4765740425308893, 3.038537332190798, 3.8530939255830754, 4.232983042164876, 3.6467398100449526, 5.366788150755652, 8.542280234092173, 2.7129951350873447, 2.401398547838671, 5.094339839941592, 5.61152629294007, 5.617547529421769, 2.9535889381556166, 2.9029491572495814, 5.518334828304726, 3.6127066439889277, 3.55883358092592, 2.7853089590889706, 5.33855286610076, 2.772713399137329, 7.545409633277615, 3.494276765056114, 3.7228137273551445, 8.322577054039863, 4.867055220427471, 4.244515929291943, 5.153044147333067, 3.2184437554038707, 2.2959571546485775, 3.6807102800753664, 4.098443130678253, 5.697381595613272, 4.667864659718746, 2.83537259976806, 6.039467184009572, 3.4389183927498643, 4.235829415537658, 3.358353501314416, 3.7005134590000397, 4.370450561413212, 3.9438699934597237, 4.065485609744926, 7.704248456862109, 3.405315091093644, 2.8225742568728336, 2.806043057887296, 3.6811053667498936, 2.513099037746944, 4.959583979704834, 3.9175498762518717, 6.2440999882378865, 3.62892979946047, 6.086162170064439, 2.9430705674595576, 2.7536095321974488, 4.202546107923751, 2.563813548358968, 5.011818143970597, 3.6439148015353355, 2.4644533634303394, 3.1234634891570234, 2.976797021717022, 3.7434740881157906, 5.815925915081504, 5.398725466082786, 3.056503592579129, 3.462076939358277, 3.2463948967490905, 2.709449824611762, 5.070049973839421, 4.55848886287876, 4.350189826473156, 3.3917950455751775, 5.257869708511391, 3.1720499678936256, 3.772319772138259, 2.6266180957254286, 7.366119215574833, 3.4648798273131463, 3.6818819186869822, 2.584411989320695, 3.861584527015048, 3.3617978854788104],\n [18.76080656394133, 17.492976512736995, 16.635139528434046, 15.44746459777639, 15.783670181902039, 14.733725951880121, 15.638408661362076, 15.334636345951882, 14.267649957173633, 14.021523845337782, 13.049834257719604, 13.75845779333952, 13.394296555878375, 13.861871754332196, 13.206869903447362, 13.254393726268543, 12.927297010667099, 12.93279827500289, 13.258932821704136, 13.690258345912175, 13.376295289155484, 12.292146820182865, 13.485115668207802, 11.797938755021, 11.40246159785775, 12.39560690669116, 11.747065008901751, 10.493827875114143, 12.161344189944922, 10.556212390462914, 8.832134924020771, 8.911887656047469, 8.305537738417593, 9.524198975479626, 7.8033161776813245, 8.713756360712695, 8.019880034661544, 7.102132101753902, 7.149171413907029, 7.399891151360409, 7.825646717768322, 6.574009428750634, 6.400368704983532, 6.021186488198534, 5.511301283838171, 4.591820860709852, 4.746702684803182, 5.2490156531890975, 7.722562877893842, 5.41969821625493, 3.93768905688015, 4.3791987261691006, 5.293936451645284, 5.308717615635361, 5.237090885765296, 4.829617439920008, 4.784585015935567, 4.842871690750853, 5.898385365676442, 4.354653671866435, 4.48829206860057, 5.944499305636778, 5.892270253820464, 5.203559644880021, 5.406297090203602, 5.8397567261451195, 4.906910788337977, 4.268157188017312, 4.768427739461705, 5.1999162550576035, 4.435033625326092, 4.348167855288795, 5.845931198737771, 4.746227962753383, 6.0208012410445635, 6.526762140357391, 7.116993889305273, 3.966693071047269, 4.7374620795062885, 4.801072440589765, 4.381354571823823, 5.838868389149641, 7.679708796762356, 4.382859070175897, 5.338581323838219, 4.381890415609466, 5.5538615166904926, 4.108887773857907, 3.8252565368040417, 3.808400769181966, 9.381040573533152, 5.2833927547548125, 7.4481998171904165, 3.595337547395573, 5.236305358949649, 6.396990094937701, 4.777510671023052, 4.351068667611331, 3.526714432497868, 3.5925124520741036, 6.044070796069493, 3.56502515462814, 4.276369909099347, 5.237153578589139, 4.467521480116951, 4.335342191401022, 3.660654695088974, 4.948475766398555, 4.787393263537703, 6.716408408324919, 4.444744869576575, 4.92784659069165, 3.888845091595223, 7.373973562787823, 4.375850837417648, 5.046693782279762, 2.850011587117684, 5.0155836665346625, 5.450957610322875, 4.0074494304542325, 4.588030170445772, 4.069999199423611, 5.536736973332822, 5.124052778565661, 4.887667234434682, 3.9724766894280346, 5.166295976495825, 4.344819381042703, 4.281066334108093, 3.4964663996412115, 5.26457988780978, 5.292901242072277, 3.8458493950344343, 3.990183554908133, 6.955706687312927, 5.751137737081019, 4.354553862821236, 4.729191829346073, 5.127544880990675, 4.35675042443368, 5.582550113247322, 5.563374899808365, 3.5899746801899424, 3.8396877144754775, 5.369767752970923, 6.8751976034905145, 5.7895355218621685, 5.292074926786702, 4.593063989932437, 4.553139346355265, 5.764127618445672, 4.223295126479774, 4.559414164606637, 5.147920214870456, 6.2870599653570185, 4.5154480192511155, 4.547816796334177, 4.780650300536115, 5.051103081956228, 4.818706532147595, 5.393742890566702, 5.268267825327518, 5.258323431332538, 5.2248646748386465, 5.42367887474358, 3.371478268659371, 5.012639991219033, 6.790271580248607, 3.8826450218583624, 5.669447503225864, 5.124385246573664, 4.731147035678929, 3.4867957701938757, 3.0903444666169713, 4.046142923737805, 5.416045918730007, 4.897128067442872, 4.98561425849013, 4.975049679982171, 4.287976776280167, 5.7410414568263555, 2.9993151173721535, 3.455975577154801, 6.060038728761162, 4.9325016521989165, 3.302612279411134, 5.2245015108303665, 4.320990627417368, 4.939427480827248, 5.1140498399606376, 4.732007215162183, 4.726982692651536, 3.4448629916828355, 3.998079771000488, 5.90939266849587, 6.687380998390341, 5.837426153759145, 3.5204984419898486, 5.02949558448002, 4.606040535803322],\n [16.72466198817075, 18.23203329299041, 17.04552114254593, 16.4747118973941, 16.328368336836206, 17.410723969308012, 15.872351517857615, 16.215526775923344, 16.250770458635948, 15.421635213362391, 15.230984642161024, 15.018798248783625, 16.157149113441704, 14.789987234441348, 13.898052078334942, 14.0204769448794, 12.371991122224061, 13.274651942911516, 13.185081880468065, 12.928237282894656, 12.434309250742041, 13.16541206087937, 11.367706390837439, 12.03777569404668, 11.497996276912717, 12.08458981365239, 11.616195997386804, 11.602473746216086, 11.668860284998402, 11.786146329392016, 13.368877951746114, 11.692426496750727, 11.910056761190265, 11.611013417828119, 11.4310550401673, 11.376875545592455, 11.88774291379042, 12.376161028666086, 11.934140584358342, 10.026147610600807, 10.663171857655023, 13.650890236932659, 10.82406026848386, 11.031026853427328, 11.510767785371765, 10.922915835987178, 10.171785259013228, 11.398845899955809, 11.45659186648161, 11.737148780504409, 11.567727151522053, 12.261539469373407, 12.165775087751083, 13.194302359278781, 11.883508729711172, 10.532688693481353, 11.131899903073919, 12.98090667466216, 11.644397652836412, 11.983477048238532, 11.088459970053604, 11.6014573520075, 10.65654637706379, 12.008667995591953, 10.625702735146497, 10.491289716194457, 11.196741692347643, 10.645907283632212, 13.993127243396385, 11.764646216436743, 10.449129120957945, 11.289812701166634, 11.14683909495442, 11.441362772681225, 11.804777773924965, 10.915199167506186, 10.812261505687099, 10.770968989746452, 10.148279068883163, 11.164810966528657, 11.154609603132217, 10.739589872807322, 12.065233075312587, 10.748658071452887, 11.643396957522432, 11.65989319099757, 12.644226062109036, 12.31832666108812, 11.430796337768562, 9.452409561869795, 10.68309787886897, 11.019388498990073, 10.97355464752894, 11.794277412768956, 10.527571957587782, 10.874232727064458, 10.051723428130131, 11.566079128878629, 11.507904391424416, 10.581708870152974, 11.536102666557959, 11.36044464652444, 11.107818267331849, 10.58827218635688, 12.166477575739101, 9.610721190466606, 9.604957110831789, 11.029291297824823, 11.247649447681821, 10.022191991196, 11.179766973179584, 9.844371784034161, 10.622396779200804, 10.188924443438914, 10.563616378506534, 11.342539469399837, 10.697064763800302, 11.63323049249691, 11.568503392139084, 11.330153355857203, 10.176695322285678, 9.896981510222165, 10.546272094136452, 10.645240307118689, 10.511362309082866, 9.902753672322367, 11.114413974005378, 11.954119329922214, 10.562144592029032, 11.783041342379423, 11.133204087505373, 10.137365958120316, 9.98984531392172, 10.77515588860395, 11.996316487654436, 9.880049530800221, 11.46066218747711, 11.06172456210555, 10.77932521428858, 11.770728695658844, 10.613787810896037, 11.778084369057982, 11.679744807602185, 10.554823370199497, 12.379442983986813, 12.118382862701306, 11.7740471020511, 10.749028212780495, 11.744029462294014, 11.225384807331835, 10.962532678567039, 11.26496714218706, 10.701225248000839, 12.010796959046942, 9.780953472817137, 11.153623566958638, 12.552979433281646, 12.18425305868127, 12.650468163954661, 11.361124614293812, 11.079120485463662, 10.718531511405144, 10.671799751849246, 11.50268334054833, 10.15104462272526, 10.125958854212838, 11.341251547737246, 11.105819749762828, 10.66764286198175, 9.990179209004388, 11.374618403956616, 10.467546368221788, 9.65552804661144, 10.515769274231314, 12.240703622922783, 12.076914222090787, 9.997014177455704, 11.976465199806409, 10.367779404321428, 10.692188091892083, 10.759828259178633, 9.885958728868792, 12.827913413017592, 11.378955560656012, 11.291682205749431, 10.81430258677906, 10.580445363306765, 11.204824938788489, 10.445691531598467, 10.977326763393487, 11.383872353372373, 11.099609968205849, 11.34669640583076, 10.801388038961784, 10.463248778249849, 10.802351684791397, 13.027318676243521, 10.911984962071466, 10.695783877952623, 9.800399823473954],\n [16.654554746808593, 16.6100360932051, 15.651527889680048, 13.190161390699306, 13.046826224571552, 13.346343897611325, 11.012656596102225, 11.870159888538701, 11.860977530007654, 11.029796647428274, 11.684184153223226, 11.113650632363562, 10.535953683712757, 9.984511831745172, 10.209728703962492, 9.199113699116312, 8.45809617026864, 8.336063756548425, 9.216973710024778, 7.548209098459137, 7.838871825156712, 7.944388254342479, 6.80270161823673, 7.410331884757784, 6.92308638666363, 6.536010951646624, 7.259495380272714, 6.808335340922749, 5.7467818247246045, 6.237096616431515, 5.369738865844254, 5.514398005041065, 6.392489438106319, 7.7860364976576, 7.306878360302303, 4.16336507094618, 3.812389080756265, 5.528159483208023, 7.293477836233743, 3.354901672370745, 4.786255365227397, 6.651966992133143, 3.364309431155788, 3.9156834775949863, 3.693928246405127, 6.040041419725765, 3.7591679565504785, 3.9207317079854787, 2.6662914187382425, 4.680450769220285, 4.022393124320226, 5.53659903273645, 3.1601066155915336, 3.2028965722391907, 6.045505082587928, 5.80168146589388, 7.728525887983637, 4.265192381837392, 4.0243909725273594, 6.648543685512981, 4.066346534983528, 4.282628649558249, 3.8078388857899355, 4.7987244045527655, 2.5663709863515716, 6.338667688927847, 5.339332808328143, 4.471767041758405, 4.17986040392646, 3.9208724749792614, 3.1009098876894985, 3.0250690154908195, 4.973414218279776, 2.7710411412572995, 4.0195517047968305, 4.530441661935704, 5.944107699151673, 4.326116089812627, 3.9220820737184363, 6.098059409089991, 5.962357320915546, 5.200249184354639, 3.1246798130684375, 4.179626275552883, 4.912507111190487, 2.8082236070797433, 4.923055077120667, 4.908974934189588, 5.496515320949629, 3.507979482013591, 3.997618802475292, 2.947730820045905, 4.457207243752405, 3.8746956251831115, 10.10270922853466, 4.768860937993621, 3.083145227078136, 2.978517569487158, 5.019907534825907, 6.52809584805239, 3.7037841464049674, 2.8957956584461364, 7.221240938650608, 3.9310739846103795, 4.852399900337307, 3.811644993962823, 3.3501218518177094, 5.323062325308605, 4.972753649610172, 3.0300921007233446, 3.666218922114425, 3.3867005212619024, 3.8263266623380634, 3.0693850920964683, 4.330816115744943, 3.6856609093132664, 3.7214656401226716, 5.004058279654546, 5.555457471080942, 4.718716349387128, 5.571483776326795, 2.3053405681882713, 4.178672683800191, 3.558205347593681, 2.826775983611389, 3.976951122410716, 3.0175778031098948, 3.28289670899062, 3.375030846389794, 2.684348363405877, 2.9060174059407835, 5.226238368421219, 3.895046126268329, 2.701200969235792, 3.3065432226508915, 3.733034104399719, 3.7050620728057253, 5.184438827410636, 5.171820237054935, 4.815215617130147, 4.650844341108002, 7.644395199816502, 4.2683320330199805, 3.009122873318518, 5.0971014888933635, 5.686473115862671, 3.84774781671446, 3.3688436836457676, 4.65487387191854, 3.9542085181488993, 2.721109607223862, 5.063100164576097, 3.8057651228251186, 3.39826859173088, 3.041759107569799, 4.332644843667052, 4.471074789672922, 4.0406870645900455, 4.5430652892891406, 3.2468999297963896, 3.8791611321272224, 5.08785949493707, 3.597267709284287, 2.5785270423518876, 5.019103128738466, 3.2711675004068685, 3.509656646115432, 3.4274757764467294, 3.8813621139849674, 3.356462953403315, 4.610170507069836, 3.9056574316748165, 5.131889633056958, 4.056704486048151, 4.808978619012737, 4.468313580074134, 4.469487335675175, 4.705405334848642, 4.975128307953824, 4.294917852769603, 2.951400325380167, 4.175567601852011, 5.015210043704079, 2.9955747599189353, 4.19071230969187, 4.690784936133687, 4.307336301019452, 4.1119251984621155, 4.663914665778903, 2.185103411529659, 3.2952427635746058, 2.4661199854561593, 4.346698316555047, 4.9237687512439186, 3.2072659131384684, 4.641070784901484, 5.349371912550463, 2.499725761766173, 4.0643624300068, 3.381167536440524],\n [15.943602136100829, 15.137646080752564, 14.031259268709801, 12.516262877100344, 11.938267615636658, 12.013598024825768, 11.890491602846351, 12.005565607921413, 12.051762520808424, 11.365131573536177, 8.494513469058973, 8.336343605446677, 8.249927064040365, 5.3549066552266655, 4.306404486787607, 5.506298533938379, 5.097928425574162, 6.002901258289079, 5.42811469348216, 4.192305620800479, 3.660004083624098, 6.868210590525829, 4.655403962750128, 4.4958008680782955, 8.036102386216092, 2.523547019662436, 4.369694939757404, 3.2389703207886336, 7.471806917543407, 3.735291531415017, 4.996440815497821, 5.493391949403715, 3.129812403746124, 5.2531827317152615, 5.493367388386861, 3.492634654233634, 3.4731525639462193, 3.4117393718090434, 4.388110363442796, 3.7277053251185928, 4.885871858478117, 4.414484843486188, 4.011569949784626, 4.447775414402923, 4.147636433378345, 5.450636987146961, 3.925817309706838, 4.783212070044803, 7.157707914791524, 3.7593833433218906, 2.7786392406122893, 4.843963925680338, 3.4274692995111455, 6.3155877258367115, 4.67850118102224, 3.6800495361193635, 2.691061061845679, 3.2595487923868514, 3.487210400320921, 3.382448412991661, 4.826782672783072, 3.1030235688688035, 3.5110279762842795, 4.539751412819247, 3.590548954592675, 3.5134592109202614, 5.040586922400771, 3.323567435537077, 2.906129320514898, 4.354708779527065, 4.117687448217158, 4.17870255825666, 4.833298553823613, 4.314988171462817, 3.5264262472366643, 3.206133078180603, 2.420734654175999, 4.413145350601405, 3.615487837742501, 2.963713344146665, 5.3904210457687824, 4.533907940233031, 4.211995614999913, 5.776857156681145, 4.512037156539176, 3.086455412273055, 4.286547582533429, 4.182043436299541, 4.351935671497788, 3.3992296100234753, 4.731050373400163, 5.274665266973615, 3.4968382998029, 5.286873829557912, 3.767126577967115, 4.839245049768803, 3.382132378107613, 4.803926325481148, 4.048561502425425, 5.660934310728218, 4.47093232395587, 2.711124061486214, 4.4029274597640615, 4.874086202812757, 3.0804043989883114, 3.8271133748660637, 5.474285943767656, 4.625414251669414, 3.2812152703679334, 3.9420159396485372, 3.722665147809936, 5.070065916296792, 3.2837907496437624, 4.68124162140156, 4.898520907829548, 2.7349512943749232, 4.655365210218104, 5.437348843244219, 3.172039237125762, 3.589310841557146, 3.1059936654116473, 4.657474703323577, 6.0206372162976916, 3.8087962241300284, 3.5734728616437796, 3.7751255219468787, 4.89566556762077, 3.470025272375947, 2.191189134410386, 4.34942151595168, 3.3498976470943616, 4.157973865778392, 4.075664341676403, 3.2400603741803304, 3.1335642909255412, 4.202973181380301, 4.479065518781197, 4.2273077248948905, 3.870930890458533, 2.9482154011456236, 3.495800293483603, 4.3743491037733575, 3.0623225780133536, 3.570440371548199, 1.744032469622766, 8.620566394898146, 3.714479673812845, 3.0306325886171934, 3.6633743112023205, 3.6271346133272737, 3.7605183152632913, 4.505750126478412, 3.9312954641117273, 3.8667722189786873, 4.7545912218554545, 3.6971158838227938, 3.0466985598118983, 2.7914826974405376, 4.643874068869419, 4.620978547504527, 4.4285320017964755, 3.8158259441314186, 3.0079408191864263, 5.618316433105118, 4.040517828052233, 4.705266634772513, 4.500119135595927, 3.60242787302192, 4.378907621872655, 5.492408374593512, 3.1682050103818113, 7.060684513255566, 3.5466290961083997, 5.100805896399661, 5.199437177875289, 2.837902969461837, 3.3567194571279195, 4.74074042710274, 3.7415086827366393, 2.9549445580860607, 3.0145347622691756, 3.43710742575117, 4.057084576172335, 3.6711782223441385, 3.8530789569423796, 6.513123133524296, 3.1124611343479254, 8.385277528154614, 3.758633174403318, 4.171841513548456, 3.9197958798173884, 3.849476642696282, 2.520489041038024, 4.090458582022855, 5.172206699106703, 3.473499135506127, 5.346517717781067, 4.700934776557932, 3.6225964949312752, 3.142052782764175]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "QCNN_pca16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "QCNN_pca16_MNIST = np.array([[15.0403534902811, 11.679744539511557, 9.086342105907185, 6.664554929588416, 6.566171879053282, 5.910776543533218, 6.931977168932667, 4.79529806898609, 3.755134914745378, 3.2357970556554827, 3.6323854680801455, 4.5115729864610135, 3.617077685829789, 5.718633611604171, 5.157951558065299, 7.654176746786203, 4.9864096284034725, 2.982486299976871, 6.806045486607213, 4.087469195545893, 5.261078435223882, 5.874669411484631, 4.240703479610863, 4.673604699857989, 4.902012155668855, 5.787842389530163, 4.1822601256875185, 5.688282096750136, 3.6007480715760867, 3.946650389033329, 6.608606140363109, 2.811459108676007, 4.065815440640769, 4.044175336351451, 3.424415886988854, 3.679671728084075, 4.28610495574031, 3.3178635185993546, 2.044339290228743, 3.701298930305038, 2.8294347859875297, 4.658087545833273, 4.757296945161872, 4.025100016210282, 2.694485984354044, 3.4031388227439088, 2.8009814881206214, 3.1403066797921735, 6.6082002359203695, 3.578493957595615, 3.1988250886006875, 2.0832025658992146, 2.576907923773283, 3.2454733353438154, 5.153927537635788, 3.0395325521216154, 2.693714572203974, 3.860309307400375, 3.3093790997964656, 4.190464547289133, 4.726344745820113, 3.5446970135729154, 3.3609735845122253, 4.724594306192631, 3.5009182750556924, 2.2518520534054725, 6.274600583930375, 6.032593729403149, 5.0163263890954015, 3.950512092278221, 5.6808451631975965, 4.951416036625423, 4.275887444355786, 4.239359540924417, 8.295463063013294, 4.520937009893485, 4.182897662336456, 4.6709477369006684, 4.377355626583974, 3.731679162608877, 3.2653239107628194, 3.5893690612208613, 2.881273250653501, 4.228633007918818, 3.1812669874249013, 2.584631593321535, 4.1709692830307965, 4.006896806533891, 3.255961016185142, 3.6764351274944174, 2.3374250754423334, 3.346986771695794, 3.146580849036632, 4.558694440296939, 3.7873357951752027, 3.2647251488485938, 4.241168825634643, 3.5049526099919697, 3.135584867198829, 6.533798767943627, 3.8629206195329173, 8.151142053229588, 4.358137347234198, 3.6120080960372625, 2.4290147304015828, 5.361213301970355, 2.8697284429940737, 5.343097293398063, 3.796832394991832, 2.8498626767446456, 3.8864412047466717, 3.998725918661383, 2.732569829592892, 3.875316637915336, 3.415049169698115, 3.4771214212428965, 6.59724517933933, 3.4765740425308893, 3.038537332190798, 3.8530939255830754, 4.232983042164876, 3.6467398100449526, 5.366788150755652, 8.542280234092173, 2.7129951350873447, 2.401398547838671, 5.094339839941592, 5.61152629294007, 5.617547529421769, 2.9535889381556166, 2.9029491572495814, 5.518334828304726, 3.6127066439889277, 3.55883358092592, 2.7853089590889706, 5.33855286610076, 2.772713399137329, 7.545409633277615, 3.494276765056114, 3.7228137273551445, 8.322577054039863, 4.867055220427471, 4.244515929291943, 5.153044147333067, 3.2184437554038707, 2.2959571546485775, 3.6807102800753664, 4.098443130678253, 5.697381595613272, 4.667864659718746, 2.83537259976806, 6.039467184009572, 3.4389183927498643, 4.235829415537658, 3.358353501314416, 3.7005134590000397, 4.370450561413212, 3.9438699934597237, 4.065485609744926, 7.704248456862109, 3.405315091093644, 2.8225742568728336, 2.806043057887296, 3.6811053667498936, 2.513099037746944, 4.959583979704834, 3.9175498762518717, 6.2440999882378865, 3.62892979946047, 6.086162170064439, 2.9430705674595576, 2.7536095321974488, 4.202546107923751, 2.563813548358968, 5.011818143970597, 3.6439148015353355, 2.4644533634303394, 3.1234634891570234, 2.976797021717022, 3.7434740881157906, 5.815925915081504, 5.398725466082786, 3.056503592579129, 3.462076939358277, 3.2463948967490905, 2.709449824611762, 5.070049973839421, 4.55848886287876, 4.350189826473156, 3.3917950455751775, 5.257869708511391, 3.1720499678936256, 3.772319772138259, 2.6266180957254286, 7.366119215574833, 3.4648798273131463, 3.6818819186869822, 2.584411989320695, 3.861584527015048, 3.3617978854788104],\n [18.76080656394133, 17.492976512736995, 16.635139528434046, 15.44746459777639, 15.783670181902039, 14.733725951880121, 15.638408661362076, 15.334636345951882, 14.267649957173633, 14.021523845337782, 13.049834257719604, 13.75845779333952, 13.394296555878375, 13.861871754332196, 13.206869903447362, 13.254393726268543, 12.927297010667099, 12.93279827500289, 13.258932821704136, 13.690258345912175, 13.376295289155484, 12.292146820182865, 13.485115668207802, 11.797938755021, 11.40246159785775, 12.39560690669116, 11.747065008901751, 10.493827875114143, 12.161344189944922, 10.556212390462914, 8.832134924020771, 8.911887656047469, 8.305537738417593, 9.524198975479626, 7.8033161776813245, 8.713756360712695, 8.019880034661544, 7.102132101753902, 7.149171413907029, 7.399891151360409, 7.825646717768322, 6.574009428750634, 6.400368704983532, 6.021186488198534, 5.511301283838171, 4.591820860709852, 4.746702684803182, 5.2490156531890975, 7.722562877893842, 5.41969821625493, 3.93768905688015, 4.3791987261691006, 5.293936451645284, 5.308717615635361, 5.237090885765296, 4.829617439920008, 4.784585015935567, 4.842871690750853, 5.898385365676442, 4.354653671866435, 4.48829206860057, 5.944499305636778, 5.892270253820464, 5.203559644880021, 5.406297090203602, 5.8397567261451195, 4.906910788337977, 4.268157188017312, 4.768427739461705, 5.1999162550576035, 4.435033625326092, 4.348167855288795, 5.845931198737771, 4.746227962753383, 6.0208012410445635, 6.526762140357391, 7.116993889305273, 3.966693071047269, 4.7374620795062885, 4.801072440589765, 4.381354571823823, 5.838868389149641, 7.679708796762356, 4.382859070175897, 5.338581323838219, 4.381890415609466, 5.5538615166904926, 4.108887773857907, 3.8252565368040417, 3.808400769181966, 9.381040573533152, 5.2833927547548125, 7.4481998171904165, 3.595337547395573, 5.236305358949649, 6.396990094937701, 4.777510671023052, 4.351068667611331, 3.526714432497868, 3.5925124520741036, 6.044070796069493, 3.56502515462814, 4.276369909099347, 5.237153578589139, 4.467521480116951, 4.335342191401022, 3.660654695088974, 4.948475766398555, 4.787393263537703, 6.716408408324919, 4.444744869576575, 4.92784659069165, 3.888845091595223, 7.373973562787823, 4.375850837417648, 5.046693782279762, 2.850011587117684, 5.0155836665346625, 5.450957610322875, 4.0074494304542325, 4.588030170445772, 4.069999199423611, 5.536736973332822, 5.124052778565661, 4.887667234434682, 3.9724766894280346, 5.166295976495825, 4.344819381042703, 4.281066334108093, 3.4964663996412115, 5.26457988780978, 5.292901242072277, 3.8458493950344343, 3.990183554908133, 6.955706687312927, 5.751137737081019, 4.354553862821236, 4.729191829346073, 5.127544880990675, 4.35675042443368, 5.582550113247322, 5.563374899808365, 3.5899746801899424, 3.8396877144754775, 5.369767752970923, 6.8751976034905145, 5.7895355218621685, 5.292074926786702, 4.593063989932437, 4.553139346355265, 5.764127618445672, 4.223295126479774, 4.559414164606637, 5.147920214870456, 6.2870599653570185, 4.5154480192511155, 4.547816796334177, 4.780650300536115, 5.051103081956228, 4.818706532147595, 5.393742890566702, 5.268267825327518, 5.258323431332538, 5.2248646748386465, 5.42367887474358, 3.371478268659371, 5.012639991219033, 6.790271580248607, 3.8826450218583624, 5.669447503225864, 5.124385246573664, 4.731147035678929, 3.4867957701938757, 3.0903444666169713, 4.046142923737805, 5.416045918730007, 4.897128067442872, 4.98561425849013, 4.975049679982171, 4.287976776280167, 5.7410414568263555, 2.9993151173721535, 3.455975577154801, 6.060038728761162, 4.9325016521989165, 3.302612279411134, 5.2245015108303665, 4.320990627417368, 4.939427480827248, 5.1140498399606376, 4.732007215162183, 4.726982692651536, 3.4448629916828355, 3.998079771000488, 5.90939266849587, 6.687380998390341, 5.837426153759145, 3.5204984419898486, 5.02949558448002, 4.606040535803322],\n [16.72466198817075, 18.23203329299041, 17.04552114254593, 16.4747118973941, 16.328368336836206, 17.410723969308012, 15.872351517857615, 16.215526775923344, 16.250770458635948, 15.421635213362391, 15.230984642161024, 15.018798248783625, 16.157149113441704, 14.789987234441348, 13.898052078334942, 14.0204769448794, 12.371991122224061, 13.274651942911516, 13.185081880468065, 12.928237282894656, 12.434309250742041, 13.16541206087937, 11.367706390837439, 12.03777569404668, 11.497996276912717, 12.08458981365239, 11.616195997386804, 11.602473746216086, 11.668860284998402, 11.786146329392016, 13.368877951746114, 11.692426496750727, 11.910056761190265, 11.611013417828119, 11.4310550401673, 11.376875545592455, 11.88774291379042, 12.376161028666086, 11.934140584358342, 10.026147610600807, 10.663171857655023, 13.650890236932659, 10.82406026848386, 11.031026853427328, 11.510767785371765, 10.922915835987178, 10.171785259013228, 11.398845899955809, 11.45659186648161, 11.737148780504409, 11.567727151522053, 12.261539469373407, 12.165775087751083, 13.194302359278781, 11.883508729711172, 10.532688693481353, 11.131899903073919, 12.98090667466216, 11.644397652836412, 11.983477048238532, 11.088459970053604, 11.6014573520075, 10.65654637706379, 12.008667995591953, 10.625702735146497, 10.491289716194457, 11.196741692347643, 10.645907283632212, 13.993127243396385, 11.764646216436743, 10.449129120957945, 11.289812701166634, 11.14683909495442, 11.441362772681225, 11.804777773924965, 10.915199167506186, 10.812261505687099, 10.770968989746452, 10.148279068883163, 11.164810966528657, 11.154609603132217, 10.739589872807322, 12.065233075312587, 10.748658071452887, 11.643396957522432, 11.65989319099757, 12.644226062109036, 12.31832666108812, 11.430796337768562, 9.452409561869795, 10.68309787886897, 11.019388498990073, 10.97355464752894, 11.794277412768956, 10.527571957587782, 10.874232727064458, 10.051723428130131, 11.566079128878629, 11.507904391424416, 10.581708870152974, 11.536102666557959, 11.36044464652444, 11.107818267331849, 10.58827218635688, 12.166477575739101, 9.610721190466606, 9.604957110831789, 11.029291297824823, 11.247649447681821, 10.022191991196, 11.179766973179584, 9.844371784034161, 10.622396779200804, 10.188924443438914, 10.563616378506534, 11.342539469399837, 10.697064763800302, 11.63323049249691, 11.568503392139084, 11.330153355857203, 10.176695322285678, 9.896981510222165, 10.546272094136452, 10.645240307118689, 10.511362309082866, 9.902753672322367, 11.114413974005378, 11.954119329922214, 10.562144592029032, 11.783041342379423, 11.133204087505373, 10.137365958120316, 9.98984531392172, 10.77515588860395, 11.996316487654436, 9.880049530800221, 11.46066218747711, 11.06172456210555, 10.77932521428858, 11.770728695658844, 10.613787810896037, 11.778084369057982, 11.679744807602185, 10.554823370199497, 12.379442983986813, 12.118382862701306, 11.7740471020511, 10.749028212780495, 11.744029462294014, 11.225384807331835, 10.962532678567039, 11.26496714218706, 10.701225248000839, 12.010796959046942, 9.780953472817137, 11.153623566958638, 12.552979433281646, 12.18425305868127, 12.650468163954661, 11.361124614293812, 11.079120485463662, 10.718531511405144, 10.671799751849246, 11.50268334054833, 10.15104462272526, 10.125958854212838, 11.341251547737246, 11.105819749762828, 10.66764286198175, 9.990179209004388, 11.374618403956616, 10.467546368221788, 9.65552804661144, 10.515769274231314, 12.240703622922783, 12.076914222090787, 9.997014177455704, 11.976465199806409, 10.367779404321428, 10.692188091892083, 10.759828259178633, 9.885958728868792, 12.827913413017592, 11.378955560656012, 11.291682205749431, 10.81430258677906, 10.580445363306765, 11.204824938788489, 10.445691531598467, 10.977326763393487, 11.383872353372373, 11.099609968205849, 11.34669640583076, 10.801388038961784, 10.463248778249849, 10.802351684791397, 13.027318676243521, 10.911984962071466, 10.695783877952623, 9.800399823473954],\n [16.654554746808593, 16.6100360932051, 15.651527889680048, 13.190161390699306, 13.046826224571552, 13.346343897611325, 11.012656596102225, 11.870159888538701, 11.860977530007654, 11.029796647428274, 11.684184153223226, 11.113650632363562, 10.535953683712757, 9.984511831745172, 10.209728703962492, 9.199113699116312, 8.45809617026864, 8.336063756548425, 9.216973710024778, 7.548209098459137, 7.838871825156712, 7.944388254342479, 6.80270161823673, 7.410331884757784, 6.92308638666363, 6.536010951646624, 7.259495380272714, 6.808335340922749, 5.7467818247246045, 6.237096616431515, 5.369738865844254, 5.514398005041065, 6.392489438106319, 7.7860364976576, 7.306878360302303, 4.16336507094618, 3.812389080756265, 5.528159483208023, 7.293477836233743, 3.354901672370745, 4.786255365227397, 6.651966992133143, 3.364309431155788, 3.9156834775949863, 3.693928246405127, 6.040041419725765, 3.7591679565504785, 3.9207317079854787, 2.6662914187382425, 4.680450769220285, 4.022393124320226, 5.53659903273645, 3.1601066155915336, 3.2028965722391907, 6.045505082587928, 5.80168146589388, 7.728525887983637, 4.265192381837392, 4.0243909725273594, 6.648543685512981, 4.066346534983528, 4.282628649558249, 3.8078388857899355, 4.7987244045527655, 2.5663709863515716, 6.338667688927847, 5.339332808328143, 4.471767041758405, 4.17986040392646, 3.9208724749792614, 3.1009098876894985, 3.0250690154908195, 4.973414218279776, 2.7710411412572995, 4.0195517047968305, 4.530441661935704, 5.944107699151673, 4.326116089812627, 3.9220820737184363, 6.098059409089991, 5.962357320915546, 5.200249184354639, 3.1246798130684375, 4.179626275552883, 4.912507111190487, 2.8082236070797433, 4.923055077120667, 4.908974934189588, 5.496515320949629, 3.507979482013591, 3.997618802475292, 2.947730820045905, 4.457207243752405, 3.8746956251831115, 10.10270922853466, 4.768860937993621, 3.083145227078136, 2.978517569487158, 5.019907534825907, 6.52809584805239, 3.7037841464049674, 2.8957956584461364, 7.221240938650608, 3.9310739846103795, 4.852399900337307, 3.811644993962823, 3.3501218518177094, 5.323062325308605, 4.972753649610172, 3.0300921007233446, 3.666218922114425, 3.3867005212619024, 3.8263266623380634, 3.0693850920964683, 4.330816115744943, 3.6856609093132664, 3.7214656401226716, 5.004058279654546, 5.555457471080942, 4.718716349387128, 5.571483776326795, 2.3053405681882713, 4.178672683800191, 3.558205347593681, 2.826775983611389, 3.976951122410716, 3.0175778031098948, 3.28289670899062, 3.375030846389794, 2.684348363405877, 2.9060174059407835, 5.226238368421219, 3.895046126268329, 2.701200969235792, 3.3065432226508915, 3.733034104399719, 3.7050620728057253, 5.184438827410636, 5.171820237054935, 4.815215617130147, 4.650844341108002, 7.644395199816502, 4.2683320330199805, 3.009122873318518, 5.0971014888933635, 5.686473115862671, 3.84774781671446, 3.3688436836457676, 4.65487387191854, 3.9542085181488993, 2.721109607223862, 5.063100164576097, 3.8057651228251186, 3.39826859173088, 3.041759107569799, 4.332644843667052, 4.471074789672922, 4.0406870645900455, 4.5430652892891406, 3.2468999297963896, 3.8791611321272224, 5.08785949493707, 3.597267709284287, 2.5785270423518876, 5.019103128738466, 3.2711675004068685, 3.509656646115432, 3.4274757764467294, 3.8813621139849674, 3.356462953403315, 4.610170507069836, 3.9056574316748165, 5.131889633056958, 4.056704486048151, 4.808978619012737, 4.468313580074134, 4.469487335675175, 4.705405334848642, 4.975128307953824, 4.294917852769603, 2.951400325380167, 4.175567601852011, 5.015210043704079, 2.9955747599189353, 4.19071230969187, 4.690784936133687, 4.307336301019452, 4.1119251984621155, 4.663914665778903, 2.185103411529659, 3.2952427635746058, 2.4661199854561593, 4.346698316555047, 4.9237687512439186, 3.2072659131384684, 4.641070784901484, 5.349371912550463, 2.499725761766173, 4.0643624300068, 3.381167536440524],\n [15.943602136100829, 15.137646080752564, 14.031259268709801, 12.516262877100344, 11.938267615636658, 12.013598024825768, 11.890491602846351, 12.005565607921413, 12.051762520808424, 11.365131573536177, 8.494513469058973, 8.336343605446677, 8.249927064040365, 5.3549066552266655, 4.306404486787607, 5.506298533938379, 5.097928425574162, 6.002901258289079, 5.42811469348216, 4.192305620800479, 3.660004083624098, 6.868210590525829, 4.655403962750128, 4.4958008680782955, 8.036102386216092, 2.523547019662436, 4.369694939757404, 3.2389703207886336, 7.471806917543407, 3.735291531415017, 4.996440815497821, 5.493391949403715, 3.129812403746124, 5.2531827317152615, 5.493367388386861, 3.492634654233634, 3.4731525639462193, 3.4117393718090434, 4.388110363442796, 3.7277053251185928, 4.885871858478117, 4.414484843486188, 4.011569949784626, 4.447775414402923, 4.147636433378345, 5.450636987146961, 3.925817309706838, 4.783212070044803, 7.157707914791524, 3.7593833433218906, 2.7786392406122893, 4.843963925680338, 3.4274692995111455, 6.3155877258367115, 4.67850118102224, 3.6800495361193635, 2.691061061845679, 3.2595487923868514, 3.487210400320921, 3.382448412991661, 4.826782672783072, 3.1030235688688035, 3.5110279762842795, 4.539751412819247, 3.590548954592675, 3.5134592109202614, 5.040586922400771, 3.323567435537077, 2.906129320514898, 4.354708779527065, 4.117687448217158, 4.17870255825666, 4.833298553823613, 4.314988171462817, 3.5264262472366643, 3.206133078180603, 2.420734654175999, 4.413145350601405, 3.615487837742501, 2.963713344146665, 5.3904210457687824, 4.533907940233031, 4.211995614999913, 5.776857156681145, 4.512037156539176, 3.086455412273055, 4.286547582533429, 4.182043436299541, 4.351935671497788, 3.3992296100234753, 4.731050373400163, 5.274665266973615, 3.4968382998029, 5.286873829557912, 3.767126577967115, 4.839245049768803, 3.382132378107613, 4.803926325481148, 4.048561502425425, 5.660934310728218, 4.47093232395587, 2.711124061486214, 4.4029274597640615, 4.874086202812757, 3.0804043989883114, 3.8271133748660637, 5.474285943767656, 4.625414251669414, 3.2812152703679334, 3.9420159396485372, 3.722665147809936, 5.070065916296792, 3.2837907496437624, 4.68124162140156, 4.898520907829548, 2.7349512943749232, 4.655365210218104, 5.437348843244219, 3.172039237125762, 3.589310841557146, 3.1059936654116473, 4.657474703323577, 6.0206372162976916, 3.8087962241300284, 3.5734728616437796, 3.7751255219468787, 4.89566556762077, 3.470025272375947, 2.191189134410386, 4.34942151595168, 3.3498976470943616, 4.157973865778392, 4.075664341676403, 3.2400603741803304, 3.1335642909255412, 4.202973181380301, 4.479065518781197, 4.2273077248948905, 3.870930890458533, 2.9482154011456236, 3.495800293483603, 4.3743491037733575, 3.0623225780133536, 3.570440371548199, 1.744032469622766, 8.620566394898146, 3.714479673812845, 3.0306325886171934, 3.6633743112023205, 3.6271346133272737, 3.7605183152632913, 4.505750126478412, 3.9312954641117273, 3.8667722189786873, 4.7545912218554545, 3.6971158838227938, 3.0466985598118983, 2.7914826974405376, 4.643874068869419, 4.620978547504527, 4.4285320017964755, 3.8158259441314186, 3.0079408191864263, 5.618316433105118, 4.040517828052233, 4.705266634772513, 4.500119135595927, 3.60242787302192, 4.378907621872655, 5.492408374593512, 3.1682050103818113, 7.060684513255566, 3.5466290961083997, 5.100805896399661, 5.199437177875289, 2.837902969461837, 3.3567194571279195, 4.74074042710274, 3.7415086827366393, 2.9549445580860607, 3.0145347622691756, 3.43710742575117, 4.057084576172335, 3.6711782223441385, 3.8530789569423796, 6.513123133524296, 3.1124611343479254, 8.385277528154614, 3.758633174403318, 4.171841513548456, 3.9197958798173884, 3.849476642696282, 2.520489041038024, 4.090458582022855, 5.172206699106703, 3.473499135506127, 5.346517717781067, 4.700934776557932, 3.6225964949312752, 3.142052782764175]])\nQCNN_ae16_MNIST = np.array([[17.354467327578657, 17.042268172506326, 17.40694835400147, 16.883255592176255, 16.65502466143769, 16.039633594472303, 15.18380569942337, 13.838095701256382, 13.6411878490585, 13.639226773262536, 11.898493011329156, 12.868276282517744, 11.407206036726924, 11.525453804634996, 11.740707201526101, 10.790203193315676, 11.34395192774905, 10.64029217956473, 11.59677744893592, 12.072397377675616, 10.298866412414995, 10.426261574135319, 12.040760960978899, 11.767660857584607, 11.385993895641256, 10.524566396615075, 11.084164348605952, 11.456355935707302, 12.254864080801898, 9.937288284144701, 11.780007450447172, 11.454643971894495, 10.531734053639495, 11.237845518480935, 9.992580562933407, 12.247149847710448, 11.372243331506699, 10.141139777730633, 10.282702105660285, 10.1585390643646, 9.8266346422288, 10.989571262070092, 9.994801039554929, 10.367891795683724, 10.879959049539266, 12.389619513224606, 12.617326879876774, 10.875798383789132, 11.309012593678991, 10.240772191556614, 9.982847744814206, 10.257602867143342, 11.438397981507801, 11.539668753352329, 10.406244308898016, 10.766912612387005, 10.38566659871728, 9.653635040918758, 9.3673220756666, 11.192703256025991, 11.356454319917198, 9.723734896084848, 9.217143461580724, 9.406698068178564, 9.13013078083301, 10.058229659466777, 9.143025760232842, 10.738484714733932, 8.731833920704114, 9.051934350067908, 9.391574380896122, 10.354198328305952, 11.41911351546528, 10.729578061770784, 9.966301829689867, 10.192854884524136, 10.140809499076251, 11.251415539219261, 9.097875565690716, 10.752420465409381, 8.734407413108007, 10.994675216320935, 9.559354946139822, 8.308752189570685, 11.003693164489391, 9.934765567860396, 8.703704842719095, 9.543374745782106, 10.585317045336256, 9.98236237314253, 9.805887132116636, 10.76611504505452, 8.84994342743999, 9.265365070717655, 8.879399016770595, 10.307291316366607, 10.662582822718035, 9.064125800739076, 9.376222504925838, 10.12848951536187, 10.975483228243919, 8.775552584439094, 11.009009122225285, 9.272603626790717, 8.704715649969021, 9.61252267368881, 9.100483391276812, 9.066614665519667, 9.989281734423669, 8.602033761933018, 9.408094899733662, 9.538269748027892, 9.008654652749843, 9.194242814602033, 8.725458922014681, 10.107479185599322, 9.442521811361718, 9.087366436118176, 9.266930805024437, 8.57586972182249, 10.0152277200648, 10.751355944699405, 9.55254169451107, 8.743040638243095, 9.315575278943692, 9.38152752224921, 9.966720452811298, 9.584847112133565, 8.089099393795474, 8.790246565148813, 8.53288745553979, 8.687112249342727, 9.729099965690347, 10.40569309666267, 9.207169547160788, 9.97315350078105, 9.582893792426372, 9.680639111697669, 9.77342413284356, 8.659016223480213, 10.175697195282925, 8.871082110951905, 8.489727966356865, 9.394969114776119, 8.212504227339023, 9.263262635173055, 8.399163280010825, 8.549578055665766, 9.014644858839622, 8.62399123382903, 8.634238480977714, 10.073618077956638, 9.391854860991515, 8.708608613451116, 9.539285285485128, 8.595083802441742, 8.731033895537273, 9.11547781143287, 10.080899796224951, 9.623257833992781, 9.11933726084373, 9.893937725659548, 9.246247681217216, 9.272467473319889, 9.398740623244864, 7.349628028032209, 8.159752447878983, 8.82574749049257, 8.525154771329886, 7.501851214751565, 12.126209028830493, 8.50769370866193, 8.277991636705003, 8.370087073623838, 8.416008477989118, 8.727618583799856, 8.645743640407767, 8.706397055235657, 8.502417955135407, 9.929431266833008, 10.533268816691018, 9.252277797684265, 10.319031755352105, 9.417109560106503, 8.67049853269791, 8.684513151003806, 8.937949449570723, 7.581054226831386, 8.282546087261737, 9.611700861294086, 9.202296390821195, 9.023864493541607, 8.609547020418514, 10.157374222079035, 9.46558300882575, 8.83136674819522, 8.785424265569475, 9.515502055170906, 8.205222935121133, 9.592483049592703],\n [18.2215099764598, 17.07539436825304, 14.553998643640266, 12.305622699706753, 13.523106217403347, 15.540845041796361, 13.726983344730938, 12.87642847297933, 12.757050662258266, 13.224431572234197, 11.557096471061302, 12.508446454997912, 12.163224381820696, 10.885399615077194, 11.010440641705598, 10.98934075246616, 9.339366214002977, 10.122126700518685, 8.96022672823414, 10.439527660375374, 10.37532364560213, 9.316992407451487, 8.036769364825696, 10.348423881784315, 9.822858732764791, 8.45144515308012, 11.712504901113189, 9.584078780248909, 11.155556226259952, 9.365974053546093, 8.319401011227004, 8.394466227450419, 8.164833821447338, 7.182439308748092, 11.295787575319011, 8.249681020631975, 8.49767091757703, 9.818493784038095, 7.9360208009033055, 7.455463392122169, 7.220454534767381, 8.47844698123061, 8.815357317225246, 8.228186366741918, 7.050260045964252, 8.863998034098914, 9.124506781105257, 8.57828842174249, 7.920929683236485, 8.116343489356497, 8.552411258798665, 7.799270308658052, 10.761275688353976, 9.451183946668186, 9.015572056083506, 7.216947331351472, 8.260400770464452, 7.998577075598228, 5.7839056284983, 7.077246328164254, 8.224734911291913, 7.8332926719482465, 8.251644833257165, 7.404415709127036, 9.111295807109403, 8.999854823497218, 8.160545863970956, 8.077756606928162, 8.776159105207023, 6.07442335360838, 9.30434872202591, 8.472446362218296, 8.707441227619633, 6.983812971030087, 7.562518684077058, 8.598975620441122, 7.527805009747777, 8.685506919774113, 8.629044157544582, 6.692967232225073, 6.332447784713338, 6.189439674753778, 8.684894606603326, 11.507768155931908, 8.127466953908705, 7.2912015341831005, 6.44757780302433, 10.014136977730027, 7.798721195214707, 6.70585627357323, 6.726794221438968, 6.6251131630776605, 6.214302756914122, 9.41099442532437, 7.778811692599605, 7.04674825837227, 7.953893730878024, 8.282992095189973, 7.322622847836619, 6.9002954004836425, 6.457646601888995, 7.650049952083803, 9.23679031556494, 9.026472916310905, 6.972239835760271, 9.541328599447557, 9.935647331789118, 7.160364395019715, 7.236850475011799, 8.007586568638347, 8.856344142522232, 9.925293608808806, 7.652568683274073, 7.4021148123673095, 8.462086042507433, 8.877335026538491, 8.184719719755941, 7.139807830123084, 8.519918807128533, 8.18065922614548, 7.230255317627048, 7.97357730611355, 6.558266240652757, 9.264231158413324, 5.418541089516621, 7.1475533557119935, 9.594623909763314, 9.077746463301063, 8.555244159469627, 7.184311205504569, 7.7624644422293105, 8.449084016414847, 8.750650478965229, 6.151543205978295, 6.210654890706382, 6.74939399765403, 6.716508482252397, 8.195207428840387, 7.809651634785921, 8.569613414497832, 8.891514968851228, 6.140467651592561, 11.51075469760621, 9.26843312597191, 7.7364603591448375, 7.326442933135175, 8.67741495796755, 6.884613400884021, 7.104576907083857, 8.721752987964718, 7.576399278895593, 7.350036553046979, 8.254354467042848, 9.22010594839898, 8.130987757301606, 7.683749817309081, 7.619310539874576, 7.178535721831786, 8.381513118201532, 6.837275296691584, 8.412977177860506, 7.64078834165367, 6.3443929593613575, 8.210279757855718, 7.093708372830625, 8.729344770520639, 9.185421884335529, 8.359860905981014, 8.544886693622976, 6.809522418103523, 5.885047212918689, 7.520591837008473, 7.669458325126419, 8.608681043921205, 7.877053316374514, 7.734122025501142, 7.08172167052958, 8.136091907638777, 7.373986694367545, 7.413408947233296, 6.566053199008984, 7.984536649806719, 10.847839696213473, 6.370106991304356, 7.859728488439158, 7.618426138341909, 6.901893961316959, 7.093826376598234, 8.67774767583435, 8.3158232663605, 6.050292643215006, 9.707825411404423, 7.447459650937453, 7.875684388982652, 8.303053986270747, 8.03365175716246, 8.927465759384772, 7.083330915738764, 9.195283941681629, 8.245188272864015],\n [17.859050158373645, 16.313695810035085, 14.448466601241563, 13.298451842741393, 13.491787528112447, 14.80840830967372, 12.163531003161914, 11.687319638854435, 12.123588616359106, 11.854624396167036, 11.06148059356733, 10.938291181658748, 11.278758618953223, 10.410521450836306, 12.430072360429184, 9.917387355057944, 11.321123110738327, 10.418219044771432, 12.031334525256193, 11.258976070013333, 12.285041307445642, 11.199686077640505, 10.838027905875343, 11.418544508403814, 15.655325716846605, 11.978018745720796, 10.979672858302955, 12.58789357207339, 10.969606557057393, 11.280954220620748, 10.512266254953753, 11.101926066088465, 12.386771579634173, 12.235307912383291, 11.135618037966902, 10.191321674752695, 11.877786413636635, 9.116946820103259, 9.973295117056507, 10.628478454290399, 9.678671552660608, 11.660284559094471, 11.691341415872852, 10.050401987857258, 11.002659670361995, 10.714240547230144, 9.177845655535934, 13.11485586641678, 8.246413372370272, 11.435267419548031, 10.683096535145852, 10.298505749106756, 11.687407383951347, 10.514975121380333, 11.9239013790073, 10.359344553361185, 10.684178742208776, 10.35847533943985, 9.469217308574832, 10.998556422430367, 9.690050798466812, 13.021944552278814, 9.9119132310249, 11.81123213949436, 9.312262773562473, 10.574985561569672, 10.018313955345654, 9.294797505097483, 11.095682423183332, 9.501323204141674, 10.183212200182998, 11.225747134684152, 8.87044780709674, 10.503370676067885, 11.40581932351349, 11.393100559859052, 11.084974164736895, 10.049685608383475, 12.2621399346572, 8.420835852631669, 8.18949681265711, 9.833535797095207, 9.090712210358713, 12.889297570868864, 11.465101084572087, 9.140922633405529, 9.77932983324417, 9.167935657591226, 8.519008084695756, 10.252453869043114, 9.748865512429358, 10.627413977896433, 11.164177749275854, 10.684587683027031, 10.674255602675633, 10.87810858904507, 8.81704656758819, 10.691827871139198, 11.342579986540558, 8.71113738098564, 11.140924300858622, 9.380062420878998, 8.70089527704871, 9.788317959403123, 11.694617414328972, 11.393908427851166, 8.399403417151385, 10.477783226823819, 10.668630706023562, 11.008527534167934, 10.765047209337633, 10.552420736211314, 11.827052891794592, 9.73776159199256, 11.668890232175784, 10.202766904022173, 10.187029663706323, 9.832953283932858, 10.285455792770605, 10.667333334509307, 10.148587383780217, 11.529168239351206, 11.444778179240638, 10.332370144881224, 10.058991462592457, 10.725627480475291, 10.183575863607688, 9.554214360138646, 10.889674215358525, 8.959967122844583, 9.266195207982625, 10.379764858130558, 10.97474334628634, 11.50805174077576, 10.923537040454812, 9.945822848683752, 9.487627506238923, 12.03986487531359, 10.007161077529545, 9.141234313979306, 10.194083569944318, 8.462339646461523, 9.362464481424334, 9.698537189642442, 9.748440258400132, 8.66247795290833, 9.562234002024102, 10.24949191143583, 10.036538959781431, 10.047934431772587, 10.199605551639104, 9.112074277115203, 10.107061434018085, 11.686927835706276, 10.570028741200995, 10.912388661535239, 9.463554761670952, 9.827990360709151, 11.379832907835928, 10.975400289085458, 10.785464413332814, 9.991163771531426, 11.309277403712306, 9.715805377718446, 13.383678148225819, 10.859535700012366, 9.664759899639904, 9.580890965424356, 7.689422227052174, 9.549024183552255, 11.079024821989975, 10.961896076949891, 10.461286935370593, 8.707231377706488, 11.150941956561661, 9.415401252327701, 8.90822181399265, 10.587146771789737, 10.624019702432161, 10.267134609695313, 8.902810056384137, 11.062777168404548, 10.550669910675298, 10.01999275050117, 9.336690757617424, 10.369614882392995, 10.125705353155126, 10.437345014316625, 9.753698451902949, 10.519018704977002, 9.394976692238666, 9.564124237149546, 10.441074967170142, 10.657903377457727, 9.352383605782327, 9.73639230490847, 10.469626935277605, 10.66894147019617, 11.489626953704297, 10.729107673244838],\n [17.477869226417724, 16.923409892593707, 14.680425178997394, 12.8810271881688, 13.624623113433353, 15.498729144466333, 14.410996532036476, 12.439385653405218, 11.824807748151184, 14.549762483555229, 12.52025094702346, 9.878390082320776, 13.353970191726475, 14.201123102594767, 10.485695152725818, 12.469841896410358, 11.785218989325013, 10.885649961244868, 11.862127107410245, 8.958517619776602, 10.21308258417946, 11.772420780101367, 11.350674914150671, 9.229652936854443, 9.350645878872323, 9.73417234976586, 10.917058280366632, 9.352920439903674, 9.116767310551115, 17.1341466183265, 12.744972996168254, 10.905428641350687, 9.868705688427436, 11.230994130057073, 10.40909538114941, 8.927450449159995, 8.64809934246907, 10.509855101545554, 9.018853645416415, 10.899434365702795, 8.993824888288769, 11.09355091355732, 9.78221138322432, 9.068457386867905, 7.9532532524764985, 13.63136104779954, 10.547030902645668, 8.55627211437192, 9.315263943948516, 8.267963958519648, 9.655133830489673, 10.610902524839648, 11.938930684277052, 10.442496188110441, 9.086091557317266, 10.523320402876738, 9.451870341084566, 9.363207873357467, 8.605438897818873, 9.259341268891594, 8.837066570124096, 9.520997546593952, 8.25459369051566, 8.352634575191372, 10.973012193124614, 8.516514267420478, 12.055314915587385, 7.799081514691986, 9.848780567429356, 9.773811432857535, 7.810066515512631, 11.298129062909965, 9.19187529815522, 8.821294019247189, 9.373848863130172, 9.669269844772323, 9.30451155893822, 10.846884854361333, 11.473225282482497, 9.7785113133521, 10.407485722119814, 8.466089651866756, 8.60866071209688, 11.375138543297105, 8.705820527310673, 9.978659292160323, 11.564462067479617, 9.911976058440146, 9.118941687650787, 7.722303035111853, 11.549820763881613, 9.817984587364052, 11.525743034479476, 8.66874519949241, 8.802439919851917, 10.081202063003442, 9.465766179009902, 8.944418735139623, 9.596773223228345, 10.399297834488916, 10.255009700581997, 11.933882356940535, 10.274235492887636, 8.942806196291254, 8.961595442394964, 9.809489700141466, 8.227384444324922, 7.258418352460358, 11.29610103411897, 10.619335991457246, 9.803534013106253, 12.298193757933678, 9.993401276289536, 8.701355117710138, 9.551607828075836, 9.362683183637984, 10.904533024362392, 8.81368207664315, 9.43819992775753, 7.873818608426682, 9.46862326830045, 9.71567817279831, 13.984939786881863, 9.79790449779715, 8.813848982286368, 11.048520086471337, 9.188515583848583, 10.715871094508985, 8.978913662157714, 10.06537395383198, 9.695788683252461, 9.556267128671744, 8.748611432264772, 10.677485344181258, 11.470763605819359, 12.072838301227586, 10.379419141567304, 9.497432368680737, 8.874028268762219, 7.679188260676408, 9.398232926939107, 10.024369151173415, 10.674012057102708, 10.177287445670794, 10.460777362059792, 7.863296722541688, 11.109118111927568, 10.23554757993483, 11.819215586920068, 10.023765273884992, 9.520225855535234, 9.227950026454442, 8.902931208993143, 10.396389228983242, 10.609817418964726, 10.108312136877915, 9.613548945391354, 10.401432036507597, 9.574928407577804, 8.497230471033916, 10.584540408982733, 9.769260727069263, 9.361016009423052, 8.572894819086242, 9.946522365694737, 9.04798158019822, 8.051284312196598, 9.548014913556012, 9.313116082496107, 9.710222808078198, 8.170177762083872, 13.319108650979388, 10.204574979219975, 10.60152370136771, 10.542469079595556, 11.326339366997628, 9.59020690117027, 9.261926826272623, 10.308410911920863, 8.101140481718296, 10.569101777748422, 8.52297072840074, 10.423688872818895, 9.315426102482768, 8.016388794085461, 10.199176020724277, 9.448372821932574, 10.784011263128235, 10.354861981990755, 10.454927775255891, 10.166802839371554, 10.666712282779185, 11.193582247915906, 11.633631947505178, 9.528969714771767, 9.995404610119527, 9.216892378839669, 10.362141171764815, 7.6446199095801575, 8.708732177794332],\n [17.784778572764985, 17.229550169768114, 17.11865277828455, 15.007253312589256, 14.93838530448627, 12.915197413343982, 16.289980215404082, 12.721903102294194, 12.46040375894754, 11.62198654969676, 13.185002249811923, 11.989804798289935, 11.766333756587969, 8.971219400248145, 20.13128200094444, 24.367381057992016, 16.868881581577096, 17.052825136151718, 15.994015894968923, 15.008922960604076, 12.144659001511501, 12.329678258901028, 12.666319623054486, 14.084776543472003, 13.154308151028845, 12.082938505322875, 9.664317135655581, 10.97517147558041, 12.99143678126779, 11.758765147066198, 12.062317839550769, 11.706288884139223, 11.203331352435114, 11.617228518123785, 9.75497128748522, 12.900930124812547, 11.55924114005623, 11.0816017480015, 11.740497140474714, 10.695073120038757, 9.703147377730485, 11.768050126611975, 10.178217027748692, 10.730378132207456, 9.791243966134035, 12.541255036100448, 10.656300764112098, 11.188765046089152, 10.423073312666965, 11.09791246968183, 10.641442930737332, 10.51309665058049, 10.28776146741658, 11.028004288408498, 10.454851980977235, 9.173405998967866, 9.96754637620247, 9.675652090524832, 11.33103827250655, 8.63308442436086, 10.920668927654592, 10.851557844547376, 10.035946213469797, 11.233826343414387, 11.850900088838877, 10.342916514184328, 9.751538452423258, 11.147611596602589, 11.265723219262341, 10.418040930870788, 9.946590076448544, 8.975660094399249, 9.113837797818968, 9.860212940722345, 11.965640262268836, 10.323453250780132, 9.580681494012781, 7.91154058149559, 9.838636058627019, 8.852412524085112, 9.925600069997344, 10.71576986296795, 11.307372947322873, 9.447219558443498, 10.123267568820076, 11.758289890811328, 8.921095936446779, 8.64678138473268, 10.061015135443945, 9.747543448330001, 7.715939755332897, 11.075105492088275, 8.89020687702349, 10.465771514144809, 9.355622900085748, 9.28133082710223, 11.300144587564407, 10.766516060699363, 8.516814949377375, 10.4557621831674, 10.072768857189988, 10.754976946016432, 9.711365261847847, 9.198095540006662, 8.889094438507572, 12.604305266574077, 7.30063974001526, 13.002908028679304, 9.994416621022099, 10.612834807828616, 8.94412565386079, 10.79345732513219, 14.05312185050333, 8.695822318852562, 9.932363728968925, 11.492471172738048, 10.345564118261775, 9.724956496709261, 10.397993643297507, 9.392729553641722, 10.973528230744563, 9.143156940032158, 9.257446996986603, 9.447442405267939, 10.145665563182348, 8.534545306075106, 10.915970351064942, 12.291800475143358, 9.53309835082488, 11.33761829076702, 9.635122881544977, 9.425665199019702, 9.240620472662078, 9.235256016637988, 9.708373220698622, 11.006706849474014, 8.98379185601443, 10.285423814474871, 9.107385324297423, 9.646792368633731, 9.971053565197701, 9.361748988832598, 10.400038880531321, 10.30211747620635, 10.818984692818917, 9.130675930681631, 9.238040582830559, 11.001465576699234, 10.151884304015596, 9.386057377507003, 10.493448537154245, 11.567260591373445, 11.230021751348001, 11.186427220363276, 9.815930369149777, 10.59198270578867, 9.220519544523595, 9.965002066698506, 10.47797362577038, 10.786817667151551, 9.690154166078592, 9.439519347888917, 8.848350931306346, 9.3781728933593, 8.268700488420869, 12.115439022134163, 10.796885195623494, 10.014380397675817, 10.73574971170541, 8.560166769341174, 10.794393046590002, 9.134062671122507, 10.418063311994729, 11.012128394570807, 9.42853787160366, 11.356927683680299, 9.900444094277631, 9.22579412385074, 9.386794383707267, 10.957568634868915, 9.694645674051442, 9.158532163752914, 10.12713743528785, 11.362052051005852, 10.005250953602864, 8.346854337527914, 9.996776616030921, 10.329430659315037, 8.372308345358952, 10.885382150718447, 9.058760530517857, 8.628993529470744, 10.711718938616118, 6.543832981652261, 11.581086230427672, 8.476882553653294, 10.741428140655021, 9.81202566227701, 9.288789377794929, 11.401902127389057]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "QCNN_ae16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "QCNN_ae16_MNIST = np.array([[17.354467327578657, 17.042268172506326, 17.40694835400147, 16.883255592176255, 16.65502466143769, 16.039633594472303, 15.18380569942337, 13.838095701256382, 13.6411878490585, 13.639226773262536, 11.898493011329156, 12.868276282517744, 11.407206036726924, 11.525453804634996, 11.740707201526101, 10.790203193315676, 11.34395192774905, 10.64029217956473, 11.59677744893592, 12.072397377675616, 10.298866412414995, 10.426261574135319, 12.040760960978899, 11.767660857584607, 11.385993895641256, 10.524566396615075, 11.084164348605952, 11.456355935707302, 12.254864080801898, 9.937288284144701, 11.780007450447172, 11.454643971894495, 10.531734053639495, 11.237845518480935, 9.992580562933407, 12.247149847710448, 11.372243331506699, 10.141139777730633, 10.282702105660285, 10.1585390643646, 9.8266346422288, 10.989571262070092, 9.994801039554929, 10.367891795683724, 10.879959049539266, 12.389619513224606, 12.617326879876774, 10.875798383789132, 11.309012593678991, 10.240772191556614, 9.982847744814206, 10.257602867143342, 11.438397981507801, 11.539668753352329, 10.406244308898016, 10.766912612387005, 10.38566659871728, 9.653635040918758, 9.3673220756666, 11.192703256025991, 11.356454319917198, 9.723734896084848, 9.217143461580724, 9.406698068178564, 9.13013078083301, 10.058229659466777, 9.143025760232842, 10.738484714733932, 8.731833920704114, 9.051934350067908, 9.391574380896122, 10.354198328305952, 11.41911351546528, 10.729578061770784, 9.966301829689867, 10.192854884524136, 10.140809499076251, 11.251415539219261, 9.097875565690716, 10.752420465409381, 8.734407413108007, 10.994675216320935, 9.559354946139822, 8.308752189570685, 11.003693164489391, 9.934765567860396, 8.703704842719095, 9.543374745782106, 10.585317045336256, 9.98236237314253, 9.805887132116636, 10.76611504505452, 8.84994342743999, 9.265365070717655, 8.879399016770595, 10.307291316366607, 10.662582822718035, 9.064125800739076, 9.376222504925838, 10.12848951536187, 10.975483228243919, 8.775552584439094, 11.009009122225285, 9.272603626790717, 8.704715649969021, 9.61252267368881, 9.100483391276812, 9.066614665519667, 9.989281734423669, 8.602033761933018, 9.408094899733662, 9.538269748027892, 9.008654652749843, 9.194242814602033, 8.725458922014681, 10.107479185599322, 9.442521811361718, 9.087366436118176, 9.266930805024437, 8.57586972182249, 10.0152277200648, 10.751355944699405, 9.55254169451107, 8.743040638243095, 9.315575278943692, 9.38152752224921, 9.966720452811298, 9.584847112133565, 8.089099393795474, 8.790246565148813, 8.53288745553979, 8.687112249342727, 9.729099965690347, 10.40569309666267, 9.207169547160788, 9.97315350078105, 9.582893792426372, 9.680639111697669, 9.77342413284356, 8.659016223480213, 10.175697195282925, 8.871082110951905, 8.489727966356865, 9.394969114776119, 8.212504227339023, 9.263262635173055, 8.399163280010825, 8.549578055665766, 9.014644858839622, 8.62399123382903, 8.634238480977714, 10.073618077956638, 9.391854860991515, 8.708608613451116, 9.539285285485128, 8.595083802441742, 8.731033895537273, 9.11547781143287, 10.080899796224951, 9.623257833992781, 9.11933726084373, 9.893937725659548, 9.246247681217216, 9.272467473319889, 9.398740623244864, 7.349628028032209, 8.159752447878983, 8.82574749049257, 8.525154771329886, 7.501851214751565, 12.126209028830493, 8.50769370866193, 8.277991636705003, 8.370087073623838, 8.416008477989118, 8.727618583799856, 8.645743640407767, 8.706397055235657, 8.502417955135407, 9.929431266833008, 10.533268816691018, 9.252277797684265, 10.319031755352105, 9.417109560106503, 8.67049853269791, 8.684513151003806, 8.937949449570723, 7.581054226831386, 8.282546087261737, 9.611700861294086, 9.202296390821195, 9.023864493541607, 8.609547020418514, 10.157374222079035, 9.46558300882575, 8.83136674819522, 8.785424265569475, 9.515502055170906, 8.205222935121133, 9.592483049592703],\n [18.2215099764598, 17.07539436825304, 14.553998643640266, 12.305622699706753, 13.523106217403347, 15.540845041796361, 13.726983344730938, 12.87642847297933, 12.757050662258266, 13.224431572234197, 11.557096471061302, 12.508446454997912, 12.163224381820696, 10.885399615077194, 11.010440641705598, 10.98934075246616, 9.339366214002977, 10.122126700518685, 8.96022672823414, 10.439527660375374, 10.37532364560213, 9.316992407451487, 8.036769364825696, 10.348423881784315, 9.822858732764791, 8.45144515308012, 11.712504901113189, 9.584078780248909, 11.155556226259952, 9.365974053546093, 8.319401011227004, 8.394466227450419, 8.164833821447338, 7.182439308748092, 11.295787575319011, 8.249681020631975, 8.49767091757703, 9.818493784038095, 7.9360208009033055, 7.455463392122169, 7.220454534767381, 8.47844698123061, 8.815357317225246, 8.228186366741918, 7.050260045964252, 8.863998034098914, 9.124506781105257, 8.57828842174249, 7.920929683236485, 8.116343489356497, 8.552411258798665, 7.799270308658052, 10.761275688353976, 9.451183946668186, 9.015572056083506, 7.216947331351472, 8.260400770464452, 7.998577075598228, 5.7839056284983, 7.077246328164254, 8.224734911291913, 7.8332926719482465, 8.251644833257165, 7.404415709127036, 9.111295807109403, 8.999854823497218, 8.160545863970956, 8.077756606928162, 8.776159105207023, 6.07442335360838, 9.30434872202591, 8.472446362218296, 8.707441227619633, 6.983812971030087, 7.562518684077058, 8.598975620441122, 7.527805009747777, 8.685506919774113, 8.629044157544582, 6.692967232225073, 6.332447784713338, 6.189439674753778, 8.684894606603326, 11.507768155931908, 8.127466953908705, 7.2912015341831005, 6.44757780302433, 10.014136977730027, 7.798721195214707, 6.70585627357323, 6.726794221438968, 6.6251131630776605, 6.214302756914122, 9.41099442532437, 7.778811692599605, 7.04674825837227, 7.953893730878024, 8.282992095189973, 7.322622847836619, 6.9002954004836425, 6.457646601888995, 7.650049952083803, 9.23679031556494, 9.026472916310905, 6.972239835760271, 9.541328599447557, 9.935647331789118, 7.160364395019715, 7.236850475011799, 8.007586568638347, 8.856344142522232, 9.925293608808806, 7.652568683274073, 7.4021148123673095, 8.462086042507433, 8.877335026538491, 8.184719719755941, 7.139807830123084, 8.519918807128533, 8.18065922614548, 7.230255317627048, 7.97357730611355, 6.558266240652757, 9.264231158413324, 5.418541089516621, 7.1475533557119935, 9.594623909763314, 9.077746463301063, 8.555244159469627, 7.184311205504569, 7.7624644422293105, 8.449084016414847, 8.750650478965229, 6.151543205978295, 6.210654890706382, 6.74939399765403, 6.716508482252397, 8.195207428840387, 7.809651634785921, 8.569613414497832, 8.891514968851228, 6.140467651592561, 11.51075469760621, 9.26843312597191, 7.7364603591448375, 7.326442933135175, 8.67741495796755, 6.884613400884021, 7.104576907083857, 8.721752987964718, 7.576399278895593, 7.350036553046979, 8.254354467042848, 9.22010594839898, 8.130987757301606, 7.683749817309081, 7.619310539874576, 7.178535721831786, 8.381513118201532, 6.837275296691584, 8.412977177860506, 7.64078834165367, 6.3443929593613575, 8.210279757855718, 7.093708372830625, 8.729344770520639, 9.185421884335529, 8.359860905981014, 8.544886693622976, 6.809522418103523, 5.885047212918689, 7.520591837008473, 7.669458325126419, 8.608681043921205, 7.877053316374514, 7.734122025501142, 7.08172167052958, 8.136091907638777, 7.373986694367545, 7.413408947233296, 6.566053199008984, 7.984536649806719, 10.847839696213473, 6.370106991304356, 7.859728488439158, 7.618426138341909, 6.901893961316959, 7.093826376598234, 8.67774767583435, 8.3158232663605, 6.050292643215006, 9.707825411404423, 7.447459650937453, 7.875684388982652, 8.303053986270747, 8.03365175716246, 8.927465759384772, 7.083330915738764, 9.195283941681629, 8.245188272864015],\n [17.859050158373645, 16.313695810035085, 14.448466601241563, 13.298451842741393, 13.491787528112447, 14.80840830967372, 12.163531003161914, 11.687319638854435, 12.123588616359106, 11.854624396167036, 11.06148059356733, 10.938291181658748, 11.278758618953223, 10.410521450836306, 12.430072360429184, 9.917387355057944, 11.321123110738327, 10.418219044771432, 12.031334525256193, 11.258976070013333, 12.285041307445642, 11.199686077640505, 10.838027905875343, 11.418544508403814, 15.655325716846605, 11.978018745720796, 10.979672858302955, 12.58789357207339, 10.969606557057393, 11.280954220620748, 10.512266254953753, 11.101926066088465, 12.386771579634173, 12.235307912383291, 11.135618037966902, 10.191321674752695, 11.877786413636635, 9.116946820103259, 9.973295117056507, 10.628478454290399, 9.678671552660608, 11.660284559094471, 11.691341415872852, 10.050401987857258, 11.002659670361995, 10.714240547230144, 9.177845655535934, 13.11485586641678, 8.246413372370272, 11.435267419548031, 10.683096535145852, 10.298505749106756, 11.687407383951347, 10.514975121380333, 11.9239013790073, 10.359344553361185, 10.684178742208776, 10.35847533943985, 9.469217308574832, 10.998556422430367, 9.690050798466812, 13.021944552278814, 9.9119132310249, 11.81123213949436, 9.312262773562473, 10.574985561569672, 10.018313955345654, 9.294797505097483, 11.095682423183332, 9.501323204141674, 10.183212200182998, 11.225747134684152, 8.87044780709674, 10.503370676067885, 11.40581932351349, 11.393100559859052, 11.084974164736895, 10.049685608383475, 12.2621399346572, 8.420835852631669, 8.18949681265711, 9.833535797095207, 9.090712210358713, 12.889297570868864, 11.465101084572087, 9.140922633405529, 9.77932983324417, 9.167935657591226, 8.519008084695756, 10.252453869043114, 9.748865512429358, 10.627413977896433, 11.164177749275854, 10.684587683027031, 10.674255602675633, 10.87810858904507, 8.81704656758819, 10.691827871139198, 11.342579986540558, 8.71113738098564, 11.140924300858622, 9.380062420878998, 8.70089527704871, 9.788317959403123, 11.694617414328972, 11.393908427851166, 8.399403417151385, 10.477783226823819, 10.668630706023562, 11.008527534167934, 10.765047209337633, 10.552420736211314, 11.827052891794592, 9.73776159199256, 11.668890232175784, 10.202766904022173, 10.187029663706323, 9.832953283932858, 10.285455792770605, 10.667333334509307, 10.148587383780217, 11.529168239351206, 11.444778179240638, 10.332370144881224, 10.058991462592457, 10.725627480475291, 10.183575863607688, 9.554214360138646, 10.889674215358525, 8.959967122844583, 9.266195207982625, 10.379764858130558, 10.97474334628634, 11.50805174077576, 10.923537040454812, 9.945822848683752, 9.487627506238923, 12.03986487531359, 10.007161077529545, 9.141234313979306, 10.194083569944318, 8.462339646461523, 9.362464481424334, 9.698537189642442, 9.748440258400132, 8.66247795290833, 9.562234002024102, 10.24949191143583, 10.036538959781431, 10.047934431772587, 10.199605551639104, 9.112074277115203, 10.107061434018085, 11.686927835706276, 10.570028741200995, 10.912388661535239, 9.463554761670952, 9.827990360709151, 11.379832907835928, 10.975400289085458, 10.785464413332814, 9.991163771531426, 11.309277403712306, 9.715805377718446, 13.383678148225819, 10.859535700012366, 9.664759899639904, 9.580890965424356, 7.689422227052174, 9.549024183552255, 11.079024821989975, 10.961896076949891, 10.461286935370593, 8.707231377706488, 11.150941956561661, 9.415401252327701, 8.90822181399265, 10.587146771789737, 10.624019702432161, 10.267134609695313, 8.902810056384137, 11.062777168404548, 10.550669910675298, 10.01999275050117, 9.336690757617424, 10.369614882392995, 10.125705353155126, 10.437345014316625, 9.753698451902949, 10.519018704977002, 9.394976692238666, 9.564124237149546, 10.441074967170142, 10.657903377457727, 9.352383605782327, 9.73639230490847, 10.469626935277605, 10.66894147019617, 11.489626953704297, 10.729107673244838],\n [17.477869226417724, 16.923409892593707, 14.680425178997394, 12.8810271881688, 13.624623113433353, 15.498729144466333, 14.410996532036476, 12.439385653405218, 11.824807748151184, 14.549762483555229, 12.52025094702346, 9.878390082320776, 13.353970191726475, 14.201123102594767, 10.485695152725818, 12.469841896410358, 11.785218989325013, 10.885649961244868, 11.862127107410245, 8.958517619776602, 10.21308258417946, 11.772420780101367, 11.350674914150671, 9.229652936854443, 9.350645878872323, 9.73417234976586, 10.917058280366632, 9.352920439903674, 9.116767310551115, 17.1341466183265, 12.744972996168254, 10.905428641350687, 9.868705688427436, 11.230994130057073, 10.40909538114941, 8.927450449159995, 8.64809934246907, 10.509855101545554, 9.018853645416415, 10.899434365702795, 8.993824888288769, 11.09355091355732, 9.78221138322432, 9.068457386867905, 7.9532532524764985, 13.63136104779954, 10.547030902645668, 8.55627211437192, 9.315263943948516, 8.267963958519648, 9.655133830489673, 10.610902524839648, 11.938930684277052, 10.442496188110441, 9.086091557317266, 10.523320402876738, 9.451870341084566, 9.363207873357467, 8.605438897818873, 9.259341268891594, 8.837066570124096, 9.520997546593952, 8.25459369051566, 8.352634575191372, 10.973012193124614, 8.516514267420478, 12.055314915587385, 7.799081514691986, 9.848780567429356, 9.773811432857535, 7.810066515512631, 11.298129062909965, 9.19187529815522, 8.821294019247189, 9.373848863130172, 9.669269844772323, 9.30451155893822, 10.846884854361333, 11.473225282482497, 9.7785113133521, 10.407485722119814, 8.466089651866756, 8.60866071209688, 11.375138543297105, 8.705820527310673, 9.978659292160323, 11.564462067479617, 9.911976058440146, 9.118941687650787, 7.722303035111853, 11.549820763881613, 9.817984587364052, 11.525743034479476, 8.66874519949241, 8.802439919851917, 10.081202063003442, 9.465766179009902, 8.944418735139623, 9.596773223228345, 10.399297834488916, 10.255009700581997, 11.933882356940535, 10.274235492887636, 8.942806196291254, 8.961595442394964, 9.809489700141466, 8.227384444324922, 7.258418352460358, 11.29610103411897, 10.619335991457246, 9.803534013106253, 12.298193757933678, 9.993401276289536, 8.701355117710138, 9.551607828075836, 9.362683183637984, 10.904533024362392, 8.81368207664315, 9.43819992775753, 7.873818608426682, 9.46862326830045, 9.71567817279831, 13.984939786881863, 9.79790449779715, 8.813848982286368, 11.048520086471337, 9.188515583848583, 10.715871094508985, 8.978913662157714, 10.06537395383198, 9.695788683252461, 9.556267128671744, 8.748611432264772, 10.677485344181258, 11.470763605819359, 12.072838301227586, 10.379419141567304, 9.497432368680737, 8.874028268762219, 7.679188260676408, 9.398232926939107, 10.024369151173415, 10.674012057102708, 10.177287445670794, 10.460777362059792, 7.863296722541688, 11.109118111927568, 10.23554757993483, 11.819215586920068, 10.023765273884992, 9.520225855535234, 9.227950026454442, 8.902931208993143, 10.396389228983242, 10.609817418964726, 10.108312136877915, 9.613548945391354, 10.401432036507597, 9.574928407577804, 8.497230471033916, 10.584540408982733, 9.769260727069263, 9.361016009423052, 8.572894819086242, 9.946522365694737, 9.04798158019822, 8.051284312196598, 9.548014913556012, 9.313116082496107, 9.710222808078198, 8.170177762083872, 13.319108650979388, 10.204574979219975, 10.60152370136771, 10.542469079595556, 11.326339366997628, 9.59020690117027, 9.261926826272623, 10.308410911920863, 8.101140481718296, 10.569101777748422, 8.52297072840074, 10.423688872818895, 9.315426102482768, 8.016388794085461, 10.199176020724277, 9.448372821932574, 10.784011263128235, 10.354861981990755, 10.454927775255891, 10.166802839371554, 10.666712282779185, 11.193582247915906, 11.633631947505178, 9.528969714771767, 9.995404610119527, 9.216892378839669, 10.362141171764815, 7.6446199095801575, 8.708732177794332],\n [17.784778572764985, 17.229550169768114, 17.11865277828455, 15.007253312589256, 14.93838530448627, 12.915197413343982, 16.289980215404082, 12.721903102294194, 12.46040375894754, 11.62198654969676, 13.185002249811923, 11.989804798289935, 11.766333756587969, 8.971219400248145, 20.13128200094444, 24.367381057992016, 16.868881581577096, 17.052825136151718, 15.994015894968923, 15.008922960604076, 12.144659001511501, 12.329678258901028, 12.666319623054486, 14.084776543472003, 13.154308151028845, 12.082938505322875, 9.664317135655581, 10.97517147558041, 12.99143678126779, 11.758765147066198, 12.062317839550769, 11.706288884139223, 11.203331352435114, 11.617228518123785, 9.75497128748522, 12.900930124812547, 11.55924114005623, 11.0816017480015, 11.740497140474714, 10.695073120038757, 9.703147377730485, 11.768050126611975, 10.178217027748692, 10.730378132207456, 9.791243966134035, 12.541255036100448, 10.656300764112098, 11.188765046089152, 10.423073312666965, 11.09791246968183, 10.641442930737332, 10.51309665058049, 10.28776146741658, 11.028004288408498, 10.454851980977235, 9.173405998967866, 9.96754637620247, 9.675652090524832, 11.33103827250655, 8.63308442436086, 10.920668927654592, 10.851557844547376, 10.035946213469797, 11.233826343414387, 11.850900088838877, 10.342916514184328, 9.751538452423258, 11.147611596602589, 11.265723219262341, 10.418040930870788, 9.946590076448544, 8.975660094399249, 9.113837797818968, 9.860212940722345, 11.965640262268836, 10.323453250780132, 9.580681494012781, 7.91154058149559, 9.838636058627019, 8.852412524085112, 9.925600069997344, 10.71576986296795, 11.307372947322873, 9.447219558443498, 10.123267568820076, 11.758289890811328, 8.921095936446779, 8.64678138473268, 10.061015135443945, 9.747543448330001, 7.715939755332897, 11.075105492088275, 8.89020687702349, 10.465771514144809, 9.355622900085748, 9.28133082710223, 11.300144587564407, 10.766516060699363, 8.516814949377375, 10.4557621831674, 10.072768857189988, 10.754976946016432, 9.711365261847847, 9.198095540006662, 8.889094438507572, 12.604305266574077, 7.30063974001526, 13.002908028679304, 9.994416621022099, 10.612834807828616, 8.94412565386079, 10.79345732513219, 14.05312185050333, 8.695822318852562, 9.932363728968925, 11.492471172738048, 10.345564118261775, 9.724956496709261, 10.397993643297507, 9.392729553641722, 10.973528230744563, 9.143156940032158, 9.257446996986603, 9.447442405267939, 10.145665563182348, 8.534545306075106, 10.915970351064942, 12.291800475143358, 9.53309835082488, 11.33761829076702, 9.635122881544977, 9.425665199019702, 9.240620472662078, 9.235256016637988, 9.708373220698622, 11.006706849474014, 8.98379185601443, 10.285423814474871, 9.107385324297423, 9.646792368633731, 9.971053565197701, 9.361748988832598, 10.400038880531321, 10.30211747620635, 10.818984692818917, 9.130675930681631, 9.238040582830559, 11.001465576699234, 10.151884304015596, 9.386057377507003, 10.493448537154245, 11.567260591373445, 11.230021751348001, 11.186427220363276, 9.815930369149777, 10.59198270578867, 9.220519544523595, 9.965002066698506, 10.47797362577038, 10.786817667151551, 9.690154166078592, 9.439519347888917, 8.848350931306346, 9.3781728933593, 8.268700488420869, 12.115439022134163, 10.796885195623494, 10.014380397675817, 10.73574971170541, 8.560166769341174, 10.794393046590002, 9.134062671122507, 10.418063311994729, 11.012128394570807, 9.42853787160366, 11.356927683680299, 9.900444094277631, 9.22579412385074, 9.386794383707267, 10.957568634868915, 9.694645674051442, 9.158532163752914, 10.12713743528785, 11.362052051005852, 10.005250953602864, 8.346854337527914, 9.996776616030921, 10.329430659315037, 8.372308345358952, 10.885382150718447, 9.058760530517857, 8.628993529470744, 10.711718938616118, 6.543832981652261, 11.581086230427672, 8.476882553653294, 10.741428140655021, 9.81202566227701, 9.288789377794929, 11.401902127389057]])\nQCNN_pca8_FASHION = np.array([[22.525038753546095, 13.216204078372108, 14.242333284763157, 13.731808981059574, 10.256464160984425, 11.597222883858867, 10.629497090660422, 8.354255159471522, 11.015406221067154, 9.150290989104242, 7.055057703200601, 7.979878556617112, 10.464926326253137, 11.487814061568718, 10.545401217390626, 8.639468643092243, 9.534301271716393, 8.623771545898558, 7.499577254864702, 7.579495572112449, 7.718862052724189, 9.601245720679604, 8.322075933633224, 6.831914887647821, 11.26468868413764, 9.223422826113929, 6.687336887835581, 6.05250629922452, 5.996097181167931, 9.347602834042991, 8.366108917028392, 9.876665794631448, 11.776498751279387, 8.066237030699073, 8.16987741244548, 8.340278473820014, 4.975928220958971, 11.364554972409007, 14.66370299168704, 16.83247231471571, 9.88090943039278, 9.769800872699852, 8.68432608767399, 8.322927041352667, 7.672598343063408, 7.921586829208608, 9.32086894700747, 12.023192892636807, 7.089971604470009, 7.29897894559643, 9.844104627635048, 5.64420431720263, 10.240854773914988, 7.2609435675747624, 7.728901901782359, 6.567222640988218, 9.66234901610107, 8.86525744030375, 8.056122389029694, 8.502949943834864, 7.005259070022256, 6.928620503689856, 7.9952200829980615, 9.026569861556471, 6.746994660916904, 9.126169673794001, 7.970434161976563, 9.264898190989665, 6.81506118635366, 11.22259732381275, 6.989436300699322, 7.82434849624806, 7.231512873108427, 7.297795742310438, 7.524643644353264, 7.716996179577135, 9.267636517028762, 6.417265269116725, 11.184065873170896, 7.201238792924387, 7.355536747579121, 8.115072872290968, 9.089261381909838, 6.406261701867933, 8.155291144492328, 6.393594623637436, 7.5465214948725645, 7.397922692401812, 8.206104136960638, 9.077282946768998, 7.071744514667186, 10.620950430862155, 8.166894349463814, 5.750109941169301, 8.358542774329255, 8.48699098579089, 9.055645909823243, 7.703032625073696, 7.618089715127769, 6.272625062025867, 6.890965985900987, 6.021174122678981, 8.952571749630682, 7.057460370732991, 6.005074303405572, 5.746845159316903, 6.7415072987593145, 8.12271773034108, 6.771807420668886, 8.371000724795552, 6.257207711977463, 9.975283506289264, 7.7329906809713185, 6.734969968129433, 8.250923759687932, 7.172987054031528, 8.42833132136366, 7.634754990345493, 6.217396001593545, 9.23048098887742, 6.994689247739925, 8.34463019828312, 7.860640520545623, 5.708505765559732, 8.457072795244176, 6.523477604635987, 8.665507466384671, 6.549298186515777, 7.5354052650190635, 9.817371870022777, 8.045434445375076, 9.679307050429115, 6.945799566349912, 9.40951091780707, 7.072212843634541, 7.7084286652384595, 8.93316800866518, 7.294761127228796, 8.305350131391318, 6.933892493070283, 7.050261917081921, 7.939274603349584, 7.577748162807357, 6.954548024673507, 8.571440211914746, 10.099027659126538, 7.720647161287194, 8.398373259262227, 8.431715246092898, 6.348976676554399, 10.066936465504831, 8.368682689492163, 10.101003359934246, 10.65069536852026, 8.476020233676149, 7.653580350535236, 9.642386733890197, 7.080016556115286, 9.816825771957152, 5.976440078178672, 7.554128656654008, 13.50858736722108, 8.626391836378414, 10.416045100984734, 9.06544022460719, 7.140654280417423, 8.88419932444197, 8.52919360858265, 7.543413941746766, 5.696479896825367, 6.794938326455062, 5.958633880531308, 7.177971890207004, 5.202091922449483, 7.4354675052351835, 11.484722034863132, 7.9227968918894645, 8.865919328851648, 6.448736287097655, 10.247949863181844, 6.8979073963101225, 5.533684741208312, 8.515998004916838, 10.938981439721669, 6.664134355574527, 10.089937381194728, 9.058234482882341, 5.557306058620261, 9.117458694651365, 7.194827997380303, 9.710673117614753, 7.130021934886105, 5.90622656077336, 8.533669546093398, 5.384961665426538, 6.625505824606782, 7.581425096097377, 8.33684118616696, 8.18732737247452, 6.719476175769455],\n [17.7390694041736, 16.69302736531633, 16.015962155096005, 13.915731645988588, 19.108460382811426, 12.558377283129245, 15.925960357155105, 17.634155343225352, 12.508967374665424, 8.742771667788107, 13.889164504533712, 8.29130392999017, 12.732433489330438, 15.345015087528573, 8.405457319201473, 11.384495513243442, 8.306644155466373, 5.183727358720002, 7.254653378729781, 9.29606810930633, 10.16507039270581, 8.568016593424066, 9.256334781282167, 8.351706138038418, 10.585911610396506, 9.23513158472319, 8.865759458048984, 10.620036612262123, 11.811534594927743, 10.315762413710752, 10.395430303943652, 9.4565779250801, 9.069417403356804, 7.323652597068435, 9.30645909950589, 8.62443939331282, 6.697873107126633, 9.025334289487134, 9.033396607882102, 8.154409299576363, 10.052705504033804, 8.970836209169558, 6.343586825241775, 10.300083991530437, 11.243630495652884, 10.138964709153353, 10.222569468688938, 7.957440255750639, 9.741410783443582, 11.794396389372874, 16.188971046834972, 22.051250589369424, 13.555668909099044, 6.9996963067667615, 8.825769595525601, 5.778717160176895, 10.072676182508351, 8.741183841900742, 6.927391636328303, 7.602979980153887, 6.400469919301807, 5.625963956404877, 11.175069358016986, 10.96439428635679, 8.999251593034455, 10.526797395772745, 11.26861826923098, 8.133956324903444, 6.404852509093303, 9.72150350480961, 8.736223841195953, 8.76909224677128, 7.980710390195934, 7.068740595083661, 8.549782159380722, 13.651624505662621, 14.622485641584069, 12.18889566292926, 11.019893374444724, 12.324125496979695, 9.076866292662165, 8.086971080444213, 8.378722825218773, 7.012954671001092, 8.747416874771236, 8.448107012970137, 11.746593862648439, 10.300690522746669, 6.291353478279032, 7.290528356057782, 8.238654985361286, 11.689286921305046, 7.968468882117411, 7.499784824408974, 6.5310953530802625, 8.054832766573403, 8.120487718585334, 8.846042313816977, 7.351303717882085, 6.869250406432281, 9.442239089654244, 8.005456123175835, 9.324808624143254, 7.785465848504737, 9.946305435284028, 6.198823855420584, 6.950138368886841, 5.7535455529946, 7.850656719506649, 7.912823027283426, 7.882509383989615, 8.101284579470002, 13.733151129665092, 13.173823043804271, 14.16122446433704, 9.183726004663148, 8.462717995225486, 7.255351865133861, 6.949024275108245, 7.769081531931598, 7.3176686021574895, 6.593229961119606, 7.091641990473536, 8.793007379799281, 9.261834101550146, 8.514363160603418, 6.59046397448622, 7.100299291623158, 8.283562980164568, 10.212712032786667, 6.994694528861405, 6.86930501657636, 6.920288548033619, 10.930251909615015, 8.296885334381749, 7.486751947526672, 9.371064009048943, 11.008555804860544, 7.139800307634327, 8.282669475844814, 8.059421132069483, 8.35370587206283, 8.317274014263797, 7.921367172602265, 7.1931181036524965, 10.348278306004437, 11.087039685610435, 6.994390376503345, 9.812991073429249, 10.12939638747413, 7.8958049976729745, 7.890181478067051, 9.136196551450473, 11.054502153348668, 7.585372503000604, 8.462399788895482, 9.334662867039425, 7.607353579590679, 10.228316509327541, 7.615392265319403, 6.919186256853584, 13.351308645034674, 11.782285097274341, 7.126007394687756, 7.597884876128463, 8.002507932554183, 9.830042259582296, 8.49253993520008, 7.712157163932788, 7.8145992642416475, 11.109034511332085, 9.284533991380302, 10.063228965398249, 10.188867855678469, 8.766476890574666, 7.287524267459958, 8.588189299622748, 9.092493191908213, 8.733564387329482, 8.183887575467459, 10.136963777705297, 6.062920615819733, 9.104806299636849, 9.237583176939097, 6.387732704472513, 7.908693314052336, 7.78250515316508, 8.591938225399545, 9.4545404448386, 8.929508634445481, 9.321019259513987, 9.770552494739624, 8.303756224114164, 6.062965941067229, 8.169982561715305, 10.782967244519545, 10.40550257727203, 12.818679304157627, 8.77097260887961, 8.428078293774403],\n [18.367461046843662, 16.814690289224092, 15.981392435271342, 15.590717418550675, 13.78459106284139, 12.790219028115448, 13.533709609197576, 12.504910318004887, 11.519180104795977, 11.623250056122005, 11.08706185381791, 12.575454349271059, 11.748010604564296, 12.30557746726062, 12.363188272842512, 10.538404994438869, 10.399288793348198, 10.047875001219923, 9.175282555491393, 9.808922294258727, 10.651495437180124, 9.987506431828303, 8.733061665250075, 8.66082214452, 11.737708178690502, 13.156088903639994, 10.10252711605426, 8.9898848223011, 8.042067295906783, 10.201684126775787, 7.276274549459411, 9.776851348696908, 7.308253446411992, 6.268359584680627, 9.786758829752637, 9.822812528580553, 21.102485233803723, 10.182822692922318, 9.005250227022946, 10.813137758612603, 10.067646039170684, 9.007038385693976, 8.729118831565776, 6.50828673393515, 9.81940489949436, 7.55788070563999, 7.516731011441329, 9.69428100832395, 8.162043803598182, 15.12310101432996, 25.602664629203403, 11.57846323702489, 8.50588898860467, 9.416045225759573, 8.137142940309374, 8.391584702670242, 7.68877970547477, 9.713949650331767, 6.776448117354238, 7.751623011355878, 11.012291743428953, 7.934612706704716, 7.251927647118261, 8.577227442757136, 7.666932556526185, 8.109073484598923, 8.77739169372259, 7.2504477738834145, 11.040863694389396, 11.837115775839912, 8.63649387578345, 7.379581468513879, 11.698761323742417, 12.494795227184659, 10.962816044455694, 7.9377480215508776, 8.223347656154361, 8.542184616730811, 7.072095482371086, 9.1566728806392, 9.754211605178359, 8.684789008171013, 7.072379867355158, 6.922184291224992, 7.459099245118162, 7.857495260675934, 8.569840574856958, 6.804125010541176, 7.414956426765643, 9.16491644362183, 8.440616025408652, 7.733879392370894, 7.513613803899214, 6.669831741721616, 7.875771853212181, 8.985310389506006, 6.540505926110707, 9.074357092223089, 8.808181481420078, 9.570178078146155, 9.127602817298392, 9.735530947025763, 14.431686406393263, 7.575022250482934, 7.670016909058494, 7.520561076510924, 9.910063717979744, 6.203583740647947, 7.109093835850977, 8.521636967911903, 10.705489443653804, 8.726526712504633, 8.201225860205067, 10.053192712200353, 6.836634292616073, 8.293300713207689, 7.101735556004384, 8.41049294800333, 8.320763186608552, 8.257854422900646, 7.395978239022934, 7.281442277222249, 9.219152699225866, 8.728349811968233, 7.52865945880653, 8.95554753995012, 7.720504504276203, 8.660612180613377, 10.719747916795155, 11.4779635648861, 10.056847413559757, 9.10494674635443, 5.578961914707252, 10.363806330584556, 8.844941792603889, 10.002434671671333, 8.337607227541238, 8.746246374831523, 7.98565803466582, 6.91621585863724, 7.681669437432087, 8.766871420623545, 9.86461167123844, 8.318437708774942, 8.464584528609025, 8.572372849049604, 6.779383972650864, 8.551859162771402, 8.055829547756252, 8.059820508098154, 7.819569938615952, 8.578090907130207, 7.289051562500602, 8.869052396011371, 9.467678240312658, 7.840343157373925, 5.90387811979723, 6.693065782370631, 8.575151867096867, 7.639307707368244, 12.523584908375957, 8.895329648204063, 6.32132197892887, 7.979330476993164, 9.29407257685631, 7.012176486492262, 10.298613437757856, 8.5665291970282, 6.528734111201251, 8.44106342820268, 8.504399606165265, 8.482924732127376, 5.435306925857987, 6.189955844680422, 8.601281799667708, 5.985383659384594, 5.6531895118814655, 7.196515786106366, 8.10792270615638, 6.486626226419606, 9.42449182798801, 8.967841658571222, 7.336021583409538, 8.377165668978757, 11.085709046494918, 7.219729845568762, 10.187155238171679, 9.924843025818602, 11.310424918196789, 11.066330330957877, 8.17928717118768, 6.240171463928257, 10.51245796400129, 9.447930369525764, 5.548840519130706, 8.346987007460342, 9.63205571542674, 5.08939032785377, 8.38190783372624, 7.181618319034054],\n [20.093471578188247, 15.240470002676348, 14.970623666089363, 12.460865166263773, 11.402402512465786, 11.625559679963047, 12.228907209627307, 11.318697860594215, 15.668919990519937, 11.22676614676285, 9.398424135648463, 10.359603993439656, 8.952609754405902, 7.710901607786571, 8.465206831502085, 12.251398154055844, 12.706337253014308, 10.002994688877, 9.7145856741547, 9.653625811111327, 11.662995254791763, 13.585316934694015, 11.690757518173372, 11.64803540603652, 8.92595441291016, 11.605328771824052, 8.520059399790982, 8.983101008253845, 8.605895552485649, 7.669932812518325, 6.682036989639228, 8.207969790721224, 8.350366090861892, 8.849437732221492, 7.437045884941238, 10.661703550696556, 7.83567157655003, 11.015211554852268, 4.951625652298421, 8.263465755564768, 8.310540747173057, 7.993691336039781, 11.067847480255251, 7.353736182726404, 6.433828587733538, 8.664227596231523, 7.182233937098731, 9.29964986639889, 10.299767272629694, 8.082208002699081, 7.33695561050209, 8.700829573931493, 8.588516433580843, 5.494863317399205, 8.22862159909139, 9.328733522626356, 7.312425151226673, 6.1965848735213305, 7.393234461590516, 8.491240036879667, 8.147709150161932, 7.7820174056932805, 9.455173615999149, 7.066963901542219, 6.327041779240348, 10.45611656434066, 6.810050597841734, 6.89811173304331, 7.433257791766566, 6.241208907127381, 10.350180755011433, 9.563424028869548, 6.856215839029643, 9.989364583683669, 8.381648854533712, 8.851544007331041, 6.437612677476114, 5.992635135024479, 8.062439709878685, 7.229721589241544, 8.222586495508642, 7.342789503301448, 7.149462830201674, 8.662792284389258, 6.653727711474329, 9.767111624222236, 9.85886918589017, 5.37134830919524, 8.958539041873353, 7.875220830394933, 6.553369864422299, 7.629990949523373, 7.871841873645017, 5.049263185300207, 6.326912044835468, 4.663349146887186, 7.54766275795534, 8.151973132859666, 10.042591436525045, 6.614324991452069, 8.26423542737398, 7.753493878072397, 7.140082602370002, 6.8957585259184775, 8.019552156781454, 6.759570706777052, 8.596990061093345, 6.682186627269535, 8.469781477233338, 6.3892643707534456, 9.380928877543921, 7.5703786505548685, 8.538442509644339, 8.3489002910439, 10.426835204967038, 7.806652078113572, 8.087960441160208, 7.095134465250964, 10.430570641677104, 6.503026399717661, 6.962295357490432, 5.09420339843243, 8.91603756530321, 9.221960185554856, 9.656195326708692, 8.567037704619551, 10.601443046821723, 6.4270593896404815, 7.4639907379601995, 9.488497159230707, 7.314692583348785, 8.255042076548941, 9.254447430387977, 6.898069716289567, 8.269692002040271, 7.530596065929002, 10.007947783373663, 7.563517529428557, 8.740385928547031, 7.942807255997364, 8.456138249643518, 6.8977962387131955, 7.38527841125858, 7.807825756178889, 7.7760241913466, 7.819173787908539, 6.951426877420532, 5.713321617844715, 7.9034723396171325, 9.905599555223858, 8.822528999664158, 8.176893679756958, 6.532140354106864, 9.827145254090114, 10.717437175985106, 8.705208318712303, 7.0163608695084285, 8.17408228521044, 5.549877373285826, 7.125388466950287, 7.518790392587565, 8.043784032203314, 7.576030790566522, 8.931919582667708, 10.391746880104884, 8.555499693805128, 6.906117900900218, 5.307627431786376, 8.895677769055874, 7.971891098774674, 7.851258846451551, 7.062932830479956, 8.175518443040652, 10.710954676865372, 12.627568417483477, 6.988203367911942, 7.580563478079185, 10.032906036712472, 8.878493878792403, 8.534450948131887, 7.423603066137307, 5.597998136906411, 6.8880688214721095, 7.110450336649771, 9.155278495119608, 10.994658772992281, 8.308284552711857, 8.19908410344054, 7.594652860873574, 8.365859048782031, 8.450098249930555, 7.562631713666295, 8.379220485739266, 8.409706136076677, 8.20846632211791, 12.743653517630978, 8.503179957531588, 8.988792382374275, 9.427528339614407, 5.611620393638202],\n [15.99844421332487, 12.667360153096515, 14.650280572397145, 27.91091248829037, 18.45657752107614, 16.50176189875578, 15.367774930514772, 18.715170566885117, 15.368910388020334, 15.0940459003475, 13.561311961671016, 12.627819749944042, 7.069785858073152, 12.756675470120433, 24.72996801435937, 14.685659433924377, 13.808240157717174, 9.307699271225761, 6.418619770777121, 11.592419795168352, 10.865675592684639, 7.840543425963905, 8.212471950824648, 11.466680456497453, 9.08228827816499, 9.448736282108374, 9.529999322728214, 14.512065877668265, 10.575824517906415, 9.63801520027799, 8.250970859954954, 9.520417143677042, 8.3880351390895, 9.372922036085365, 6.257278679162988, 5.882673523841979, 7.511435878698112, 8.45264982843566, 8.907779099417397, 5.665874434056404, 5.482918209615173, 8.078769099224154, 7.343789848075083, 9.365652680019831, 8.239245793842764, 4.605717605344383, 6.797613952478779, 6.793132642990523, 9.80491796590601, 8.582030535604208, 6.294097665208823, 7.364681420924376, 7.406233745336385, 7.793954967857749, 8.52658003806355, 8.23113431080118, 5.460016309198461, 8.58561193544833, 5.213431838045249, 7.536560070117995, 6.916196690002716, 7.7029980507562295, 6.87405906959778, 6.223273064784113, 7.2509913361491645, 8.700053412744857, 8.929041207072977, 7.690969824508415, 9.626657393735643, 8.50760479797336, 8.46282833453814, 8.641117863542863, 8.67183377690492, 7.062811099284048, 7.507891403074677, 6.095616171052702, 7.377366857646905, 6.915264122726458, 7.500192060614502, 5.8123830924749464, 9.420112732030095, 8.395284945014676, 6.777311068474881, 9.151535543675958, 7.310999692218685, 9.756565224162538, 8.918393742781577, 9.388115988654082, 6.3845041907734466, 7.1665740307830035, 8.365378747201166, 6.821946311622847, 7.23647092353758, 10.745497259385735, 8.574445464830248, 5.223885354330731, 8.779773036561195, 10.126248313227205, 8.468587432212006, 6.955970498558412, 9.482568830701497, 8.792871652481686, 8.56242982144813, 7.885731476374264, 6.467257717095384, 8.571307961991808, 6.19491122062597, 5.745901765931287, 10.022014902340047, 6.550525934030427, 7.126731474199514, 8.95218009121349, 8.214242037686004, 8.04614210839355, 8.760727777009127, 10.18789680380936, 7.3890750225671304, 7.543749718817351, 9.371278699100243, 10.857580009353113, 8.390490080655963, 7.605047182918841, 8.63981174996564, 6.145216913049062, 7.314319768511249, 8.32560445978138, 6.118805029519846, 10.511607543619892, 8.321669503647568, 6.658167651389458, 6.880549194675298, 9.138068239415487, 8.53392035065545, 8.910645757375073, 7.412488338415838, 7.16834364984102, 6.13443365507289, 6.268930695997683, 5.592307538523584, 6.358025840761492, 5.748854188713233, 9.954916677076465, 7.6665301078302415, 8.648772680793956, 10.628411273855844, 7.1139499781300275, 7.299358091168056, 9.750148484992668, 4.766687237131658, 8.627381191592773, 5.89059709066853, 8.470432290868324, 10.921838592619107, 6.052844846708801, 8.604871685058631, 8.989237189885491, 7.121322987037773, 9.046093383564312, 7.827381427826947, 8.01500847479938, 8.437063744522305, 6.721055838168828, 7.690978297565653, 6.044905911565294, 9.033002163304815, 9.761717956561986, 6.778111406425475, 11.733249942465521, 8.423433046531942, 7.821584434801706, 8.16091659399263, 5.118021061064587, 9.18093918737943, 10.649667043913631, 8.139365019567581, 8.806120157260205, 7.855023086973385, 10.298041368109626, 7.407850252902208, 5.836931345496306, 9.407608978204072, 5.325721414024552, 7.428444024347508, 9.204052068805469, 6.547964812463914, 9.793049514109743, 6.808459590608135, 7.465577955076744, 9.39662379085216, 9.663180477615683, 7.588697782982331, 8.357209101026747, 7.913356420131625, 11.87804247926184, 7.869507040729936, 7.818227322334145, 8.15491136058747, 5.9684860356460625, 8.316127392894865, 7.240518632079241]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "QCNN_pca8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "QCNN_pca8_FASHION = np.array([[22.525038753546095, 13.216204078372108, 14.242333284763157, 13.731808981059574, 10.256464160984425, 11.597222883858867, 10.629497090660422, 8.354255159471522, 11.015406221067154, 9.150290989104242, 7.055057703200601, 7.979878556617112, 10.464926326253137, 11.487814061568718, 10.545401217390626, 8.639468643092243, 9.534301271716393, 8.623771545898558, 7.499577254864702, 7.579495572112449, 7.718862052724189, 9.601245720679604, 8.322075933633224, 6.831914887647821, 11.26468868413764, 9.223422826113929, 6.687336887835581, 6.05250629922452, 5.996097181167931, 9.347602834042991, 8.366108917028392, 9.876665794631448, 11.776498751279387, 8.066237030699073, 8.16987741244548, 8.340278473820014, 4.975928220958971, 11.364554972409007, 14.66370299168704, 16.83247231471571, 9.88090943039278, 9.769800872699852, 8.68432608767399, 8.322927041352667, 7.672598343063408, 7.921586829208608, 9.32086894700747, 12.023192892636807, 7.089971604470009, 7.29897894559643, 9.844104627635048, 5.64420431720263, 10.240854773914988, 7.2609435675747624, 7.728901901782359, 6.567222640988218, 9.66234901610107, 8.86525744030375, 8.056122389029694, 8.502949943834864, 7.005259070022256, 6.928620503689856, 7.9952200829980615, 9.026569861556471, 6.746994660916904, 9.126169673794001, 7.970434161976563, 9.264898190989665, 6.81506118635366, 11.22259732381275, 6.989436300699322, 7.82434849624806, 7.231512873108427, 7.297795742310438, 7.524643644353264, 7.716996179577135, 9.267636517028762, 6.417265269116725, 11.184065873170896, 7.201238792924387, 7.355536747579121, 8.115072872290968, 9.089261381909838, 6.406261701867933, 8.155291144492328, 6.393594623637436, 7.5465214948725645, 7.397922692401812, 8.206104136960638, 9.077282946768998, 7.071744514667186, 10.620950430862155, 8.166894349463814, 5.750109941169301, 8.358542774329255, 8.48699098579089, 9.055645909823243, 7.703032625073696, 7.618089715127769, 6.272625062025867, 6.890965985900987, 6.021174122678981, 8.952571749630682, 7.057460370732991, 6.005074303405572, 5.746845159316903, 6.7415072987593145, 8.12271773034108, 6.771807420668886, 8.371000724795552, 6.257207711977463, 9.975283506289264, 7.7329906809713185, 6.734969968129433, 8.250923759687932, 7.172987054031528, 8.42833132136366, 7.634754990345493, 6.217396001593545, 9.23048098887742, 6.994689247739925, 8.34463019828312, 7.860640520545623, 5.708505765559732, 8.457072795244176, 6.523477604635987, 8.665507466384671, 6.549298186515777, 7.5354052650190635, 9.817371870022777, 8.045434445375076, 9.679307050429115, 6.945799566349912, 9.40951091780707, 7.072212843634541, 7.7084286652384595, 8.93316800866518, 7.294761127228796, 8.305350131391318, 6.933892493070283, 7.050261917081921, 7.939274603349584, 7.577748162807357, 6.954548024673507, 8.571440211914746, 10.099027659126538, 7.720647161287194, 8.398373259262227, 8.431715246092898, 6.348976676554399, 10.066936465504831, 8.368682689492163, 10.101003359934246, 10.65069536852026, 8.476020233676149, 7.653580350535236, 9.642386733890197, 7.080016556115286, 9.816825771957152, 5.976440078178672, 7.554128656654008, 13.50858736722108, 8.626391836378414, 10.416045100984734, 9.06544022460719, 7.140654280417423, 8.88419932444197, 8.52919360858265, 7.543413941746766, 5.696479896825367, 6.794938326455062, 5.958633880531308, 7.177971890207004, 5.202091922449483, 7.4354675052351835, 11.484722034863132, 7.9227968918894645, 8.865919328851648, 6.448736287097655, 10.247949863181844, 6.8979073963101225, 5.533684741208312, 8.515998004916838, 10.938981439721669, 6.664134355574527, 10.089937381194728, 9.058234482882341, 5.557306058620261, 9.117458694651365, 7.194827997380303, 9.710673117614753, 7.130021934886105, 5.90622656077336, 8.533669546093398, 5.384961665426538, 6.625505824606782, 7.581425096097377, 8.33684118616696, 8.18732737247452, 6.719476175769455],\n [17.7390694041736, 16.69302736531633, 16.015962155096005, 13.915731645988588, 19.108460382811426, 12.558377283129245, 15.925960357155105, 17.634155343225352, 12.508967374665424, 8.742771667788107, 13.889164504533712, 8.29130392999017, 12.732433489330438, 15.345015087528573, 8.405457319201473, 11.384495513243442, 8.306644155466373, 5.183727358720002, 7.254653378729781, 9.29606810930633, 10.16507039270581, 8.568016593424066, 9.256334781282167, 8.351706138038418, 10.585911610396506, 9.23513158472319, 8.865759458048984, 10.620036612262123, 11.811534594927743, 10.315762413710752, 10.395430303943652, 9.4565779250801, 9.069417403356804, 7.323652597068435, 9.30645909950589, 8.62443939331282, 6.697873107126633, 9.025334289487134, 9.033396607882102, 8.154409299576363, 10.052705504033804, 8.970836209169558, 6.343586825241775, 10.300083991530437, 11.243630495652884, 10.138964709153353, 10.222569468688938, 7.957440255750639, 9.741410783443582, 11.794396389372874, 16.188971046834972, 22.051250589369424, 13.555668909099044, 6.9996963067667615, 8.825769595525601, 5.778717160176895, 10.072676182508351, 8.741183841900742, 6.927391636328303, 7.602979980153887, 6.400469919301807, 5.625963956404877, 11.175069358016986, 10.96439428635679, 8.999251593034455, 10.526797395772745, 11.26861826923098, 8.133956324903444, 6.404852509093303, 9.72150350480961, 8.736223841195953, 8.76909224677128, 7.980710390195934, 7.068740595083661, 8.549782159380722, 13.651624505662621, 14.622485641584069, 12.18889566292926, 11.019893374444724, 12.324125496979695, 9.076866292662165, 8.086971080444213, 8.378722825218773, 7.012954671001092, 8.747416874771236, 8.448107012970137, 11.746593862648439, 10.300690522746669, 6.291353478279032, 7.290528356057782, 8.238654985361286, 11.689286921305046, 7.968468882117411, 7.499784824408974, 6.5310953530802625, 8.054832766573403, 8.120487718585334, 8.846042313816977, 7.351303717882085, 6.869250406432281, 9.442239089654244, 8.005456123175835, 9.324808624143254, 7.785465848504737, 9.946305435284028, 6.198823855420584, 6.950138368886841, 5.7535455529946, 7.850656719506649, 7.912823027283426, 7.882509383989615, 8.101284579470002, 13.733151129665092, 13.173823043804271, 14.16122446433704, 9.183726004663148, 8.462717995225486, 7.255351865133861, 6.949024275108245, 7.769081531931598, 7.3176686021574895, 6.593229961119606, 7.091641990473536, 8.793007379799281, 9.261834101550146, 8.514363160603418, 6.59046397448622, 7.100299291623158, 8.283562980164568, 10.212712032786667, 6.994694528861405, 6.86930501657636, 6.920288548033619, 10.930251909615015, 8.296885334381749, 7.486751947526672, 9.371064009048943, 11.008555804860544, 7.139800307634327, 8.282669475844814, 8.059421132069483, 8.35370587206283, 8.317274014263797, 7.921367172602265, 7.1931181036524965, 10.348278306004437, 11.087039685610435, 6.994390376503345, 9.812991073429249, 10.12939638747413, 7.8958049976729745, 7.890181478067051, 9.136196551450473, 11.054502153348668, 7.585372503000604, 8.462399788895482, 9.334662867039425, 7.607353579590679, 10.228316509327541, 7.615392265319403, 6.919186256853584, 13.351308645034674, 11.782285097274341, 7.126007394687756, 7.597884876128463, 8.002507932554183, 9.830042259582296, 8.49253993520008, 7.712157163932788, 7.8145992642416475, 11.109034511332085, 9.284533991380302, 10.063228965398249, 10.188867855678469, 8.766476890574666, 7.287524267459958, 8.588189299622748, 9.092493191908213, 8.733564387329482, 8.183887575467459, 10.136963777705297, 6.062920615819733, 9.104806299636849, 9.237583176939097, 6.387732704472513, 7.908693314052336, 7.78250515316508, 8.591938225399545, 9.4545404448386, 8.929508634445481, 9.321019259513987, 9.770552494739624, 8.303756224114164, 6.062965941067229, 8.169982561715305, 10.782967244519545, 10.40550257727203, 12.818679304157627, 8.77097260887961, 8.428078293774403],\n [18.367461046843662, 16.814690289224092, 15.981392435271342, 15.590717418550675, 13.78459106284139, 12.790219028115448, 13.533709609197576, 12.504910318004887, 11.519180104795977, 11.623250056122005, 11.08706185381791, 12.575454349271059, 11.748010604564296, 12.30557746726062, 12.363188272842512, 10.538404994438869, 10.399288793348198, 10.047875001219923, 9.175282555491393, 9.808922294258727, 10.651495437180124, 9.987506431828303, 8.733061665250075, 8.66082214452, 11.737708178690502, 13.156088903639994, 10.10252711605426, 8.9898848223011, 8.042067295906783, 10.201684126775787, 7.276274549459411, 9.776851348696908, 7.308253446411992, 6.268359584680627, 9.786758829752637, 9.822812528580553, 21.102485233803723, 10.182822692922318, 9.005250227022946, 10.813137758612603, 10.067646039170684, 9.007038385693976, 8.729118831565776, 6.50828673393515, 9.81940489949436, 7.55788070563999, 7.516731011441329, 9.69428100832395, 8.162043803598182, 15.12310101432996, 25.602664629203403, 11.57846323702489, 8.50588898860467, 9.416045225759573, 8.137142940309374, 8.391584702670242, 7.68877970547477, 9.713949650331767, 6.776448117354238, 7.751623011355878, 11.012291743428953, 7.934612706704716, 7.251927647118261, 8.577227442757136, 7.666932556526185, 8.109073484598923, 8.77739169372259, 7.2504477738834145, 11.040863694389396, 11.837115775839912, 8.63649387578345, 7.379581468513879, 11.698761323742417, 12.494795227184659, 10.962816044455694, 7.9377480215508776, 8.223347656154361, 8.542184616730811, 7.072095482371086, 9.1566728806392, 9.754211605178359, 8.684789008171013, 7.072379867355158, 6.922184291224992, 7.459099245118162, 7.857495260675934, 8.569840574856958, 6.804125010541176, 7.414956426765643, 9.16491644362183, 8.440616025408652, 7.733879392370894, 7.513613803899214, 6.669831741721616, 7.875771853212181, 8.985310389506006, 6.540505926110707, 9.074357092223089, 8.808181481420078, 9.570178078146155, 9.127602817298392, 9.735530947025763, 14.431686406393263, 7.575022250482934, 7.670016909058494, 7.520561076510924, 9.910063717979744, 6.203583740647947, 7.109093835850977, 8.521636967911903, 10.705489443653804, 8.726526712504633, 8.201225860205067, 10.053192712200353, 6.836634292616073, 8.293300713207689, 7.101735556004384, 8.41049294800333, 8.320763186608552, 8.257854422900646, 7.395978239022934, 7.281442277222249, 9.219152699225866, 8.728349811968233, 7.52865945880653, 8.95554753995012, 7.720504504276203, 8.660612180613377, 10.719747916795155, 11.4779635648861, 10.056847413559757, 9.10494674635443, 5.578961914707252, 10.363806330584556, 8.844941792603889, 10.002434671671333, 8.337607227541238, 8.746246374831523, 7.98565803466582, 6.91621585863724, 7.681669437432087, 8.766871420623545, 9.86461167123844, 8.318437708774942, 8.464584528609025, 8.572372849049604, 6.779383972650864, 8.551859162771402, 8.055829547756252, 8.059820508098154, 7.819569938615952, 8.578090907130207, 7.289051562500602, 8.869052396011371, 9.467678240312658, 7.840343157373925, 5.90387811979723, 6.693065782370631, 8.575151867096867, 7.639307707368244, 12.523584908375957, 8.895329648204063, 6.32132197892887, 7.979330476993164, 9.29407257685631, 7.012176486492262, 10.298613437757856, 8.5665291970282, 6.528734111201251, 8.44106342820268, 8.504399606165265, 8.482924732127376, 5.435306925857987, 6.189955844680422, 8.601281799667708, 5.985383659384594, 5.6531895118814655, 7.196515786106366, 8.10792270615638, 6.486626226419606, 9.42449182798801, 8.967841658571222, 7.336021583409538, 8.377165668978757, 11.085709046494918, 7.219729845568762, 10.187155238171679, 9.924843025818602, 11.310424918196789, 11.066330330957877, 8.17928717118768, 6.240171463928257, 10.51245796400129, 9.447930369525764, 5.548840519130706, 8.346987007460342, 9.63205571542674, 5.08939032785377, 8.38190783372624, 7.181618319034054],\n [20.093471578188247, 15.240470002676348, 14.970623666089363, 12.460865166263773, 11.402402512465786, 11.625559679963047, 12.228907209627307, 11.318697860594215, 15.668919990519937, 11.22676614676285, 9.398424135648463, 10.359603993439656, 8.952609754405902, 7.710901607786571, 8.465206831502085, 12.251398154055844, 12.706337253014308, 10.002994688877, 9.7145856741547, 9.653625811111327, 11.662995254791763, 13.585316934694015, 11.690757518173372, 11.64803540603652, 8.92595441291016, 11.605328771824052, 8.520059399790982, 8.983101008253845, 8.605895552485649, 7.669932812518325, 6.682036989639228, 8.207969790721224, 8.350366090861892, 8.849437732221492, 7.437045884941238, 10.661703550696556, 7.83567157655003, 11.015211554852268, 4.951625652298421, 8.263465755564768, 8.310540747173057, 7.993691336039781, 11.067847480255251, 7.353736182726404, 6.433828587733538, 8.664227596231523, 7.182233937098731, 9.29964986639889, 10.299767272629694, 8.082208002699081, 7.33695561050209, 8.700829573931493, 8.588516433580843, 5.494863317399205, 8.22862159909139, 9.328733522626356, 7.312425151226673, 6.1965848735213305, 7.393234461590516, 8.491240036879667, 8.147709150161932, 7.7820174056932805, 9.455173615999149, 7.066963901542219, 6.327041779240348, 10.45611656434066, 6.810050597841734, 6.89811173304331, 7.433257791766566, 6.241208907127381, 10.350180755011433, 9.563424028869548, 6.856215839029643, 9.989364583683669, 8.381648854533712, 8.851544007331041, 6.437612677476114, 5.992635135024479, 8.062439709878685, 7.229721589241544, 8.222586495508642, 7.342789503301448, 7.149462830201674, 8.662792284389258, 6.653727711474329, 9.767111624222236, 9.85886918589017, 5.37134830919524, 8.958539041873353, 7.875220830394933, 6.553369864422299, 7.629990949523373, 7.871841873645017, 5.049263185300207, 6.326912044835468, 4.663349146887186, 7.54766275795534, 8.151973132859666, 10.042591436525045, 6.614324991452069, 8.26423542737398, 7.753493878072397, 7.140082602370002, 6.8957585259184775, 8.019552156781454, 6.759570706777052, 8.596990061093345, 6.682186627269535, 8.469781477233338, 6.3892643707534456, 9.380928877543921, 7.5703786505548685, 8.538442509644339, 8.3489002910439, 10.426835204967038, 7.806652078113572, 8.087960441160208, 7.095134465250964, 10.430570641677104, 6.503026399717661, 6.962295357490432, 5.09420339843243, 8.91603756530321, 9.221960185554856, 9.656195326708692, 8.567037704619551, 10.601443046821723, 6.4270593896404815, 7.4639907379601995, 9.488497159230707, 7.314692583348785, 8.255042076548941, 9.254447430387977, 6.898069716289567, 8.269692002040271, 7.530596065929002, 10.007947783373663, 7.563517529428557, 8.740385928547031, 7.942807255997364, 8.456138249643518, 6.8977962387131955, 7.38527841125858, 7.807825756178889, 7.7760241913466, 7.819173787908539, 6.951426877420532, 5.713321617844715, 7.9034723396171325, 9.905599555223858, 8.822528999664158, 8.176893679756958, 6.532140354106864, 9.827145254090114, 10.717437175985106, 8.705208318712303, 7.0163608695084285, 8.17408228521044, 5.549877373285826, 7.125388466950287, 7.518790392587565, 8.043784032203314, 7.576030790566522, 8.931919582667708, 10.391746880104884, 8.555499693805128, 6.906117900900218, 5.307627431786376, 8.895677769055874, 7.971891098774674, 7.851258846451551, 7.062932830479956, 8.175518443040652, 10.710954676865372, 12.627568417483477, 6.988203367911942, 7.580563478079185, 10.032906036712472, 8.878493878792403, 8.534450948131887, 7.423603066137307, 5.597998136906411, 6.8880688214721095, 7.110450336649771, 9.155278495119608, 10.994658772992281, 8.308284552711857, 8.19908410344054, 7.594652860873574, 8.365859048782031, 8.450098249930555, 7.562631713666295, 8.379220485739266, 8.409706136076677, 8.20846632211791, 12.743653517630978, 8.503179957531588, 8.988792382374275, 9.427528339614407, 5.611620393638202],\n [15.99844421332487, 12.667360153096515, 14.650280572397145, 27.91091248829037, 18.45657752107614, 16.50176189875578, 15.367774930514772, 18.715170566885117, 15.368910388020334, 15.0940459003475, 13.561311961671016, 12.627819749944042, 7.069785858073152, 12.756675470120433, 24.72996801435937, 14.685659433924377, 13.808240157717174, 9.307699271225761, 6.418619770777121, 11.592419795168352, 10.865675592684639, 7.840543425963905, 8.212471950824648, 11.466680456497453, 9.08228827816499, 9.448736282108374, 9.529999322728214, 14.512065877668265, 10.575824517906415, 9.63801520027799, 8.250970859954954, 9.520417143677042, 8.3880351390895, 9.372922036085365, 6.257278679162988, 5.882673523841979, 7.511435878698112, 8.45264982843566, 8.907779099417397, 5.665874434056404, 5.482918209615173, 8.078769099224154, 7.343789848075083, 9.365652680019831, 8.239245793842764, 4.605717605344383, 6.797613952478779, 6.793132642990523, 9.80491796590601, 8.582030535604208, 6.294097665208823, 7.364681420924376, 7.406233745336385, 7.793954967857749, 8.52658003806355, 8.23113431080118, 5.460016309198461, 8.58561193544833, 5.213431838045249, 7.536560070117995, 6.916196690002716, 7.7029980507562295, 6.87405906959778, 6.223273064784113, 7.2509913361491645, 8.700053412744857, 8.929041207072977, 7.690969824508415, 9.626657393735643, 8.50760479797336, 8.46282833453814, 8.641117863542863, 8.67183377690492, 7.062811099284048, 7.507891403074677, 6.095616171052702, 7.377366857646905, 6.915264122726458, 7.500192060614502, 5.8123830924749464, 9.420112732030095, 8.395284945014676, 6.777311068474881, 9.151535543675958, 7.310999692218685, 9.756565224162538, 8.918393742781577, 9.388115988654082, 6.3845041907734466, 7.1665740307830035, 8.365378747201166, 6.821946311622847, 7.23647092353758, 10.745497259385735, 8.574445464830248, 5.223885354330731, 8.779773036561195, 10.126248313227205, 8.468587432212006, 6.955970498558412, 9.482568830701497, 8.792871652481686, 8.56242982144813, 7.885731476374264, 6.467257717095384, 8.571307961991808, 6.19491122062597, 5.745901765931287, 10.022014902340047, 6.550525934030427, 7.126731474199514, 8.95218009121349, 8.214242037686004, 8.04614210839355, 8.760727777009127, 10.18789680380936, 7.3890750225671304, 7.543749718817351, 9.371278699100243, 10.857580009353113, 8.390490080655963, 7.605047182918841, 8.63981174996564, 6.145216913049062, 7.314319768511249, 8.32560445978138, 6.118805029519846, 10.511607543619892, 8.321669503647568, 6.658167651389458, 6.880549194675298, 9.138068239415487, 8.53392035065545, 8.910645757375073, 7.412488338415838, 7.16834364984102, 6.13443365507289, 6.268930695997683, 5.592307538523584, 6.358025840761492, 5.748854188713233, 9.954916677076465, 7.6665301078302415, 8.648772680793956, 10.628411273855844, 7.1139499781300275, 7.299358091168056, 9.750148484992668, 4.766687237131658, 8.627381191592773, 5.89059709066853, 8.470432290868324, 10.921838592619107, 6.052844846708801, 8.604871685058631, 8.989237189885491, 7.121322987037773, 9.046093383564312, 7.827381427826947, 8.01500847479938, 8.437063744522305, 6.721055838168828, 7.690978297565653, 6.044905911565294, 9.033002163304815, 9.761717956561986, 6.778111406425475, 11.733249942465521, 8.423433046531942, 7.821584434801706, 8.16091659399263, 5.118021061064587, 9.18093918737943, 10.649667043913631, 8.139365019567581, 8.806120157260205, 7.855023086973385, 10.298041368109626, 7.407850252902208, 5.836931345496306, 9.407608978204072, 5.325721414024552, 7.428444024347508, 9.204052068805469, 6.547964812463914, 9.793049514109743, 6.808459590608135, 7.465577955076744, 9.39662379085216, 9.663180477615683, 7.588697782982331, 8.357209101026747, 7.913356420131625, 11.87804247926184, 7.869507040729936, 7.818227322334145, 8.15491136058747, 5.9684860356460625, 8.316127392894865, 7.240518632079241]])\nQCNN_ae8_FASHION = np.array([[18.712848126625353, 17.026705806977112, 14.493331278658019, 15.384846467130794, 14.296125408407736, 14.26728987455375, 13.235956375213691, 13.642490514403134, 12.628129097433778, 12.580980307322015, 12.486039937182948, 13.173723711838242, 11.115425723805727, 9.904431023032178, 11.671377947215737, 10.930665890927383, 10.405467480140032, 10.264690053851728, 11.096566057993675, 9.919179866081857, 9.588209260088233, 10.92943022768969, 9.777015659707292, 12.170628403906653, 9.216602820219594, 10.346935660218959, 13.898694459839772, 11.55599408093733, 9.78475173702373, 11.034433765016363, 8.573462957480102, 10.632923338732795, 9.998647973931837, 11.227806124520043, 11.810286169936315, 10.650705446270106, 10.64505434091775, 10.935839016808107, 9.339274297114681, 10.06929458287948, 10.427739321350575, 8.788614505465636, 9.973650618311728, 10.374305364535068, 8.993463554081355, 9.650100384966708, 7.874339202952801, 11.179674855564567, 11.154285176277385, 9.691137890738242, 12.422566019543739, 10.551329708522074, 9.459284822639946, 10.529926191330016, 8.511942244902933, 10.408492692391786, 9.600036766620036, 10.638732604791974, 9.472980003468372, 10.727403609642199, 9.767448001461673, 9.668911579291809, 11.440596230981008, 10.052538118614656, 8.976556185267341, 10.397969717644338, 10.04337771655426, 9.42798140123906, 9.838054196332088, 8.804127914983113, 8.646572944665172, 9.285110662263332, 9.750791375567708, 8.918642459597883, 11.07924059288286, 11.606655740668053, 9.442105886795671, 7.852390660671382, 9.410406280698393, 7.903005365526704, 10.450954974797218, 9.69885469734169, 9.987648480943617, 8.766986690246718, 9.202970464136422, 8.034507349043121, 10.222707561848988, 9.894183192589004, 9.365979852461498, 8.45207586133659, 9.26822822672579, 9.734032625124131, 8.712048475653425, 9.442530915185635, 8.149079311896713, 8.53117023258566, 8.308872976550896, 8.557615618858812, 9.334598577271704, 11.331440843432157, 8.562942494897207, 7.90026789359747, 9.997297245842987, 8.911267731783784, 8.427353667076936, 9.209326522338568, 11.60971413924858, 11.915255010846362, 12.038280370532954, 8.432232279978424, 8.424570692618504, 9.42193533861766, 9.311017435776742, 8.272797283779441, 8.394662755625502, 11.837514575820137, 11.051401051863028, 9.679453465384153, 8.607595410052317, 12.95650456251789, 8.96422172705352, 8.711173645501345, 9.421036815444774, 10.029615271846323, 10.918203703855882, 9.23530361770149, 8.93403443975954, 9.401228609879995, 7.683417019928715, 8.014505268842539, 8.818538945936595, 9.659802581799115, 8.231549001737624, 10.444057395918627, 10.276555077634915, 9.403493739534039, 8.032365918068665, 11.33747400810304, 9.197955323436302, 10.981673861431329, 9.453145527028813, 7.741260238009232, 8.530640753449843, 9.7029046152892, 9.091250741381918, 9.93513270613483, 9.332711625064984, 10.828087179003504, 8.823229772360088, 7.785429706166966, 8.618998320097035, 8.619313103950242, 8.576235060719235, 7.159591782912968, 9.333821859881834, 6.9736199923040605, 9.165265289894089, 8.150026867902012, 8.286188397988429, 11.05757562301829, 10.97509985663988, 8.494433280185376, 10.557318392601369, 10.156171548049324, 8.093195459172644, 8.665540189392559, 9.358289442847104, 9.521221792170806, 9.284083453267607, 8.61300450878218, 7.830002225139488, 8.502006883983626, 7.823442956300858, 9.713492970583143, 8.243281501024526, 7.1182464771261, 7.317494878592048, 8.92399096842727, 8.34782654605435, 9.744047041549553, 8.753975645426449, 9.912844366862691, 7.8830338218735045, 7.496679427633059, 10.29405209660884, 8.19655105245017, 8.77491705692589, 9.56310303047758, 8.95216828150052, 9.360699818045822, 7.958748739932429, 7.3794577622806745, 8.73766949546878, 12.505167841009284, 7.224289266862777, 7.160061111541247, 9.849302528166927, 8.461600847024421, 11.02001444328986, 8.204937229489273],\n [17.724969493705974, 16.985321609279367, 13.392438740242687, 13.564257290188259, 13.222004792231791, 13.800715638623068, 14.263207568909433, 13.532110504066344, 12.138030001158716, 12.21461063476483, 11.34829601113098, 13.132145365501252, 10.613878003819227, 9.470936716464223, 10.123757112991772, 10.09239045464374, 10.308041286798865, 10.078326511878345, 8.021032049876634, 10.625488871474145, 9.473769342359292, 11.436981463784491, 10.362919844286884, 8.986481426520276, 10.45515190524284, 8.67400700707833, 10.238661986080123, 10.114664468218377, 12.083806762971559, 10.489738553101917, 8.563467474739872, 10.862515512417456, 9.681733090396149, 10.078009855519891, 11.531668521869483, 11.28860693706361, 12.693756213225425, 9.409139652619109, 11.046992476804764, 10.03807813971731, 9.078128046591322, 9.873187133060522, 10.166749727368595, 10.001658140848086, 9.66968779126388, 9.295609120198012, 9.795449434998282, 10.136556597964749, 11.881866124372682, 9.764040289593533, 10.04661892828681, 9.955879825113133, 11.04227130755816, 10.912306112213448, 10.585900071960392, 11.899309983758876, 12.01635137095045, 11.57111754620914, 9.362224756146297, 8.513982863267216, 8.944029584711348, 9.578053290544796, 11.517908759957947, 12.573331933436496, 9.42891619986523, 12.052347630059534, 9.874895159416516, 8.054566944204137, 9.508572084059486, 8.672714435844977, 10.82688476465491, 10.838726121590994, 8.949386243137582, 10.396830813977878, 10.26649658713702, 10.304681945254549, 9.814345683989115, 7.017067604462374, 8.86338626013271, 8.917905716700938, 11.08977394635223, 8.80169674675162, 12.322944173431637, 10.231818986960084, 10.341563460819152, 12.42950081732582, 11.174671663489482, 11.440379917075518, 11.123435851452435, 11.53657637417139, 12.103199913874928, 10.753969581042744, 10.533579277749904, 11.118593298179924, 13.029586219183377, 8.970946232158354, 10.39408348605849, 9.01202135153644, 11.814140337811121, 10.468594327889168, 10.09229837265605, 10.72123017519997, 9.786154962736086, 11.280651046568527, 8.763589363699392, 11.795843973774124, 12.350980083392521, 9.36697614310966, 9.3654150163396, 11.368070186345216, 9.714042035665056, 7.89805449387597, 10.701833613295438, 9.797942296016021, 10.214916778019479, 10.960430620186132, 8.055356113803638, 8.382256715276968, 9.51196865260292, 9.247102981657857, 10.389871449471697, 9.441858433486036, 9.334453540205061, 10.342629181837951, 9.937443451125475, 7.981245145246529, 7.580792686542374, 10.258417832560697, 10.120090492413194, 9.027770791661279, 9.135664443037866, 9.298832343138823, 9.925349742595548, 7.792714768266973, 9.048115931245947, 10.182160458255312, 13.871433313577539, 8.76603769853222, 8.800176260285307, 9.140590595418772, 8.523446387649066, 10.268253785486175, 11.06647027665753, 8.253600107767529, 8.926886915558601, 9.675302703739298, 8.448095257364335, 8.225971580463636, 8.366788984502524, 10.155319180178003, 13.848494156123913, 9.904055350752012, 11.990208000885058, 11.75994178975892, 10.111496943063448, 9.148428512400015, 11.47459288717779, 10.025561164623397, 8.984303252101506, 8.903071037980423, 11.42451651226219, 9.429827687829391, 9.850557808236871, 11.464714491099045, 11.5440769861382, 9.128967995229814, 9.651729305352388, 10.94298735151194, 9.63001790940849, 11.547309396955596, 7.919442803713278, 10.500564806911825, 9.14141238071253, 8.523114230805874, 11.052144586543875, 11.422262338367009, 8.877243965090843, 9.445237640478592, 10.37428096363661, 8.375390424113167, 8.611343806072805, 10.316941181931693, 10.61887164860755, 7.382933587444922, 8.069172618849178, 8.784733843679572, 8.725760362619928, 8.36850912615156, 10.449249068605226, 9.626636031123569, 11.631516776373171, 10.07975235609016, 8.420706613794852, 9.848247146752307, 9.549419119152644, 9.318807142870538, 12.520942016608585, 8.433621417070286, 11.209178717892792, 8.985184200511135],\n [18.168971213705127, 18.09948798448156, 20.356476414423568, 18.06441332431552, 17.433959664331457, 17.316568842268182, 17.20544006748239, 17.204380194563853, 17.41315606766121, 17.09672293957003, 16.885882741339667, 16.47039635087509, 15.446551357793064, 14.598054558917354, 14.187669758890106, 12.243291233487943, 12.148629802665017, 12.01464207699033, 13.387321522877306, 11.983924416435668, 11.9708256098371, 12.217342495035991, 11.318116975930675, 12.61740802059251, 11.490766938893163, 10.221522512993173, 10.130144752058511, 10.60446666208203, 9.48898946077257, 8.576695821159669, 11.408827550761083, 10.132648896282712, 11.714523302452937, 10.303288211942666, 9.949521237082545, 11.289459530234836, 11.840400836257002, 10.780235270187308, 9.643443517446299, 9.137861705698183, 8.56922684990849, 8.547990642638062, 8.627133843092555, 9.617239715710843, 8.344269202505762, 7.861309571736697, 9.618654760779219, 10.098397573964943, 9.799242300459248, 8.504567494158124, 8.984759968537539, 10.75409765331483, 7.777162932806218, 7.531454612334114, 7.486395593439431, 10.485101164322918, 7.664861773945254, 7.7802009384724835, 8.313461627928938, 7.4864221482038165, 7.021961736158062, 8.272104289315019, 7.292786069126491, 6.662093626093378, 8.949340781975687, 10.188138400715184, 9.620029512524633, 7.87617349674253, 9.046259666483742, 10.051492176356152, 9.326473118887444, 7.401651051079849, 8.992446316041772, 9.987851532594028, 8.257822228733502, 8.75049608154607, 7.842148118774588, 7.484392833047435, 7.8629025521316676, 7.679279184739714, 12.829370090937456, 8.437284935116018, 7.5709183014882555, 7.387329282414164, 8.433496877550013, 9.027676329084725, 8.568178440509758, 7.388605621205098, 9.474135268791201, 8.332935417250061, 6.019995627357835, 8.927135202232805, 9.352696040070555, 8.613651781599517, 8.702240746224566, 6.09329798541564, 8.46619599589795, 7.205960020907471, 7.670244238062496, 9.538394506253727, 9.265786471533367, 7.994167495529957, 9.862553637696603, 9.44713031167138, 8.495765314665855, 7.908799346976214, 9.38648420239414, 6.623996696631651, 7.5528484807130045, 8.447652012255231, 8.499073196551626, 6.294311163900188, 8.336424120908685, 5.86098248922095, 6.930701114658679, 7.054226025924941, 6.819043885103816, 9.358721905245893, 6.495838355797971, 8.027054499807878, 9.489459717239372, 6.052992573184167, 11.62899813898797, 6.898877563342017, 7.919918612230186, 6.571944966122923, 8.547183665594849, 7.473995162875985, 8.879902736795254, 7.957722258151804, 9.292010353074367, 9.989534966411613, 7.737363495851887, 6.845568836119702, 8.981189548477822, 7.612392534974124, 8.72142136521589, 5.395089376824843, 7.364219161525932, 6.727400209240946, 6.123065782359484, 8.00469342288095, 9.217753995104315, 7.482754327344144, 5.982746921992949, 7.565787731966314, 7.62423409700392, 7.953105077375218, 9.729174484509429, 7.173076087132787, 8.092955316186691, 7.685622098792884, 6.677182441966579, 5.910984482008669, 9.135849938540927, 8.043489456944014, 7.7626862812564, 5.474339235270639, 6.966963360673835, 6.861549758839532, 6.742956895868598, 7.664568307114971, 7.710724496034348, 9.435872121244195, 5.936069354600976, 7.776332933398961, 6.5268498880727215, 6.036250955999785, 6.078944244051717, 6.709997709270294, 6.1584608938677174, 5.423060175386657, 8.190868214972877, 6.519586486769419, 7.079455374824206, 8.279982791988205, 9.483779303281262, 6.657745531027846, 6.537360427442257, 8.201429782266016, 8.681957167558688, 8.424265808454738, 7.162886013996535, 9.532281127773695, 6.726542148547179, 9.346198450389041, 10.61059261678538, 7.523942486195809, 9.74094793439675, 8.758048432695627, 7.565901899282078, 7.606596839387631, 6.342056774953504, 6.662866632358967, 9.571574907847253, 6.419377220878184, 6.0948494427310544, 7.801866246737841, 8.498661111158714, 8.637575188381325],\n [15.47237529007874, 15.121920558377452, 15.734468656753677, 15.902123677655107, 16.14164915023064, 12.984558881159014, 12.156157481906165, 12.230702560681928, 14.126056699083271, 12.890215032269717, 11.400863736589566, 10.942808490696466, 13.02780033729369, 11.428769940722177, 10.93785078187039, 10.670345343649261, 8.811132493201097, 8.913502382017644, 11.11064772838931, 9.087542163495932, 8.76604478113914, 10.41579057791804, 9.775088696223651, 8.921462555617842, 8.821401703152448, 9.13229418127733, 9.110627990719586, 7.84784612485433, 9.3089240448395, 8.965189265476615, 8.540753795369884, 8.10537213523649, 8.780767058671406, 9.062713081061041, 7.821457145502602, 8.150268040108793, 8.300125020769297, 8.8605264174679, 8.949460405068477, 7.335061085982462, 8.107558925250586, 8.082393369472355, 9.421629979617803, 10.46901593550262, 9.757299614512625, 8.083929750601031, 8.23299662293339, 8.227109925951304, 9.511416262144254, 8.61334586525843, 7.523771817565371, 7.907270331565664, 8.562934589884971, 8.78644863441816, 7.7988906316225615, 6.741492434806218, 8.351690426302623, 7.639616581069282, 9.595872003444136, 8.272931183667156, 9.29933451723061, 7.949202666851371, 8.611141298820021, 9.899842853248867, 9.424413229064463, 9.194373232065193, 9.170300594355348, 8.458104380320453, 10.032953595224555, 10.002689748187064, 8.570718518321954, 8.367468058026205, 7.67792959736898, 9.058569552899293, 8.882101680515294, 11.816910226662275, 7.6093789351437495, 9.804398815835606, 9.583985101252239, 10.252096737104425, 8.92761644756087, 7.231768655190682, 8.261148494147937, 7.8546457917766945, 8.743806945228757, 8.560116867691729, 12.31231136799699, 9.914307189247303, 8.23473708210795, 12.790985229738073, 8.723409821483871, 9.574578833342542, 9.43314365437812, 8.257361610618224, 10.317139171432942, 8.792780852629525, 8.012285402720918, 8.186707035655202, 9.14830158692532, 10.329910517806821, 10.077502380358826, 8.047962420640605, 7.905011616649968, 10.688717726534126, 9.044463829179614, 8.934741961408912, 9.432926689630838, 8.748470178407437, 8.783214706392013, 8.75460034069625, 7.822045220718941, 10.703837579075957, 8.621767934013137, 8.47771836640275, 8.99590810342142, 8.068886681309797, 8.341964114461966, 9.591314804401238, 8.254469944154078, 7.517449049776807, 9.075694805580456, 10.082878788015295, 7.922099697777529, 9.118471067299403, 7.898774274474209, 9.871832773252333, 10.331625820707973, 8.101229482601779, 9.762446698992838, 7.599721926109046, 9.260848245633616, 8.179214841858181, 9.048337500366483, 7.662492743842515, 7.239308519880746, 9.35336277573206, 10.490436834696158, 11.649782248744323, 7.989766528161437, 8.233244528084304, 9.300968519788373, 10.71737996880339, 8.0408218636835, 9.770330980262797, 7.6918735820409205, 7.090466724110408, 8.551474884827698, 8.52817717581957, 9.485064453599328, 7.276638355421168, 7.57449748264411, 8.102701445686362, 7.836847342998863, 7.843167313166515, 7.525836823040023, 7.740825803320405, 8.713830047685784, 9.792171749335626, 8.016558659294292, 8.610337193959802, 7.686509048710983, 9.516502085468327, 10.450471221322273, 8.348033287161595, 9.556392602770192, 7.781825021112967, 10.923251038424292, 8.470613674900342, 10.067497385232187, 7.920456182927356, 7.851225729254214, 8.613958357779168, 9.104481812711125, 7.897823300468376, 9.749845856935135, 7.789243080732912, 8.150814651554468, 8.602292362184084, 9.985818601955131, 11.159751382360108, 8.36451358706994, 9.480154582946545, 9.854160281233208, 7.19064442345934, 8.727376333325964, 9.522816918170163, 7.5008887006696625, 8.092487273786547, 9.071248041206662, 9.40282856897173, 9.080910590540642, 8.849019511708898, 9.133735811641197, 8.018770621603759, 13.09527537765517, 9.040053788870111, 8.609976156260206, 7.747228134046389, 9.16401223484431, 8.970995373059257],\n [20.683005333450055, 18.267736682905213, 17.175032848509982, 16.60911631788564, 15.33279020706126, 14.2215223516022, 14.088247479421296, 13.820287981222355, 14.33759390378888, 16.57775253032724, 17.252324298790555, 13.833293897159491, 17.138546523751703, 17.701345358302802, 16.730840546039712, 16.897332184731702, 16.29854767077804, 15.66586265169082, 14.558544323068794, 15.271527791695647, 12.42172447160138, 13.281138652182447, 13.458847690806772, 12.407042484908006, 10.50202882930347, 8.190709251129691, 7.478885795748316, 8.932342672444413, 9.277858272805942, 6.894662867255597, 8.807467146265735, 10.436447157552362, 8.233855471666104, 8.522757113852792, 7.338482181906378, 7.181966997371922, 6.736307850425244, 9.118641191802494, 7.534461812252117, 7.021232235720462, 6.7671043381630795, 5.965023965107337, 7.6146350584072175, 8.883849174158678, 7.466796517104723, 9.691455128539971, 3.851496787446084, 8.389158335768672, 7.799112755977195, 6.134688002500012, 7.708104093147763, 7.993215009150312, 7.143972444646884, 8.793013158747055, 6.366706547986155, 4.878618358616566, 6.83256986969982, 7.414474022264097, 6.238341518383238, 9.698000520854874, 8.30588951462028, 5.417520854991893, 9.804951799923835, 6.987375710696394, 6.852768985889441, 5.772803169960699, 6.464898717898582, 7.065771876470555, 6.658555366228665, 5.891064171143182, 6.332885020995347, 7.673825566228463, 6.114835766670566, 6.684139361808645, 8.980665216483798, 7.580936457002063, 8.726756575972171, 7.39548455067962, 7.913542696523183, 8.203722901269103, 6.218932974966063, 6.568534371845129, 6.866931322606477, 7.219306920984255, 5.860022108974549, 7.172460379203403, 5.867755561139791, 6.50357450769631, 8.133988285227398, 7.626390659236641, 7.865328181651307, 7.893275069492033, 6.9848105563181075, 5.855869013839226, 6.550972479964267, 5.696300435726472, 7.607125363737651, 8.26236962638436, 8.16128025460234, 7.784832070949528, 6.249083984112179, 8.380552172663155, 7.091722074157455, 8.179251176781678, 5.475054475621989, 5.673672358294827, 8.355988106918629, 8.119036543590228, 6.433107884177927, 6.131493887625068, 8.388165922333409, 5.410761095042206, 6.66015666521853, 6.72655509905609, 6.849279948729544, 8.649846850543616, 6.426090517005289, 9.088055649382042, 5.620559799850784, 8.358840568775085, 5.848266295028982, 9.960780305017563, 8.990225084829786, 8.028416077095914, 7.090799491178961, 7.672201533949532, 7.918028847854404, 5.115240423796817, 7.797132308154474, 7.8177755733567125, 9.569372167049185, 7.932459801661724, 6.556512352860484, 6.978798208359354, 7.205724112192511, 6.357070839737896, 8.160568371498796, 7.83652865809797, 5.361529289332873, 6.132988451177973, 5.315110775108044, 7.830817917143794, 7.374843960298627, 6.3410116947039015, 8.594900011777003, 8.111893403771079, 7.231072551951223, 4.86169966620579, 9.71278444604642, 7.359797884149192, 7.508125654493484, 6.939272181948796, 7.329666423222715, 8.507522574627632, 6.757625314343968, 7.459286909154, 7.614089808644679, 7.359928546111833, 7.816163623814883, 8.05977502973244, 6.376558608300571, 7.030146713320054, 8.14434945576826, 7.357057707938525, 6.148924789501268, 6.891055666494754, 7.0345404664271864, 8.0134582464101, 8.231510376752711, 6.426786250899349, 5.573985388728724, 6.320284083257476, 8.079385962833218, 6.828510458185883, 7.1651698744635075, 7.451355302779589, 7.472398478325829, 6.26357442005399, 6.546009763086556, 5.591878251762002, 7.5040406848509456, 7.8941600605558415, 6.885625409184911, 6.874799861732833, 6.74644371833238, 8.412316342331914, 7.475877859266048, 6.928096800605699, 9.835571162917416, 8.661415060347343, 5.283641840552192, 9.019498325907291, 5.865331497049222, 7.1932358665570355, 7.30503683666938, 4.663862168489629, 5.11743679618586, 10.70800811171637, 7.491798466426066, 7.165062071396071]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "QCNN_ae8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "QCNN_ae8_FASHION = np.array([[18.712848126625353, 17.026705806977112, 14.493331278658019, 15.384846467130794, 14.296125408407736, 14.26728987455375, 13.235956375213691, 13.642490514403134, 12.628129097433778, 12.580980307322015, 12.486039937182948, 13.173723711838242, 11.115425723805727, 9.904431023032178, 11.671377947215737, 10.930665890927383, 10.405467480140032, 10.264690053851728, 11.096566057993675, 9.919179866081857, 9.588209260088233, 10.92943022768969, 9.777015659707292, 12.170628403906653, 9.216602820219594, 10.346935660218959, 13.898694459839772, 11.55599408093733, 9.78475173702373, 11.034433765016363, 8.573462957480102, 10.632923338732795, 9.998647973931837, 11.227806124520043, 11.810286169936315, 10.650705446270106, 10.64505434091775, 10.935839016808107, 9.339274297114681, 10.06929458287948, 10.427739321350575, 8.788614505465636, 9.973650618311728, 10.374305364535068, 8.993463554081355, 9.650100384966708, 7.874339202952801, 11.179674855564567, 11.154285176277385, 9.691137890738242, 12.422566019543739, 10.551329708522074, 9.459284822639946, 10.529926191330016, 8.511942244902933, 10.408492692391786, 9.600036766620036, 10.638732604791974, 9.472980003468372, 10.727403609642199, 9.767448001461673, 9.668911579291809, 11.440596230981008, 10.052538118614656, 8.976556185267341, 10.397969717644338, 10.04337771655426, 9.42798140123906, 9.838054196332088, 8.804127914983113, 8.646572944665172, 9.285110662263332, 9.750791375567708, 8.918642459597883, 11.07924059288286, 11.606655740668053, 9.442105886795671, 7.852390660671382, 9.410406280698393, 7.903005365526704, 10.450954974797218, 9.69885469734169, 9.987648480943617, 8.766986690246718, 9.202970464136422, 8.034507349043121, 10.222707561848988, 9.894183192589004, 9.365979852461498, 8.45207586133659, 9.26822822672579, 9.734032625124131, 8.712048475653425, 9.442530915185635, 8.149079311896713, 8.53117023258566, 8.308872976550896, 8.557615618858812, 9.334598577271704, 11.331440843432157, 8.562942494897207, 7.90026789359747, 9.997297245842987, 8.911267731783784, 8.427353667076936, 9.209326522338568, 11.60971413924858, 11.915255010846362, 12.038280370532954, 8.432232279978424, 8.424570692618504, 9.42193533861766, 9.311017435776742, 8.272797283779441, 8.394662755625502, 11.837514575820137, 11.051401051863028, 9.679453465384153, 8.607595410052317, 12.95650456251789, 8.96422172705352, 8.711173645501345, 9.421036815444774, 10.029615271846323, 10.918203703855882, 9.23530361770149, 8.93403443975954, 9.401228609879995, 7.683417019928715, 8.014505268842539, 8.818538945936595, 9.659802581799115, 8.231549001737624, 10.444057395918627, 10.276555077634915, 9.403493739534039, 8.032365918068665, 11.33747400810304, 9.197955323436302, 10.981673861431329, 9.453145527028813, 7.741260238009232, 8.530640753449843, 9.7029046152892, 9.091250741381918, 9.93513270613483, 9.332711625064984, 10.828087179003504, 8.823229772360088, 7.785429706166966, 8.618998320097035, 8.619313103950242, 8.576235060719235, 7.159591782912968, 9.333821859881834, 6.9736199923040605, 9.165265289894089, 8.150026867902012, 8.286188397988429, 11.05757562301829, 10.97509985663988, 8.494433280185376, 10.557318392601369, 10.156171548049324, 8.093195459172644, 8.665540189392559, 9.358289442847104, 9.521221792170806, 9.284083453267607, 8.61300450878218, 7.830002225139488, 8.502006883983626, 7.823442956300858, 9.713492970583143, 8.243281501024526, 7.1182464771261, 7.317494878592048, 8.92399096842727, 8.34782654605435, 9.744047041549553, 8.753975645426449, 9.912844366862691, 7.8830338218735045, 7.496679427633059, 10.29405209660884, 8.19655105245017, 8.77491705692589, 9.56310303047758, 8.95216828150052, 9.360699818045822, 7.958748739932429, 7.3794577622806745, 8.73766949546878, 12.505167841009284, 7.224289266862777, 7.160061111541247, 9.849302528166927, 8.461600847024421, 11.02001444328986, 8.204937229489273],\n [17.724969493705974, 16.985321609279367, 13.392438740242687, 13.564257290188259, 13.222004792231791, 13.800715638623068, 14.263207568909433, 13.532110504066344, 12.138030001158716, 12.21461063476483, 11.34829601113098, 13.132145365501252, 10.613878003819227, 9.470936716464223, 10.123757112991772, 10.09239045464374, 10.308041286798865, 10.078326511878345, 8.021032049876634, 10.625488871474145, 9.473769342359292, 11.436981463784491, 10.362919844286884, 8.986481426520276, 10.45515190524284, 8.67400700707833, 10.238661986080123, 10.114664468218377, 12.083806762971559, 10.489738553101917, 8.563467474739872, 10.862515512417456, 9.681733090396149, 10.078009855519891, 11.531668521869483, 11.28860693706361, 12.693756213225425, 9.409139652619109, 11.046992476804764, 10.03807813971731, 9.078128046591322, 9.873187133060522, 10.166749727368595, 10.001658140848086, 9.66968779126388, 9.295609120198012, 9.795449434998282, 10.136556597964749, 11.881866124372682, 9.764040289593533, 10.04661892828681, 9.955879825113133, 11.04227130755816, 10.912306112213448, 10.585900071960392, 11.899309983758876, 12.01635137095045, 11.57111754620914, 9.362224756146297, 8.513982863267216, 8.944029584711348, 9.578053290544796, 11.517908759957947, 12.573331933436496, 9.42891619986523, 12.052347630059534, 9.874895159416516, 8.054566944204137, 9.508572084059486, 8.672714435844977, 10.82688476465491, 10.838726121590994, 8.949386243137582, 10.396830813977878, 10.26649658713702, 10.304681945254549, 9.814345683989115, 7.017067604462374, 8.86338626013271, 8.917905716700938, 11.08977394635223, 8.80169674675162, 12.322944173431637, 10.231818986960084, 10.341563460819152, 12.42950081732582, 11.174671663489482, 11.440379917075518, 11.123435851452435, 11.53657637417139, 12.103199913874928, 10.753969581042744, 10.533579277749904, 11.118593298179924, 13.029586219183377, 8.970946232158354, 10.39408348605849, 9.01202135153644, 11.814140337811121, 10.468594327889168, 10.09229837265605, 10.72123017519997, 9.786154962736086, 11.280651046568527, 8.763589363699392, 11.795843973774124, 12.350980083392521, 9.36697614310966, 9.3654150163396, 11.368070186345216, 9.714042035665056, 7.89805449387597, 10.701833613295438, 9.797942296016021, 10.214916778019479, 10.960430620186132, 8.055356113803638, 8.382256715276968, 9.51196865260292, 9.247102981657857, 10.389871449471697, 9.441858433486036, 9.334453540205061, 10.342629181837951, 9.937443451125475, 7.981245145246529, 7.580792686542374, 10.258417832560697, 10.120090492413194, 9.027770791661279, 9.135664443037866, 9.298832343138823, 9.925349742595548, 7.792714768266973, 9.048115931245947, 10.182160458255312, 13.871433313577539, 8.76603769853222, 8.800176260285307, 9.140590595418772, 8.523446387649066, 10.268253785486175, 11.06647027665753, 8.253600107767529, 8.926886915558601, 9.675302703739298, 8.448095257364335, 8.225971580463636, 8.366788984502524, 10.155319180178003, 13.848494156123913, 9.904055350752012, 11.990208000885058, 11.75994178975892, 10.111496943063448, 9.148428512400015, 11.47459288717779, 10.025561164623397, 8.984303252101506, 8.903071037980423, 11.42451651226219, 9.429827687829391, 9.850557808236871, 11.464714491099045, 11.5440769861382, 9.128967995229814, 9.651729305352388, 10.94298735151194, 9.63001790940849, 11.547309396955596, 7.919442803713278, 10.500564806911825, 9.14141238071253, 8.523114230805874, 11.052144586543875, 11.422262338367009, 8.877243965090843, 9.445237640478592, 10.37428096363661, 8.375390424113167, 8.611343806072805, 10.316941181931693, 10.61887164860755, 7.382933587444922, 8.069172618849178, 8.784733843679572, 8.725760362619928, 8.36850912615156, 10.449249068605226, 9.626636031123569, 11.631516776373171, 10.07975235609016, 8.420706613794852, 9.848247146752307, 9.549419119152644, 9.318807142870538, 12.520942016608585, 8.433621417070286, 11.209178717892792, 8.985184200511135],\n [18.168971213705127, 18.09948798448156, 20.356476414423568, 18.06441332431552, 17.433959664331457, 17.316568842268182, 17.20544006748239, 17.204380194563853, 17.41315606766121, 17.09672293957003, 16.885882741339667, 16.47039635087509, 15.446551357793064, 14.598054558917354, 14.187669758890106, 12.243291233487943, 12.148629802665017, 12.01464207699033, 13.387321522877306, 11.983924416435668, 11.9708256098371, 12.217342495035991, 11.318116975930675, 12.61740802059251, 11.490766938893163, 10.221522512993173, 10.130144752058511, 10.60446666208203, 9.48898946077257, 8.576695821159669, 11.408827550761083, 10.132648896282712, 11.714523302452937, 10.303288211942666, 9.949521237082545, 11.289459530234836, 11.840400836257002, 10.780235270187308, 9.643443517446299, 9.137861705698183, 8.56922684990849, 8.547990642638062, 8.627133843092555, 9.617239715710843, 8.344269202505762, 7.861309571736697, 9.618654760779219, 10.098397573964943, 9.799242300459248, 8.504567494158124, 8.984759968537539, 10.75409765331483, 7.777162932806218, 7.531454612334114, 7.486395593439431, 10.485101164322918, 7.664861773945254, 7.7802009384724835, 8.313461627928938, 7.4864221482038165, 7.021961736158062, 8.272104289315019, 7.292786069126491, 6.662093626093378, 8.949340781975687, 10.188138400715184, 9.620029512524633, 7.87617349674253, 9.046259666483742, 10.051492176356152, 9.326473118887444, 7.401651051079849, 8.992446316041772, 9.987851532594028, 8.257822228733502, 8.75049608154607, 7.842148118774588, 7.484392833047435, 7.8629025521316676, 7.679279184739714, 12.829370090937456, 8.437284935116018, 7.5709183014882555, 7.387329282414164, 8.433496877550013, 9.027676329084725, 8.568178440509758, 7.388605621205098, 9.474135268791201, 8.332935417250061, 6.019995627357835, 8.927135202232805, 9.352696040070555, 8.613651781599517, 8.702240746224566, 6.09329798541564, 8.46619599589795, 7.205960020907471, 7.670244238062496, 9.538394506253727, 9.265786471533367, 7.994167495529957, 9.862553637696603, 9.44713031167138, 8.495765314665855, 7.908799346976214, 9.38648420239414, 6.623996696631651, 7.5528484807130045, 8.447652012255231, 8.499073196551626, 6.294311163900188, 8.336424120908685, 5.86098248922095, 6.930701114658679, 7.054226025924941, 6.819043885103816, 9.358721905245893, 6.495838355797971, 8.027054499807878, 9.489459717239372, 6.052992573184167, 11.62899813898797, 6.898877563342017, 7.919918612230186, 6.571944966122923, 8.547183665594849, 7.473995162875985, 8.879902736795254, 7.957722258151804, 9.292010353074367, 9.989534966411613, 7.737363495851887, 6.845568836119702, 8.981189548477822, 7.612392534974124, 8.72142136521589, 5.395089376824843, 7.364219161525932, 6.727400209240946, 6.123065782359484, 8.00469342288095, 9.217753995104315, 7.482754327344144, 5.982746921992949, 7.565787731966314, 7.62423409700392, 7.953105077375218, 9.729174484509429, 7.173076087132787, 8.092955316186691, 7.685622098792884, 6.677182441966579, 5.910984482008669, 9.135849938540927, 8.043489456944014, 7.7626862812564, 5.474339235270639, 6.966963360673835, 6.861549758839532, 6.742956895868598, 7.664568307114971, 7.710724496034348, 9.435872121244195, 5.936069354600976, 7.776332933398961, 6.5268498880727215, 6.036250955999785, 6.078944244051717, 6.709997709270294, 6.1584608938677174, 5.423060175386657, 8.190868214972877, 6.519586486769419, 7.079455374824206, 8.279982791988205, 9.483779303281262, 6.657745531027846, 6.537360427442257, 8.201429782266016, 8.681957167558688, 8.424265808454738, 7.162886013996535, 9.532281127773695, 6.726542148547179, 9.346198450389041, 10.61059261678538, 7.523942486195809, 9.74094793439675, 8.758048432695627, 7.565901899282078, 7.606596839387631, 6.342056774953504, 6.662866632358967, 9.571574907847253, 6.419377220878184, 6.0948494427310544, 7.801866246737841, 8.498661111158714, 8.637575188381325],\n [15.47237529007874, 15.121920558377452, 15.734468656753677, 15.902123677655107, 16.14164915023064, 12.984558881159014, 12.156157481906165, 12.230702560681928, 14.126056699083271, 12.890215032269717, 11.400863736589566, 10.942808490696466, 13.02780033729369, 11.428769940722177, 10.93785078187039, 10.670345343649261, 8.811132493201097, 8.913502382017644, 11.11064772838931, 9.087542163495932, 8.76604478113914, 10.41579057791804, 9.775088696223651, 8.921462555617842, 8.821401703152448, 9.13229418127733, 9.110627990719586, 7.84784612485433, 9.3089240448395, 8.965189265476615, 8.540753795369884, 8.10537213523649, 8.780767058671406, 9.062713081061041, 7.821457145502602, 8.150268040108793, 8.300125020769297, 8.8605264174679, 8.949460405068477, 7.335061085982462, 8.107558925250586, 8.082393369472355, 9.421629979617803, 10.46901593550262, 9.757299614512625, 8.083929750601031, 8.23299662293339, 8.227109925951304, 9.511416262144254, 8.61334586525843, 7.523771817565371, 7.907270331565664, 8.562934589884971, 8.78644863441816, 7.7988906316225615, 6.741492434806218, 8.351690426302623, 7.639616581069282, 9.595872003444136, 8.272931183667156, 9.29933451723061, 7.949202666851371, 8.611141298820021, 9.899842853248867, 9.424413229064463, 9.194373232065193, 9.170300594355348, 8.458104380320453, 10.032953595224555, 10.002689748187064, 8.570718518321954, 8.367468058026205, 7.67792959736898, 9.058569552899293, 8.882101680515294, 11.816910226662275, 7.6093789351437495, 9.804398815835606, 9.583985101252239, 10.252096737104425, 8.92761644756087, 7.231768655190682, 8.261148494147937, 7.8546457917766945, 8.743806945228757, 8.560116867691729, 12.31231136799699, 9.914307189247303, 8.23473708210795, 12.790985229738073, 8.723409821483871, 9.574578833342542, 9.43314365437812, 8.257361610618224, 10.317139171432942, 8.792780852629525, 8.012285402720918, 8.186707035655202, 9.14830158692532, 10.329910517806821, 10.077502380358826, 8.047962420640605, 7.905011616649968, 10.688717726534126, 9.044463829179614, 8.934741961408912, 9.432926689630838, 8.748470178407437, 8.783214706392013, 8.75460034069625, 7.822045220718941, 10.703837579075957, 8.621767934013137, 8.47771836640275, 8.99590810342142, 8.068886681309797, 8.341964114461966, 9.591314804401238, 8.254469944154078, 7.517449049776807, 9.075694805580456, 10.082878788015295, 7.922099697777529, 9.118471067299403, 7.898774274474209, 9.871832773252333, 10.331625820707973, 8.101229482601779, 9.762446698992838, 7.599721926109046, 9.260848245633616, 8.179214841858181, 9.048337500366483, 7.662492743842515, 7.239308519880746, 9.35336277573206, 10.490436834696158, 11.649782248744323, 7.989766528161437, 8.233244528084304, 9.300968519788373, 10.71737996880339, 8.0408218636835, 9.770330980262797, 7.6918735820409205, 7.090466724110408, 8.551474884827698, 8.52817717581957, 9.485064453599328, 7.276638355421168, 7.57449748264411, 8.102701445686362, 7.836847342998863, 7.843167313166515, 7.525836823040023, 7.740825803320405, 8.713830047685784, 9.792171749335626, 8.016558659294292, 8.610337193959802, 7.686509048710983, 9.516502085468327, 10.450471221322273, 8.348033287161595, 9.556392602770192, 7.781825021112967, 10.923251038424292, 8.470613674900342, 10.067497385232187, 7.920456182927356, 7.851225729254214, 8.613958357779168, 9.104481812711125, 7.897823300468376, 9.749845856935135, 7.789243080732912, 8.150814651554468, 8.602292362184084, 9.985818601955131, 11.159751382360108, 8.36451358706994, 9.480154582946545, 9.854160281233208, 7.19064442345934, 8.727376333325964, 9.522816918170163, 7.5008887006696625, 8.092487273786547, 9.071248041206662, 9.40282856897173, 9.080910590540642, 8.849019511708898, 9.133735811641197, 8.018770621603759, 13.09527537765517, 9.040053788870111, 8.609976156260206, 7.747228134046389, 9.16401223484431, 8.970995373059257],\n [20.683005333450055, 18.267736682905213, 17.175032848509982, 16.60911631788564, 15.33279020706126, 14.2215223516022, 14.088247479421296, 13.820287981222355, 14.33759390378888, 16.57775253032724, 17.252324298790555, 13.833293897159491, 17.138546523751703, 17.701345358302802, 16.730840546039712, 16.897332184731702, 16.29854767077804, 15.66586265169082, 14.558544323068794, 15.271527791695647, 12.42172447160138, 13.281138652182447, 13.458847690806772, 12.407042484908006, 10.50202882930347, 8.190709251129691, 7.478885795748316, 8.932342672444413, 9.277858272805942, 6.894662867255597, 8.807467146265735, 10.436447157552362, 8.233855471666104, 8.522757113852792, 7.338482181906378, 7.181966997371922, 6.736307850425244, 9.118641191802494, 7.534461812252117, 7.021232235720462, 6.7671043381630795, 5.965023965107337, 7.6146350584072175, 8.883849174158678, 7.466796517104723, 9.691455128539971, 3.851496787446084, 8.389158335768672, 7.799112755977195, 6.134688002500012, 7.708104093147763, 7.993215009150312, 7.143972444646884, 8.793013158747055, 6.366706547986155, 4.878618358616566, 6.83256986969982, 7.414474022264097, 6.238341518383238, 9.698000520854874, 8.30588951462028, 5.417520854991893, 9.804951799923835, 6.987375710696394, 6.852768985889441, 5.772803169960699, 6.464898717898582, 7.065771876470555, 6.658555366228665, 5.891064171143182, 6.332885020995347, 7.673825566228463, 6.114835766670566, 6.684139361808645, 8.980665216483798, 7.580936457002063, 8.726756575972171, 7.39548455067962, 7.913542696523183, 8.203722901269103, 6.218932974966063, 6.568534371845129, 6.866931322606477, 7.219306920984255, 5.860022108974549, 7.172460379203403, 5.867755561139791, 6.50357450769631, 8.133988285227398, 7.626390659236641, 7.865328181651307, 7.893275069492033, 6.9848105563181075, 5.855869013839226, 6.550972479964267, 5.696300435726472, 7.607125363737651, 8.26236962638436, 8.16128025460234, 7.784832070949528, 6.249083984112179, 8.380552172663155, 7.091722074157455, 8.179251176781678, 5.475054475621989, 5.673672358294827, 8.355988106918629, 8.119036543590228, 6.433107884177927, 6.131493887625068, 8.388165922333409, 5.410761095042206, 6.66015666521853, 6.72655509905609, 6.849279948729544, 8.649846850543616, 6.426090517005289, 9.088055649382042, 5.620559799850784, 8.358840568775085, 5.848266295028982, 9.960780305017563, 8.990225084829786, 8.028416077095914, 7.090799491178961, 7.672201533949532, 7.918028847854404, 5.115240423796817, 7.797132308154474, 7.8177755733567125, 9.569372167049185, 7.932459801661724, 6.556512352860484, 6.978798208359354, 7.205724112192511, 6.357070839737896, 8.160568371498796, 7.83652865809797, 5.361529289332873, 6.132988451177973, 5.315110775108044, 7.830817917143794, 7.374843960298627, 6.3410116947039015, 8.594900011777003, 8.111893403771079, 7.231072551951223, 4.86169966620579, 9.71278444604642, 7.359797884149192, 7.508125654493484, 6.939272181948796, 7.329666423222715, 8.507522574627632, 6.757625314343968, 7.459286909154, 7.614089808644679, 7.359928546111833, 7.816163623814883, 8.05977502973244, 6.376558608300571, 7.030146713320054, 8.14434945576826, 7.357057707938525, 6.148924789501268, 6.891055666494754, 7.0345404664271864, 8.0134582464101, 8.231510376752711, 6.426786250899349, 5.573985388728724, 6.320284083257476, 8.079385962833218, 6.828510458185883, 7.1651698744635075, 7.451355302779589, 7.472398478325829, 6.26357442005399, 6.546009763086556, 5.591878251762002, 7.5040406848509456, 7.8941600605558415, 6.885625409184911, 6.874799861732833, 6.74644371833238, 8.412316342331914, 7.475877859266048, 6.928096800605699, 9.835571162917416, 8.661415060347343, 5.283641840552192, 9.019498325907291, 5.865331497049222, 7.1932358665570355, 7.30503683666938, 4.663862168489629, 5.11743679618586, 10.70800811171637, 7.491798466426066, 7.165062071396071]])\nQCNN_pca16_FASHION = np.array([[15.835578057472704, 16.094499711923053, 15.781128756712544, 16.606914045934715, 15.579901912166468, 15.80259579476616, 15.880206667978085, 15.093962443308323, 14.477600324084023, 13.562250738502538, 13.90726874051306, 14.948828654598247, 13.8821519857618, 14.487904644975616, 13.380870254033963, 12.997376259647888, 14.835336246410264, 12.147875737631729, 12.718309281813475, 13.622099115599836, 13.334100417508953, 13.803288931281323, 12.40381171069822, 12.374368747957499, 11.982369338377097, 14.058434616988956, 10.774410601814594, 11.302168680574121, 11.302483024324378, 11.157402452498497, 10.919930614377643, 10.747950419779151, 6.624600712356559, 11.595747634249578, 8.870556160917682, 11.485168932064235, 9.084920497555563, 8.298629242677857, 10.48959822292189, 7.912963906612884, 7.203717270862793, 8.284099424621882, 9.71495859884348, 8.848090655598332, 9.980634379607913, 8.946205530742064, 10.527879126069877, 9.713679577857874, 7.059646576849257, 11.265857510114754, 6.70851805874549, 8.386281494626347, 10.192805944936259, 7.924344705199933, 10.146990199471709, 8.140791144937099, 9.57388231949292, 6.645576286654185, 8.76557702604543, 8.178185869528457, 7.370487420738141, 7.574402155180798, 7.835517795658859, 8.25763972686547, 9.862443403114362, 10.570278036178788, 6.1936489438408024, 6.985936198359627, 8.818788832253462, 10.776367994637912, 12.668335661529504, 9.954209527943767, 10.75095790430579, 6.48536803964386, 8.904483242870501, 8.726351352998293, 8.908628291845675, 10.531547108042862, 8.465741948377675, 6.9079222757859275, 7.135871572364123, 9.550659019602401, 7.764593732485292, 9.391206058422387, 9.136770230467945, 6.391256505479871, 9.431429573043555, 9.88003078247211, 13.113661651734665, 9.394949117125966, 6.790250549953463, 7.96248130623432, 10.187006933259243, 10.112923677482195, 7.257247683816389, 11.296913504590561, 6.040167914767909, 4.89512402884543, 9.191186771710091, 6.7304425926323255, 13.494683113689277, 7.768412138123139, 13.342840112823682, 9.164068498869817, 8.456496177919957, 9.995573610762376, 7.718092587762777, 12.88472711300399, 9.421323552490763, 11.438242285365941, 9.166239399950308, 9.103938221795788, 10.467718805490652, 10.677051892394793, 8.94612956754597, 11.951836328854235, 7.205156897620786, 9.342176508193363, 10.39429894226471, 7.227103185182163, 9.491655237206876, 8.177810749932974, 10.707653830697902, 11.064543601271358, 9.869123075624042, 10.469811306209765, 7.589940747374616, 9.43393501841338, 8.318213153429486, 9.00148241510562, 9.112794178807732, 7.80209669655523, 5.707250597116932, 9.421050408117106, 10.246608113741676, 8.017371992026156, 9.877205333648394, 10.204169265135125, 6.765556485028936, 9.895107482730168, 7.165799691567091, 8.076738584906439, 9.470086716597978, 6.810724868201463, 6.830205925391157, 20.56586660466806, 10.19514208623488, 8.23649941380675, 8.486155435803504, 8.054952848957308, 9.418609634148854, 11.107625503706872, 10.54138829975945, 10.778808492732214, 7.212193639045053, 11.829429562913832, 9.546346067351271, 8.259227734051104, 11.498970688490154, 9.563812507653592, 10.230251263989588, 10.932324823496984, 11.719048279683838, 9.576545479578458, 10.090381888614717, 6.872110547120437, 9.101473042629248, 6.987684326335396, 13.358116981019545, 10.372075448751419, 8.725125033167764, 7.9412093988983194, 10.34698511557538, 9.21828061364868, 8.728266970471347, 9.578355916143424, 13.009136910556581, 12.15119303684984, 11.169746498486504, 8.091942268666317, 8.99204232766467, 8.290926144456131, 8.147941040954299, 12.412616475941928, 8.766373911672984, 8.562814728166536, 7.460908483139848, 10.018084995636382, 7.922262803574408, 8.906114040913298, 8.94470762894532, 8.075181897902088, 13.973533998795855, 7.363904443282757, 6.812062370769613, 7.559692235414283, 6.836889827187756, 9.88239432212883, 10.459818996771096, 9.905940422926713],\n [15.684648105072474, 16.319093477472403, 14.85393289013616, 15.370941794109017, 13.746335173393325, 11.708500915151387, 14.960903821753917, 12.102671503574063, 10.169830108764483, 12.707899014232177, 10.295598908573352, 11.5294925550275, 10.3940118950235, 8.615156897668426, 13.998643865479764, 10.894923482874344, 10.653818872471732, 8.84472542935161, 10.331838299975445, 11.170290166813595, 14.83053315235911, 10.104983674304016, 9.734194170259036, 8.879250753693073, 11.254449869084487, 9.411572364368217, 13.508833008992433, 13.700438229371445, 9.588984948195257, 7.293510483301357, 14.045995270710518, 14.094643061910611, 11.06569658683474, 10.90570316988258, 14.927942338726947, 11.398443009373814, 7.252804512368241, 7.624672983673706, 13.779719739067646, 11.175889661646181, 6.379356416044427, 7.559069478026385, 10.539286376090004, 7.328050677329964, 12.164782739058218, 12.327000794310598, 10.024627364820859, 12.52178787522241, 11.563428496152335, 8.346766927822483, 9.62776066110205, 7.780960717994579, 7.2326744005393975, 14.878942847051427, 12.40807314798496, 8.308361091526242, 12.69242258274123, 9.10395407196278, 8.972486487308055, 12.199607897537982, 12.093748879768487, 10.459094670547115, 13.54208189002777, 8.939377097173592, 10.283503996462532, 7.992470815413437, 9.879817192240468, 11.852355677775062, 8.263484839788795, 11.139171868727045, 8.582851637711565, 7.688760947418979, 13.16017479445809, 6.66364964692711, 9.545639064218681, 10.880441371828681, 16.04266091879192, 7.336457701116979, 5.437501549079247, 10.396313804673118, 11.192198183235316, 8.909207415714858, 10.351211049297218, 14.798728163585603, 9.751122488414156, 9.985660225114641, 8.74055156295637, 14.061404038879939, 14.318045881737564, 12.055952507053345, 13.763540752917562, 12.62518299471035, 9.697879140221065, 13.1391524849862, 9.28656191864442, 7.914869726620654, 6.0999482578766, 6.177410305318499, 9.349303813744811, 8.097426901652458, 6.457606729943572, 7.497144106428058, 7.7849642682528835, 7.590110548635438, 10.688805260983383, 7.512280507542261, 7.881341511090873, 10.078061221353458, 9.81563805735599, 11.860282051474861, 10.893753174664756, 9.25034173160697, 7.5607651199971615, 12.438754415398847, 11.136635879577323, 11.071376123530941, 10.987520868691732, 9.549169930502716, 6.767147721015631, 10.955589704107755, 15.728865005406481, 11.29450976485513, 8.401104028601553, 14.743304644437716, 11.062972989724955, 8.664360777017947, 9.414484449189676, 10.233172256600744, 10.148758642445324, 11.230157491982862, 10.996707968799303, 9.7495499560665, 11.050476818380652, 11.63820030498926, 9.168508688422985, 7.068291807903766, 10.614490188232795, 14.082565206360925, 11.326792771793412, 15.883881123919545, 10.411489282864672, 8.976110452892682, 10.770188128071515, 9.59104620910803, 8.392906672209808, 8.736944142846209, 8.200211964039518, 9.545913286968512, 14.884669820444572, 13.204229170454688, 12.447257418138104, 11.533408780073781, 18.906757705026997, 12.737214161970384, 10.493580392129186, 11.001236038673087, 12.897085638024254, 10.81422659092342, 7.930345294136909, 11.690297530289905, 9.792844154138006, 10.922421880072184, 12.426771686522823, 8.704494283673766, 10.137940601393991, 11.710437847876689, 6.470855697732518, 9.12267556479345, 10.956413111332422, 9.613224281955008, 13.572429826883184, 10.099665424531393, 8.231481898316046, 7.517487973645029, 9.048538548624766, 12.2968529044401, 12.722643475427882, 13.056227808639612, 13.58651775251408, 12.6169751124243, 11.249273814749893, 13.04093468623825, 7.066125701579749, 13.447236100781847, 11.652721912402269, 11.180775409066355, 10.08290908138505, 8.44317898092026, 12.764215279842535, 8.12837715041915, 9.728055180758277, 10.72031017126853, 12.79204681735087, 8.778835931140025, 10.692884579462865, 8.81068279206634, 13.584721473617284, 8.083897171152715, 7.274662990849004, 6.644168858421618],\n [17.3763402158999, 17.465381683975014, 16.693580382002107, 16.5494405826277, 15.812431726209171, 15.970261074983569, 14.492570027836585, 12.707863682883403, 12.135437373864963, 10.544118187722935, 13.108040092531091, 12.620547284302756, 10.766403031062987, 9.98137271949238, 11.281551458608378, 11.255237384695075, 9.006537592577002, 12.580419254497778, 9.059446739268006, 8.663204933274756, 9.22396353666398, 7.674410599971726, 9.549922785361291, 8.604652187224106, 9.735299961370183, 10.061220743440476, 17.312683664880502, 13.050297120837508, 24.713270092770998, 8.613622187260061, 8.5944472431004, 8.675417771599712, 9.144167451884883, 8.24028947209169, 8.77292591944392, 10.04538372230688, 8.590389860380721, 8.533546580122959, 9.923985353113139, 7.147118874882419, 11.406879884363546, 7.347940752162352, 8.294617655442016, 9.386313539577898, 9.217575759844342, 8.709196914744549, 8.868133148405615, 10.659756794829711, 9.376359625413485, 6.8081827267415145, 9.483586453250767, 9.319716097218912, 8.729800648382916, 7.7435890099514815, 9.682449511692615, 8.590680534425903, 8.510697217932286, 12.890933108086557, 10.228244158792151, 11.396695612034693, 8.684636352103583, 10.217305512024812, 9.937927350433604, 7.66408972084202, 8.528613040058096, 8.156978432506191, 8.989390591313246, 8.879240912479501, 10.695322334614442, 8.889878429385483, 11.950886704001222, 8.475606309239735, 8.308708228374034, 8.350053425953272, 7.058050173138914, 10.242890467856771, 8.664876401480393, 5.9913073402192385, 8.649908663449317, 8.052262249098444, 11.192757816538343, 10.066776014012751, 11.01587464012332, 9.921612254802868, 7.354681472672452, 9.041918687816407, 9.28032765886085, 9.219980700011291, 8.801012183221125, 6.645051896590689, 6.4032357403259805, 11.91204409198507, 7.138786768164556, 10.545791358404292, 6.748276615033419, 7.56033195088718, 11.52383776705684, 11.787371331136981, 9.158476078356747, 10.539331205341407, 9.02574530127472, 9.552037104848818, 10.21852363954266, 9.30117505037342, 9.101927488238706, 8.66205065426535, 9.331205733619392, 7.856206364495721, 9.66402073772104, 9.368278574870972, 11.19303241742949, 9.43125978476612, 8.910473249364337, 8.044953555850793, 9.387228140922355, 7.7720300297566745, 8.934320491100479, 7.831657589667519, 7.464291216346058, 12.673969017592785, 8.244700608084102, 9.212999017241621, 8.6944618697249, 8.566637169671695, 8.668876474592196, 9.97448187908918, 7.458204311491999, 7.953072709490485, 9.383046481178509, 8.804838473977448, 7.71976300755978, 9.442322477912517, 8.496304768004336, 8.368884464393588, 6.167135657602622, 8.369300017783795, 6.633508067948844, 8.894825538435457, 10.298315186275913, 9.283520309149251, 7.96586104229123, 8.533394823830207, 8.380027341373053, 9.679845187626208, 7.755918530896894, 7.620099781922749, 11.310145435856542, 9.942570648657178, 7.655835277622718, 9.081679773588569, 7.732196624144153, 5.252372628014775, 7.32746648251859, 11.5041363496484, 8.756641527193167, 8.160399437272542, 10.171135065059358, 9.45960602213474, 10.988472590368216, 10.680118456214156, 7.913404917510096, 10.877655245372615, 11.853023674282005, 7.586104901564608, 7.188530667526484, 12.106209522127424, 10.280272449409768, 9.041884147737974, 6.82162157045741, 11.428480760701584, 7.7632480042035406, 10.673004820733588, 8.674732846453859, 9.398743426819067, 8.333325395210675, 8.315290944754508, 7.89036971904872, 8.668543545123253, 8.5402103501377, 8.78336348359503, 8.504475499768839, 10.415028154761302, 9.226544240030776, 7.181668438443649, 8.922850841378333, 10.601374359496207, 11.999989441166043, 7.816561328093369, 7.479117996399882, 7.4294629359150965, 7.33314750149915, 10.627727659449642, 8.16325624076094, 8.637943431213298, 12.163922318128334, 9.920861664533366, 9.425288226043333, 8.944781677085096, 12.582666793265604, 10.141553690333312],\n [16.65513602819292, 15.823981589458242, 15.422252163469551, 15.258143092605039, 14.646413921434366, 14.354066540362528, 13.13743892158798, 12.847811595801845, 12.723155433396835, 12.210958315743909, 12.68974714606964, 11.735741112378935, 10.941097341533865, 11.321134653770866, 11.15048917573322, 9.48651443875823, 12.029192705162776, 8.872267491723171, 8.615034929106166, 7.951440820104183, 5.4430580476616015, 7.648037679752995, 7.151478699527949, 13.921436263662926, 7.3654042937514435, 11.201710073181319, 7.719664134559113, 9.103684868725257, 6.940987299850096, 10.777022325518306, 8.316624722863708, 8.501251029553755, 9.40263790824705, 8.343384089298304, 10.130707416611603, 7.171463534933684, 9.946790718150766, 10.519015515554553, 12.15604776066794, 9.906513527624636, 9.015464416935332, 7.894256853803812, 9.547193670575602, 9.933481369866838, 5.945803325621449, 9.094305054829103, 8.881636608479033, 8.245375438759616, 8.575874380211774, 9.452000743214155, 7.605391902303943, 7.378027443995133, 10.196631033761323, 13.449246962316685, 9.288733589457452, 9.126301687840753, 7.382629612365059, 7.927929939672349, 10.353192417188904, 8.71150535642135, 5.706528315653079, 9.691396272384887, 8.32099219329515, 9.859202115142319, 10.637323024020302, 9.274664782296187, 8.177645531458092, 8.337593632715274, 9.167767333476462, 7.969372969360523, 8.08543935953597, 7.87278126848897, 8.64390266529729, 6.636612587938079, 8.63481956991534, 12.116362627814055, 10.648343396677932, 8.569140486227951, 8.161534579348944, 6.196054356586272, 11.523692905129106, 5.493795005501367, 7.77217197803562, 10.395134924713334, 7.768964988864032, 10.169430923831467, 14.997013023415523, 8.490971119632414, 10.72958084061941, 8.618234204680217, 8.227626444084725, 10.668271846805217, 8.574391915817799, 10.011547979349096, 11.952868577610582, 11.043082092042324, 6.97371540613131, 9.744282397830899, 8.774246516546263, 9.002833735948233, 11.008402095404895, 9.519068950709832, 9.220442341817026, 12.348990491853966, 12.44359643406186, 8.911333184160174, 7.141516777054673, 9.17365017323204, 7.449608410068299, 9.116046221834365, 9.659628503282596, 10.974615380540818, 7.380519604144077, 12.866420520553927, 8.567933639093267, 7.277701372901979, 10.303240331093384, 8.628201122870047, 9.110736265254328, 8.27136849988656, 12.707438023287969, 7.976085571504812, 8.553684847073729, 9.637782303371957, 10.172893908817935, 8.190127652125124, 8.280274484192491, 8.241502590932113, 9.579871584167188, 7.989303350421207, 9.827839160366663, 7.745356402634669, 8.780519526157004, 11.04218987259776, 9.391658588643981, 8.780933356683297, 8.717319759603924, 8.48869602273149, 8.346725152203918, 7.654948860540628, 5.742559718495124, 10.905428111225822, 11.640652677031763, 9.787754779071095, 8.751553989042103, 8.302296599071967, 9.089792277118132, 9.973971206404876, 9.672431442820137, 7.754158722217721, 10.910693306152362, 10.95965656310865, 8.475280727959216, 9.313799711584283, 9.632528600539805, 7.704256751095142, 7.246621632104317, 7.044471305707654, 8.146041739427515, 7.799983713527615, 7.847964177000058, 11.376737709349339, 11.787589196385706, 8.405982422181351, 9.466331711956387, 7.3542577659456825, 7.90111468211505, 7.4959686308495925, 11.560738266049325, 8.05893921460889, 8.073402399850586, 9.591198955996918, 7.8636082929299045, 10.415309064767584, 9.296592231605388, 7.696981134830109, 6.868420329320034, 13.716569307694842, 16.078606281085115, 9.196563348184926, 10.404183216369914, 10.903686180005451, 9.247560546005579, 12.984419276190382, 8.529952043738241, 9.088912195200233, 8.808552458718236, 10.784336513646437, 8.073979623173667, 8.617790039585959, 7.628613470124751, 9.890780456425302, 11.405321599593954, 11.511484790385394, 9.597632139843556, 7.4425721905664215, 7.858902896557257, 10.625917086231654, 8.553663873625531, 7.33996887725474],\n [16.11327258158127, 16.00059640041787, 14.620337901871778, 16.720538105543525, 13.977390220888859, 12.51852096936117, 13.805974998049399, 12.117558789569031, 12.042935430058465, 10.8024256895446, 11.865676492967589, 11.720447835397534, 13.051874693107735, 9.465342443779997, 8.214977443023159, 9.800711465383182, 15.838942987483883, 9.610058918590786, 9.418045037744086, 10.298069079501342, 6.996951591240167, 11.566778008222107, 15.14220280156793, 7.790332750187694, 10.7129601654711, 8.213186447176877, 9.323899187667436, 9.564265722060128, 13.846977763301776, 8.345976667882741, 7.68943191270598, 7.208786148167796, 8.569368254790033, 12.383388478801553, 10.736120912141306, 7.704998764545269, 11.162877791789988, 8.77194535691064, 10.364324769606093, 9.712368374025225, 9.506329829445304, 8.423615136469511, 8.077187497097507, 14.73981083031366, 10.919927045028066, 8.231525461953781, 9.176084725400788, 11.685326215035905, 7.902501306676448, 9.518247192059615, 9.667356930166251, 10.175080793292967, 9.59479890310814, 9.003077265739286, 8.069603520170933, 9.048542624167686, 10.277167181963932, 11.397083899216504, 6.259733891339257, 8.747925250655374, 7.3492613900899055, 8.201718385741241, 9.105589384560474, 12.226496797791173, 8.686335249981163, 9.5801775660523, 10.521077184957973, 9.764747891437203, 9.510947252318005, 8.477507956065285, 9.845029946795046, 7.462311478093125, 10.27979235371629, 7.165917360648856, 14.336554990097907, 10.502270696826676, 8.999526152367874, 9.205230417861957, 11.492290594536044, 9.909399714625318, 11.44685377752685, 9.021005135404133, 8.732962673205561, 7.061366482492443, 6.901540498784995, 10.880795190630238, 7.757645794538955, 10.418272006186584, 7.578652760919779, 10.13090673771482, 11.898893511948026, 9.238555219896325, 10.741194017683949, 11.119931540519842, 8.23185317767386, 8.720564378941724, 6.697800931689298, 9.855694447904444, 6.972577608447699, 6.599268849826786, 8.239548071537872, 8.89586130931171, 9.472121192694436, 10.54678601608819, 5.6498494004278985, 9.270965374863076, 10.285799990098965, 11.02495284765393, 9.001033027276726, 9.264061639848517, 11.042795732999949, 8.444376163141998, 9.887177495691907, 7.882491521088401, 10.716944586108632, 11.325951781944655, 10.493639693683603, 8.478578226194402, 10.29567045223303, 10.957657792797058, 9.272722331806012, 8.163403149819501, 7.0616117533532785, 8.643798415424252, 6.111881519333135, 8.809373587252047, 12.392004588143251, 6.364369112179225, 7.888487099004201, 8.015182282065895, 10.030028716471113, 8.742634921676869, 9.400439112863438, 9.000186568429907, 8.388998719377964, 10.66083738101429, 9.58453351194431, 8.964562682195387, 11.192760786845557, 7.992138322201374, 6.809417199501597, 10.42324705119013, 12.14905068118972, 8.110846391800502, 8.96550204567695, 7.5126588223032025, 7.925510007250605, 8.327912317358743, 10.049615766989783, 6.000887525013875, 8.904128564839644, 7.05036267970005, 9.31903564178668, 10.224230624044436, 8.180820151626763, 8.66623687010023, 8.072048068249721, 8.215128778865491, 9.22775619427417, 11.390872771320925, 11.430101718782783, 9.274129685825036, 5.5681232537793095, 6.990895232089772, 7.039798108618637, 8.333993875035286, 6.585170557725001, 8.851221077518417, 10.664800408539884, 8.389573003946674, 12.982994860103693, 7.534352806666141, 8.759155207077598, 8.421770044984868, 8.827205737111997, 6.936369660568447, 7.395694728886182, 9.126252218870023, 7.271645996621227, 7.685863577040834, 8.071927770855446, 11.175564762599665, 6.7133497367474675, 10.29121862131937, 10.420355593755843, 8.59880484432011, 8.489355850287682, 9.987958707789451, 7.018553485563951, 10.794832214305016, 11.08143195504709, 11.783973030823343, 8.902849850666286, 8.50666038798698, 6.52454624614959, 7.335885402990751, 7.799905121827317, 10.121116465519925, 8.464009303102694, 8.258384374410458]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "QCNN_pca16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "QCNN_pca16_FASHION = np.array([[15.835578057472704, 16.094499711923053, 15.781128756712544, 16.606914045934715, 15.579901912166468, 15.80259579476616, 15.880206667978085, 15.093962443308323, 14.477600324084023, 13.562250738502538, 13.90726874051306, 14.948828654598247, 13.8821519857618, 14.487904644975616, 13.380870254033963, 12.997376259647888, 14.835336246410264, 12.147875737631729, 12.718309281813475, 13.622099115599836, 13.334100417508953, 13.803288931281323, 12.40381171069822, 12.374368747957499, 11.982369338377097, 14.058434616988956, 10.774410601814594, 11.302168680574121, 11.302483024324378, 11.157402452498497, 10.919930614377643, 10.747950419779151, 6.624600712356559, 11.595747634249578, 8.870556160917682, 11.485168932064235, 9.084920497555563, 8.298629242677857, 10.48959822292189, 7.912963906612884, 7.203717270862793, 8.284099424621882, 9.71495859884348, 8.848090655598332, 9.980634379607913, 8.946205530742064, 10.527879126069877, 9.713679577857874, 7.059646576849257, 11.265857510114754, 6.70851805874549, 8.386281494626347, 10.192805944936259, 7.924344705199933, 10.146990199471709, 8.140791144937099, 9.57388231949292, 6.645576286654185, 8.76557702604543, 8.178185869528457, 7.370487420738141, 7.574402155180798, 7.835517795658859, 8.25763972686547, 9.862443403114362, 10.570278036178788, 6.1936489438408024, 6.985936198359627, 8.818788832253462, 10.776367994637912, 12.668335661529504, 9.954209527943767, 10.75095790430579, 6.48536803964386, 8.904483242870501, 8.726351352998293, 8.908628291845675, 10.531547108042862, 8.465741948377675, 6.9079222757859275, 7.135871572364123, 9.550659019602401, 7.764593732485292, 9.391206058422387, 9.136770230467945, 6.391256505479871, 9.431429573043555, 9.88003078247211, 13.113661651734665, 9.394949117125966, 6.790250549953463, 7.96248130623432, 10.187006933259243, 10.112923677482195, 7.257247683816389, 11.296913504590561, 6.040167914767909, 4.89512402884543, 9.191186771710091, 6.7304425926323255, 13.494683113689277, 7.768412138123139, 13.342840112823682, 9.164068498869817, 8.456496177919957, 9.995573610762376, 7.718092587762777, 12.88472711300399, 9.421323552490763, 11.438242285365941, 9.166239399950308, 9.103938221795788, 10.467718805490652, 10.677051892394793, 8.94612956754597, 11.951836328854235, 7.205156897620786, 9.342176508193363, 10.39429894226471, 7.227103185182163, 9.491655237206876, 8.177810749932974, 10.707653830697902, 11.064543601271358, 9.869123075624042, 10.469811306209765, 7.589940747374616, 9.43393501841338, 8.318213153429486, 9.00148241510562, 9.112794178807732, 7.80209669655523, 5.707250597116932, 9.421050408117106, 10.246608113741676, 8.017371992026156, 9.877205333648394, 10.204169265135125, 6.765556485028936, 9.895107482730168, 7.165799691567091, 8.076738584906439, 9.470086716597978, 6.810724868201463, 6.830205925391157, 20.56586660466806, 10.19514208623488, 8.23649941380675, 8.486155435803504, 8.054952848957308, 9.418609634148854, 11.107625503706872, 10.54138829975945, 10.778808492732214, 7.212193639045053, 11.829429562913832, 9.546346067351271, 8.259227734051104, 11.498970688490154, 9.563812507653592, 10.230251263989588, 10.932324823496984, 11.719048279683838, 9.576545479578458, 10.090381888614717, 6.872110547120437, 9.101473042629248, 6.987684326335396, 13.358116981019545, 10.372075448751419, 8.725125033167764, 7.9412093988983194, 10.34698511557538, 9.21828061364868, 8.728266970471347, 9.578355916143424, 13.009136910556581, 12.15119303684984, 11.169746498486504, 8.091942268666317, 8.99204232766467, 8.290926144456131, 8.147941040954299, 12.412616475941928, 8.766373911672984, 8.562814728166536, 7.460908483139848, 10.018084995636382, 7.922262803574408, 8.906114040913298, 8.94470762894532, 8.075181897902088, 13.973533998795855, 7.363904443282757, 6.812062370769613, 7.559692235414283, 6.836889827187756, 9.88239432212883, 10.459818996771096, 9.905940422926713],\n [15.684648105072474, 16.319093477472403, 14.85393289013616, 15.370941794109017, 13.746335173393325, 11.708500915151387, 14.960903821753917, 12.102671503574063, 10.169830108764483, 12.707899014232177, 10.295598908573352, 11.5294925550275, 10.3940118950235, 8.615156897668426, 13.998643865479764, 10.894923482874344, 10.653818872471732, 8.84472542935161, 10.331838299975445, 11.170290166813595, 14.83053315235911, 10.104983674304016, 9.734194170259036, 8.879250753693073, 11.254449869084487, 9.411572364368217, 13.508833008992433, 13.700438229371445, 9.588984948195257, 7.293510483301357, 14.045995270710518, 14.094643061910611, 11.06569658683474, 10.90570316988258, 14.927942338726947, 11.398443009373814, 7.252804512368241, 7.624672983673706, 13.779719739067646, 11.175889661646181, 6.379356416044427, 7.559069478026385, 10.539286376090004, 7.328050677329964, 12.164782739058218, 12.327000794310598, 10.024627364820859, 12.52178787522241, 11.563428496152335, 8.346766927822483, 9.62776066110205, 7.780960717994579, 7.2326744005393975, 14.878942847051427, 12.40807314798496, 8.308361091526242, 12.69242258274123, 9.10395407196278, 8.972486487308055, 12.199607897537982, 12.093748879768487, 10.459094670547115, 13.54208189002777, 8.939377097173592, 10.283503996462532, 7.992470815413437, 9.879817192240468, 11.852355677775062, 8.263484839788795, 11.139171868727045, 8.582851637711565, 7.688760947418979, 13.16017479445809, 6.66364964692711, 9.545639064218681, 10.880441371828681, 16.04266091879192, 7.336457701116979, 5.437501549079247, 10.396313804673118, 11.192198183235316, 8.909207415714858, 10.351211049297218, 14.798728163585603, 9.751122488414156, 9.985660225114641, 8.74055156295637, 14.061404038879939, 14.318045881737564, 12.055952507053345, 13.763540752917562, 12.62518299471035, 9.697879140221065, 13.1391524849862, 9.28656191864442, 7.914869726620654, 6.0999482578766, 6.177410305318499, 9.349303813744811, 8.097426901652458, 6.457606729943572, 7.497144106428058, 7.7849642682528835, 7.590110548635438, 10.688805260983383, 7.512280507542261, 7.881341511090873, 10.078061221353458, 9.81563805735599, 11.860282051474861, 10.893753174664756, 9.25034173160697, 7.5607651199971615, 12.438754415398847, 11.136635879577323, 11.071376123530941, 10.987520868691732, 9.549169930502716, 6.767147721015631, 10.955589704107755, 15.728865005406481, 11.29450976485513, 8.401104028601553, 14.743304644437716, 11.062972989724955, 8.664360777017947, 9.414484449189676, 10.233172256600744, 10.148758642445324, 11.230157491982862, 10.996707968799303, 9.7495499560665, 11.050476818380652, 11.63820030498926, 9.168508688422985, 7.068291807903766, 10.614490188232795, 14.082565206360925, 11.326792771793412, 15.883881123919545, 10.411489282864672, 8.976110452892682, 10.770188128071515, 9.59104620910803, 8.392906672209808, 8.736944142846209, 8.200211964039518, 9.545913286968512, 14.884669820444572, 13.204229170454688, 12.447257418138104, 11.533408780073781, 18.906757705026997, 12.737214161970384, 10.493580392129186, 11.001236038673087, 12.897085638024254, 10.81422659092342, 7.930345294136909, 11.690297530289905, 9.792844154138006, 10.922421880072184, 12.426771686522823, 8.704494283673766, 10.137940601393991, 11.710437847876689, 6.470855697732518, 9.12267556479345, 10.956413111332422, 9.613224281955008, 13.572429826883184, 10.099665424531393, 8.231481898316046, 7.517487973645029, 9.048538548624766, 12.2968529044401, 12.722643475427882, 13.056227808639612, 13.58651775251408, 12.6169751124243, 11.249273814749893, 13.04093468623825, 7.066125701579749, 13.447236100781847, 11.652721912402269, 11.180775409066355, 10.08290908138505, 8.44317898092026, 12.764215279842535, 8.12837715041915, 9.728055180758277, 10.72031017126853, 12.79204681735087, 8.778835931140025, 10.692884579462865, 8.81068279206634, 13.584721473617284, 8.083897171152715, 7.274662990849004, 6.644168858421618],\n [17.3763402158999, 17.465381683975014, 16.693580382002107, 16.5494405826277, 15.812431726209171, 15.970261074983569, 14.492570027836585, 12.707863682883403, 12.135437373864963, 10.544118187722935, 13.108040092531091, 12.620547284302756, 10.766403031062987, 9.98137271949238, 11.281551458608378, 11.255237384695075, 9.006537592577002, 12.580419254497778, 9.059446739268006, 8.663204933274756, 9.22396353666398, 7.674410599971726, 9.549922785361291, 8.604652187224106, 9.735299961370183, 10.061220743440476, 17.312683664880502, 13.050297120837508, 24.713270092770998, 8.613622187260061, 8.5944472431004, 8.675417771599712, 9.144167451884883, 8.24028947209169, 8.77292591944392, 10.04538372230688, 8.590389860380721, 8.533546580122959, 9.923985353113139, 7.147118874882419, 11.406879884363546, 7.347940752162352, 8.294617655442016, 9.386313539577898, 9.217575759844342, 8.709196914744549, 8.868133148405615, 10.659756794829711, 9.376359625413485, 6.8081827267415145, 9.483586453250767, 9.319716097218912, 8.729800648382916, 7.7435890099514815, 9.682449511692615, 8.590680534425903, 8.510697217932286, 12.890933108086557, 10.228244158792151, 11.396695612034693, 8.684636352103583, 10.217305512024812, 9.937927350433604, 7.66408972084202, 8.528613040058096, 8.156978432506191, 8.989390591313246, 8.879240912479501, 10.695322334614442, 8.889878429385483, 11.950886704001222, 8.475606309239735, 8.308708228374034, 8.350053425953272, 7.058050173138914, 10.242890467856771, 8.664876401480393, 5.9913073402192385, 8.649908663449317, 8.052262249098444, 11.192757816538343, 10.066776014012751, 11.01587464012332, 9.921612254802868, 7.354681472672452, 9.041918687816407, 9.28032765886085, 9.219980700011291, 8.801012183221125, 6.645051896590689, 6.4032357403259805, 11.91204409198507, 7.138786768164556, 10.545791358404292, 6.748276615033419, 7.56033195088718, 11.52383776705684, 11.787371331136981, 9.158476078356747, 10.539331205341407, 9.02574530127472, 9.552037104848818, 10.21852363954266, 9.30117505037342, 9.101927488238706, 8.66205065426535, 9.331205733619392, 7.856206364495721, 9.66402073772104, 9.368278574870972, 11.19303241742949, 9.43125978476612, 8.910473249364337, 8.044953555850793, 9.387228140922355, 7.7720300297566745, 8.934320491100479, 7.831657589667519, 7.464291216346058, 12.673969017592785, 8.244700608084102, 9.212999017241621, 8.6944618697249, 8.566637169671695, 8.668876474592196, 9.97448187908918, 7.458204311491999, 7.953072709490485, 9.383046481178509, 8.804838473977448, 7.71976300755978, 9.442322477912517, 8.496304768004336, 8.368884464393588, 6.167135657602622, 8.369300017783795, 6.633508067948844, 8.894825538435457, 10.298315186275913, 9.283520309149251, 7.96586104229123, 8.533394823830207, 8.380027341373053, 9.679845187626208, 7.755918530896894, 7.620099781922749, 11.310145435856542, 9.942570648657178, 7.655835277622718, 9.081679773588569, 7.732196624144153, 5.252372628014775, 7.32746648251859, 11.5041363496484, 8.756641527193167, 8.160399437272542, 10.171135065059358, 9.45960602213474, 10.988472590368216, 10.680118456214156, 7.913404917510096, 10.877655245372615, 11.853023674282005, 7.586104901564608, 7.188530667526484, 12.106209522127424, 10.280272449409768, 9.041884147737974, 6.82162157045741, 11.428480760701584, 7.7632480042035406, 10.673004820733588, 8.674732846453859, 9.398743426819067, 8.333325395210675, 8.315290944754508, 7.89036971904872, 8.668543545123253, 8.5402103501377, 8.78336348359503, 8.504475499768839, 10.415028154761302, 9.226544240030776, 7.181668438443649, 8.922850841378333, 10.601374359496207, 11.999989441166043, 7.816561328093369, 7.479117996399882, 7.4294629359150965, 7.33314750149915, 10.627727659449642, 8.16325624076094, 8.637943431213298, 12.163922318128334, 9.920861664533366, 9.425288226043333, 8.944781677085096, 12.582666793265604, 10.141553690333312],\n [16.65513602819292, 15.823981589458242, 15.422252163469551, 15.258143092605039, 14.646413921434366, 14.354066540362528, 13.13743892158798, 12.847811595801845, 12.723155433396835, 12.210958315743909, 12.68974714606964, 11.735741112378935, 10.941097341533865, 11.321134653770866, 11.15048917573322, 9.48651443875823, 12.029192705162776, 8.872267491723171, 8.615034929106166, 7.951440820104183, 5.4430580476616015, 7.648037679752995, 7.151478699527949, 13.921436263662926, 7.3654042937514435, 11.201710073181319, 7.719664134559113, 9.103684868725257, 6.940987299850096, 10.777022325518306, 8.316624722863708, 8.501251029553755, 9.40263790824705, 8.343384089298304, 10.130707416611603, 7.171463534933684, 9.946790718150766, 10.519015515554553, 12.15604776066794, 9.906513527624636, 9.015464416935332, 7.894256853803812, 9.547193670575602, 9.933481369866838, 5.945803325621449, 9.094305054829103, 8.881636608479033, 8.245375438759616, 8.575874380211774, 9.452000743214155, 7.605391902303943, 7.378027443995133, 10.196631033761323, 13.449246962316685, 9.288733589457452, 9.126301687840753, 7.382629612365059, 7.927929939672349, 10.353192417188904, 8.71150535642135, 5.706528315653079, 9.691396272384887, 8.32099219329515, 9.859202115142319, 10.637323024020302, 9.274664782296187, 8.177645531458092, 8.337593632715274, 9.167767333476462, 7.969372969360523, 8.08543935953597, 7.87278126848897, 8.64390266529729, 6.636612587938079, 8.63481956991534, 12.116362627814055, 10.648343396677932, 8.569140486227951, 8.161534579348944, 6.196054356586272, 11.523692905129106, 5.493795005501367, 7.77217197803562, 10.395134924713334, 7.768964988864032, 10.169430923831467, 14.997013023415523, 8.490971119632414, 10.72958084061941, 8.618234204680217, 8.227626444084725, 10.668271846805217, 8.574391915817799, 10.011547979349096, 11.952868577610582, 11.043082092042324, 6.97371540613131, 9.744282397830899, 8.774246516546263, 9.002833735948233, 11.008402095404895, 9.519068950709832, 9.220442341817026, 12.348990491853966, 12.44359643406186, 8.911333184160174, 7.141516777054673, 9.17365017323204, 7.449608410068299, 9.116046221834365, 9.659628503282596, 10.974615380540818, 7.380519604144077, 12.866420520553927, 8.567933639093267, 7.277701372901979, 10.303240331093384, 8.628201122870047, 9.110736265254328, 8.27136849988656, 12.707438023287969, 7.976085571504812, 8.553684847073729, 9.637782303371957, 10.172893908817935, 8.190127652125124, 8.280274484192491, 8.241502590932113, 9.579871584167188, 7.989303350421207, 9.827839160366663, 7.745356402634669, 8.780519526157004, 11.04218987259776, 9.391658588643981, 8.780933356683297, 8.717319759603924, 8.48869602273149, 8.346725152203918, 7.654948860540628, 5.742559718495124, 10.905428111225822, 11.640652677031763, 9.787754779071095, 8.751553989042103, 8.302296599071967, 9.089792277118132, 9.973971206404876, 9.672431442820137, 7.754158722217721, 10.910693306152362, 10.95965656310865, 8.475280727959216, 9.313799711584283, 9.632528600539805, 7.704256751095142, 7.246621632104317, 7.044471305707654, 8.146041739427515, 7.799983713527615, 7.847964177000058, 11.376737709349339, 11.787589196385706, 8.405982422181351, 9.466331711956387, 7.3542577659456825, 7.90111468211505, 7.4959686308495925, 11.560738266049325, 8.05893921460889, 8.073402399850586, 9.591198955996918, 7.8636082929299045, 10.415309064767584, 9.296592231605388, 7.696981134830109, 6.868420329320034, 13.716569307694842, 16.078606281085115, 9.196563348184926, 10.404183216369914, 10.903686180005451, 9.247560546005579, 12.984419276190382, 8.529952043738241, 9.088912195200233, 8.808552458718236, 10.784336513646437, 8.073979623173667, 8.617790039585959, 7.628613470124751, 9.890780456425302, 11.405321599593954, 11.511484790385394, 9.597632139843556, 7.4425721905664215, 7.858902896557257, 10.625917086231654, 8.553663873625531, 7.33996887725474],\n [16.11327258158127, 16.00059640041787, 14.620337901871778, 16.720538105543525, 13.977390220888859, 12.51852096936117, 13.805974998049399, 12.117558789569031, 12.042935430058465, 10.8024256895446, 11.865676492967589, 11.720447835397534, 13.051874693107735, 9.465342443779997, 8.214977443023159, 9.800711465383182, 15.838942987483883, 9.610058918590786, 9.418045037744086, 10.298069079501342, 6.996951591240167, 11.566778008222107, 15.14220280156793, 7.790332750187694, 10.7129601654711, 8.213186447176877, 9.323899187667436, 9.564265722060128, 13.846977763301776, 8.345976667882741, 7.68943191270598, 7.208786148167796, 8.569368254790033, 12.383388478801553, 10.736120912141306, 7.704998764545269, 11.162877791789988, 8.77194535691064, 10.364324769606093, 9.712368374025225, 9.506329829445304, 8.423615136469511, 8.077187497097507, 14.73981083031366, 10.919927045028066, 8.231525461953781, 9.176084725400788, 11.685326215035905, 7.902501306676448, 9.518247192059615, 9.667356930166251, 10.175080793292967, 9.59479890310814, 9.003077265739286, 8.069603520170933, 9.048542624167686, 10.277167181963932, 11.397083899216504, 6.259733891339257, 8.747925250655374, 7.3492613900899055, 8.201718385741241, 9.105589384560474, 12.226496797791173, 8.686335249981163, 9.5801775660523, 10.521077184957973, 9.764747891437203, 9.510947252318005, 8.477507956065285, 9.845029946795046, 7.462311478093125, 10.27979235371629, 7.165917360648856, 14.336554990097907, 10.502270696826676, 8.999526152367874, 9.205230417861957, 11.492290594536044, 9.909399714625318, 11.44685377752685, 9.021005135404133, 8.732962673205561, 7.061366482492443, 6.901540498784995, 10.880795190630238, 7.757645794538955, 10.418272006186584, 7.578652760919779, 10.13090673771482, 11.898893511948026, 9.238555219896325, 10.741194017683949, 11.119931540519842, 8.23185317767386, 8.720564378941724, 6.697800931689298, 9.855694447904444, 6.972577608447699, 6.599268849826786, 8.239548071537872, 8.89586130931171, 9.472121192694436, 10.54678601608819, 5.6498494004278985, 9.270965374863076, 10.285799990098965, 11.02495284765393, 9.001033027276726, 9.264061639848517, 11.042795732999949, 8.444376163141998, 9.887177495691907, 7.882491521088401, 10.716944586108632, 11.325951781944655, 10.493639693683603, 8.478578226194402, 10.29567045223303, 10.957657792797058, 9.272722331806012, 8.163403149819501, 7.0616117533532785, 8.643798415424252, 6.111881519333135, 8.809373587252047, 12.392004588143251, 6.364369112179225, 7.888487099004201, 8.015182282065895, 10.030028716471113, 8.742634921676869, 9.400439112863438, 9.000186568429907, 8.388998719377964, 10.66083738101429, 9.58453351194431, 8.964562682195387, 11.192760786845557, 7.992138322201374, 6.809417199501597, 10.42324705119013, 12.14905068118972, 8.110846391800502, 8.96550204567695, 7.5126588223032025, 7.925510007250605, 8.327912317358743, 10.049615766989783, 6.000887525013875, 8.904128564839644, 7.05036267970005, 9.31903564178668, 10.224230624044436, 8.180820151626763, 8.66623687010023, 8.072048068249721, 8.215128778865491, 9.22775619427417, 11.390872771320925, 11.430101718782783, 9.274129685825036, 5.5681232537793095, 6.990895232089772, 7.039798108618637, 8.333993875035286, 6.585170557725001, 8.851221077518417, 10.664800408539884, 8.389573003946674, 12.982994860103693, 7.534352806666141, 8.759155207077598, 8.421770044984868, 8.827205737111997, 6.936369660568447, 7.395694728886182, 9.126252218870023, 7.271645996621227, 7.685863577040834, 8.071927770855446, 11.175564762599665, 6.7133497367474675, 10.29121862131937, 10.420355593755843, 8.59880484432011, 8.489355850287682, 9.987958707789451, 7.018553485563951, 10.794832214305016, 11.08143195504709, 11.783973030823343, 8.902849850666286, 8.50666038798698, 6.52454624614959, 7.335885402990751, 7.799905121827317, 10.121116465519925, 8.464009303102694, 8.258384374410458]])\nQCNN_ae16_FASHION = np.array([[16.168533905262052, 16.35363877128826, 15.86870718385021, 15.707332475975521, 14.797734186646124, 14.743276289026284, 14.227691240214325, 13.798205459901089, 13.953956731015401, 13.681660809708813, 12.73123894958906, 15.964761550298995, 13.013347927840908, 12.203452739930569, 12.411145619887435, 13.695706276168211, 13.883146665149807, 11.670957109505169, 15.558135233849589, 12.850943094406201, 13.436917947090004, 10.90114916068677, 11.86373993517938, 13.646669167244456, 12.724107971280151, 12.077338276967973, 10.451553161145164, 12.844133986001813, 13.270574174617494, 13.460806497508186, 10.556897801520856, 10.758087020785624, 12.873379168568162, 10.931458408052151, 11.171526503215913, 12.606909894936368, 11.306793976204023, 11.82715552946807, 11.824597634043862, 10.710670128842992, 11.741547217153258, 12.636995932459813, 14.017497059562459, 11.78242160979226, 9.383470292799187, 11.771617904596958, 13.259516713206303, 10.565354272389788, 10.294510241869235, 10.70268728964846, 12.645605581392616, 12.098603909594768, 12.028696072364122, 12.769225915684933, 11.140989450715372, 10.996466259837119, 11.183094678045403, 12.248421590982424, 11.28234755776755, 10.208234542613688, 9.252309776243331, 11.37581026257861, 12.883144352902384, 10.142768118934695, 9.437914161958204, 9.657957190694924, 11.071623515668513, 11.500063922069788, 9.439620534960019, 9.509882115142174, 8.593414015339349, 8.801624609748526, 8.928881155973267, 12.036563950499687, 10.943167906379216, 11.269784376596421, 12.304603075439061, 9.857951866632652, 10.735827612303451, 10.451627333433823, 10.451899109544849, 9.940869015088834, 11.665593700379608, 12.242940514866227, 12.070632002979629, 10.197934027644711, 9.286616596212186, 10.533136745784144, 9.760247161692782, 9.880055238592215, 12.83558782458182, 12.654097421826311, 10.513012565329428, 11.73917803827573, 10.65117493853059, 11.668486538723133, 12.313391408761408, 9.831648012770927, 10.472354816080403, 8.96863814368618, 10.006113751336246, 9.771116018929066, 10.885304443911453, 10.029846245028276, 10.676657933471418, 9.979008788984775, 10.476773359348652, 12.342211364919223, 9.89094187667906, 9.64510934059656, 10.77294784951734, 13.403209679887764, 10.690937408830688, 10.960813691123038, 9.861444222598111, 10.184373099787273, 11.014738749419298, 11.206998160894276, 10.423054676882657, 12.063789653841464, 10.316876719715191, 9.457300263118638, 11.28646455337976, 10.350305769006399, 11.142621639479364, 10.992947287186722, 9.607125851708187, 11.071486411410058, 10.34291419928263, 9.505877032225563, 10.41673917013119, 12.914892033395024, 9.18017566873471, 9.930208399547837, 10.08001357645496, 12.19603487913572, 9.619776620942911, 9.262974531161962, 11.804390985993157, 11.148749745597398, 10.884290981104112, 10.955399847139347, 10.788183874397822, 9.820042304249867, 12.192016251533957, 9.572696230480023, 13.964958871765589, 10.058259839090669, 9.007535322107357, 11.632826994096883, 9.947815112278764, 9.794339774262667, 11.908838036804324, 10.10737074324803, 11.14400843479954, 9.705111761267746, 9.068346316256184, 9.873061445954102, 9.624160462710556, 10.64389266141783, 9.88576137173499, 9.394167090343158, 8.715859861881828, 10.49729504942295, 10.110902852059892, 10.08086236435305, 10.214236030749937, 11.954859899033343, 10.490587011233991, 9.509503670592325, 9.489321503527524, 8.85276251568906, 9.117529719191504, 12.465157286612548, 10.978357883040216, 10.491853016845898, 10.578713021653176, 7.837052336313976, 9.416648517497125, 10.617842368182863, 10.965015552312883, 12.298223522644944, 10.602387273567814, 9.859470886299945, 10.298730449598242, 9.925282888790132, 9.3844640583219, 10.593014592250816, 8.865255810879171, 11.245228464444258, 12.145540172736428, 12.08742637427548, 10.194323067676553, 11.564371343705924, 10.696575222763038, 9.948179481862597, 11.507125865570302, 10.78764796074029, 11.088778112226478, 10.716982556742423],\n [16.934873981770718, 16.20387656729803, 16.581963289053025, 16.090240554366098, 15.903818940984902, 14.757712441975627, 15.08276094075051, 14.458133605371163, 14.175481014769161, 14.36333920635297, 13.16692192560422, 13.101477978886129, 13.67589003832726, 12.885059996221118, 11.666040037129445, 10.423096293741082, 10.224791748217822, 9.959922991341964, 8.634563474065153, 8.602342981547608, 8.622992754841626, 10.041286225001786, 8.689434003995682, 7.571720593958701, 7.863470865017623, 8.821026862611605, 8.432979985619689, 6.751635793984621, 8.071928070882118, 6.744943095645349, 9.02911291257321, 9.345769878328042, 10.841873851712965, 10.282057976910881, 9.292547691528213, 8.118776492447928, 8.73224163267046, 8.020308751269916, 6.838003733962189, 6.872901036919048, 8.140868405380193, 7.717937437401024, 7.401956062148155, 7.608443110014859, 5.446979131797351, 6.731283582241682, 7.712593836504903, 9.680621708558434, 7.424165044922366, 7.514610706883776, 5.706050785254357, 7.877985338233364, 8.991686926031681, 7.660067040064281, 7.713695590338443, 6.054850293708736, 7.9775475022696405, 5.8408002547211995, 10.11344436847423, 8.67727463898918, 5.8407365607832755, 9.44114338602329, 8.125484704027759, 8.556455127032034, 9.742525174370595, 10.417035827783803, 8.235097982551277, 7.595035845791096, 6.246197056463312, 8.723519471151151, 7.744863966380206, 7.5176579258681855, 5.881066235285239, 7.998351938222555, 10.278982672518042, 8.62189813510403, 8.4506418280589, 7.36803727031597, 8.981271203532593, 8.290412174696849, 9.589863072726844, 8.139082998297157, 6.232288194815809, 9.111311937720394, 7.1910080381259, 8.524498163972918, 8.18681241977441, 7.894288750485701, 6.988601225685789, 7.561246797134458, 7.490388603462786, 8.087500479320951, 7.91126972101667, 6.9350230267020985, 6.9585425001896395, 9.26692520676806, 7.208059821306057, 8.364991513827478, 8.36016170618454, 7.8119951782713395, 10.414247784635586, 7.336580671836466, 6.7753014730783185, 6.3884697273019935, 7.58103693305997, 9.342329709411837, 7.831973602851348, 8.204168611735334, 9.801764275953929, 7.357052027089204, 9.213299844229232, 7.950855558035624, 9.55325559100182, 8.574250885350777, 6.838542844538051, 7.790933046572799, 7.303315540931703, 8.255175734360698, 7.261447519281447, 6.733629677221911, 7.439310521563123, 7.0486072873619605, 8.413300336737972, 6.954603369584885, 7.262921436546864, 5.754046166026076, 9.165044530695157, 8.321220839998674, 6.780631769047393, 8.835880488936862, 7.744916673412439, 7.812645680353854, 6.843583913678806, 6.947013944736711, 7.995283829616596, 9.86300071985878, 11.295904984672124, 5.943990393469122, 8.799044975880225, 6.898760296988971, 7.20193863613897, 9.190967021152543, 7.591554129119344, 7.449896041105425, 8.96367944440784, 7.590917645766649, 7.265664445831402, 10.568510755101878, 8.338331916346142, 7.657091384186253, 7.225681787424967, 8.467886863837734, 6.597161477034116, 11.409348695377048, 7.584466664105653, 6.082936598969789, 7.709078558976194, 8.534894298997823, 8.766497695404793, 7.24080138688792, 6.906326469105638, 10.49664094240543, 8.355629881281924, 8.089335266348675, 8.167517150858856, 8.878839256565344, 8.540127314467256, 6.321328662629648, 7.595069640436064, 7.7520154080414025, 6.749911075510355, 8.993437827570133, 6.947390102761641, 6.964546393875183, 6.586186712857802, 7.073752518629127, 8.143067572595868, 7.802710820352373, 7.949247098797616, 8.030663990061948, 6.342885234086354, 9.553570460710633, 10.07346774856329, 8.48551145195774, 7.871554163127499, 7.446800801658883, 6.87164171781296, 7.011797553233216, 9.078810620881052, 9.21643763318028, 7.438002496436309, 8.230976604912593, 9.54470709512294, 7.198480165252349, 9.110198612142982, 6.975644951033088, 5.275322801141798, 7.730460953171377, 10.240213642338828, 10.423130961677955],\n [29.869171085128713, 19.247820411251052, 20.716513354117065, 17.203499054123416, 17.214943179386356, 16.640822892907117, 18.10095543313283, 16.86799606066329, 17.640523192696143, 17.661105031669074, 16.40445732394639, 15.092492078274033, 14.970111270546932, 14.43044815571158, 13.939989367440548, 12.2745385392881, 14.488928229821015, 13.571115690814828, 12.879516386081795, 12.231847250544615, 11.717506959093138, 11.697538695295497, 10.325569858479396, 11.011922770422128, 13.115180137568979, 9.434167142832532, 14.274780053582255, 9.075947439985136, 9.418746079758106, 9.635349361023431, 8.753159705951475, 8.735548391071598, 8.503996122016009, 8.681057545259506, 9.392809925955337, 7.833005402182007, 8.068901791086644, 8.413984577121191, 8.557535434739034, 6.515264530024305, 7.8750451068350396, 9.845217123299703, 8.147225832063707, 9.535009654798609, 7.563175019309073, 9.72905938306348, 9.954615349350151, 6.539990159215163, 7.774642501683633, 9.342080522955643, 8.706154951121809, 8.77820011854369, 7.959949113489947, 7.606393967469446, 7.997298076290153, 8.999795867816022, 7.704336030125127, 9.921730461252329, 7.710497040586374, 9.037263391207649, 8.524660061042164, 7.686226514382502, 8.268926879449538, 10.692810857929153, 8.748281662618755, 8.941097051332044, 11.311388829815453, 7.093331715167289, 9.08879950138175, 8.109978597912692, 7.468923758352063, 8.717974867228566, 8.807814287220195, 10.133352839073995, 7.225586112836936, 6.974236116645971, 7.781483196193429, 10.19892773093788, 8.675218449262276, 9.123241884303155, 8.19535904439145, 10.763400024287572, 9.632406621741211, 8.625949128701354, 9.364745944010217, 7.142501629535395, 8.608691372410672, 9.280442005809542, 7.680960465705023, 7.918875387913574, 7.954810543883512, 7.971300551271394, 8.883387321573398, 8.897002886225255, 7.873000546392299, 8.145451438631149, 7.739016470611203, 9.656070513145577, 8.036064038758433, 9.84944029379927, 8.620933847883212, 9.575451474981278, 7.507402916136, 9.19948788290408, 7.7531797548369665, 9.207136064526626, 9.105279808102075, 7.170490886561373, 8.570782004767535, 8.17610475846743, 7.413965885137827, 11.024234373246864, 8.567594922954006, 8.535600246380364, 8.90652590836521, 8.261669485202114, 10.713365392258572, 9.934607363879989, 6.741165973477654, 7.615163251426394, 9.50121164750382, 8.321270154765196, 8.54678548933741, 7.461167396806383, 8.066939974182253, 8.442563259386697, 8.570841583902322, 7.288593772152833, 6.880075112991428, 8.424939338092065, 8.469137890797018, 9.806779896811364, 10.061269318492641, 10.49225751139659, 9.270021518740217, 9.021589451108696, 8.628322890146432, 8.269033433218032, 7.681699348858471, 9.191990983280034, 8.271960762544781, 8.92775588015976, 7.976308077916293, 11.530820146311191, 7.230056142452939, 7.409625408944511, 9.59192644615762, 7.443465747090689, 10.688918562929848, 9.700388177220596, 7.525374998416934, 7.767213612904375, 7.888102771179124, 8.367940708304895, 9.915789677787767, 7.322926508666237, 9.168251414547424, 9.107957269773067, 7.146290924292758, 9.035143711011639, 7.948637405506762, 8.71529787791062, 7.962468785464406, 7.2207234990946745, 8.152049047742215, 7.769928589884074, 8.207945747597492, 8.700083810198583, 7.472680684869939, 10.094570175229663, 8.320306720679291, 11.37375235144043, 9.219795090527011, 8.50053945767189, 7.90656775349191, 7.8694288382503395, 7.672449443161977, 8.333833559246905, 6.67095677133731, 6.8398654640045, 7.172181255733293, 7.626863113669965, 9.307733125117528, 9.424800306779806, 8.403776850313472, 8.098866653724713, 7.183390604660435, 7.42300280211306, 8.027493597533773, 9.89109554750821, 7.720458715942417, 8.790386343336674, 7.5871601865210625, 9.116273231407298, 9.077693929591135, 8.318471651193148, 10.407633656164673, 9.40954439682548, 9.120347207685533, 7.343604530395195],\n [18.92542477451782, 19.275629907805403, 17.471137200082993, 18.72563923597236, 17.749420686583115, 16.90808329437192, 17.063589282901845, 16.241380185704816, 15.36392022559027, 14.94660699294393, 13.912030337313238, 13.416221109926896, 13.709713674866666, 13.852681265122579, 13.420808673373088, 13.201577451974305, 13.223618862191232, 11.66239626698383, 12.419554528149895, 13.498076476098145, 12.080064594014853, 12.526599259557052, 11.70103249578385, 15.437811112275128, 12.166130567961808, 11.555700469674086, 11.715406267988435, 12.485514841063534, 12.127889781986177, 12.347796237765943, 11.917564950225833, 16.207599705057163, 13.706550806239166, 11.04616132167589, 10.983347710825575, 11.925618475664669, 11.440239403373372, 14.292224868828505, 12.136822441472756, 12.529623571565747, 13.065987587713751, 14.17922610904978, 12.179823701700538, 11.975261946874527, 10.06363320636632, 11.08223410750279, 11.614476967618012, 10.36858707507102, 12.480123143095772, 12.380873801995453, 11.135590475599116, 14.91370032149542, 12.370671315113299, 13.761809663513713, 11.031526997238608, 10.427609194304086, 9.769410975988162, 10.673138275405503, 11.07343486489805, 10.598282777726064, 10.028544153282303, 12.456910597589928, 9.411974723791706, 10.560151549508818, 14.586897443207988, 9.592677784976452, 11.82782493838238, 10.076632508327474, 10.19752765696013, 8.956515659326582, 9.626815326647938, 10.56585017786478, 10.062025898243801, 8.567175381986862, 10.710256838739916, 10.3764282635568, 11.137971254381714, 8.937837753425736, 10.129680582594462, 15.370915519910653, 9.562205922116942, 11.024838618004706, 9.569255179777247, 7.830602013928761, 9.959580081161638, 10.538081523256077, 10.466898906762072, 11.506342848372466, 10.598320708504728, 10.992026834199141, 10.963893149495444, 8.825962408545273, 11.525526122680422, 11.0260184525542, 10.443193743602958, 9.807413284968378, 10.668988218443657, 8.984439839544136, 9.083718688107803, 9.073111257041539, 9.210207579887538, 9.93818623695558, 8.071062082849698, 8.903269180912275, 9.97391472202903, 10.804265582914677, 8.623291430990674, 9.401453362160336, 9.787632126485489, 9.072967241077345, 8.931613096003572, 7.420972774169009, 10.748363822873772, 10.025341379618633, 11.852767495534827, 10.799243515111527, 9.129231192196704, 10.473864672849547, 10.335961923235852, 9.306967077852688, 10.034464353003578, 11.681991384587839, 9.649723770822916, 10.947736382403685, 8.586739607113469, 10.005963097851593, 10.035342603750681, 9.602602442173577, 14.361518003645552, 11.328374880366507, 9.928856456979387, 9.721385712333829, 9.770669547391911, 13.614860442614793, 9.366698063905595, 9.928641818179978, 9.653711698888696, 7.4300429560722385, 11.217797229258485, 10.72431298186289, 7.467642368863637, 8.971570454681991, 8.549965767538584, 9.407493275739359, 10.528392867450778, 7.974295577989083, 10.219822187765827, 10.372566501215653, 8.440210177657706, 9.51912575051376, 8.682077206889756, 9.706553781906859, 11.130780643068825, 7.171653205795305, 11.564530043471716, 9.534322949575465, 9.112239377042227, 8.164320944914019, 7.492729995974972, 7.582309128187825, 7.647315653669804, 9.123270793064075, 10.062457638193482, 10.31679195149326, 9.055152218370864, 7.856555884864121, 9.96791560344558, 7.218287757242914, 8.229691690038566, 8.619631018437145, 9.31461317813508, 8.701351625863655, 11.020230280554676, 10.497006819077145, 10.356605828896036, 9.176425006812952, 9.310547923304911, 8.107680508414317, 8.606889331078822, 7.925078332719253, 10.932890749279707, 10.143359702633756, 7.669977009412438, 9.39589196464282, 9.695925362084324, 9.218120739163735, 9.424649625033663, 9.363369215162816, 11.267245842936587, 8.419435861136535, 7.131981143033376, 9.419276477771131, 8.630991202023736, 10.768282280735411, 6.909893105733801, 8.368833438976184, 8.772950463993755, 12.07185317714497, 8.54804676963937, 8.66387752114025],\n [17.266577105638163, 17.37678074779175, 16.546289668708628, 14.817395171108211, 16.9185797697574, 16.778835667720575, 15.625721309294644, 15.539578788768072, 17.60328210932527, 16.537912283522708, 14.73700364229281, 15.375289012222929, 15.47579757800776, 15.72210860561612, 14.49258738093842, 14.340787828370257, 15.445570616123922, 14.379315342044107, 15.612630429974372, 14.95906470703494, 15.951449925028731, 14.287221474224074, 14.392801854265816, 13.50968957044579, 16.0346217646537, 16.065210894611333, 13.452812510942032, 13.692202787570366, 12.712223525165145, 11.647594046121132, 11.555688396523362, 11.041394599378718, 11.340535582092015, 12.87122469678418, 11.160250855978273, 11.888195239943025, 10.08631863795053, 10.44987928222506, 11.292054780934054, 9.56983952656372, 13.23573698944273, 9.15382289554587, 10.226144823316712, 8.499792244585155, 8.941894522595291, 12.407558759188484, 9.576915009692652, 9.532967643877054, 9.691448495352635, 9.463402724554408, 7.987612187584988, 8.816423999705847, 13.052594445059077, 11.627579386348312, 9.725816543830737, 10.224496206948439, 9.590600251442686, 10.81928076216993, 10.593560177781331, 9.601309305318953, 8.595452990116502, 9.712966578999625, 8.039835137914423, 10.148264602432198, 10.107884816171785, 9.098800490110802, 14.414178715607624, 9.621932685301733, 9.66025542152395, 9.059291922109237, 7.8561911191467075, 11.003357927599152, 10.107299727244268, 9.590516656178476, 9.783279900893943, 9.662904388066831, 9.305142703591446, 8.636324141089586, 12.170842130977945, 7.082944913673979, 9.806206178879547, 10.160523553836873, 8.985338418985801, 10.224925509328992, 7.818499685880952, 10.740396983740856, 9.243107804993711, 7.73055872268374, 8.144975756751371, 10.053742368792928, 8.522909964571312, 10.432096948166322, 10.06396215170603, 9.714080284337694, 7.445726020821557, 9.866777490466985, 10.243687181598931, 9.167481243250455, 9.081712581384723, 9.059541037754721, 8.74948953059817, 8.625060024886263, 9.303699042334186, 8.826005731697181, 10.240633829536277, 9.14639910149468, 9.770141042862203, 8.2000613153184, 8.860101372546286, 7.487198786200657, 8.281702877580948, 12.881142788744135, 8.867968052678139, 8.227751271626786, 9.406695357759235, 8.09059403432903, 7.69092361992629, 7.33967601725349, 8.845876068394304, 8.387177907802162, 9.006247722772171, 9.641493087757967, 8.23328291883457, 6.876592775797417, 9.951155014112825, 13.155882148822469, 8.41575103091348, 8.341409214890705, 8.523471717279442, 7.874433628195658, 8.273522895947075, 7.578443855485955, 7.700422645659718, 9.795986349160515, 7.943555760951561, 9.041446694361873, 8.073638238840427, 8.899934503688483, 9.002352867624069, 10.240448164523244, 9.7563901799707, 7.801473103606336, 8.592678787188632, 8.549454176831071, 8.268825682508181, 8.138327299814197, 9.108589220043099, 10.509261097419104, 6.9861493327951205, 8.372907782567282, 8.395579959091037, 7.140667222646346, 8.712018372837424, 8.350371454734367, 8.146105396056289, 7.2158100659772675, 8.280453003390043, 9.839106878108126, 8.850328269208353, 9.536994855270635, 7.278474965294019, 8.015550639732956, 9.286258898579081, 9.58372688982397, 7.180390089576769, 8.08906650639738, 10.07275269393014, 7.146805861044028, 9.152466536905466, 7.37786494592921, 8.49204192491858, 8.930002975844022, 9.871683758483845, 7.205875714782072, 7.615310253258645, 10.456191712577809, 9.19551228756415, 7.9805618374208676, 7.800618005631044, 8.727294287164508, 6.304038882596863, 9.633017438894116, 10.822627794260098, 10.931045869695053, 9.282886303997424, 7.9785568612491815, 8.44190265811056, 9.095802016074453, 7.892242840014909, 8.174756970039711, 11.078451028460798, 8.526886055425843, 7.186685920546122, 9.099564344943769, 8.814161456413448, 10.357844520719977, 8.080582954598881, 8.54631141138036, 7.97076017079595, 10.661481512399412]])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "QCNN_ae16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "QCNN_ae16_FASHION = np.array([[16.168533905262052, 16.35363877128826, 15.86870718385021, 15.707332475975521, 14.797734186646124, 14.743276289026284, 14.227691240214325, 13.798205459901089, 13.953956731015401, 13.681660809708813, 12.73123894958906, 15.964761550298995, 13.013347927840908, 12.203452739930569, 12.411145619887435, 13.695706276168211, 13.883146665149807, 11.670957109505169, 15.558135233849589, 12.850943094406201, 13.436917947090004, 10.90114916068677, 11.86373993517938, 13.646669167244456, 12.724107971280151, 12.077338276967973, 10.451553161145164, 12.844133986001813, 13.270574174617494, 13.460806497508186, 10.556897801520856, 10.758087020785624, 12.873379168568162, 10.931458408052151, 11.171526503215913, 12.606909894936368, 11.306793976204023, 11.82715552946807, 11.824597634043862, 10.710670128842992, 11.741547217153258, 12.636995932459813, 14.017497059562459, 11.78242160979226, 9.383470292799187, 11.771617904596958, 13.259516713206303, 10.565354272389788, 10.294510241869235, 10.70268728964846, 12.645605581392616, 12.098603909594768, 12.028696072364122, 12.769225915684933, 11.140989450715372, 10.996466259837119, 11.183094678045403, 12.248421590982424, 11.28234755776755, 10.208234542613688, 9.252309776243331, 11.37581026257861, 12.883144352902384, 10.142768118934695, 9.437914161958204, 9.657957190694924, 11.071623515668513, 11.500063922069788, 9.439620534960019, 9.509882115142174, 8.593414015339349, 8.801624609748526, 8.928881155973267, 12.036563950499687, 10.943167906379216, 11.269784376596421, 12.304603075439061, 9.857951866632652, 10.735827612303451, 10.451627333433823, 10.451899109544849, 9.940869015088834, 11.665593700379608, 12.242940514866227, 12.070632002979629, 10.197934027644711, 9.286616596212186, 10.533136745784144, 9.760247161692782, 9.880055238592215, 12.83558782458182, 12.654097421826311, 10.513012565329428, 11.73917803827573, 10.65117493853059, 11.668486538723133, 12.313391408761408, 9.831648012770927, 10.472354816080403, 8.96863814368618, 10.006113751336246, 9.771116018929066, 10.885304443911453, 10.029846245028276, 10.676657933471418, 9.979008788984775, 10.476773359348652, 12.342211364919223, 9.89094187667906, 9.64510934059656, 10.77294784951734, 13.403209679887764, 10.690937408830688, 10.960813691123038, 9.861444222598111, 10.184373099787273, 11.014738749419298, 11.206998160894276, 10.423054676882657, 12.063789653841464, 10.316876719715191, 9.457300263118638, 11.28646455337976, 10.350305769006399, 11.142621639479364, 10.992947287186722, 9.607125851708187, 11.071486411410058, 10.34291419928263, 9.505877032225563, 10.41673917013119, 12.914892033395024, 9.18017566873471, 9.930208399547837, 10.08001357645496, 12.19603487913572, 9.619776620942911, 9.262974531161962, 11.804390985993157, 11.148749745597398, 10.884290981104112, 10.955399847139347, 10.788183874397822, 9.820042304249867, 12.192016251533957, 9.572696230480023, 13.964958871765589, 10.058259839090669, 9.007535322107357, 11.632826994096883, 9.947815112278764, 9.794339774262667, 11.908838036804324, 10.10737074324803, 11.14400843479954, 9.705111761267746, 9.068346316256184, 9.873061445954102, 9.624160462710556, 10.64389266141783, 9.88576137173499, 9.394167090343158, 8.715859861881828, 10.49729504942295, 10.110902852059892, 10.08086236435305, 10.214236030749937, 11.954859899033343, 10.490587011233991, 9.509503670592325, 9.489321503527524, 8.85276251568906, 9.117529719191504, 12.465157286612548, 10.978357883040216, 10.491853016845898, 10.578713021653176, 7.837052336313976, 9.416648517497125, 10.617842368182863, 10.965015552312883, 12.298223522644944, 10.602387273567814, 9.859470886299945, 10.298730449598242, 9.925282888790132, 9.3844640583219, 10.593014592250816, 8.865255810879171, 11.245228464444258, 12.145540172736428, 12.08742637427548, 10.194323067676553, 11.564371343705924, 10.696575222763038, 9.948179481862597, 11.507125865570302, 10.78764796074029, 11.088778112226478, 10.716982556742423],\n [16.934873981770718, 16.20387656729803, 16.581963289053025, 16.090240554366098, 15.903818940984902, 14.757712441975627, 15.08276094075051, 14.458133605371163, 14.175481014769161, 14.36333920635297, 13.16692192560422, 13.101477978886129, 13.67589003832726, 12.885059996221118, 11.666040037129445, 10.423096293741082, 10.224791748217822, 9.959922991341964, 8.634563474065153, 8.602342981547608, 8.622992754841626, 10.041286225001786, 8.689434003995682, 7.571720593958701, 7.863470865017623, 8.821026862611605, 8.432979985619689, 6.751635793984621, 8.071928070882118, 6.744943095645349, 9.02911291257321, 9.345769878328042, 10.841873851712965, 10.282057976910881, 9.292547691528213, 8.118776492447928, 8.73224163267046, 8.020308751269916, 6.838003733962189, 6.872901036919048, 8.140868405380193, 7.717937437401024, 7.401956062148155, 7.608443110014859, 5.446979131797351, 6.731283582241682, 7.712593836504903, 9.680621708558434, 7.424165044922366, 7.514610706883776, 5.706050785254357, 7.877985338233364, 8.991686926031681, 7.660067040064281, 7.713695590338443, 6.054850293708736, 7.9775475022696405, 5.8408002547211995, 10.11344436847423, 8.67727463898918, 5.8407365607832755, 9.44114338602329, 8.125484704027759, 8.556455127032034, 9.742525174370595, 10.417035827783803, 8.235097982551277, 7.595035845791096, 6.246197056463312, 8.723519471151151, 7.744863966380206, 7.5176579258681855, 5.881066235285239, 7.998351938222555, 10.278982672518042, 8.62189813510403, 8.4506418280589, 7.36803727031597, 8.981271203532593, 8.290412174696849, 9.589863072726844, 8.139082998297157, 6.232288194815809, 9.111311937720394, 7.1910080381259, 8.524498163972918, 8.18681241977441, 7.894288750485701, 6.988601225685789, 7.561246797134458, 7.490388603462786, 8.087500479320951, 7.91126972101667, 6.9350230267020985, 6.9585425001896395, 9.26692520676806, 7.208059821306057, 8.364991513827478, 8.36016170618454, 7.8119951782713395, 10.414247784635586, 7.336580671836466, 6.7753014730783185, 6.3884697273019935, 7.58103693305997, 9.342329709411837, 7.831973602851348, 8.204168611735334, 9.801764275953929, 7.357052027089204, 9.213299844229232, 7.950855558035624, 9.55325559100182, 8.574250885350777, 6.838542844538051, 7.790933046572799, 7.303315540931703, 8.255175734360698, 7.261447519281447, 6.733629677221911, 7.439310521563123, 7.0486072873619605, 8.413300336737972, 6.954603369584885, 7.262921436546864, 5.754046166026076, 9.165044530695157, 8.321220839998674, 6.780631769047393, 8.835880488936862, 7.744916673412439, 7.812645680353854, 6.843583913678806, 6.947013944736711, 7.995283829616596, 9.86300071985878, 11.295904984672124, 5.943990393469122, 8.799044975880225, 6.898760296988971, 7.20193863613897, 9.190967021152543, 7.591554129119344, 7.449896041105425, 8.96367944440784, 7.590917645766649, 7.265664445831402, 10.568510755101878, 8.338331916346142, 7.657091384186253, 7.225681787424967, 8.467886863837734, 6.597161477034116, 11.409348695377048, 7.584466664105653, 6.082936598969789, 7.709078558976194, 8.534894298997823, 8.766497695404793, 7.24080138688792, 6.906326469105638, 10.49664094240543, 8.355629881281924, 8.089335266348675, 8.167517150858856, 8.878839256565344, 8.540127314467256, 6.321328662629648, 7.595069640436064, 7.7520154080414025, 6.749911075510355, 8.993437827570133, 6.947390102761641, 6.964546393875183, 6.586186712857802, 7.073752518629127, 8.143067572595868, 7.802710820352373, 7.949247098797616, 8.030663990061948, 6.342885234086354, 9.553570460710633, 10.07346774856329, 8.48551145195774, 7.871554163127499, 7.446800801658883, 6.87164171781296, 7.011797553233216, 9.078810620881052, 9.21643763318028, 7.438002496436309, 8.230976604912593, 9.54470709512294, 7.198480165252349, 9.110198612142982, 6.975644951033088, 5.275322801141798, 7.730460953171377, 10.240213642338828, 10.423130961677955],\n [29.869171085128713, 19.247820411251052, 20.716513354117065, 17.203499054123416, 17.214943179386356, 16.640822892907117, 18.10095543313283, 16.86799606066329, 17.640523192696143, 17.661105031669074, 16.40445732394639, 15.092492078274033, 14.970111270546932, 14.43044815571158, 13.939989367440548, 12.2745385392881, 14.488928229821015, 13.571115690814828, 12.879516386081795, 12.231847250544615, 11.717506959093138, 11.697538695295497, 10.325569858479396, 11.011922770422128, 13.115180137568979, 9.434167142832532, 14.274780053582255, 9.075947439985136, 9.418746079758106, 9.635349361023431, 8.753159705951475, 8.735548391071598, 8.503996122016009, 8.681057545259506, 9.392809925955337, 7.833005402182007, 8.068901791086644, 8.413984577121191, 8.557535434739034, 6.515264530024305, 7.8750451068350396, 9.845217123299703, 8.147225832063707, 9.535009654798609, 7.563175019309073, 9.72905938306348, 9.954615349350151, 6.539990159215163, 7.774642501683633, 9.342080522955643, 8.706154951121809, 8.77820011854369, 7.959949113489947, 7.606393967469446, 7.997298076290153, 8.999795867816022, 7.704336030125127, 9.921730461252329, 7.710497040586374, 9.037263391207649, 8.524660061042164, 7.686226514382502, 8.268926879449538, 10.692810857929153, 8.748281662618755, 8.941097051332044, 11.311388829815453, 7.093331715167289, 9.08879950138175, 8.109978597912692, 7.468923758352063, 8.717974867228566, 8.807814287220195, 10.133352839073995, 7.225586112836936, 6.974236116645971, 7.781483196193429, 10.19892773093788, 8.675218449262276, 9.123241884303155, 8.19535904439145, 10.763400024287572, 9.632406621741211, 8.625949128701354, 9.364745944010217, 7.142501629535395, 8.608691372410672, 9.280442005809542, 7.680960465705023, 7.918875387913574, 7.954810543883512, 7.971300551271394, 8.883387321573398, 8.897002886225255, 7.873000546392299, 8.145451438631149, 7.739016470611203, 9.656070513145577, 8.036064038758433, 9.84944029379927, 8.620933847883212, 9.575451474981278, 7.507402916136, 9.19948788290408, 7.7531797548369665, 9.207136064526626, 9.105279808102075, 7.170490886561373, 8.570782004767535, 8.17610475846743, 7.413965885137827, 11.024234373246864, 8.567594922954006, 8.535600246380364, 8.90652590836521, 8.261669485202114, 10.713365392258572, 9.934607363879989, 6.741165973477654, 7.615163251426394, 9.50121164750382, 8.321270154765196, 8.54678548933741, 7.461167396806383, 8.066939974182253, 8.442563259386697, 8.570841583902322, 7.288593772152833, 6.880075112991428, 8.424939338092065, 8.469137890797018, 9.806779896811364, 10.061269318492641, 10.49225751139659, 9.270021518740217, 9.021589451108696, 8.628322890146432, 8.269033433218032, 7.681699348858471, 9.191990983280034, 8.271960762544781, 8.92775588015976, 7.976308077916293, 11.530820146311191, 7.230056142452939, 7.409625408944511, 9.59192644615762, 7.443465747090689, 10.688918562929848, 9.700388177220596, 7.525374998416934, 7.767213612904375, 7.888102771179124, 8.367940708304895, 9.915789677787767, 7.322926508666237, 9.168251414547424, 9.107957269773067, 7.146290924292758, 9.035143711011639, 7.948637405506762, 8.71529787791062, 7.962468785464406, 7.2207234990946745, 8.152049047742215, 7.769928589884074, 8.207945747597492, 8.700083810198583, 7.472680684869939, 10.094570175229663, 8.320306720679291, 11.37375235144043, 9.219795090527011, 8.50053945767189, 7.90656775349191, 7.8694288382503395, 7.672449443161977, 8.333833559246905, 6.67095677133731, 6.8398654640045, 7.172181255733293, 7.626863113669965, 9.307733125117528, 9.424800306779806, 8.403776850313472, 8.098866653724713, 7.183390604660435, 7.42300280211306, 8.027493597533773, 9.89109554750821, 7.720458715942417, 8.790386343336674, 7.5871601865210625, 9.116273231407298, 9.077693929591135, 8.318471651193148, 10.407633656164673, 9.40954439682548, 9.120347207685533, 7.343604530395195],\n [18.92542477451782, 19.275629907805403, 17.471137200082993, 18.72563923597236, 17.749420686583115, 16.90808329437192, 17.063589282901845, 16.241380185704816, 15.36392022559027, 14.94660699294393, 13.912030337313238, 13.416221109926896, 13.709713674866666, 13.852681265122579, 13.420808673373088, 13.201577451974305, 13.223618862191232, 11.66239626698383, 12.419554528149895, 13.498076476098145, 12.080064594014853, 12.526599259557052, 11.70103249578385, 15.437811112275128, 12.166130567961808, 11.555700469674086, 11.715406267988435, 12.485514841063534, 12.127889781986177, 12.347796237765943, 11.917564950225833, 16.207599705057163, 13.706550806239166, 11.04616132167589, 10.983347710825575, 11.925618475664669, 11.440239403373372, 14.292224868828505, 12.136822441472756, 12.529623571565747, 13.065987587713751, 14.17922610904978, 12.179823701700538, 11.975261946874527, 10.06363320636632, 11.08223410750279, 11.614476967618012, 10.36858707507102, 12.480123143095772, 12.380873801995453, 11.135590475599116, 14.91370032149542, 12.370671315113299, 13.761809663513713, 11.031526997238608, 10.427609194304086, 9.769410975988162, 10.673138275405503, 11.07343486489805, 10.598282777726064, 10.028544153282303, 12.456910597589928, 9.411974723791706, 10.560151549508818, 14.586897443207988, 9.592677784976452, 11.82782493838238, 10.076632508327474, 10.19752765696013, 8.956515659326582, 9.626815326647938, 10.56585017786478, 10.062025898243801, 8.567175381986862, 10.710256838739916, 10.3764282635568, 11.137971254381714, 8.937837753425736, 10.129680582594462, 15.370915519910653, 9.562205922116942, 11.024838618004706, 9.569255179777247, 7.830602013928761, 9.959580081161638, 10.538081523256077, 10.466898906762072, 11.506342848372466, 10.598320708504728, 10.992026834199141, 10.963893149495444, 8.825962408545273, 11.525526122680422, 11.0260184525542, 10.443193743602958, 9.807413284968378, 10.668988218443657, 8.984439839544136, 9.083718688107803, 9.073111257041539, 9.210207579887538, 9.93818623695558, 8.071062082849698, 8.903269180912275, 9.97391472202903, 10.804265582914677, 8.623291430990674, 9.401453362160336, 9.787632126485489, 9.072967241077345, 8.931613096003572, 7.420972774169009, 10.748363822873772, 10.025341379618633, 11.852767495534827, 10.799243515111527, 9.129231192196704, 10.473864672849547, 10.335961923235852, 9.306967077852688, 10.034464353003578, 11.681991384587839, 9.649723770822916, 10.947736382403685, 8.586739607113469, 10.005963097851593, 10.035342603750681, 9.602602442173577, 14.361518003645552, 11.328374880366507, 9.928856456979387, 9.721385712333829, 9.770669547391911, 13.614860442614793, 9.366698063905595, 9.928641818179978, 9.653711698888696, 7.4300429560722385, 11.217797229258485, 10.72431298186289, 7.467642368863637, 8.971570454681991, 8.549965767538584, 9.407493275739359, 10.528392867450778, 7.974295577989083, 10.219822187765827, 10.372566501215653, 8.440210177657706, 9.51912575051376, 8.682077206889756, 9.706553781906859, 11.130780643068825, 7.171653205795305, 11.564530043471716, 9.534322949575465, 9.112239377042227, 8.164320944914019, 7.492729995974972, 7.582309128187825, 7.647315653669804, 9.123270793064075, 10.062457638193482, 10.31679195149326, 9.055152218370864, 7.856555884864121, 9.96791560344558, 7.218287757242914, 8.229691690038566, 8.619631018437145, 9.31461317813508, 8.701351625863655, 11.020230280554676, 10.497006819077145, 10.356605828896036, 9.176425006812952, 9.310547923304911, 8.107680508414317, 8.606889331078822, 7.925078332719253, 10.932890749279707, 10.143359702633756, 7.669977009412438, 9.39589196464282, 9.695925362084324, 9.218120739163735, 9.424649625033663, 9.363369215162816, 11.267245842936587, 8.419435861136535, 7.131981143033376, 9.419276477771131, 8.630991202023736, 10.768282280735411, 6.909893105733801, 8.368833438976184, 8.772950463993755, 12.07185317714497, 8.54804676963937, 8.66387752114025],\n [17.266577105638163, 17.37678074779175, 16.546289668708628, 14.817395171108211, 16.9185797697574, 16.778835667720575, 15.625721309294644, 15.539578788768072, 17.60328210932527, 16.537912283522708, 14.73700364229281, 15.375289012222929, 15.47579757800776, 15.72210860561612, 14.49258738093842, 14.340787828370257, 15.445570616123922, 14.379315342044107, 15.612630429974372, 14.95906470703494, 15.951449925028731, 14.287221474224074, 14.392801854265816, 13.50968957044579, 16.0346217646537, 16.065210894611333, 13.452812510942032, 13.692202787570366, 12.712223525165145, 11.647594046121132, 11.555688396523362, 11.041394599378718, 11.340535582092015, 12.87122469678418, 11.160250855978273, 11.888195239943025, 10.08631863795053, 10.44987928222506, 11.292054780934054, 9.56983952656372, 13.23573698944273, 9.15382289554587, 10.226144823316712, 8.499792244585155, 8.941894522595291, 12.407558759188484, 9.576915009692652, 9.532967643877054, 9.691448495352635, 9.463402724554408, 7.987612187584988, 8.816423999705847, 13.052594445059077, 11.627579386348312, 9.725816543830737, 10.224496206948439, 9.590600251442686, 10.81928076216993, 10.593560177781331, 9.601309305318953, 8.595452990116502, 9.712966578999625, 8.039835137914423, 10.148264602432198, 10.107884816171785, 9.098800490110802, 14.414178715607624, 9.621932685301733, 9.66025542152395, 9.059291922109237, 7.8561911191467075, 11.003357927599152, 10.107299727244268, 9.590516656178476, 9.783279900893943, 9.662904388066831, 9.305142703591446, 8.636324141089586, 12.170842130977945, 7.082944913673979, 9.806206178879547, 10.160523553836873, 8.985338418985801, 10.224925509328992, 7.818499685880952, 10.740396983740856, 9.243107804993711, 7.73055872268374, 8.144975756751371, 10.053742368792928, 8.522909964571312, 10.432096948166322, 10.06396215170603, 9.714080284337694, 7.445726020821557, 9.866777490466985, 10.243687181598931, 9.167481243250455, 9.081712581384723, 9.059541037754721, 8.74948953059817, 8.625060024886263, 9.303699042334186, 8.826005731697181, 10.240633829536277, 9.14639910149468, 9.770141042862203, 8.2000613153184, 8.860101372546286, 7.487198786200657, 8.281702877580948, 12.881142788744135, 8.867968052678139, 8.227751271626786, 9.406695357759235, 8.09059403432903, 7.69092361992629, 7.33967601725349, 8.845876068394304, 8.387177907802162, 9.006247722772171, 9.641493087757967, 8.23328291883457, 6.876592775797417, 9.951155014112825, 13.155882148822469, 8.41575103091348, 8.341409214890705, 8.523471717279442, 7.874433628195658, 8.273522895947075, 7.578443855485955, 7.700422645659718, 9.795986349160515, 7.943555760951561, 9.041446694361873, 8.073638238840427, 8.899934503688483, 9.002352867624069, 10.240448164523244, 9.7563901799707, 7.801473103606336, 8.592678787188632, 8.549454176831071, 8.268825682508181, 8.138327299814197, 9.108589220043099, 10.509261097419104, 6.9861493327951205, 8.372907782567282, 8.395579959091037, 7.140667222646346, 8.712018372837424, 8.350371454734367, 8.146105396056289, 7.2158100659772675, 8.280453003390043, 9.839106878108126, 8.850328269208353, 9.536994855270635, 7.278474965294019, 8.015550639732956, 9.286258898579081, 9.58372688982397, 7.180390089576769, 8.08906650639738, 10.07275269393014, 7.146805861044028, 9.152466536905466, 7.37786494592921, 8.49204192491858, 8.930002975844022, 9.871683758483845, 7.205875714782072, 7.615310253258645, 10.456191712577809, 9.19551228756415, 7.9805618374208676, 7.800618005631044, 8.727294287164508, 6.304038882596863, 9.633017438894116, 10.822627794260098, 10.931045869695053, 9.282886303997424, 7.9785568612491815, 8.44190265811056, 9.095802016074453, 7.892242840014909, 8.174756970039711, 11.078451028460798, 8.526886055425843, 7.186685920546122, 9.099564344943769, 8.814161456413448, 10.357844520719977, 8.080582954598881, 8.54631141138036, 7.97076017079595, 10.661481512399412]])\nQCNN_pca8_MNIST, QCNN_ae8_MNIST = QCNN_pca8_MNIST / 25, QCNN_ae8_MNIST / 25\nQCNN_pca8_FASHION, QCNN_ae8_FASHION = QCNN_pca8_FASHION / 25, QCNN_ae8_FASHION / 25\nQCNN_pca16_MNIST, QCNN_ae16_MNIST = QCNN_pca16_MNIST / 25, QCNN_ae16_MNIST / 25\nQCNN_pca16_FASHION, QCNN_ae16_FASHION = QCNN_pca16_FASHION / 25, QCNN_ae16_FASHION / 25\nloss_history_QCNN_pca8_MNIST_mean, loss_history_QCNN_pca8_MNIST_std = mean_std(QCNN_pca8_MNIST)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "n", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "n = 1\ndef plot_loss_history(Encodings, datasets):\n for Encoding in Encodings:\n if Encoding == 'PCA8':\n if datasets == 'MNIST':\n loss_history_QCNN_mean = loss_history_QCNN_pca8_MNIST_mean[::n]\n loss_history_QCNN_std = loss_history_QCNN_pca8_MNIST_std[::n]\n loss_history_CNN_mean = loss_history_CNN_pca8_MNIST_mean[::n]\n loss_history_CNN_std = loss_history_CNN_pca8_MNIST_std[::n]\n else:", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "datasets", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "datasets = 'MNIST'\nEncodings = ['AutoEnc8', 'PCA16']\nplot_loss_history(Encodings, datasets)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "Encodings", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "peekOfCode": "Encodings = ['AutoEnc8', 'PCA16']\nplot_loss_history(Encodings, datasets)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN.loss_history_plot", + "documentation": {} + }, + { + "label": "TTN_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "TTN_pca30_mnist_mse = np.array([0.9829787234042553, 0.9749408983451536, 0.9754137115839243, 0.9820330969267139, 0.9810874704491725])\nTTN_ae30_mnist_mse = np.array([0.6657210401891253, 0.7673758865248227, 0.7900709219858156, 0.826950354609929, 0.8326241134751773])\nTTN_pca32_mnist_mse = np.array([0.7229314420803783, 0.7304964539007093, 0.7172576832151301, 0.6671394799054373, 0.5957446808510638])\nTTN_ae32_mnist_mse = np.array([0.9011820330969267, 0.8879432624113475, 0.8401891252955083, 0.915839243498818, 0.875177304964539])\nU5_pca30_mnist_mse = np.array([0.9801418439716312, 0.9810874704491725, 0.9839243498817967, 0.9829787234042553, 0.9825059101654846])\nU5_ae30_mnist_mse = np.array([0.9196217494089834, 0.9475177304964539, 0.8567375886524823, 0.8926713947990543, 0.8950354609929078])\nU5_pca32_mnist_mse = np.array([0.7853427895981088, 0.7262411347517731, 0.7801418439716312, 0.7356973995271867, 0.7030732860520095])\nU5_ae32_mnist_mse = np.array([0.9796690307328605, 0.9650118203309692, 0.9895981087470449, 0.992434988179669, 0.9328605200945627])\nU6_pca30_mnist_mse = np.array([0.9829787234042553, 0.9820330969267139, 0.9843971631205674, 0.9825059101654846, 0.9843971631205674])\nU6_ae30_mnist_mse = np.array([0.8978723404255319, 0.8846335697399527, 0.9087470449172577, 0.8449172576832151, 0.9191489361702128])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "TTN_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "TTN_ae30_mnist_mse = np.array([0.6657210401891253, 0.7673758865248227, 0.7900709219858156, 0.826950354609929, 0.8326241134751773])\nTTN_pca32_mnist_mse = np.array([0.7229314420803783, 0.7304964539007093, 0.7172576832151301, 0.6671394799054373, 0.5957446808510638])\nTTN_ae32_mnist_mse = np.array([0.9011820330969267, 0.8879432624113475, 0.8401891252955083, 0.915839243498818, 0.875177304964539])\nU5_pca30_mnist_mse = np.array([0.9801418439716312, 0.9810874704491725, 0.9839243498817967, 0.9829787234042553, 0.9825059101654846])\nU5_ae30_mnist_mse = np.array([0.9196217494089834, 0.9475177304964539, 0.8567375886524823, 0.8926713947990543, 0.8950354609929078])\nU5_pca32_mnist_mse = np.array([0.7853427895981088, 0.7262411347517731, 0.7801418439716312, 0.7356973995271867, 0.7030732860520095])\nU5_ae32_mnist_mse = np.array([0.9796690307328605, 0.9650118203309692, 0.9895981087470449, 0.992434988179669, 0.9328605200945627])\nU6_pca30_mnist_mse = np.array([0.9829787234042553, 0.9820330969267139, 0.9843971631205674, 0.9825059101654846, 0.9843971631205674])\nU6_ae30_mnist_mse = np.array([0.8978723404255319, 0.8846335697399527, 0.9087470449172577, 0.8449172576832151, 0.9191489361702128])\nU6_pca32_mnist_mse = np.array([0.8009456264775414, 0.8056737588652483, 0.7952718676122932, 0.7687943262411348, 0.816548463356974])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "TTN_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "TTN_pca32_mnist_mse = np.array([0.7229314420803783, 0.7304964539007093, 0.7172576832151301, 0.6671394799054373, 0.5957446808510638])\nTTN_ae32_mnist_mse = np.array([0.9011820330969267, 0.8879432624113475, 0.8401891252955083, 0.915839243498818, 0.875177304964539])\nU5_pca30_mnist_mse = np.array([0.9801418439716312, 0.9810874704491725, 0.9839243498817967, 0.9829787234042553, 0.9825059101654846])\nU5_ae30_mnist_mse = np.array([0.9196217494089834, 0.9475177304964539, 0.8567375886524823, 0.8926713947990543, 0.8950354609929078])\nU5_pca32_mnist_mse = np.array([0.7853427895981088, 0.7262411347517731, 0.7801418439716312, 0.7356973995271867, 0.7030732860520095])\nU5_ae32_mnist_mse = np.array([0.9796690307328605, 0.9650118203309692, 0.9895981087470449, 0.992434988179669, 0.9328605200945627])\nU6_pca30_mnist_mse = np.array([0.9829787234042553, 0.9820330969267139, 0.9843971631205674, 0.9825059101654846, 0.9843971631205674])\nU6_ae30_mnist_mse = np.array([0.8978723404255319, 0.8846335697399527, 0.9087470449172577, 0.8449172576832151, 0.9191489361702128])\nU6_pca32_mnist_mse = np.array([0.8009456264775414, 0.8056737588652483, 0.7952718676122932, 0.7687943262411348, 0.816548463356974])\nU6_ae32_mnist_mse = np.array([0.9862884160756501, 0.9498817966903074, 0.9555555555555556, 0.9862884160756501, 0.9522458628841608])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "TTN_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "TTN_ae32_mnist_mse = np.array([0.9011820330969267, 0.8879432624113475, 0.8401891252955083, 0.915839243498818, 0.875177304964539])\nU5_pca30_mnist_mse = np.array([0.9801418439716312, 0.9810874704491725, 0.9839243498817967, 0.9829787234042553, 0.9825059101654846])\nU5_ae30_mnist_mse = np.array([0.9196217494089834, 0.9475177304964539, 0.8567375886524823, 0.8926713947990543, 0.8950354609929078])\nU5_pca32_mnist_mse = np.array([0.7853427895981088, 0.7262411347517731, 0.7801418439716312, 0.7356973995271867, 0.7030732860520095])\nU5_ae32_mnist_mse = np.array([0.9796690307328605, 0.9650118203309692, 0.9895981087470449, 0.992434988179669, 0.9328605200945627])\nU6_pca30_mnist_mse = np.array([0.9829787234042553, 0.9820330969267139, 0.9843971631205674, 0.9825059101654846, 0.9843971631205674])\nU6_ae30_mnist_mse = np.array([0.8978723404255319, 0.8846335697399527, 0.9087470449172577, 0.8449172576832151, 0.9191489361702128])\nU6_pca32_mnist_mse = np.array([0.8009456264775414, 0.8056737588652483, 0.7952718676122932, 0.7687943262411348, 0.816548463356974])\nU6_ae32_mnist_mse = np.array([0.9862884160756501, 0.9498817966903074, 0.9555555555555556, 0.9862884160756501, 0.9522458628841608])\nU9_pca30_mnist_mse = np.array([0.9612293144208038, 0.9196217494089834, 0.9721040189125295, 0.957919621749409, 0.9234042553191489])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U5_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U5_pca30_mnist_mse = np.array([0.9801418439716312, 0.9810874704491725, 0.9839243498817967, 0.9829787234042553, 0.9825059101654846])\nU5_ae30_mnist_mse = np.array([0.9196217494089834, 0.9475177304964539, 0.8567375886524823, 0.8926713947990543, 0.8950354609929078])\nU5_pca32_mnist_mse = np.array([0.7853427895981088, 0.7262411347517731, 0.7801418439716312, 0.7356973995271867, 0.7030732860520095])\nU5_ae32_mnist_mse = np.array([0.9796690307328605, 0.9650118203309692, 0.9895981087470449, 0.992434988179669, 0.9328605200945627])\nU6_pca30_mnist_mse = np.array([0.9829787234042553, 0.9820330969267139, 0.9843971631205674, 0.9825059101654846, 0.9843971631205674])\nU6_ae30_mnist_mse = np.array([0.8978723404255319, 0.8846335697399527, 0.9087470449172577, 0.8449172576832151, 0.9191489361702128])\nU6_pca32_mnist_mse = np.array([0.8009456264775414, 0.8056737588652483, 0.7952718676122932, 0.7687943262411348, 0.816548463356974])\nU6_ae32_mnist_mse = np.array([0.9862884160756501, 0.9498817966903074, 0.9555555555555556, 0.9862884160756501, 0.9522458628841608])\nU9_pca30_mnist_mse = np.array([0.9612293144208038, 0.9196217494089834, 0.9721040189125295, 0.957919621749409, 0.9234042553191489])\nU9_ae30_mnist_mse = np.array([0.8066193853427897, 0.775886524822695, 0.8368794326241135, 0.7375886524822695, 0.8439716312056738])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U5_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U5_ae30_mnist_mse = np.array([0.9196217494089834, 0.9475177304964539, 0.8567375886524823, 0.8926713947990543, 0.8950354609929078])\nU5_pca32_mnist_mse = np.array([0.7853427895981088, 0.7262411347517731, 0.7801418439716312, 0.7356973995271867, 0.7030732860520095])\nU5_ae32_mnist_mse = np.array([0.9796690307328605, 0.9650118203309692, 0.9895981087470449, 0.992434988179669, 0.9328605200945627])\nU6_pca30_mnist_mse = np.array([0.9829787234042553, 0.9820330969267139, 0.9843971631205674, 0.9825059101654846, 0.9843971631205674])\nU6_ae30_mnist_mse = np.array([0.8978723404255319, 0.8846335697399527, 0.9087470449172577, 0.8449172576832151, 0.9191489361702128])\nU6_pca32_mnist_mse = np.array([0.8009456264775414, 0.8056737588652483, 0.7952718676122932, 0.7687943262411348, 0.816548463356974])\nU6_ae32_mnist_mse = np.array([0.9862884160756501, 0.9498817966903074, 0.9555555555555556, 0.9862884160756501, 0.9522458628841608])\nU9_pca30_mnist_mse = np.array([0.9612293144208038, 0.9196217494089834, 0.9721040189125295, 0.957919621749409, 0.9234042553191489])\nU9_ae30_mnist_mse = np.array([0.8066193853427897, 0.775886524822695, 0.8368794326241135, 0.7375886524822695, 0.8439716312056738])\nU9_pca32_mnist_mse = np.array([0.6033096926713948, 0.6699763593380614, 0.6458628841607565, 0.6085106382978723, 0.5815602836879432])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U5_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U5_pca32_mnist_mse = np.array([0.7853427895981088, 0.7262411347517731, 0.7801418439716312, 0.7356973995271867, 0.7030732860520095])\nU5_ae32_mnist_mse = np.array([0.9796690307328605, 0.9650118203309692, 0.9895981087470449, 0.992434988179669, 0.9328605200945627])\nU6_pca30_mnist_mse = np.array([0.9829787234042553, 0.9820330969267139, 0.9843971631205674, 0.9825059101654846, 0.9843971631205674])\nU6_ae30_mnist_mse = np.array([0.8978723404255319, 0.8846335697399527, 0.9087470449172577, 0.8449172576832151, 0.9191489361702128])\nU6_pca32_mnist_mse = np.array([0.8009456264775414, 0.8056737588652483, 0.7952718676122932, 0.7687943262411348, 0.816548463356974])\nU6_ae32_mnist_mse = np.array([0.9862884160756501, 0.9498817966903074, 0.9555555555555556, 0.9862884160756501, 0.9522458628841608])\nU9_pca30_mnist_mse = np.array([0.9612293144208038, 0.9196217494089834, 0.9721040189125295, 0.957919621749409, 0.9234042553191489])\nU9_ae30_mnist_mse = np.array([0.8066193853427897, 0.775886524822695, 0.8368794326241135, 0.7375886524822695, 0.8439716312056738])\nU9_pca32_mnist_mse = np.array([0.6033096926713948, 0.6699763593380614, 0.6458628841607565, 0.6085106382978723, 0.5815602836879432])\nU9_ae32_mnist_mse = np.array([0.8335697399527187, 0.9546099290780142, 0.9754137115839243, 0.9224586288416076, 0.9815602836879432])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U5_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U5_ae32_mnist_mse = np.array([0.9796690307328605, 0.9650118203309692, 0.9895981087470449, 0.992434988179669, 0.9328605200945627])\nU6_pca30_mnist_mse = np.array([0.9829787234042553, 0.9820330969267139, 0.9843971631205674, 0.9825059101654846, 0.9843971631205674])\nU6_ae30_mnist_mse = np.array([0.8978723404255319, 0.8846335697399527, 0.9087470449172577, 0.8449172576832151, 0.9191489361702128])\nU6_pca32_mnist_mse = np.array([0.8009456264775414, 0.8056737588652483, 0.7952718676122932, 0.7687943262411348, 0.816548463356974])\nU6_ae32_mnist_mse = np.array([0.9862884160756501, 0.9498817966903074, 0.9555555555555556, 0.9862884160756501, 0.9522458628841608])\nU9_pca30_mnist_mse = np.array([0.9612293144208038, 0.9196217494089834, 0.9721040189125295, 0.957919621749409, 0.9234042553191489])\nU9_ae30_mnist_mse = np.array([0.8066193853427897, 0.775886524822695, 0.8368794326241135, 0.7375886524822695, 0.8439716312056738])\nU9_pca32_mnist_mse = np.array([0.6033096926713948, 0.6699763593380614, 0.6458628841607565, 0.6085106382978723, 0.5815602836879432])\nU9_ae32_mnist_mse = np.array([0.8335697399527187, 0.9546099290780142, 0.9754137115839243, 0.9224586288416076, 0.9815602836879432])\nU13_pca30_mnist_mse = np.array([0.9801418439716312, 0.9825059101654846, 0.9829787234042553, 0.9782505910165484, 0.9744680851063829])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U6_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U6_pca30_mnist_mse = np.array([0.9829787234042553, 0.9820330969267139, 0.9843971631205674, 0.9825059101654846, 0.9843971631205674])\nU6_ae30_mnist_mse = np.array([0.8978723404255319, 0.8846335697399527, 0.9087470449172577, 0.8449172576832151, 0.9191489361702128])\nU6_pca32_mnist_mse = np.array([0.8009456264775414, 0.8056737588652483, 0.7952718676122932, 0.7687943262411348, 0.816548463356974])\nU6_ae32_mnist_mse = np.array([0.9862884160756501, 0.9498817966903074, 0.9555555555555556, 0.9862884160756501, 0.9522458628841608])\nU9_pca30_mnist_mse = np.array([0.9612293144208038, 0.9196217494089834, 0.9721040189125295, 0.957919621749409, 0.9234042553191489])\nU9_ae30_mnist_mse = np.array([0.8066193853427897, 0.775886524822695, 0.8368794326241135, 0.7375886524822695, 0.8439716312056738])\nU9_pca32_mnist_mse = np.array([0.6033096926713948, 0.6699763593380614, 0.6458628841607565, 0.6085106382978723, 0.5815602836879432])\nU9_ae32_mnist_mse = np.array([0.8335697399527187, 0.9546099290780142, 0.9754137115839243, 0.9224586288416076, 0.9815602836879432])\nU13_pca30_mnist_mse = np.array([0.9801418439716312, 0.9825059101654846, 0.9829787234042553, 0.9782505910165484, 0.9744680851063829])\nU13_ae30_mnist_mse = np.array([0.8780141843971632, 0.8652482269503546, 0.9347517730496454, 0.9640661938534278, 0.8784869976359339])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U6_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U6_ae30_mnist_mse = np.array([0.8978723404255319, 0.8846335697399527, 0.9087470449172577, 0.8449172576832151, 0.9191489361702128])\nU6_pca32_mnist_mse = np.array([0.8009456264775414, 0.8056737588652483, 0.7952718676122932, 0.7687943262411348, 0.816548463356974])\nU6_ae32_mnist_mse = np.array([0.9862884160756501, 0.9498817966903074, 0.9555555555555556, 0.9862884160756501, 0.9522458628841608])\nU9_pca30_mnist_mse = np.array([0.9612293144208038, 0.9196217494089834, 0.9721040189125295, 0.957919621749409, 0.9234042553191489])\nU9_ae30_mnist_mse = np.array([0.8066193853427897, 0.775886524822695, 0.8368794326241135, 0.7375886524822695, 0.8439716312056738])\nU9_pca32_mnist_mse = np.array([0.6033096926713948, 0.6699763593380614, 0.6458628841607565, 0.6085106382978723, 0.5815602836879432])\nU9_ae32_mnist_mse = np.array([0.8335697399527187, 0.9546099290780142, 0.9754137115839243, 0.9224586288416076, 0.9815602836879432])\nU13_pca30_mnist_mse = np.array([0.9801418439716312, 0.9825059101654846, 0.9829787234042553, 0.9782505910165484, 0.9744680851063829])\nU13_ae30_mnist_mse = np.array([0.8780141843971632, 0.8652482269503546, 0.9347517730496454, 0.9640661938534278, 0.8784869976359339])\nU13_pca32_mnist_mse = np.array([0.6794326241134752, 0.6964539007092199, 0.7087470449172577, 0.7096926713947991, 0.7148936170212766])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U6_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U6_pca32_mnist_mse = np.array([0.8009456264775414, 0.8056737588652483, 0.7952718676122932, 0.7687943262411348, 0.816548463356974])\nU6_ae32_mnist_mse = np.array([0.9862884160756501, 0.9498817966903074, 0.9555555555555556, 0.9862884160756501, 0.9522458628841608])\nU9_pca30_mnist_mse = np.array([0.9612293144208038, 0.9196217494089834, 0.9721040189125295, 0.957919621749409, 0.9234042553191489])\nU9_ae30_mnist_mse = np.array([0.8066193853427897, 0.775886524822695, 0.8368794326241135, 0.7375886524822695, 0.8439716312056738])\nU9_pca32_mnist_mse = np.array([0.6033096926713948, 0.6699763593380614, 0.6458628841607565, 0.6085106382978723, 0.5815602836879432])\nU9_ae32_mnist_mse = np.array([0.8335697399527187, 0.9546099290780142, 0.9754137115839243, 0.9224586288416076, 0.9815602836879432])\nU13_pca30_mnist_mse = np.array([0.9801418439716312, 0.9825059101654846, 0.9829787234042553, 0.9782505910165484, 0.9744680851063829])\nU13_ae30_mnist_mse = np.array([0.8780141843971632, 0.8652482269503546, 0.9347517730496454, 0.9640661938534278, 0.8784869976359339])\nU13_pca32_mnist_mse = np.array([0.6794326241134752, 0.6964539007092199, 0.7087470449172577, 0.7096926713947991, 0.7148936170212766])\nU13_ae32_mnist_mse = np.array([0.9560283687943263, 0.9692671394799054, 0.9739952718676123, 0.9517730496453901, 0.9791962174940898])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U6_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U6_ae32_mnist_mse = np.array([0.9862884160756501, 0.9498817966903074, 0.9555555555555556, 0.9862884160756501, 0.9522458628841608])\nU9_pca30_mnist_mse = np.array([0.9612293144208038, 0.9196217494089834, 0.9721040189125295, 0.957919621749409, 0.9234042553191489])\nU9_ae30_mnist_mse = np.array([0.8066193853427897, 0.775886524822695, 0.8368794326241135, 0.7375886524822695, 0.8439716312056738])\nU9_pca32_mnist_mse = np.array([0.6033096926713948, 0.6699763593380614, 0.6458628841607565, 0.6085106382978723, 0.5815602836879432])\nU9_ae32_mnist_mse = np.array([0.8335697399527187, 0.9546099290780142, 0.9754137115839243, 0.9224586288416076, 0.9815602836879432])\nU13_pca30_mnist_mse = np.array([0.9801418439716312, 0.9825059101654846, 0.9829787234042553, 0.9782505910165484, 0.9744680851063829])\nU13_ae30_mnist_mse = np.array([0.8780141843971632, 0.8652482269503546, 0.9347517730496454, 0.9640661938534278, 0.8784869976359339])\nU13_pca32_mnist_mse = np.array([0.6794326241134752, 0.6964539007092199, 0.7087470449172577, 0.7096926713947991, 0.7148936170212766])\nU13_ae32_mnist_mse = np.array([0.9560283687943263, 0.9692671394799054, 0.9739952718676123, 0.9517730496453901, 0.9791962174940898])\nU14_pca30_mnist_mse = np.array([0.9825059101654846, 0.9806146572104019, 0.9810874704491725, 0.9791962174940898, 0.9787234042553191])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U9_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U9_pca30_mnist_mse = np.array([0.9612293144208038, 0.9196217494089834, 0.9721040189125295, 0.957919621749409, 0.9234042553191489])\nU9_ae30_mnist_mse = np.array([0.8066193853427897, 0.775886524822695, 0.8368794326241135, 0.7375886524822695, 0.8439716312056738])\nU9_pca32_mnist_mse = np.array([0.6033096926713948, 0.6699763593380614, 0.6458628841607565, 0.6085106382978723, 0.5815602836879432])\nU9_ae32_mnist_mse = np.array([0.8335697399527187, 0.9546099290780142, 0.9754137115839243, 0.9224586288416076, 0.9815602836879432])\nU13_pca30_mnist_mse = np.array([0.9801418439716312, 0.9825059101654846, 0.9829787234042553, 0.9782505910165484, 0.9744680851063829])\nU13_ae30_mnist_mse = np.array([0.8780141843971632, 0.8652482269503546, 0.9347517730496454, 0.9640661938534278, 0.8784869976359339])\nU13_pca32_mnist_mse = np.array([0.6794326241134752, 0.6964539007092199, 0.7087470449172577, 0.7096926713947991, 0.7148936170212766])\nU13_ae32_mnist_mse = np.array([0.9560283687943263, 0.9692671394799054, 0.9739952718676123, 0.9517730496453901, 0.9791962174940898])\nU14_pca30_mnist_mse = np.array([0.9825059101654846, 0.9806146572104019, 0.9810874704491725, 0.9791962174940898, 0.9787234042553191])\nU14_ae30_mnist_mse = np.array([0.8666666666666667, 0.9035460992907801, 0.8023640661938535, 0.9352245862884161, 0.816548463356974])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U9_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U9_ae30_mnist_mse = np.array([0.8066193853427897, 0.775886524822695, 0.8368794326241135, 0.7375886524822695, 0.8439716312056738])\nU9_pca32_mnist_mse = np.array([0.6033096926713948, 0.6699763593380614, 0.6458628841607565, 0.6085106382978723, 0.5815602836879432])\nU9_ae32_mnist_mse = np.array([0.8335697399527187, 0.9546099290780142, 0.9754137115839243, 0.9224586288416076, 0.9815602836879432])\nU13_pca30_mnist_mse = np.array([0.9801418439716312, 0.9825059101654846, 0.9829787234042553, 0.9782505910165484, 0.9744680851063829])\nU13_ae30_mnist_mse = np.array([0.8780141843971632, 0.8652482269503546, 0.9347517730496454, 0.9640661938534278, 0.8784869976359339])\nU13_pca32_mnist_mse = np.array([0.6794326241134752, 0.6964539007092199, 0.7087470449172577, 0.7096926713947991, 0.7148936170212766])\nU13_ae32_mnist_mse = np.array([0.9560283687943263, 0.9692671394799054, 0.9739952718676123, 0.9517730496453901, 0.9791962174940898])\nU14_pca30_mnist_mse = np.array([0.9825059101654846, 0.9806146572104019, 0.9810874704491725, 0.9791962174940898, 0.9787234042553191])\nU14_ae30_mnist_mse = np.array([0.8666666666666667, 0.9035460992907801, 0.8023640661938535, 0.9352245862884161, 0.816548463356974])\nU14_pca32_mnist_mse = np.array([0.7144208037825059, 0.7971631205673759, 0.6411347517730497, 0.7791962174940898, 0.6988179669030733])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U9_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U9_pca32_mnist_mse = np.array([0.6033096926713948, 0.6699763593380614, 0.6458628841607565, 0.6085106382978723, 0.5815602836879432])\nU9_ae32_mnist_mse = np.array([0.8335697399527187, 0.9546099290780142, 0.9754137115839243, 0.9224586288416076, 0.9815602836879432])\nU13_pca30_mnist_mse = np.array([0.9801418439716312, 0.9825059101654846, 0.9829787234042553, 0.9782505910165484, 0.9744680851063829])\nU13_ae30_mnist_mse = np.array([0.8780141843971632, 0.8652482269503546, 0.9347517730496454, 0.9640661938534278, 0.8784869976359339])\nU13_pca32_mnist_mse = np.array([0.6794326241134752, 0.6964539007092199, 0.7087470449172577, 0.7096926713947991, 0.7148936170212766])\nU13_ae32_mnist_mse = np.array([0.9560283687943263, 0.9692671394799054, 0.9739952718676123, 0.9517730496453901, 0.9791962174940898])\nU14_pca30_mnist_mse = np.array([0.9825059101654846, 0.9806146572104019, 0.9810874704491725, 0.9791962174940898, 0.9787234042553191])\nU14_ae30_mnist_mse = np.array([0.8666666666666667, 0.9035460992907801, 0.8023640661938535, 0.9352245862884161, 0.816548463356974])\nU14_pca32_mnist_mse = np.array([0.7144208037825059, 0.7971631205673759, 0.6411347517730497, 0.7791962174940898, 0.6988179669030733])\nU14_ae32_mnist_mse = np.array([0.9205673758865248, 0.9555555555555556, 0.9423167848699764, 0.9205673758865248, 0.9366430260047282])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U9_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U9_ae32_mnist_mse = np.array([0.8335697399527187, 0.9546099290780142, 0.9754137115839243, 0.9224586288416076, 0.9815602836879432])\nU13_pca30_mnist_mse = np.array([0.9801418439716312, 0.9825059101654846, 0.9829787234042553, 0.9782505910165484, 0.9744680851063829])\nU13_ae30_mnist_mse = np.array([0.8780141843971632, 0.8652482269503546, 0.9347517730496454, 0.9640661938534278, 0.8784869976359339])\nU13_pca32_mnist_mse = np.array([0.6794326241134752, 0.6964539007092199, 0.7087470449172577, 0.7096926713947991, 0.7148936170212766])\nU13_ae32_mnist_mse = np.array([0.9560283687943263, 0.9692671394799054, 0.9739952718676123, 0.9517730496453901, 0.9791962174940898])\nU14_pca30_mnist_mse = np.array([0.9825059101654846, 0.9806146572104019, 0.9810874704491725, 0.9791962174940898, 0.9787234042553191])\nU14_ae30_mnist_mse = np.array([0.8666666666666667, 0.9035460992907801, 0.8023640661938535, 0.9352245862884161, 0.816548463356974])\nU14_pca32_mnist_mse = np.array([0.7144208037825059, 0.7971631205673759, 0.6411347517730497, 0.7791962174940898, 0.6988179669030733])\nU14_ae32_mnist_mse = np.array([0.9205673758865248, 0.9555555555555556, 0.9423167848699764, 0.9205673758865248, 0.9366430260047282])\nU15_pca30_mnist_mse = np.array([0.9777777777777777, 0.9810874704491725, 0.9829787234042553, 0.9825059101654846, 0.9820330969267139])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U13_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U13_pca30_mnist_mse = np.array([0.9801418439716312, 0.9825059101654846, 0.9829787234042553, 0.9782505910165484, 0.9744680851063829])\nU13_ae30_mnist_mse = np.array([0.8780141843971632, 0.8652482269503546, 0.9347517730496454, 0.9640661938534278, 0.8784869976359339])\nU13_pca32_mnist_mse = np.array([0.6794326241134752, 0.6964539007092199, 0.7087470449172577, 0.7096926713947991, 0.7148936170212766])\nU13_ae32_mnist_mse = np.array([0.9560283687943263, 0.9692671394799054, 0.9739952718676123, 0.9517730496453901, 0.9791962174940898])\nU14_pca30_mnist_mse = np.array([0.9825059101654846, 0.9806146572104019, 0.9810874704491725, 0.9791962174940898, 0.9787234042553191])\nU14_ae30_mnist_mse = np.array([0.8666666666666667, 0.9035460992907801, 0.8023640661938535, 0.9352245862884161, 0.816548463356974])\nU14_pca32_mnist_mse = np.array([0.7144208037825059, 0.7971631205673759, 0.6411347517730497, 0.7791962174940898, 0.6988179669030733])\nU14_ae32_mnist_mse = np.array([0.9205673758865248, 0.9555555555555556, 0.9423167848699764, 0.9205673758865248, 0.9366430260047282])\nU15_pca30_mnist_mse = np.array([0.9777777777777777, 0.9810874704491725, 0.9829787234042553, 0.9825059101654846, 0.9820330969267139])\nU15_ae30_mnist_mse = np.array([0.8463356973995272, 0.8321513002364066, 0.8529550827423168, 0.8482269503546099, 0.8397163120567376])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U13_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U13_ae30_mnist_mse = np.array([0.8780141843971632, 0.8652482269503546, 0.9347517730496454, 0.9640661938534278, 0.8784869976359339])\nU13_pca32_mnist_mse = np.array([0.6794326241134752, 0.6964539007092199, 0.7087470449172577, 0.7096926713947991, 0.7148936170212766])\nU13_ae32_mnist_mse = np.array([0.9560283687943263, 0.9692671394799054, 0.9739952718676123, 0.9517730496453901, 0.9791962174940898])\nU14_pca30_mnist_mse = np.array([0.9825059101654846, 0.9806146572104019, 0.9810874704491725, 0.9791962174940898, 0.9787234042553191])\nU14_ae30_mnist_mse = np.array([0.8666666666666667, 0.9035460992907801, 0.8023640661938535, 0.9352245862884161, 0.816548463356974])\nU14_pca32_mnist_mse = np.array([0.7144208037825059, 0.7971631205673759, 0.6411347517730497, 0.7791962174940898, 0.6988179669030733])\nU14_ae32_mnist_mse = np.array([0.9205673758865248, 0.9555555555555556, 0.9423167848699764, 0.9205673758865248, 0.9366430260047282])\nU15_pca30_mnist_mse = np.array([0.9777777777777777, 0.9810874704491725, 0.9829787234042553, 0.9825059101654846, 0.9820330969267139])\nU15_ae30_mnist_mse = np.array([0.8463356973995272, 0.8321513002364066, 0.8529550827423168, 0.8482269503546099, 0.8397163120567376])\nU15_pca32_mnist_mse = np.array([0.7333333333333333, 0.810401891252955, 0.775886524822695, 0.7550827423167848, 0.7460992907801418])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U13_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U13_pca32_mnist_mse = np.array([0.6794326241134752, 0.6964539007092199, 0.7087470449172577, 0.7096926713947991, 0.7148936170212766])\nU13_ae32_mnist_mse = np.array([0.9560283687943263, 0.9692671394799054, 0.9739952718676123, 0.9517730496453901, 0.9791962174940898])\nU14_pca30_mnist_mse = np.array([0.9825059101654846, 0.9806146572104019, 0.9810874704491725, 0.9791962174940898, 0.9787234042553191])\nU14_ae30_mnist_mse = np.array([0.8666666666666667, 0.9035460992907801, 0.8023640661938535, 0.9352245862884161, 0.816548463356974])\nU14_pca32_mnist_mse = np.array([0.7144208037825059, 0.7971631205673759, 0.6411347517730497, 0.7791962174940898, 0.6988179669030733])\nU14_ae32_mnist_mse = np.array([0.9205673758865248, 0.9555555555555556, 0.9423167848699764, 0.9205673758865248, 0.9366430260047282])\nU15_pca30_mnist_mse = np.array([0.9777777777777777, 0.9810874704491725, 0.9829787234042553, 0.9825059101654846, 0.9820330969267139])\nU15_ae30_mnist_mse = np.array([0.8463356973995272, 0.8321513002364066, 0.8529550827423168, 0.8482269503546099, 0.8397163120567376])\nU15_pca32_mnist_mse = np.array([0.7333333333333333, 0.810401891252955, 0.775886524822695, 0.7550827423167848, 0.7460992907801418])\nU15_ae32_mnist_mse = np.array([0.9621749408983451, 0.8865248226950354, 0.9782505910165484, 0.9810874704491725, 0.9555555555555556])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U13_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U13_ae32_mnist_mse = np.array([0.9560283687943263, 0.9692671394799054, 0.9739952718676123, 0.9517730496453901, 0.9791962174940898])\nU14_pca30_mnist_mse = np.array([0.9825059101654846, 0.9806146572104019, 0.9810874704491725, 0.9791962174940898, 0.9787234042553191])\nU14_ae30_mnist_mse = np.array([0.8666666666666667, 0.9035460992907801, 0.8023640661938535, 0.9352245862884161, 0.816548463356974])\nU14_pca32_mnist_mse = np.array([0.7144208037825059, 0.7971631205673759, 0.6411347517730497, 0.7791962174940898, 0.6988179669030733])\nU14_ae32_mnist_mse = np.array([0.9205673758865248, 0.9555555555555556, 0.9423167848699764, 0.9205673758865248, 0.9366430260047282])\nU15_pca30_mnist_mse = np.array([0.9777777777777777, 0.9810874704491725, 0.9829787234042553, 0.9825059101654846, 0.9820330969267139])\nU15_ae30_mnist_mse = np.array([0.8463356973995272, 0.8321513002364066, 0.8529550827423168, 0.8482269503546099, 0.8397163120567376])\nU15_pca32_mnist_mse = np.array([0.7333333333333333, 0.810401891252955, 0.775886524822695, 0.7550827423167848, 0.7460992907801418])\nU15_ae32_mnist_mse = np.array([0.9621749408983451, 0.8865248226950354, 0.9782505910165484, 0.9810874704491725, 0.9555555555555556])\nSO4_pca30_mnist_mse = np.array([0.9872340425531915, 0.9820330969267139, 0.983451536643026, 0.9801418439716312, 0.9829787234042553])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U14_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U14_pca30_mnist_mse = np.array([0.9825059101654846, 0.9806146572104019, 0.9810874704491725, 0.9791962174940898, 0.9787234042553191])\nU14_ae30_mnist_mse = np.array([0.8666666666666667, 0.9035460992907801, 0.8023640661938535, 0.9352245862884161, 0.816548463356974])\nU14_pca32_mnist_mse = np.array([0.7144208037825059, 0.7971631205673759, 0.6411347517730497, 0.7791962174940898, 0.6988179669030733])\nU14_ae32_mnist_mse = np.array([0.9205673758865248, 0.9555555555555556, 0.9423167848699764, 0.9205673758865248, 0.9366430260047282])\nU15_pca30_mnist_mse = np.array([0.9777777777777777, 0.9810874704491725, 0.9829787234042553, 0.9825059101654846, 0.9820330969267139])\nU15_ae30_mnist_mse = np.array([0.8463356973995272, 0.8321513002364066, 0.8529550827423168, 0.8482269503546099, 0.8397163120567376])\nU15_pca32_mnist_mse = np.array([0.7333333333333333, 0.810401891252955, 0.775886524822695, 0.7550827423167848, 0.7460992907801418])\nU15_ae32_mnist_mse = np.array([0.9621749408983451, 0.8865248226950354, 0.9782505910165484, 0.9810874704491725, 0.9555555555555556])\nSO4_pca30_mnist_mse = np.array([0.9872340425531915, 0.9820330969267139, 0.983451536643026, 0.9801418439716312, 0.9829787234042553])\nSO4_ae30_mnist_mse = np.array([0.9210401891252955, 0.9234042553191489, 0.9342789598108747, 0.7522458628841607, 0.8132387706855791])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U14_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U14_ae30_mnist_mse = np.array([0.8666666666666667, 0.9035460992907801, 0.8023640661938535, 0.9352245862884161, 0.816548463356974])\nU14_pca32_mnist_mse = np.array([0.7144208037825059, 0.7971631205673759, 0.6411347517730497, 0.7791962174940898, 0.6988179669030733])\nU14_ae32_mnist_mse = np.array([0.9205673758865248, 0.9555555555555556, 0.9423167848699764, 0.9205673758865248, 0.9366430260047282])\nU15_pca30_mnist_mse = np.array([0.9777777777777777, 0.9810874704491725, 0.9829787234042553, 0.9825059101654846, 0.9820330969267139])\nU15_ae30_mnist_mse = np.array([0.8463356973995272, 0.8321513002364066, 0.8529550827423168, 0.8482269503546099, 0.8397163120567376])\nU15_pca32_mnist_mse = np.array([0.7333333333333333, 0.810401891252955, 0.775886524822695, 0.7550827423167848, 0.7460992907801418])\nU15_ae32_mnist_mse = np.array([0.9621749408983451, 0.8865248226950354, 0.9782505910165484, 0.9810874704491725, 0.9555555555555556])\nSO4_pca30_mnist_mse = np.array([0.9872340425531915, 0.9820330969267139, 0.983451536643026, 0.9801418439716312, 0.9829787234042553])\nSO4_ae30_mnist_mse = np.array([0.9210401891252955, 0.9234042553191489, 0.9342789598108747, 0.7522458628841607, 0.8132387706855791])\nSO4_pca32_mnist_mse = np.array([0.7877068557919622, 0.7952718676122932, 0.7777777777777778, 0.75177304964539, 0.7583924349881797])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U14_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U14_pca32_mnist_mse = np.array([0.7144208037825059, 0.7971631205673759, 0.6411347517730497, 0.7791962174940898, 0.6988179669030733])\nU14_ae32_mnist_mse = np.array([0.9205673758865248, 0.9555555555555556, 0.9423167848699764, 0.9205673758865248, 0.9366430260047282])\nU15_pca30_mnist_mse = np.array([0.9777777777777777, 0.9810874704491725, 0.9829787234042553, 0.9825059101654846, 0.9820330969267139])\nU15_ae30_mnist_mse = np.array([0.8463356973995272, 0.8321513002364066, 0.8529550827423168, 0.8482269503546099, 0.8397163120567376])\nU15_pca32_mnist_mse = np.array([0.7333333333333333, 0.810401891252955, 0.775886524822695, 0.7550827423167848, 0.7460992907801418])\nU15_ae32_mnist_mse = np.array([0.9621749408983451, 0.8865248226950354, 0.9782505910165484, 0.9810874704491725, 0.9555555555555556])\nSO4_pca30_mnist_mse = np.array([0.9872340425531915, 0.9820330969267139, 0.983451536643026, 0.9801418439716312, 0.9829787234042553])\nSO4_ae30_mnist_mse = np.array([0.9210401891252955, 0.9234042553191489, 0.9342789598108747, 0.7522458628841607, 0.8132387706855791])\nSO4_pca32_mnist_mse = np.array([0.7877068557919622, 0.7952718676122932, 0.7777777777777778, 0.75177304964539, 0.7583924349881797])\nSO4_ae32_mnist_mse = np.array([0.991016548463357, 0.9470449172576832, 0.9451536643026005, 0.9796690307328605, 0.9891252955082742])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U14_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U14_ae32_mnist_mse = np.array([0.9205673758865248, 0.9555555555555556, 0.9423167848699764, 0.9205673758865248, 0.9366430260047282])\nU15_pca30_mnist_mse = np.array([0.9777777777777777, 0.9810874704491725, 0.9829787234042553, 0.9825059101654846, 0.9820330969267139])\nU15_ae30_mnist_mse = np.array([0.8463356973995272, 0.8321513002364066, 0.8529550827423168, 0.8482269503546099, 0.8397163120567376])\nU15_pca32_mnist_mse = np.array([0.7333333333333333, 0.810401891252955, 0.775886524822695, 0.7550827423167848, 0.7460992907801418])\nU15_ae32_mnist_mse = np.array([0.9621749408983451, 0.8865248226950354, 0.9782505910165484, 0.9810874704491725, 0.9555555555555556])\nSO4_pca30_mnist_mse = np.array([0.9872340425531915, 0.9820330969267139, 0.983451536643026, 0.9801418439716312, 0.9829787234042553])\nSO4_ae30_mnist_mse = np.array([0.9210401891252955, 0.9234042553191489, 0.9342789598108747, 0.7522458628841607, 0.8132387706855791])\nSO4_pca32_mnist_mse = np.array([0.7877068557919622, 0.7952718676122932, 0.7777777777777778, 0.75177304964539, 0.7583924349881797])\nSO4_ae32_mnist_mse = np.array([0.991016548463357, 0.9470449172576832, 0.9451536643026005, 0.9796690307328605, 0.9891252955082742])\nSU4_pca30_mnist_mse = np.array([0.983451536643026, 0.9810874704491725, 0.9839243498817967, 0.9810874704491725, 0.9796690307328605])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U15_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U15_pca30_mnist_mse = np.array([0.9777777777777777, 0.9810874704491725, 0.9829787234042553, 0.9825059101654846, 0.9820330969267139])\nU15_ae30_mnist_mse = np.array([0.8463356973995272, 0.8321513002364066, 0.8529550827423168, 0.8482269503546099, 0.8397163120567376])\nU15_pca32_mnist_mse = np.array([0.7333333333333333, 0.810401891252955, 0.775886524822695, 0.7550827423167848, 0.7460992907801418])\nU15_ae32_mnist_mse = np.array([0.9621749408983451, 0.8865248226950354, 0.9782505910165484, 0.9810874704491725, 0.9555555555555556])\nSO4_pca30_mnist_mse = np.array([0.9872340425531915, 0.9820330969267139, 0.983451536643026, 0.9801418439716312, 0.9829787234042553])\nSO4_ae30_mnist_mse = np.array([0.9210401891252955, 0.9234042553191489, 0.9342789598108747, 0.7522458628841607, 0.8132387706855791])\nSO4_pca32_mnist_mse = np.array([0.7877068557919622, 0.7952718676122932, 0.7777777777777778, 0.75177304964539, 0.7583924349881797])\nSO4_ae32_mnist_mse = np.array([0.991016548463357, 0.9470449172576832, 0.9451536643026005, 0.9796690307328605, 0.9891252955082742])\nSU4_pca30_mnist_mse = np.array([0.983451536643026, 0.9810874704491725, 0.9839243498817967, 0.9810874704491725, 0.9796690307328605])\nSU4_ae30_mnist_mse = np.array([0.900709219858156, 0.8208037825059101, 0.9068557919621749, 0.9286052009456265, 0.7924349881796691])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U15_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U15_ae30_mnist_mse = np.array([0.8463356973995272, 0.8321513002364066, 0.8529550827423168, 0.8482269503546099, 0.8397163120567376])\nU15_pca32_mnist_mse = np.array([0.7333333333333333, 0.810401891252955, 0.775886524822695, 0.7550827423167848, 0.7460992907801418])\nU15_ae32_mnist_mse = np.array([0.9621749408983451, 0.8865248226950354, 0.9782505910165484, 0.9810874704491725, 0.9555555555555556])\nSO4_pca30_mnist_mse = np.array([0.9872340425531915, 0.9820330969267139, 0.983451536643026, 0.9801418439716312, 0.9829787234042553])\nSO4_ae30_mnist_mse = np.array([0.9210401891252955, 0.9234042553191489, 0.9342789598108747, 0.7522458628841607, 0.8132387706855791])\nSO4_pca32_mnist_mse = np.array([0.7877068557919622, 0.7952718676122932, 0.7777777777777778, 0.75177304964539, 0.7583924349881797])\nSO4_ae32_mnist_mse = np.array([0.991016548463357, 0.9470449172576832, 0.9451536643026005, 0.9796690307328605, 0.9891252955082742])\nSU4_pca30_mnist_mse = np.array([0.983451536643026, 0.9810874704491725, 0.9839243498817967, 0.9810874704491725, 0.9796690307328605])\nSU4_ae30_mnist_mse = np.array([0.900709219858156, 0.8208037825059101, 0.9068557919621749, 0.9286052009456265, 0.7924349881796691])\nSU4_pca32_mnist_mse = np.array([0.8047281323877069, 0.8052009456264776, 0.7787234042553192, 0.7295508274231679, 0.7877068557919622])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U15_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U15_pca32_mnist_mse = np.array([0.7333333333333333, 0.810401891252955, 0.775886524822695, 0.7550827423167848, 0.7460992907801418])\nU15_ae32_mnist_mse = np.array([0.9621749408983451, 0.8865248226950354, 0.9782505910165484, 0.9810874704491725, 0.9555555555555556])\nSO4_pca30_mnist_mse = np.array([0.9872340425531915, 0.9820330969267139, 0.983451536643026, 0.9801418439716312, 0.9829787234042553])\nSO4_ae30_mnist_mse = np.array([0.9210401891252955, 0.9234042553191489, 0.9342789598108747, 0.7522458628841607, 0.8132387706855791])\nSO4_pca32_mnist_mse = np.array([0.7877068557919622, 0.7952718676122932, 0.7777777777777778, 0.75177304964539, 0.7583924349881797])\nSO4_ae32_mnist_mse = np.array([0.991016548463357, 0.9470449172576832, 0.9451536643026005, 0.9796690307328605, 0.9891252955082742])\nSU4_pca30_mnist_mse = np.array([0.983451536643026, 0.9810874704491725, 0.9839243498817967, 0.9810874704491725, 0.9796690307328605])\nSU4_ae30_mnist_mse = np.array([0.900709219858156, 0.8208037825059101, 0.9068557919621749, 0.9286052009456265, 0.7924349881796691])\nSU4_pca32_mnist_mse = np.array([0.8047281323877069, 0.8052009456264776, 0.7787234042553192, 0.7295508274231679, 0.7877068557919622])\nSU4_ae32_mnist_mse = np.array([0.992434988179669, 0.9957446808510638, 0.9872340425531915, 0.975886524822695, 0.9366430260047282])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U15_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U15_ae32_mnist_mse = np.array([0.9621749408983451, 0.8865248226950354, 0.9782505910165484, 0.9810874704491725, 0.9555555555555556])\nSO4_pca30_mnist_mse = np.array([0.9872340425531915, 0.9820330969267139, 0.983451536643026, 0.9801418439716312, 0.9829787234042553])\nSO4_ae30_mnist_mse = np.array([0.9210401891252955, 0.9234042553191489, 0.9342789598108747, 0.7522458628841607, 0.8132387706855791])\nSO4_pca32_mnist_mse = np.array([0.7877068557919622, 0.7952718676122932, 0.7777777777777778, 0.75177304964539, 0.7583924349881797])\nSO4_ae32_mnist_mse = np.array([0.991016548463357, 0.9470449172576832, 0.9451536643026005, 0.9796690307328605, 0.9891252955082742])\nSU4_pca30_mnist_mse = np.array([0.983451536643026, 0.9810874704491725, 0.9839243498817967, 0.9810874704491725, 0.9796690307328605])\nSU4_ae30_mnist_mse = np.array([0.900709219858156, 0.8208037825059101, 0.9068557919621749, 0.9286052009456265, 0.7924349881796691])\nSU4_pca32_mnist_mse = np.array([0.8047281323877069, 0.8052009456264776, 0.7787234042553192, 0.7295508274231679, 0.7877068557919622])\nSU4_ae32_mnist_mse = np.array([0.992434988179669, 0.9957446808510638, 0.9872340425531915, 0.975886524822695, 0.9366430260047282])\nSU4_1_pca30_mnist_mse = np.array([0.9829787234042553, 0.9791962174940898, 0.9806146572104019, 0.9810874704491725, 0.9820330969267139])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SO4_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SO4_pca30_mnist_mse = np.array([0.9872340425531915, 0.9820330969267139, 0.983451536643026, 0.9801418439716312, 0.9829787234042553])\nSO4_ae30_mnist_mse = np.array([0.9210401891252955, 0.9234042553191489, 0.9342789598108747, 0.7522458628841607, 0.8132387706855791])\nSO4_pca32_mnist_mse = np.array([0.7877068557919622, 0.7952718676122932, 0.7777777777777778, 0.75177304964539, 0.7583924349881797])\nSO4_ae32_mnist_mse = np.array([0.991016548463357, 0.9470449172576832, 0.9451536643026005, 0.9796690307328605, 0.9891252955082742])\nSU4_pca30_mnist_mse = np.array([0.983451536643026, 0.9810874704491725, 0.9839243498817967, 0.9810874704491725, 0.9796690307328605])\nSU4_ae30_mnist_mse = np.array([0.900709219858156, 0.8208037825059101, 0.9068557919621749, 0.9286052009456265, 0.7924349881796691])\nSU4_pca32_mnist_mse = np.array([0.8047281323877069, 0.8052009456264776, 0.7787234042553192, 0.7295508274231679, 0.7877068557919622])\nSU4_ae32_mnist_mse = np.array([0.992434988179669, 0.9957446808510638, 0.9872340425531915, 0.975886524822695, 0.9366430260047282])\nSU4_1_pca30_mnist_mse = np.array([0.9829787234042553, 0.9791962174940898, 0.9806146572104019, 0.9810874704491725, 0.9820330969267139])\nSU4_1_ae30_mnist_mse = np.array([0.9101654846335697, 0.8468085106382979, 0.858628841607565, 0.9196217494089834, 0.8893617021276595])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SO4_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SO4_ae30_mnist_mse = np.array([0.9210401891252955, 0.9234042553191489, 0.9342789598108747, 0.7522458628841607, 0.8132387706855791])\nSO4_pca32_mnist_mse = np.array([0.7877068557919622, 0.7952718676122932, 0.7777777777777778, 0.75177304964539, 0.7583924349881797])\nSO4_ae32_mnist_mse = np.array([0.991016548463357, 0.9470449172576832, 0.9451536643026005, 0.9796690307328605, 0.9891252955082742])\nSU4_pca30_mnist_mse = np.array([0.983451536643026, 0.9810874704491725, 0.9839243498817967, 0.9810874704491725, 0.9796690307328605])\nSU4_ae30_mnist_mse = np.array([0.900709219858156, 0.8208037825059101, 0.9068557919621749, 0.9286052009456265, 0.7924349881796691])\nSU4_pca32_mnist_mse = np.array([0.8047281323877069, 0.8052009456264776, 0.7787234042553192, 0.7295508274231679, 0.7877068557919622])\nSU4_ae32_mnist_mse = np.array([0.992434988179669, 0.9957446808510638, 0.9872340425531915, 0.975886524822695, 0.9366430260047282])\nSU4_1_pca30_mnist_mse = np.array([0.9829787234042553, 0.9791962174940898, 0.9806146572104019, 0.9810874704491725, 0.9820330969267139])\nSU4_1_ae30_mnist_mse = np.array([0.9101654846335697, 0.8468085106382979, 0.858628841607565, 0.9196217494089834, 0.8893617021276595])\nSU4_1_pca32_mnist_mse = np.array([0.7565011820330969, 0.7635933806146572, 0.8231678486997636, 0.7635933806146572, 0.7650118203309693])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SO4_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SO4_pca32_mnist_mse = np.array([0.7877068557919622, 0.7952718676122932, 0.7777777777777778, 0.75177304964539, 0.7583924349881797])\nSO4_ae32_mnist_mse = np.array([0.991016548463357, 0.9470449172576832, 0.9451536643026005, 0.9796690307328605, 0.9891252955082742])\nSU4_pca30_mnist_mse = np.array([0.983451536643026, 0.9810874704491725, 0.9839243498817967, 0.9810874704491725, 0.9796690307328605])\nSU4_ae30_mnist_mse = np.array([0.900709219858156, 0.8208037825059101, 0.9068557919621749, 0.9286052009456265, 0.7924349881796691])\nSU4_pca32_mnist_mse = np.array([0.8047281323877069, 0.8052009456264776, 0.7787234042553192, 0.7295508274231679, 0.7877068557919622])\nSU4_ae32_mnist_mse = np.array([0.992434988179669, 0.9957446808510638, 0.9872340425531915, 0.975886524822695, 0.9366430260047282])\nSU4_1_pca30_mnist_mse = np.array([0.9829787234042553, 0.9791962174940898, 0.9806146572104019, 0.9810874704491725, 0.9820330969267139])\nSU4_1_ae30_mnist_mse = np.array([0.9101654846335697, 0.8468085106382979, 0.858628841607565, 0.9196217494089834, 0.8893617021276595])\nSU4_1_pca32_mnist_mse = np.array([0.7565011820330969, 0.7635933806146572, 0.8231678486997636, 0.7635933806146572, 0.7650118203309693])\nSU4_1_ae32_mnist_mse = np.array([0.9744680851063829, 0.9815602836879432, 0.9905437352245863, 0.9938534278959811, 0.9612293144208038])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SO4_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SO4_ae32_mnist_mse = np.array([0.991016548463357, 0.9470449172576832, 0.9451536643026005, 0.9796690307328605, 0.9891252955082742])\nSU4_pca30_mnist_mse = np.array([0.983451536643026, 0.9810874704491725, 0.9839243498817967, 0.9810874704491725, 0.9796690307328605])\nSU4_ae30_mnist_mse = np.array([0.900709219858156, 0.8208037825059101, 0.9068557919621749, 0.9286052009456265, 0.7924349881796691])\nSU4_pca32_mnist_mse = np.array([0.8047281323877069, 0.8052009456264776, 0.7787234042553192, 0.7295508274231679, 0.7877068557919622])\nSU4_ae32_mnist_mse = np.array([0.992434988179669, 0.9957446808510638, 0.9872340425531915, 0.975886524822695, 0.9366430260047282])\nSU4_1_pca30_mnist_mse = np.array([0.9829787234042553, 0.9791962174940898, 0.9806146572104019, 0.9810874704491725, 0.9820330969267139])\nSU4_1_ae30_mnist_mse = np.array([0.9101654846335697, 0.8468085106382979, 0.858628841607565, 0.9196217494089834, 0.8893617021276595])\nSU4_1_pca32_mnist_mse = np.array([0.7565011820330969, 0.7635933806146572, 0.8231678486997636, 0.7635933806146572, 0.7650118203309693])\nSU4_1_ae32_mnist_mse = np.array([0.9744680851063829, 0.9815602836879432, 0.9905437352245863, 0.9938534278959811, 0.9612293144208038])\n# Fashion MNIST", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_pca30_mnist_mse = np.array([0.983451536643026, 0.9810874704491725, 0.9839243498817967, 0.9810874704491725, 0.9796690307328605])\nSU4_ae30_mnist_mse = np.array([0.900709219858156, 0.8208037825059101, 0.9068557919621749, 0.9286052009456265, 0.7924349881796691])\nSU4_pca32_mnist_mse = np.array([0.8047281323877069, 0.8052009456264776, 0.7787234042553192, 0.7295508274231679, 0.7877068557919622])\nSU4_ae32_mnist_mse = np.array([0.992434988179669, 0.9957446808510638, 0.9872340425531915, 0.975886524822695, 0.9366430260047282])\nSU4_1_pca30_mnist_mse = np.array([0.9829787234042553, 0.9791962174940898, 0.9806146572104019, 0.9810874704491725, 0.9820330969267139])\nSU4_1_ae30_mnist_mse = np.array([0.9101654846335697, 0.8468085106382979, 0.858628841607565, 0.9196217494089834, 0.8893617021276595])\nSU4_1_pca32_mnist_mse = np.array([0.7565011820330969, 0.7635933806146572, 0.8231678486997636, 0.7635933806146572, 0.7650118203309693])\nSU4_1_ae32_mnist_mse = np.array([0.9744680851063829, 0.9815602836879432, 0.9905437352245863, 0.9938534278959811, 0.9612293144208038])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.8475, 0.837, 0.8575, 0.8065, 0.846])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_ae30_mnist_mse = np.array([0.900709219858156, 0.8208037825059101, 0.9068557919621749, 0.9286052009456265, 0.7924349881796691])\nSU4_pca32_mnist_mse = np.array([0.8047281323877069, 0.8052009456264776, 0.7787234042553192, 0.7295508274231679, 0.7877068557919622])\nSU4_ae32_mnist_mse = np.array([0.992434988179669, 0.9957446808510638, 0.9872340425531915, 0.975886524822695, 0.9366430260047282])\nSU4_1_pca30_mnist_mse = np.array([0.9829787234042553, 0.9791962174940898, 0.9806146572104019, 0.9810874704491725, 0.9820330969267139])\nSU4_1_ae30_mnist_mse = np.array([0.9101654846335697, 0.8468085106382979, 0.858628841607565, 0.9196217494089834, 0.8893617021276595])\nSU4_1_pca32_mnist_mse = np.array([0.7565011820330969, 0.7635933806146572, 0.8231678486997636, 0.7635933806146572, 0.7650118203309693])\nSU4_1_ae32_mnist_mse = np.array([0.9744680851063829, 0.9815602836879432, 0.9905437352245863, 0.9938534278959811, 0.9612293144208038])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.8475, 0.837, 0.8575, 0.8065, 0.846])\nTTN_ae30_fashion_mse = np.array([0.828, 0.856, 0.78, 0.776, 0.897])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_pca32_mnist_mse = np.array([0.8047281323877069, 0.8052009456264776, 0.7787234042553192, 0.7295508274231679, 0.7877068557919622])\nSU4_ae32_mnist_mse = np.array([0.992434988179669, 0.9957446808510638, 0.9872340425531915, 0.975886524822695, 0.9366430260047282])\nSU4_1_pca30_mnist_mse = np.array([0.9829787234042553, 0.9791962174940898, 0.9806146572104019, 0.9810874704491725, 0.9820330969267139])\nSU4_1_ae30_mnist_mse = np.array([0.9101654846335697, 0.8468085106382979, 0.858628841607565, 0.9196217494089834, 0.8893617021276595])\nSU4_1_pca32_mnist_mse = np.array([0.7565011820330969, 0.7635933806146572, 0.8231678486997636, 0.7635933806146572, 0.7650118203309693])\nSU4_1_ae32_mnist_mse = np.array([0.9744680851063829, 0.9815602836879432, 0.9905437352245863, 0.9938534278959811, 0.9612293144208038])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.8475, 0.837, 0.8575, 0.8065, 0.846])\nTTN_ae30_fashion_mse = np.array([0.828, 0.856, 0.78, 0.776, 0.897])\nTTN_pca32_fashion_mse = np.array([0.583, 0.6525, 0.6495, 0.671, 0.665])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_ae32_mnist_mse = np.array([0.992434988179669, 0.9957446808510638, 0.9872340425531915, 0.975886524822695, 0.9366430260047282])\nSU4_1_pca30_mnist_mse = np.array([0.9829787234042553, 0.9791962174940898, 0.9806146572104019, 0.9810874704491725, 0.9820330969267139])\nSU4_1_ae30_mnist_mse = np.array([0.9101654846335697, 0.8468085106382979, 0.858628841607565, 0.9196217494089834, 0.8893617021276595])\nSU4_1_pca32_mnist_mse = np.array([0.7565011820330969, 0.7635933806146572, 0.8231678486997636, 0.7635933806146572, 0.7650118203309693])\nSU4_1_ae32_mnist_mse = np.array([0.9744680851063829, 0.9815602836879432, 0.9905437352245863, 0.9938534278959811, 0.9612293144208038])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.8475, 0.837, 0.8575, 0.8065, 0.846])\nTTN_ae30_fashion_mse = np.array([0.828, 0.856, 0.78, 0.776, 0.897])\nTTN_pca32_fashion_mse = np.array([0.583, 0.6525, 0.6495, 0.671, 0.665])\nTTN_ae32_fashion_mse = np.array([0.9055, 0.9105, 0.92, 0.908, 0.8805])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_1_pca30_mnist_mse = np.array([0.9829787234042553, 0.9791962174940898, 0.9806146572104019, 0.9810874704491725, 0.9820330969267139])\nSU4_1_ae30_mnist_mse = np.array([0.9101654846335697, 0.8468085106382979, 0.858628841607565, 0.9196217494089834, 0.8893617021276595])\nSU4_1_pca32_mnist_mse = np.array([0.7565011820330969, 0.7635933806146572, 0.8231678486997636, 0.7635933806146572, 0.7650118203309693])\nSU4_1_ae32_mnist_mse = np.array([0.9744680851063829, 0.9815602836879432, 0.9905437352245863, 0.9938534278959811, 0.9612293144208038])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.8475, 0.837, 0.8575, 0.8065, 0.846])\nTTN_ae30_fashion_mse = np.array([0.828, 0.856, 0.78, 0.776, 0.897])\nTTN_pca32_fashion_mse = np.array([0.583, 0.6525, 0.6495, 0.671, 0.665])\nTTN_ae32_fashion_mse = np.array([0.9055, 0.9105, 0.92, 0.908, 0.8805])\nU5_pca30_fashion_mse = np.array([0.8575, 0.8725, 0.879, 0.837, 0.889])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_1_ae30_mnist_mse = np.array([0.9101654846335697, 0.8468085106382979, 0.858628841607565, 0.9196217494089834, 0.8893617021276595])\nSU4_1_pca32_mnist_mse = np.array([0.7565011820330969, 0.7635933806146572, 0.8231678486997636, 0.7635933806146572, 0.7650118203309693])\nSU4_1_ae32_mnist_mse = np.array([0.9744680851063829, 0.9815602836879432, 0.9905437352245863, 0.9938534278959811, 0.9612293144208038])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.8475, 0.837, 0.8575, 0.8065, 0.846])\nTTN_ae30_fashion_mse = np.array([0.828, 0.856, 0.78, 0.776, 0.897])\nTTN_pca32_fashion_mse = np.array([0.583, 0.6525, 0.6495, 0.671, 0.665])\nTTN_ae32_fashion_mse = np.array([0.9055, 0.9105, 0.92, 0.908, 0.8805])\nU5_pca30_fashion_mse = np.array([0.8575, 0.8725, 0.879, 0.837, 0.889])\nU5_ae30_fashion_mse = np.array([0.891, 0.9405, 0.9155, 0.8575, 0.8785])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_1_pca32_mnist_mse = np.array([0.7565011820330969, 0.7635933806146572, 0.8231678486997636, 0.7635933806146572, 0.7650118203309693])\nSU4_1_ae32_mnist_mse = np.array([0.9744680851063829, 0.9815602836879432, 0.9905437352245863, 0.9938534278959811, 0.9612293144208038])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.8475, 0.837, 0.8575, 0.8065, 0.846])\nTTN_ae30_fashion_mse = np.array([0.828, 0.856, 0.78, 0.776, 0.897])\nTTN_pca32_fashion_mse = np.array([0.583, 0.6525, 0.6495, 0.671, 0.665])\nTTN_ae32_fashion_mse = np.array([0.9055, 0.9105, 0.92, 0.908, 0.8805])\nU5_pca30_fashion_mse = np.array([0.8575, 0.8725, 0.879, 0.837, 0.889])\nU5_ae30_fashion_mse = np.array([0.891, 0.9405, 0.9155, 0.8575, 0.8785])\nU5_pca32_fashion_mse = np.array([0.6565, 0.657, 0.6595, 0.661, 0.6795])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_1_ae32_mnist_mse = np.array([0.9744680851063829, 0.9815602836879432, 0.9905437352245863, 0.9938534278959811, 0.9612293144208038])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.8475, 0.837, 0.8575, 0.8065, 0.846])\nTTN_ae30_fashion_mse = np.array([0.828, 0.856, 0.78, 0.776, 0.897])\nTTN_pca32_fashion_mse = np.array([0.583, 0.6525, 0.6495, 0.671, 0.665])\nTTN_ae32_fashion_mse = np.array([0.9055, 0.9105, 0.92, 0.908, 0.8805])\nU5_pca30_fashion_mse = np.array([0.8575, 0.8725, 0.879, 0.837, 0.889])\nU5_ae30_fashion_mse = np.array([0.891, 0.9405, 0.9155, 0.8575, 0.8785])\nU5_pca32_fashion_mse = np.array([0.6565, 0.657, 0.6595, 0.661, 0.6795])\nU5_ae32_fashion_mse = np.array([0.946, 0.9395, 0.9275, 0.947, 0.932])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "TTN_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "TTN_pca30_fashion_mse = np.array([0.8475, 0.837, 0.8575, 0.8065, 0.846])\nTTN_ae30_fashion_mse = np.array([0.828, 0.856, 0.78, 0.776, 0.897])\nTTN_pca32_fashion_mse = np.array([0.583, 0.6525, 0.6495, 0.671, 0.665])\nTTN_ae32_fashion_mse = np.array([0.9055, 0.9105, 0.92, 0.908, 0.8805])\nU5_pca30_fashion_mse = np.array([0.8575, 0.8725, 0.879, 0.837, 0.889])\nU5_ae30_fashion_mse = np.array([0.891, 0.9405, 0.9155, 0.8575, 0.8785])\nU5_pca32_fashion_mse = np.array([0.6565, 0.657, 0.6595, 0.661, 0.6795])\nU5_ae32_fashion_mse = np.array([0.946, 0.9395, 0.9275, 0.947, 0.932])\nU6_pca30_fashion_mse = np.array([0.8365, 0.8755, 0.8785, 0.8615, 0.86])\nU6_ae30_fashion_mse = np.array([0.9355, 0.936, 0.9225, 0.845, 0.9395])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "TTN_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "TTN_ae30_fashion_mse = np.array([0.828, 0.856, 0.78, 0.776, 0.897])\nTTN_pca32_fashion_mse = np.array([0.583, 0.6525, 0.6495, 0.671, 0.665])\nTTN_ae32_fashion_mse = np.array([0.9055, 0.9105, 0.92, 0.908, 0.8805])\nU5_pca30_fashion_mse = np.array([0.8575, 0.8725, 0.879, 0.837, 0.889])\nU5_ae30_fashion_mse = np.array([0.891, 0.9405, 0.9155, 0.8575, 0.8785])\nU5_pca32_fashion_mse = np.array([0.6565, 0.657, 0.6595, 0.661, 0.6795])\nU5_ae32_fashion_mse = np.array([0.946, 0.9395, 0.9275, 0.947, 0.932])\nU6_pca30_fashion_mse = np.array([0.8365, 0.8755, 0.8785, 0.8615, 0.86])\nU6_ae30_fashion_mse = np.array([0.9355, 0.936, 0.9225, 0.845, 0.9395])\nU6_pca32_fashion_mse = np.array([0.6665, 0.6615, 0.67, 0.6665, 0.6625])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "TTN_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "TTN_pca32_fashion_mse = np.array([0.583, 0.6525, 0.6495, 0.671, 0.665])\nTTN_ae32_fashion_mse = np.array([0.9055, 0.9105, 0.92, 0.908, 0.8805])\nU5_pca30_fashion_mse = np.array([0.8575, 0.8725, 0.879, 0.837, 0.889])\nU5_ae30_fashion_mse = np.array([0.891, 0.9405, 0.9155, 0.8575, 0.8785])\nU5_pca32_fashion_mse = np.array([0.6565, 0.657, 0.6595, 0.661, 0.6795])\nU5_ae32_fashion_mse = np.array([0.946, 0.9395, 0.9275, 0.947, 0.932])\nU6_pca30_fashion_mse = np.array([0.8365, 0.8755, 0.8785, 0.8615, 0.86])\nU6_ae30_fashion_mse = np.array([0.9355, 0.936, 0.9225, 0.845, 0.9395])\nU6_pca32_fashion_mse = np.array([0.6665, 0.6615, 0.67, 0.6665, 0.6625])\nU6_ae32_fashion_mse = np.array([0.9115, 0.9465, 0.9195, 0.948, 0.936])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "TTN_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "TTN_ae32_fashion_mse = np.array([0.9055, 0.9105, 0.92, 0.908, 0.8805])\nU5_pca30_fashion_mse = np.array([0.8575, 0.8725, 0.879, 0.837, 0.889])\nU5_ae30_fashion_mse = np.array([0.891, 0.9405, 0.9155, 0.8575, 0.8785])\nU5_pca32_fashion_mse = np.array([0.6565, 0.657, 0.6595, 0.661, 0.6795])\nU5_ae32_fashion_mse = np.array([0.946, 0.9395, 0.9275, 0.947, 0.932])\nU6_pca30_fashion_mse = np.array([0.8365, 0.8755, 0.8785, 0.8615, 0.86])\nU6_ae30_fashion_mse = np.array([0.9355, 0.936, 0.9225, 0.845, 0.9395])\nU6_pca32_fashion_mse = np.array([0.6665, 0.6615, 0.67, 0.6665, 0.6625])\nU6_ae32_fashion_mse = np.array([0.9115, 0.9465, 0.9195, 0.948, 0.936])\nU9_pca30_fashion_mse = np.array([0.8645, 0.844, 0.7625, 0.8755, 0.8675])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U5_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U5_pca30_fashion_mse = np.array([0.8575, 0.8725, 0.879, 0.837, 0.889])\nU5_ae30_fashion_mse = np.array([0.891, 0.9405, 0.9155, 0.8575, 0.8785])\nU5_pca32_fashion_mse = np.array([0.6565, 0.657, 0.6595, 0.661, 0.6795])\nU5_ae32_fashion_mse = np.array([0.946, 0.9395, 0.9275, 0.947, 0.932])\nU6_pca30_fashion_mse = np.array([0.8365, 0.8755, 0.8785, 0.8615, 0.86])\nU6_ae30_fashion_mse = np.array([0.9355, 0.936, 0.9225, 0.845, 0.9395])\nU6_pca32_fashion_mse = np.array([0.6665, 0.6615, 0.67, 0.6665, 0.6625])\nU6_ae32_fashion_mse = np.array([0.9115, 0.9465, 0.9195, 0.948, 0.936])\nU9_pca30_fashion_mse = np.array([0.8645, 0.844, 0.7625, 0.8755, 0.8675])\nU9_ae30_fashion_mse = np.array([0.836, 0.781, 0.8685, 0.8735, 0.746])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U5_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U5_ae30_fashion_mse = np.array([0.891, 0.9405, 0.9155, 0.8575, 0.8785])\nU5_pca32_fashion_mse = np.array([0.6565, 0.657, 0.6595, 0.661, 0.6795])\nU5_ae32_fashion_mse = np.array([0.946, 0.9395, 0.9275, 0.947, 0.932])\nU6_pca30_fashion_mse = np.array([0.8365, 0.8755, 0.8785, 0.8615, 0.86])\nU6_ae30_fashion_mse = np.array([0.9355, 0.936, 0.9225, 0.845, 0.9395])\nU6_pca32_fashion_mse = np.array([0.6665, 0.6615, 0.67, 0.6665, 0.6625])\nU6_ae32_fashion_mse = np.array([0.9115, 0.9465, 0.9195, 0.948, 0.936])\nU9_pca30_fashion_mse = np.array([0.8645, 0.844, 0.7625, 0.8755, 0.8675])\nU9_ae30_fashion_mse = np.array([0.836, 0.781, 0.8685, 0.8735, 0.746])\nU9_pca32_fashion_mse = np.array([0.623, 0.644, 0.612, 0.6265, 0.649])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U5_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U5_pca32_fashion_mse = np.array([0.6565, 0.657, 0.6595, 0.661, 0.6795])\nU5_ae32_fashion_mse = np.array([0.946, 0.9395, 0.9275, 0.947, 0.932])\nU6_pca30_fashion_mse = np.array([0.8365, 0.8755, 0.8785, 0.8615, 0.86])\nU6_ae30_fashion_mse = np.array([0.9355, 0.936, 0.9225, 0.845, 0.9395])\nU6_pca32_fashion_mse = np.array([0.6665, 0.6615, 0.67, 0.6665, 0.6625])\nU6_ae32_fashion_mse = np.array([0.9115, 0.9465, 0.9195, 0.948, 0.936])\nU9_pca30_fashion_mse = np.array([0.8645, 0.844, 0.7625, 0.8755, 0.8675])\nU9_ae30_fashion_mse = np.array([0.836, 0.781, 0.8685, 0.8735, 0.746])\nU9_pca32_fashion_mse = np.array([0.623, 0.644, 0.612, 0.6265, 0.649])\nU9_ae32_fashion_mse = np.array([0.8705, 0.8615, 0.771, 0.904, 0.8935])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U5_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U5_ae32_fashion_mse = np.array([0.946, 0.9395, 0.9275, 0.947, 0.932])\nU6_pca30_fashion_mse = np.array([0.8365, 0.8755, 0.8785, 0.8615, 0.86])\nU6_ae30_fashion_mse = np.array([0.9355, 0.936, 0.9225, 0.845, 0.9395])\nU6_pca32_fashion_mse = np.array([0.6665, 0.6615, 0.67, 0.6665, 0.6625])\nU6_ae32_fashion_mse = np.array([0.9115, 0.9465, 0.9195, 0.948, 0.936])\nU9_pca30_fashion_mse = np.array([0.8645, 0.844, 0.7625, 0.8755, 0.8675])\nU9_ae30_fashion_mse = np.array([0.836, 0.781, 0.8685, 0.8735, 0.746])\nU9_pca32_fashion_mse = np.array([0.623, 0.644, 0.612, 0.6265, 0.649])\nU9_ae32_fashion_mse = np.array([0.8705, 0.8615, 0.771, 0.904, 0.8935])\nU13_pca30_fashion_mse = np.array([0.8575, 0.8525, 0.8315, 0.8365, 0.8555])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U6_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U6_pca30_fashion_mse = np.array([0.8365, 0.8755, 0.8785, 0.8615, 0.86])\nU6_ae30_fashion_mse = np.array([0.9355, 0.936, 0.9225, 0.845, 0.9395])\nU6_pca32_fashion_mse = np.array([0.6665, 0.6615, 0.67, 0.6665, 0.6625])\nU6_ae32_fashion_mse = np.array([0.9115, 0.9465, 0.9195, 0.948, 0.936])\nU9_pca30_fashion_mse = np.array([0.8645, 0.844, 0.7625, 0.8755, 0.8675])\nU9_ae30_fashion_mse = np.array([0.836, 0.781, 0.8685, 0.8735, 0.746])\nU9_pca32_fashion_mse = np.array([0.623, 0.644, 0.612, 0.6265, 0.649])\nU9_ae32_fashion_mse = np.array([0.8705, 0.8615, 0.771, 0.904, 0.8935])\nU13_pca30_fashion_mse = np.array([0.8575, 0.8525, 0.8315, 0.8365, 0.8555])\nU13_ae30_fashion_mse = np.array([0.855, 0.921, 0.94, 0.884, 0.8195])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U6_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U6_ae30_fashion_mse = np.array([0.9355, 0.936, 0.9225, 0.845, 0.9395])\nU6_pca32_fashion_mse = np.array([0.6665, 0.6615, 0.67, 0.6665, 0.6625])\nU6_ae32_fashion_mse = np.array([0.9115, 0.9465, 0.9195, 0.948, 0.936])\nU9_pca30_fashion_mse = np.array([0.8645, 0.844, 0.7625, 0.8755, 0.8675])\nU9_ae30_fashion_mse = np.array([0.836, 0.781, 0.8685, 0.8735, 0.746])\nU9_pca32_fashion_mse = np.array([0.623, 0.644, 0.612, 0.6265, 0.649])\nU9_ae32_fashion_mse = np.array([0.8705, 0.8615, 0.771, 0.904, 0.8935])\nU13_pca30_fashion_mse = np.array([0.8575, 0.8525, 0.8315, 0.8365, 0.8555])\nU13_ae30_fashion_mse = np.array([0.855, 0.921, 0.94, 0.884, 0.8195])\nU13_pca32_fashion_mse = np.array([0.681, 0.6405, 0.622, 0.6485, 0.6445])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U6_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U6_pca32_fashion_mse = np.array([0.6665, 0.6615, 0.67, 0.6665, 0.6625])\nU6_ae32_fashion_mse = np.array([0.9115, 0.9465, 0.9195, 0.948, 0.936])\nU9_pca30_fashion_mse = np.array([0.8645, 0.844, 0.7625, 0.8755, 0.8675])\nU9_ae30_fashion_mse = np.array([0.836, 0.781, 0.8685, 0.8735, 0.746])\nU9_pca32_fashion_mse = np.array([0.623, 0.644, 0.612, 0.6265, 0.649])\nU9_ae32_fashion_mse = np.array([0.8705, 0.8615, 0.771, 0.904, 0.8935])\nU13_pca30_fashion_mse = np.array([0.8575, 0.8525, 0.8315, 0.8365, 0.8555])\nU13_ae30_fashion_mse = np.array([0.855, 0.921, 0.94, 0.884, 0.8195])\nU13_pca32_fashion_mse = np.array([0.681, 0.6405, 0.622, 0.6485, 0.6445])\nU13_ae32_fashion_mse = np.array([0.912, 0.913, 0.9045, 0.9115, 0.919])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U6_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U6_ae32_fashion_mse = np.array([0.9115, 0.9465, 0.9195, 0.948, 0.936])\nU9_pca30_fashion_mse = np.array([0.8645, 0.844, 0.7625, 0.8755, 0.8675])\nU9_ae30_fashion_mse = np.array([0.836, 0.781, 0.8685, 0.8735, 0.746])\nU9_pca32_fashion_mse = np.array([0.623, 0.644, 0.612, 0.6265, 0.649])\nU9_ae32_fashion_mse = np.array([0.8705, 0.8615, 0.771, 0.904, 0.8935])\nU13_pca30_fashion_mse = np.array([0.8575, 0.8525, 0.8315, 0.8365, 0.8555])\nU13_ae30_fashion_mse = np.array([0.855, 0.921, 0.94, 0.884, 0.8195])\nU13_pca32_fashion_mse = np.array([0.681, 0.6405, 0.622, 0.6485, 0.6445])\nU13_ae32_fashion_mse = np.array([0.912, 0.913, 0.9045, 0.9115, 0.919])\nU14_pca30_fashion_mse = np.array([0.8165, 0.8685, 0.855, 0.8475, 0.8395])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U9_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U9_pca30_fashion_mse = np.array([0.8645, 0.844, 0.7625, 0.8755, 0.8675])\nU9_ae30_fashion_mse = np.array([0.836, 0.781, 0.8685, 0.8735, 0.746])\nU9_pca32_fashion_mse = np.array([0.623, 0.644, 0.612, 0.6265, 0.649])\nU9_ae32_fashion_mse = np.array([0.8705, 0.8615, 0.771, 0.904, 0.8935])\nU13_pca30_fashion_mse = np.array([0.8575, 0.8525, 0.8315, 0.8365, 0.8555])\nU13_ae30_fashion_mse = np.array([0.855, 0.921, 0.94, 0.884, 0.8195])\nU13_pca32_fashion_mse = np.array([0.681, 0.6405, 0.622, 0.6485, 0.6445])\nU13_ae32_fashion_mse = np.array([0.912, 0.913, 0.9045, 0.9115, 0.919])\nU14_pca30_fashion_mse = np.array([0.8165, 0.8685, 0.855, 0.8475, 0.8395])\nU14_ae30_fashion_mse = np.array([0.831, 0.918, 0.8715, 0.805, 0.871])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U9_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U9_ae30_fashion_mse = np.array([0.836, 0.781, 0.8685, 0.8735, 0.746])\nU9_pca32_fashion_mse = np.array([0.623, 0.644, 0.612, 0.6265, 0.649])\nU9_ae32_fashion_mse = np.array([0.8705, 0.8615, 0.771, 0.904, 0.8935])\nU13_pca30_fashion_mse = np.array([0.8575, 0.8525, 0.8315, 0.8365, 0.8555])\nU13_ae30_fashion_mse = np.array([0.855, 0.921, 0.94, 0.884, 0.8195])\nU13_pca32_fashion_mse = np.array([0.681, 0.6405, 0.622, 0.6485, 0.6445])\nU13_ae32_fashion_mse = np.array([0.912, 0.913, 0.9045, 0.9115, 0.919])\nU14_pca30_fashion_mse = np.array([0.8165, 0.8685, 0.855, 0.8475, 0.8395])\nU14_ae30_fashion_mse = np.array([0.831, 0.918, 0.8715, 0.805, 0.871])\nU14_pca32_fashion_mse = np.array([0.6585,0.6155, 0.67, 0.6485, 0.61])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U9_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U9_pca32_fashion_mse = np.array([0.623, 0.644, 0.612, 0.6265, 0.649])\nU9_ae32_fashion_mse = np.array([0.8705, 0.8615, 0.771, 0.904, 0.8935])\nU13_pca30_fashion_mse = np.array([0.8575, 0.8525, 0.8315, 0.8365, 0.8555])\nU13_ae30_fashion_mse = np.array([0.855, 0.921, 0.94, 0.884, 0.8195])\nU13_pca32_fashion_mse = np.array([0.681, 0.6405, 0.622, 0.6485, 0.6445])\nU13_ae32_fashion_mse = np.array([0.912, 0.913, 0.9045, 0.9115, 0.919])\nU14_pca30_fashion_mse = np.array([0.8165, 0.8685, 0.855, 0.8475, 0.8395])\nU14_ae30_fashion_mse = np.array([0.831, 0.918, 0.8715, 0.805, 0.871])\nU14_pca32_fashion_mse = np.array([0.6585,0.6155, 0.67, 0.6485, 0.61])\nU14_ae32_fashion_mse = np.array([0.9035, 0.9155, 0.9345, 0.938, 0.9275])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U9_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U9_ae32_fashion_mse = np.array([0.8705, 0.8615, 0.771, 0.904, 0.8935])\nU13_pca30_fashion_mse = np.array([0.8575, 0.8525, 0.8315, 0.8365, 0.8555])\nU13_ae30_fashion_mse = np.array([0.855, 0.921, 0.94, 0.884, 0.8195])\nU13_pca32_fashion_mse = np.array([0.681, 0.6405, 0.622, 0.6485, 0.6445])\nU13_ae32_fashion_mse = np.array([0.912, 0.913, 0.9045, 0.9115, 0.919])\nU14_pca30_fashion_mse = np.array([0.8165, 0.8685, 0.855, 0.8475, 0.8395])\nU14_ae30_fashion_mse = np.array([0.831, 0.918, 0.8715, 0.805, 0.871])\nU14_pca32_fashion_mse = np.array([0.6585,0.6155, 0.67, 0.6485, 0.61])\nU14_ae32_fashion_mse = np.array([0.9035, 0.9155, 0.9345, 0.938, 0.9275])\nU15_pca30_fashion_mse = np.array([0.833, 0.861, 0.8535, 0.867, 0.862])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U13_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U13_pca30_fashion_mse = np.array([0.8575, 0.8525, 0.8315, 0.8365, 0.8555])\nU13_ae30_fashion_mse = np.array([0.855, 0.921, 0.94, 0.884, 0.8195])\nU13_pca32_fashion_mse = np.array([0.681, 0.6405, 0.622, 0.6485, 0.6445])\nU13_ae32_fashion_mse = np.array([0.912, 0.913, 0.9045, 0.9115, 0.919])\nU14_pca30_fashion_mse = np.array([0.8165, 0.8685, 0.855, 0.8475, 0.8395])\nU14_ae30_fashion_mse = np.array([0.831, 0.918, 0.8715, 0.805, 0.871])\nU14_pca32_fashion_mse = np.array([0.6585,0.6155, 0.67, 0.6485, 0.61])\nU14_ae32_fashion_mse = np.array([0.9035, 0.9155, 0.9345, 0.938, 0.9275])\nU15_pca30_fashion_mse = np.array([0.833, 0.861, 0.8535, 0.867, 0.862])\nU15_ae30_fashion_mse = np.array([0.8115, 0.8695, 0.9135, 0.925, 0.8855])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U13_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U13_ae30_fashion_mse = np.array([0.855, 0.921, 0.94, 0.884, 0.8195])\nU13_pca32_fashion_mse = np.array([0.681, 0.6405, 0.622, 0.6485, 0.6445])\nU13_ae32_fashion_mse = np.array([0.912, 0.913, 0.9045, 0.9115, 0.919])\nU14_pca30_fashion_mse = np.array([0.8165, 0.8685, 0.855, 0.8475, 0.8395])\nU14_ae30_fashion_mse = np.array([0.831, 0.918, 0.8715, 0.805, 0.871])\nU14_pca32_fashion_mse = np.array([0.6585,0.6155, 0.67, 0.6485, 0.61])\nU14_ae32_fashion_mse = np.array([0.9035, 0.9155, 0.9345, 0.938, 0.9275])\nU15_pca30_fashion_mse = np.array([0.833, 0.861, 0.8535, 0.867, 0.862])\nU15_ae30_fashion_mse = np.array([0.8115, 0.8695, 0.9135, 0.925, 0.8855])\nU15_pca32_fashion_mse = np.array([0.639, 0.663, 0.6695, 0.639, 0.6655])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U13_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U13_pca32_fashion_mse = np.array([0.681, 0.6405, 0.622, 0.6485, 0.6445])\nU13_ae32_fashion_mse = np.array([0.912, 0.913, 0.9045, 0.9115, 0.919])\nU14_pca30_fashion_mse = np.array([0.8165, 0.8685, 0.855, 0.8475, 0.8395])\nU14_ae30_fashion_mse = np.array([0.831, 0.918, 0.8715, 0.805, 0.871])\nU14_pca32_fashion_mse = np.array([0.6585,0.6155, 0.67, 0.6485, 0.61])\nU14_ae32_fashion_mse = np.array([0.9035, 0.9155, 0.9345, 0.938, 0.9275])\nU15_pca30_fashion_mse = np.array([0.833, 0.861, 0.8535, 0.867, 0.862])\nU15_ae30_fashion_mse = np.array([0.8115, 0.8695, 0.9135, 0.925, 0.8855])\nU15_pca32_fashion_mse = np.array([0.639, 0.663, 0.6695, 0.639, 0.6655])\nU15_ae32_fashion_mse = np.array([0.905, 0.904, 0.9065, 0.935, 0.935])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U13_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U13_ae32_fashion_mse = np.array([0.912, 0.913, 0.9045, 0.9115, 0.919])\nU14_pca30_fashion_mse = np.array([0.8165, 0.8685, 0.855, 0.8475, 0.8395])\nU14_ae30_fashion_mse = np.array([0.831, 0.918, 0.8715, 0.805, 0.871])\nU14_pca32_fashion_mse = np.array([0.6585,0.6155, 0.67, 0.6485, 0.61])\nU14_ae32_fashion_mse = np.array([0.9035, 0.9155, 0.9345, 0.938, 0.9275])\nU15_pca30_fashion_mse = np.array([0.833, 0.861, 0.8535, 0.867, 0.862])\nU15_ae30_fashion_mse = np.array([0.8115, 0.8695, 0.9135, 0.925, 0.8855])\nU15_pca32_fashion_mse = np.array([0.639, 0.663, 0.6695, 0.639, 0.6655])\nU15_ae32_fashion_mse = np.array([0.905, 0.904, 0.9065, 0.935, 0.935])\nSO4_pca30_fashion_mse = np.array([0.9105, 0.8695, 0.8565, 0.9045, 0.8455])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U14_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U14_pca30_fashion_mse = np.array([0.8165, 0.8685, 0.855, 0.8475, 0.8395])\nU14_ae30_fashion_mse = np.array([0.831, 0.918, 0.8715, 0.805, 0.871])\nU14_pca32_fashion_mse = np.array([0.6585,0.6155, 0.67, 0.6485, 0.61])\nU14_ae32_fashion_mse = np.array([0.9035, 0.9155, 0.9345, 0.938, 0.9275])\nU15_pca30_fashion_mse = np.array([0.833, 0.861, 0.8535, 0.867, 0.862])\nU15_ae30_fashion_mse = np.array([0.8115, 0.8695, 0.9135, 0.925, 0.8855])\nU15_pca32_fashion_mse = np.array([0.639, 0.663, 0.6695, 0.639, 0.6655])\nU15_ae32_fashion_mse = np.array([0.905, 0.904, 0.9065, 0.935, 0.935])\nSO4_pca30_fashion_mse = np.array([0.9105, 0.8695, 0.8565, 0.9045, 0.8455])\nSO4_ae30_fashion_mse = np.array([0.8815, 0.8845, 0.914, 0.9055, 0.8405])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U14_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U14_ae30_fashion_mse = np.array([0.831, 0.918, 0.8715, 0.805, 0.871])\nU14_pca32_fashion_mse = np.array([0.6585,0.6155, 0.67, 0.6485, 0.61])\nU14_ae32_fashion_mse = np.array([0.9035, 0.9155, 0.9345, 0.938, 0.9275])\nU15_pca30_fashion_mse = np.array([0.833, 0.861, 0.8535, 0.867, 0.862])\nU15_ae30_fashion_mse = np.array([0.8115, 0.8695, 0.9135, 0.925, 0.8855])\nU15_pca32_fashion_mse = np.array([0.639, 0.663, 0.6695, 0.639, 0.6655])\nU15_ae32_fashion_mse = np.array([0.905, 0.904, 0.9065, 0.935, 0.935])\nSO4_pca30_fashion_mse = np.array([0.9105, 0.8695, 0.8565, 0.9045, 0.8455])\nSO4_ae30_fashion_mse = np.array([0.8815, 0.8845, 0.914, 0.9055, 0.8405])\nSO4_pca32_fashion_mse = np.array([0.656, 0.645, 0.6505, 0.663, 0.6705])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U14_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U14_pca32_fashion_mse = np.array([0.6585,0.6155, 0.67, 0.6485, 0.61])\nU14_ae32_fashion_mse = np.array([0.9035, 0.9155, 0.9345, 0.938, 0.9275])\nU15_pca30_fashion_mse = np.array([0.833, 0.861, 0.8535, 0.867, 0.862])\nU15_ae30_fashion_mse = np.array([0.8115, 0.8695, 0.9135, 0.925, 0.8855])\nU15_pca32_fashion_mse = np.array([0.639, 0.663, 0.6695, 0.639, 0.6655])\nU15_ae32_fashion_mse = np.array([0.905, 0.904, 0.9065, 0.935, 0.935])\nSO4_pca30_fashion_mse = np.array([0.9105, 0.8695, 0.8565, 0.9045, 0.8455])\nSO4_ae30_fashion_mse = np.array([0.8815, 0.8845, 0.914, 0.9055, 0.8405])\nSO4_pca32_fashion_mse = np.array([0.656, 0.645, 0.6505, 0.663, 0.6705])\nSO4_ae32_fashion_mse = np.array([0.937, 0.9245, 0.958, 0.943, 0.9515])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U14_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U14_ae32_fashion_mse = np.array([0.9035, 0.9155, 0.9345, 0.938, 0.9275])\nU15_pca30_fashion_mse = np.array([0.833, 0.861, 0.8535, 0.867, 0.862])\nU15_ae30_fashion_mse = np.array([0.8115, 0.8695, 0.9135, 0.925, 0.8855])\nU15_pca32_fashion_mse = np.array([0.639, 0.663, 0.6695, 0.639, 0.6655])\nU15_ae32_fashion_mse = np.array([0.905, 0.904, 0.9065, 0.935, 0.935])\nSO4_pca30_fashion_mse = np.array([0.9105, 0.8695, 0.8565, 0.9045, 0.8455])\nSO4_ae30_fashion_mse = np.array([0.8815, 0.8845, 0.914, 0.9055, 0.8405])\nSO4_pca32_fashion_mse = np.array([0.656, 0.645, 0.6505, 0.663, 0.6705])\nSO4_ae32_fashion_mse = np.array([0.937, 0.9245, 0.958, 0.943, 0.9515])\nSU4_pca30_fashion_mse = np.array([0.877, 0.8845, 0.9205, 0.889, 0.858])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U15_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U15_pca30_fashion_mse = np.array([0.833, 0.861, 0.8535, 0.867, 0.862])\nU15_ae30_fashion_mse = np.array([0.8115, 0.8695, 0.9135, 0.925, 0.8855])\nU15_pca32_fashion_mse = np.array([0.639, 0.663, 0.6695, 0.639, 0.6655])\nU15_ae32_fashion_mse = np.array([0.905, 0.904, 0.9065, 0.935, 0.935])\nSO4_pca30_fashion_mse = np.array([0.9105, 0.8695, 0.8565, 0.9045, 0.8455])\nSO4_ae30_fashion_mse = np.array([0.8815, 0.8845, 0.914, 0.9055, 0.8405])\nSO4_pca32_fashion_mse = np.array([0.656, 0.645, 0.6505, 0.663, 0.6705])\nSO4_ae32_fashion_mse = np.array([0.937, 0.9245, 0.958, 0.943, 0.9515])\nSU4_pca30_fashion_mse = np.array([0.877, 0.8845, 0.9205, 0.889, 0.858])\nSU4_ae30_fashion_mse = np.array([0.8655, 0.926, 0.87, 0.829, 0.922])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U15_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U15_ae30_fashion_mse = np.array([0.8115, 0.8695, 0.9135, 0.925, 0.8855])\nU15_pca32_fashion_mse = np.array([0.639, 0.663, 0.6695, 0.639, 0.6655])\nU15_ae32_fashion_mse = np.array([0.905, 0.904, 0.9065, 0.935, 0.935])\nSO4_pca30_fashion_mse = np.array([0.9105, 0.8695, 0.8565, 0.9045, 0.8455])\nSO4_ae30_fashion_mse = np.array([0.8815, 0.8845, 0.914, 0.9055, 0.8405])\nSO4_pca32_fashion_mse = np.array([0.656, 0.645, 0.6505, 0.663, 0.6705])\nSO4_ae32_fashion_mse = np.array([0.937, 0.9245, 0.958, 0.943, 0.9515])\nSU4_pca30_fashion_mse = np.array([0.877, 0.8845, 0.9205, 0.889, 0.858])\nSU4_ae30_fashion_mse = np.array([0.8655, 0.926, 0.87, 0.829, 0.922])\nSU4_pca32_fashion_mse = np.array([0.631, 0.6615, 0.6515, 0.664, 0.6395])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U15_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U15_pca32_fashion_mse = np.array([0.639, 0.663, 0.6695, 0.639, 0.6655])\nU15_ae32_fashion_mse = np.array([0.905, 0.904, 0.9065, 0.935, 0.935])\nSO4_pca30_fashion_mse = np.array([0.9105, 0.8695, 0.8565, 0.9045, 0.8455])\nSO4_ae30_fashion_mse = np.array([0.8815, 0.8845, 0.914, 0.9055, 0.8405])\nSO4_pca32_fashion_mse = np.array([0.656, 0.645, 0.6505, 0.663, 0.6705])\nSO4_ae32_fashion_mse = np.array([0.937, 0.9245, 0.958, 0.943, 0.9515])\nSU4_pca30_fashion_mse = np.array([0.877, 0.8845, 0.9205, 0.889, 0.858])\nSU4_ae30_fashion_mse = np.array([0.8655, 0.926, 0.87, 0.829, 0.922])\nSU4_pca32_fashion_mse = np.array([0.631, 0.6615, 0.6515, 0.664, 0.6395])\nSU4_ae32_fashion_mse = np.array([0.9405, 0.9665, 0.9415, 0.949, 0.9165])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "U15_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "U15_ae32_fashion_mse = np.array([0.905, 0.904, 0.9065, 0.935, 0.935])\nSO4_pca30_fashion_mse = np.array([0.9105, 0.8695, 0.8565, 0.9045, 0.8455])\nSO4_ae30_fashion_mse = np.array([0.8815, 0.8845, 0.914, 0.9055, 0.8405])\nSO4_pca32_fashion_mse = np.array([0.656, 0.645, 0.6505, 0.663, 0.6705])\nSO4_ae32_fashion_mse = np.array([0.937, 0.9245, 0.958, 0.943, 0.9515])\nSU4_pca30_fashion_mse = np.array([0.877, 0.8845, 0.9205, 0.889, 0.858])\nSU4_ae30_fashion_mse = np.array([0.8655, 0.926, 0.87, 0.829, 0.922])\nSU4_pca32_fashion_mse = np.array([0.631, 0.6615, 0.6515, 0.664, 0.6395])\nSU4_ae32_fashion_mse = np.array([0.9405, 0.9665, 0.9415, 0.949, 0.9165])\nSU4_1_pca30_fashion_mse = np.array([0.8575, 0.8975, 0.887, 0.8995, 0.9015])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SO4_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SO4_pca30_fashion_mse = np.array([0.9105, 0.8695, 0.8565, 0.9045, 0.8455])\nSO4_ae30_fashion_mse = np.array([0.8815, 0.8845, 0.914, 0.9055, 0.8405])\nSO4_pca32_fashion_mse = np.array([0.656, 0.645, 0.6505, 0.663, 0.6705])\nSO4_ae32_fashion_mse = np.array([0.937, 0.9245, 0.958, 0.943, 0.9515])\nSU4_pca30_fashion_mse = np.array([0.877, 0.8845, 0.9205, 0.889, 0.858])\nSU4_ae30_fashion_mse = np.array([0.8655, 0.926, 0.87, 0.829, 0.922])\nSU4_pca32_fashion_mse = np.array([0.631, 0.6615, 0.6515, 0.664, 0.6395])\nSU4_ae32_fashion_mse = np.array([0.9405, 0.9665, 0.9415, 0.949, 0.9165])\nSU4_1_pca30_fashion_mse = np.array([0.8575, 0.8975, 0.887, 0.8995, 0.9015])\nSU4_1_ae30_fashion_mse = np.array([0.877, 0.8355, 0.8305, 0.9295, 0.885])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SO4_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SO4_ae30_fashion_mse = np.array([0.8815, 0.8845, 0.914, 0.9055, 0.8405])\nSO4_pca32_fashion_mse = np.array([0.656, 0.645, 0.6505, 0.663, 0.6705])\nSO4_ae32_fashion_mse = np.array([0.937, 0.9245, 0.958, 0.943, 0.9515])\nSU4_pca30_fashion_mse = np.array([0.877, 0.8845, 0.9205, 0.889, 0.858])\nSU4_ae30_fashion_mse = np.array([0.8655, 0.926, 0.87, 0.829, 0.922])\nSU4_pca32_fashion_mse = np.array([0.631, 0.6615, 0.6515, 0.664, 0.6395])\nSU4_ae32_fashion_mse = np.array([0.9405, 0.9665, 0.9415, 0.949, 0.9165])\nSU4_1_pca30_fashion_mse = np.array([0.8575, 0.8975, 0.887, 0.8995, 0.9015])\nSU4_1_ae30_fashion_mse = np.array([0.877, 0.8355, 0.8305, 0.9295, 0.885])\nSU4_1_pca32_fashion_mse = np.array([0.669, 0.6415, 0.6595, 0.5955, 0.669])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SO4_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SO4_pca32_fashion_mse = np.array([0.656, 0.645, 0.6505, 0.663, 0.6705])\nSO4_ae32_fashion_mse = np.array([0.937, 0.9245, 0.958, 0.943, 0.9515])\nSU4_pca30_fashion_mse = np.array([0.877, 0.8845, 0.9205, 0.889, 0.858])\nSU4_ae30_fashion_mse = np.array([0.8655, 0.926, 0.87, 0.829, 0.922])\nSU4_pca32_fashion_mse = np.array([0.631, 0.6615, 0.6515, 0.664, 0.6395])\nSU4_ae32_fashion_mse = np.array([0.9405, 0.9665, 0.9415, 0.949, 0.9165])\nSU4_1_pca30_fashion_mse = np.array([0.8575, 0.8975, 0.887, 0.8995, 0.9015])\nSU4_1_ae30_fashion_mse = np.array([0.877, 0.8355, 0.8305, 0.9295, 0.885])\nSU4_1_pca32_fashion_mse = np.array([0.669, 0.6415, 0.6595, 0.5955, 0.669])\nSU4_1_ae32_fashion_mse = np.array([0.943, 0.934, 0.9365, 0.926, 0.9065])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SO4_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SO4_ae32_fashion_mse = np.array([0.937, 0.9245, 0.958, 0.943, 0.9515])\nSU4_pca30_fashion_mse = np.array([0.877, 0.8845, 0.9205, 0.889, 0.858])\nSU4_ae30_fashion_mse = np.array([0.8655, 0.926, 0.87, 0.829, 0.922])\nSU4_pca32_fashion_mse = np.array([0.631, 0.6615, 0.6515, 0.664, 0.6395])\nSU4_ae32_fashion_mse = np.array([0.9405, 0.9665, 0.9415, 0.949, 0.9165])\nSU4_1_pca30_fashion_mse = np.array([0.8575, 0.8975, 0.887, 0.8995, 0.9015])\nSU4_1_ae30_fashion_mse = np.array([0.877, 0.8355, 0.8305, 0.9295, 0.885])\nSU4_1_pca32_fashion_mse = np.array([0.669, 0.6415, 0.6595, 0.5955, 0.669])\nSU4_1_ae32_fashion_mse = np.array([0.943, 0.934, 0.9365, 0.926, 0.9065])\nprint(\"HAE and HDE result with CrossEntropy\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_pca30_fashion_mse = np.array([0.877, 0.8845, 0.9205, 0.889, 0.858])\nSU4_ae30_fashion_mse = np.array([0.8655, 0.926, 0.87, 0.829, 0.922])\nSU4_pca32_fashion_mse = np.array([0.631, 0.6615, 0.6515, 0.664, 0.6395])\nSU4_ae32_fashion_mse = np.array([0.9405, 0.9665, 0.9415, 0.949, 0.9165])\nSU4_1_pca30_fashion_mse = np.array([0.8575, 0.8975, 0.887, 0.8995, 0.9015])\nSU4_1_ae30_fashion_mse = np.array([0.877, 0.8355, 0.8305, 0.9295, 0.885])\nSU4_1_pca32_fashion_mse = np.array([0.669, 0.6415, 0.6595, 0.5955, 0.669])\nSU4_1_ae32_fashion_mse = np.array([0.943, 0.934, 0.9365, 0.926, 0.9065])\nprint(\"HAE and HDE result with CrossEntropy\")\nprint(\"MNIST DATASET: \")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_ae30_fashion_mse = np.array([0.8655, 0.926, 0.87, 0.829, 0.922])\nSU4_pca32_fashion_mse = np.array([0.631, 0.6615, 0.6515, 0.664, 0.6395])\nSU4_ae32_fashion_mse = np.array([0.9405, 0.9665, 0.9415, 0.949, 0.9165])\nSU4_1_pca30_fashion_mse = np.array([0.8575, 0.8975, 0.887, 0.8995, 0.9015])\nSU4_1_ae30_fashion_mse = np.array([0.877, 0.8355, 0.8305, 0.9295, 0.885])\nSU4_1_pca32_fashion_mse = np.array([0.669, 0.6415, 0.6595, 0.5955, 0.669])\nSU4_1_ae32_fashion_mse = np.array([0.943, 0.934, 0.9365, 0.926, 0.9065])\nprint(\"HAE and HDE result with CrossEntropy\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_pca32_fashion_mse = np.array([0.631, 0.6615, 0.6515, 0.664, 0.6395])\nSU4_ae32_fashion_mse = np.array([0.9405, 0.9665, 0.9415, 0.949, 0.9165])\nSU4_1_pca30_fashion_mse = np.array([0.8575, 0.8975, 0.887, 0.8995, 0.9015])\nSU4_1_ae30_fashion_mse = np.array([0.877, 0.8355, 0.8305, 0.9295, 0.885])\nSU4_1_pca32_fashion_mse = np.array([0.669, 0.6415, 0.6595, 0.5955, 0.669])\nSU4_1_ae32_fashion_mse = np.array([0.943, 0.934, 0.9365, 0.926, 0.9065])\nprint(\"HAE and HDE result with CrossEntropy\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_ae32_fashion_mse = np.array([0.9405, 0.9665, 0.9415, 0.949, 0.9165])\nSU4_1_pca30_fashion_mse = np.array([0.8575, 0.8975, 0.887, 0.8995, 0.9015])\nSU4_1_ae30_fashion_mse = np.array([0.877, 0.8355, 0.8305, 0.9295, 0.885])\nSU4_1_pca32_fashion_mse = np.array([0.669, 0.6415, 0.6595, 0.5955, 0.669])\nSU4_1_ae32_fashion_mse = np.array([0.943, 0.934, 0.9365, 0.926, 0.9065])\nprint(\"HAE and HDE result with CrossEntropy\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))\nprint(\"TTN PCA32 \" + str(TTN_pca32_mnist_mse.mean()) + \" +/- \" + str(TTN_pca32_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_1_pca30_fashion_mse = np.array([0.8575, 0.8975, 0.887, 0.8995, 0.9015])\nSU4_1_ae30_fashion_mse = np.array([0.877, 0.8355, 0.8305, 0.9295, 0.885])\nSU4_1_pca32_fashion_mse = np.array([0.669, 0.6415, 0.6595, 0.5955, 0.669])\nSU4_1_ae32_fashion_mse = np.array([0.943, 0.934, 0.9365, 0.926, 0.9065])\nprint(\"HAE and HDE result with CrossEntropy\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))\nprint(\"TTN PCA32 \" + str(TTN_pca32_mnist_mse.mean()) + \" +/- \" + str(TTN_pca32_mnist_mse.std()))\nprint(\"TTN AE32 \" + str(TTN_ae32_mnist_mse.mean()) + \" +/- \" + str(TTN_ae32_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_1_ae30_fashion_mse = np.array([0.877, 0.8355, 0.8305, 0.9295, 0.885])\nSU4_1_pca32_fashion_mse = np.array([0.669, 0.6415, 0.6595, 0.5955, 0.669])\nSU4_1_ae32_fashion_mse = np.array([0.943, 0.934, 0.9365, 0.926, 0.9065])\nprint(\"HAE and HDE result with CrossEntropy\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))\nprint(\"TTN PCA32 \" + str(TTN_pca32_mnist_mse.mean()) + \" +/- \" + str(TTN_pca32_mnist_mse.std()))\nprint(\"TTN AE32 \" + str(TTN_ae32_mnist_mse.mean()) + \" +/- \" + str(TTN_ae32_mnist_mse.std()))\nprint(\"\\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_1_pca32_fashion_mse = np.array([0.669, 0.6415, 0.6595, 0.5955, 0.669])\nSU4_1_ae32_fashion_mse = np.array([0.943, 0.934, 0.9365, 0.926, 0.9065])\nprint(\"HAE and HDE result with CrossEntropy\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))\nprint(\"TTN PCA32 \" + str(TTN_pca32_mnist_mse.mean()) + \" +/- \" + str(TTN_pca32_mnist_mse.std()))\nprint(\"TTN AE32 \" + str(TTN_ae32_mnist_mse.mean()) + \" +/- \" + str(TTN_ae32_mnist_mse.std()))\nprint(\"\\n\")\nprint(\"U5 PCA30 \" + str(U5_pca30_mnist_mse.mean()) + \" +/- \" + str(U5_pca30_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "SU4_1_ae32_fashion_mse = np.array([0.943, 0.934, 0.9365, 0.926, 0.9065])\nprint(\"HAE and HDE result with CrossEntropy\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))\nprint(\"TTN PCA32 \" + str(TTN_pca32_mnist_mse.mean()) + \" +/- \" + str(TTN_pca32_mnist_mse.std()))\nprint(\"TTN AE32 \" + str(TTN_ae32_mnist_mse.mean()) + \" +/- \" + str(TTN_ae32_mnist_mse.std()))\nprint(\"\\n\")\nprint(\"U5 PCA30 \" + str(U5_pca30_mnist_mse.mean()) + \" +/- \" + str(U5_pca30_mnist_mse.std()))\nprint(\"U5 AE30 \" + str(U5_ae30_mnist_mse.mean()) + \" +/- \" + str(U5_ae30_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HDE_PCA_MNIST_mean", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HDE_PCA_MNIST_mean = np.array([TTN_pca32_mnist_mse.mean(), U5_pca32_mnist_mse.mean(), U6_pca32_mnist_mse.mean(),\n U9_pca32_mnist_mse.mean(), U13_pca32_mnist_mse.mean(), U14_pca32_mnist_mse.mean(),\n U15_pca32_mnist_mse.mean(), SO4_pca32_mnist_mse.mean(), SU4_pca32_mnist_mse.mean(),\n SU4_1_pca32_mnist_mse.mean()]).mean()\nHDE_PCA_MNIST_std = np.array([TTN_pca32_mnist_mse.std(), U5_pca32_mnist_mse.std(), U6_pca32_mnist_mse.std(),\n U9_pca32_mnist_mse.std(), U13_pca32_mnist_mse.std(), U14_pca32_mnist_mse.std(),\n U15_pca32_mnist_mse.std(), SO4_pca32_mnist_mse.std(), SU4_pca32_mnist_mse.std(),\n SU4_1_pca32_mnist_mse.std()]).mean()\nHDE_AE_MNIST_mean = np.array([TTN_ae32_mnist_mse.mean(), U5_ae32_mnist_mse.mean(), U6_ae32_mnist_mse.mean(),\n U9_ae32_mnist_mse.mean(), U13_ae32_mnist_mse.mean(), U14_ae32_mnist_mse.mean(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HDE_PCA_MNIST_std", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HDE_PCA_MNIST_std = np.array([TTN_pca32_mnist_mse.std(), U5_pca32_mnist_mse.std(), U6_pca32_mnist_mse.std(),\n U9_pca32_mnist_mse.std(), U13_pca32_mnist_mse.std(), U14_pca32_mnist_mse.std(),\n U15_pca32_mnist_mse.std(), SO4_pca32_mnist_mse.std(), SU4_pca32_mnist_mse.std(),\n SU4_1_pca32_mnist_mse.std()]).mean()\nHDE_AE_MNIST_mean = np.array([TTN_ae32_mnist_mse.mean(), U5_ae32_mnist_mse.mean(), U6_ae32_mnist_mse.mean(),\n U9_ae32_mnist_mse.mean(), U13_ae32_mnist_mse.mean(), U14_ae32_mnist_mse.mean(),\n U15_ae32_mnist_mse.mean(), SO4_ae32_mnist_mse.mean(), SU4_ae32_mnist_mse.mean(),\n SU4_1_ae32_mnist_mse.mean()]).mean()\nHDE_AE_MNIST_std = np.array([TTN_ae32_mnist_mse.std(), U5_ae32_mnist_mse.std(), U6_ae32_mnist_mse.std(),\n U9_ae32_mnist_mse.std(), U13_ae32_mnist_mse.std(), U14_ae32_mnist_mse.std(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HDE_AE_MNIST_mean", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HDE_AE_MNIST_mean = np.array([TTN_ae32_mnist_mse.mean(), U5_ae32_mnist_mse.mean(), U6_ae32_mnist_mse.mean(),\n U9_ae32_mnist_mse.mean(), U13_ae32_mnist_mse.mean(), U14_ae32_mnist_mse.mean(),\n U15_ae32_mnist_mse.mean(), SO4_ae32_mnist_mse.mean(), SU4_ae32_mnist_mse.mean(),\n SU4_1_ae32_mnist_mse.mean()]).mean()\nHDE_AE_MNIST_std = np.array([TTN_ae32_mnist_mse.std(), U5_ae32_mnist_mse.std(), U6_ae32_mnist_mse.std(),\n U9_ae32_mnist_mse.std(), U13_ae32_mnist_mse.std(), U14_ae32_mnist_mse.std(),\n U15_ae32_mnist_mse.std(), SO4_ae32_mnist_mse.std(), SU4_ae32_mnist_mse.std(),\n SU4_1_ae32_mnist_mse.std()]).mean()\nHDE_PCA_FASHION_mean = np.array([TTN_pca32_fashion_mse.mean(), U5_pca32_fashion_mse.mean(), U6_pca32_fashion_mse.mean(),\n U9_pca32_fashion_mse.mean(), U13_pca32_fashion_mse.mean(), U14_pca32_fashion_mse.mean(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HDE_AE_MNIST_std", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HDE_AE_MNIST_std = np.array([TTN_ae32_mnist_mse.std(), U5_ae32_mnist_mse.std(), U6_ae32_mnist_mse.std(),\n U9_ae32_mnist_mse.std(), U13_ae32_mnist_mse.std(), U14_ae32_mnist_mse.std(),\n U15_ae32_mnist_mse.std(), SO4_ae32_mnist_mse.std(), SU4_ae32_mnist_mse.std(),\n SU4_1_ae32_mnist_mse.std()]).mean()\nHDE_PCA_FASHION_mean = np.array([TTN_pca32_fashion_mse.mean(), U5_pca32_fashion_mse.mean(), U6_pca32_fashion_mse.mean(),\n U9_pca32_fashion_mse.mean(), U13_pca32_fashion_mse.mean(), U14_pca32_fashion_mse.mean(),\n U15_pca32_fashion_mse.mean(), SO4_pca32_fashion_mse.mean(), SU4_pca32_fashion_mse.mean(),\n SU4_1_pca32_fashion_mse.mean()]).mean()\nHDE_PCA_FASHION_std = np.array([TTN_pca32_fashion_mse.std(), U5_pca32_fashion_mse.std(), U6_pca32_fashion_mse.std(),\n U9_pca32_fashion_mse.std(), U13_pca32_fashion_mse.std(), U14_pca32_fashion_mse.std(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HDE_PCA_FASHION_mean", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HDE_PCA_FASHION_mean = np.array([TTN_pca32_fashion_mse.mean(), U5_pca32_fashion_mse.mean(), U6_pca32_fashion_mse.mean(),\n U9_pca32_fashion_mse.mean(), U13_pca32_fashion_mse.mean(), U14_pca32_fashion_mse.mean(),\n U15_pca32_fashion_mse.mean(), SO4_pca32_fashion_mse.mean(), SU4_pca32_fashion_mse.mean(),\n SU4_1_pca32_fashion_mse.mean()]).mean()\nHDE_PCA_FASHION_std = np.array([TTN_pca32_fashion_mse.std(), U5_pca32_fashion_mse.std(), U6_pca32_fashion_mse.std(),\n U9_pca32_fashion_mse.std(), U13_pca32_fashion_mse.std(), U14_pca32_fashion_mse.std(),\n U15_pca32_fashion_mse.std(), SO4_pca32_fashion_mse.std(), SU4_pca32_fashion_mse.std(),\n SU4_1_pca32_fashion_mse.std()]).mean()\nHDE_AE_FASHION_mean = np.array([TTN_ae32_fashion_mse.mean(), U5_ae32_fashion_mse.mean(), U6_ae32_fashion_mse.mean(),\n U9_ae32_fashion_mse.mean(), U13_ae32_fashion_mse.mean(), U14_ae32_fashion_mse.mean(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HDE_PCA_FASHION_std", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HDE_PCA_FASHION_std = np.array([TTN_pca32_fashion_mse.std(), U5_pca32_fashion_mse.std(), U6_pca32_fashion_mse.std(),\n U9_pca32_fashion_mse.std(), U13_pca32_fashion_mse.std(), U14_pca32_fashion_mse.std(),\n U15_pca32_fashion_mse.std(), SO4_pca32_fashion_mse.std(), SU4_pca32_fashion_mse.std(),\n SU4_1_pca32_fashion_mse.std()]).mean()\nHDE_AE_FASHION_mean = np.array([TTN_ae32_fashion_mse.mean(), U5_ae32_fashion_mse.mean(), U6_ae32_fashion_mse.mean(),\n U9_ae32_fashion_mse.mean(), U13_ae32_fashion_mse.mean(), U14_ae32_fashion_mse.mean(),\n U15_ae32_fashion_mse.mean(), SO4_ae32_fashion_mse.mean(), SU4_ae32_fashion_mse.mean(),\n SU4_1_ae32_fashion_mse.mean()]).mean()\nHDE_AE_FASHION_std = np.array([TTN_ae32_fashion_mse.std(), U5_ae32_fashion_mse.std(), U6_ae32_fashion_mse.std(),\n U9_ae32_fashion_mse.std(), U13_ae32_fashion_mse.std(), U14_ae32_fashion_mse.std(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HDE_AE_FASHION_mean", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HDE_AE_FASHION_mean = np.array([TTN_ae32_fashion_mse.mean(), U5_ae32_fashion_mse.mean(), U6_ae32_fashion_mse.mean(),\n U9_ae32_fashion_mse.mean(), U13_ae32_fashion_mse.mean(), U14_ae32_fashion_mse.mean(),\n U15_ae32_fashion_mse.mean(), SO4_ae32_fashion_mse.mean(), SU4_ae32_fashion_mse.mean(),\n SU4_1_ae32_fashion_mse.mean()]).mean()\nHDE_AE_FASHION_std = np.array([TTN_ae32_fashion_mse.std(), U5_ae32_fashion_mse.std(), U6_ae32_fashion_mse.std(),\n U9_ae32_fashion_mse.std(), U13_ae32_fashion_mse.std(), U14_ae32_fashion_mse.std(),\n U15_ae32_fashion_mse.std(), SO4_ae32_fashion_mse.std(), SU4_ae32_fashion_mse.std(),\n SU4_1_ae32_fashion_mse.std()]).mean()\n# HAE mean for PCA and AE\nHAE_PCA_MNIST_mean = np.array([TTN_pca30_mnist_mse.mean(), U5_pca30_mnist_mse.mean(), U6_pca30_mnist_mse.mean(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HDE_AE_FASHION_std", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HDE_AE_FASHION_std = np.array([TTN_ae32_fashion_mse.std(), U5_ae32_fashion_mse.std(), U6_ae32_fashion_mse.std(),\n U9_ae32_fashion_mse.std(), U13_ae32_fashion_mse.std(), U14_ae32_fashion_mse.std(),\n U15_ae32_fashion_mse.std(), SO4_ae32_fashion_mse.std(), SU4_ae32_fashion_mse.std(),\n SU4_1_ae32_fashion_mse.std()]).mean()\n# HAE mean for PCA and AE\nHAE_PCA_MNIST_mean = np.array([TTN_pca30_mnist_mse.mean(), U5_pca30_mnist_mse.mean(), U6_pca30_mnist_mse.mean(),\n U9_pca30_mnist_mse.mean(), U13_pca30_mnist_mse.mean(), U14_pca30_mnist_mse.mean(),\n U15_pca30_mnist_mse.mean(), SO4_pca30_mnist_mse.mean(), SU4_pca30_mnist_mse.mean(),\n SU4_1_pca30_mnist_mse.mean()]).mean()\nHAE_PCA_MNIST_std = np.array([TTN_pca30_mnist_mse.std(), U5_pca30_mnist_mse.std(), U6_pca30_mnist_mse.std(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HAE_PCA_MNIST_mean", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HAE_PCA_MNIST_mean = np.array([TTN_pca30_mnist_mse.mean(), U5_pca30_mnist_mse.mean(), U6_pca30_mnist_mse.mean(),\n U9_pca30_mnist_mse.mean(), U13_pca30_mnist_mse.mean(), U14_pca30_mnist_mse.mean(),\n U15_pca30_mnist_mse.mean(), SO4_pca30_mnist_mse.mean(), SU4_pca30_mnist_mse.mean(),\n SU4_1_pca30_mnist_mse.mean()]).mean()\nHAE_PCA_MNIST_std = np.array([TTN_pca30_mnist_mse.std(), U5_pca30_mnist_mse.std(), U6_pca30_mnist_mse.std(),\n U9_pca30_mnist_mse.std(), U13_pca30_mnist_mse.std(), U14_pca30_mnist_mse.std(),\n U15_pca30_mnist_mse.std(), SO4_pca30_mnist_mse.std(), SU4_pca30_mnist_mse.std(),\n SU4_1_pca30_mnist_mse.std()]).mean()\nHAE_AE_MNIST_mean = np.array([TTN_ae30_mnist_mse.mean(), U5_ae30_mnist_mse.mean(), U6_ae30_mnist_mse.mean(),\n U9_ae30_mnist_mse.mean(), U13_ae30_mnist_mse.mean(), U14_ae30_mnist_mse.mean(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HAE_PCA_MNIST_std", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HAE_PCA_MNIST_std = np.array([TTN_pca30_mnist_mse.std(), U5_pca30_mnist_mse.std(), U6_pca30_mnist_mse.std(),\n U9_pca30_mnist_mse.std(), U13_pca30_mnist_mse.std(), U14_pca30_mnist_mse.std(),\n U15_pca30_mnist_mse.std(), SO4_pca30_mnist_mse.std(), SU4_pca30_mnist_mse.std(),\n SU4_1_pca30_mnist_mse.std()]).mean()\nHAE_AE_MNIST_mean = np.array([TTN_ae30_mnist_mse.mean(), U5_ae30_mnist_mse.mean(), U6_ae30_mnist_mse.mean(),\n U9_ae30_mnist_mse.mean(), U13_ae30_mnist_mse.mean(), U14_ae30_mnist_mse.mean(),\n U15_ae30_mnist_mse.mean(), SO4_ae30_mnist_mse.mean(), SU4_ae30_mnist_mse.mean(),\n SU4_1_ae30_mnist_mse.mean()]).mean()\nHAE_AE_MNIST_std = np.array([TTN_ae30_mnist_mse.std(), U5_ae30_mnist_mse.std(), U6_ae30_mnist_mse.std(),\n U9_ae30_mnist_mse.std(), U13_ae30_mnist_mse.std(), U14_ae30_mnist_mse.std(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HAE_AE_MNIST_mean", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HAE_AE_MNIST_mean = np.array([TTN_ae30_mnist_mse.mean(), U5_ae30_mnist_mse.mean(), U6_ae30_mnist_mse.mean(),\n U9_ae30_mnist_mse.mean(), U13_ae30_mnist_mse.mean(), U14_ae30_mnist_mse.mean(),\n U15_ae30_mnist_mse.mean(), SO4_ae30_mnist_mse.mean(), SU4_ae30_mnist_mse.mean(),\n SU4_1_ae30_mnist_mse.mean()]).mean()\nHAE_AE_MNIST_std = np.array([TTN_ae30_mnist_mse.std(), U5_ae30_mnist_mse.std(), U6_ae30_mnist_mse.std(),\n U9_ae30_mnist_mse.std(), U13_ae30_mnist_mse.std(), U14_ae30_mnist_mse.std(),\n U15_ae30_mnist_mse.std(), SO4_ae30_mnist_mse.std(), SU4_ae30_mnist_mse.std(),\n SU4_1_ae30_mnist_mse.std()]).mean()\nHAE_PCA_FASHION_mean = np.array([TTN_pca30_fashion_mse.mean(), U5_pca30_fashion_mse.mean(), U6_pca30_fashion_mse.mean(),\n U9_pca30_fashion_mse.mean(), U13_pca30_fashion_mse.mean(), U14_pca30_fashion_mse.mean(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HAE_AE_MNIST_std", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HAE_AE_MNIST_std = np.array([TTN_ae30_mnist_mse.std(), U5_ae30_mnist_mse.std(), U6_ae30_mnist_mse.std(),\n U9_ae30_mnist_mse.std(), U13_ae30_mnist_mse.std(), U14_ae30_mnist_mse.std(),\n U15_ae30_mnist_mse.std(), SO4_ae30_mnist_mse.std(), SU4_ae30_mnist_mse.std(),\n SU4_1_ae30_mnist_mse.std()]).mean()\nHAE_PCA_FASHION_mean = np.array([TTN_pca30_fashion_mse.mean(), U5_pca30_fashion_mse.mean(), U6_pca30_fashion_mse.mean(),\n U9_pca30_fashion_mse.mean(), U13_pca30_fashion_mse.mean(), U14_pca30_fashion_mse.mean(),\n U15_pca30_fashion_mse.mean(), SO4_pca30_fashion_mse.mean(), SU4_pca30_fashion_mse.mean(),\n SU4_1_pca30_fashion_mse.mean()]).mean()\nHAE_PCA_FASHION_std = np.array([TTN_pca30_fashion_mse.std(), U5_pca30_fashion_mse.std(), U6_pca30_fashion_mse.std(),\n U9_pca30_fashion_mse.std(), U13_pca30_fashion_mse.std(), U14_pca30_fashion_mse.std(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HAE_PCA_FASHION_mean", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HAE_PCA_FASHION_mean = np.array([TTN_pca30_fashion_mse.mean(), U5_pca30_fashion_mse.mean(), U6_pca30_fashion_mse.mean(),\n U9_pca30_fashion_mse.mean(), U13_pca30_fashion_mse.mean(), U14_pca30_fashion_mse.mean(),\n U15_pca30_fashion_mse.mean(), SO4_pca30_fashion_mse.mean(), SU4_pca30_fashion_mse.mean(),\n SU4_1_pca30_fashion_mse.mean()]).mean()\nHAE_PCA_FASHION_std = np.array([TTN_pca30_fashion_mse.std(), U5_pca30_fashion_mse.std(), U6_pca30_fashion_mse.std(),\n U9_pca30_fashion_mse.std(), U13_pca30_fashion_mse.std(), U14_pca30_fashion_mse.std(),\n U15_pca30_fashion_mse.std(), SO4_pca30_fashion_mse.std(), SU4_pca30_fashion_mse.std(),\n SU4_1_pca30_fashion_mse.std()]).mean()\nHAE_AE_FASHION_mean = np.array([TTN_ae30_fashion_mse.mean(), U5_ae30_fashion_mse.mean(), U6_ae30_fashion_mse.mean(),\n U9_ae30_fashion_mse.mean(), U13_ae30_fashion_mse.mean(), U14_ae30_fashion_mse.mean(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HAE_PCA_FASHION_std", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HAE_PCA_FASHION_std = np.array([TTN_pca30_fashion_mse.std(), U5_pca30_fashion_mse.std(), U6_pca30_fashion_mse.std(),\n U9_pca30_fashion_mse.std(), U13_pca30_fashion_mse.std(), U14_pca30_fashion_mse.std(),\n U15_pca30_fashion_mse.std(), SO4_pca30_fashion_mse.std(), SU4_pca30_fashion_mse.std(),\n SU4_1_pca30_fashion_mse.std()]).mean()\nHAE_AE_FASHION_mean = np.array([TTN_ae30_fashion_mse.mean(), U5_ae30_fashion_mse.mean(), U6_ae30_fashion_mse.mean(),\n U9_ae30_fashion_mse.mean(), U13_ae30_fashion_mse.mean(), U14_ae30_fashion_mse.mean(),\n U15_ae30_fashion_mse.mean(), SO4_ae30_fashion_mse.mean(), SU4_ae30_fashion_mse.mean(),\n SU4_1_ae30_fashion_mse.mean()]).mean()\nHAE_AE_FASHION_std = np.array([TTN_ae30_fashion_mse.std(), U5_ae30_fashion_mse.std(), U6_ae30_fashion_mse.std(),\n U9_ae30_fashion_mse.std(), U13_ae30_fashion_mse.std(), U14_ae30_fashion_mse.std(),", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HAE_AE_FASHION_mean", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HAE_AE_FASHION_mean = np.array([TTN_ae30_fashion_mse.mean(), U5_ae30_fashion_mse.mean(), U6_ae30_fashion_mse.mean(),\n U9_ae30_fashion_mse.mean(), U13_ae30_fashion_mse.mean(), U14_ae30_fashion_mse.mean(),\n U15_ae30_fashion_mse.mean(), SO4_ae30_fashion_mse.mean(), SU4_ae30_fashion_mse.mean(),\n SU4_1_ae30_fashion_mse.mean()]).mean()\nHAE_AE_FASHION_std = np.array([TTN_ae30_fashion_mse.std(), U5_ae30_fashion_mse.std(), U6_ae30_fashion_mse.std(),\n U9_ae30_fashion_mse.std(), U13_ae30_fashion_mse.std(), U14_ae30_fashion_mse.std(),\n U15_ae30_fashion_mse.std(), SO4_ae30_fashion_mse.std(), SU4_ae30_fashion_mse.std(),\n SU4_1_ae30_fashion_mse.std()]).mean()\nprint(\"\\n\")\nprint(\"Mean Values for PCA and AE Encodings: \")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "HAE_AE_FASHION_std", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "peekOfCode": "HAE_AE_FASHION_std = np.array([TTN_ae30_fashion_mse.std(), U5_ae30_fashion_mse.std(), U6_ae30_fashion_mse.std(),\n U9_ae30_fashion_mse.std(), U13_ae30_fashion_mse.std(), U14_ae30_fashion_mse.std(),\n U15_ae30_fashion_mse.std(), SO4_ae30_fashion_mse.std(), SU4_ae30_fashion_mse.std(),\n SU4_1_ae30_fashion_mse.std()]).mean()\nprint(\"\\n\")\nprint(\"Mean Values for PCA and AE Encodings: \")\nprint(\"HAE PCA MNIST : \" + str(HAE_PCA_MNIST_mean) + \" +\\- \" + str(HAE_PCA_MNIST_std))\nprint(\"HAE AE MNIST: \" + str(HAE_AE_MNIST_mean) + \" +\\- \" + str(HAE_AE_MNIST_std))\nprint(\"HDE PCA MNIST : \" + str(HDE_PCA_MNIST_mean) + \" +\\- \" + str(HDE_PCA_MNIST_std))\nprint(\"HDE AE MNIST: \" + str(HDE_AE_MNIST_mean) + \" +\\- \" + str(HDE_AE_MNIST_std))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.CrossEntropy_Hybrid", + "documentation": {} + }, + { + "label": "TTN_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "TTN_pca30_mnist_mse = np.array([0.9858156028368794, 0.9839243498817967, 0.9839243498817967, 0.9796690307328605, 0.9546099290780142])\nTTN_ae30_mnist_mse = np.array([0.8591016548463357, 0.7848699763593381, 0.7408983451536643, 0.6865248226950355, 0.8222222222222222])\nTTN_pca32_mnist_mse = np.array([0.6099290780141844, 0.6189125295508274, 0.6335697399527187, 0.6061465721040189, 0.5456264775413712])\nTTN_ae32_mnist_mse = np.array([0.8439716312056738, 0.900709219858156, 0.807565011820331, 0.8387706855791962, 0.7801418439716312])\nU5_pca30_mnist_mse = np.array([0.983451536643026, 0.983451536643026, 0.9787234042553191, 0.9820330969267139, 0.9791962174940898])\nU5_ae30_mnist_mse = np.array([0.8600472813238771, 0.8250591016548463, 0.7650118203309693, 0.7754137115839244, 0.7848699763593381])\nU5_pca32_mnist_mse = np.array([0.7040189125295508, 0.6628841607565011, 0.7536643026004728, 0.642080378250591, 0.6822695035460993])\nU5_ae32_mnist_mse = np.array([0.9791962174940898, 0.9427895981087471, 0.9806146572104019, 0.9621749408983451, 0.8704491725768322])\nU6_pca30_mnist_mse = np.array([0.9815602836879432, 0.9820330969267139, 0.9810874704491725, 0.9829787234042553, 0.9839243498817967])\nU6_ae30_mnist_mse = np.array([0.8326241134751773, 0.8146572104018912, 0.9371158392434988, 0.8349881796690307, 0.8865248226950354])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "TTN_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "TTN_ae30_mnist_mse = np.array([0.8591016548463357, 0.7848699763593381, 0.7408983451536643, 0.6865248226950355, 0.8222222222222222])\nTTN_pca32_mnist_mse = np.array([0.6099290780141844, 0.6189125295508274, 0.6335697399527187, 0.6061465721040189, 0.5456264775413712])\nTTN_ae32_mnist_mse = np.array([0.8439716312056738, 0.900709219858156, 0.807565011820331, 0.8387706855791962, 0.7801418439716312])\nU5_pca30_mnist_mse = np.array([0.983451536643026, 0.983451536643026, 0.9787234042553191, 0.9820330969267139, 0.9791962174940898])\nU5_ae30_mnist_mse = np.array([0.8600472813238771, 0.8250591016548463, 0.7650118203309693, 0.7754137115839244, 0.7848699763593381])\nU5_pca32_mnist_mse = np.array([0.7040189125295508, 0.6628841607565011, 0.7536643026004728, 0.642080378250591, 0.6822695035460993])\nU5_ae32_mnist_mse = np.array([0.9791962174940898, 0.9427895981087471, 0.9806146572104019, 0.9621749408983451, 0.8704491725768322])\nU6_pca30_mnist_mse = np.array([0.9815602836879432, 0.9820330969267139, 0.9810874704491725, 0.9829787234042553, 0.9839243498817967])\nU6_ae30_mnist_mse = np.array([0.8326241134751773, 0.8146572104018912, 0.9371158392434988, 0.8349881796690307, 0.8865248226950354])\nU6_pca32_mnist_mse = np.array([0.6832151300236406, 0.7347517730496453, 0.6217494089834515, 0.7040189125295508, 0.6803782505910165])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "TTN_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "TTN_pca32_mnist_mse = np.array([0.6099290780141844, 0.6189125295508274, 0.6335697399527187, 0.6061465721040189, 0.5456264775413712])\nTTN_ae32_mnist_mse = np.array([0.8439716312056738, 0.900709219858156, 0.807565011820331, 0.8387706855791962, 0.7801418439716312])\nU5_pca30_mnist_mse = np.array([0.983451536643026, 0.983451536643026, 0.9787234042553191, 0.9820330969267139, 0.9791962174940898])\nU5_ae30_mnist_mse = np.array([0.8600472813238771, 0.8250591016548463, 0.7650118203309693, 0.7754137115839244, 0.7848699763593381])\nU5_pca32_mnist_mse = np.array([0.7040189125295508, 0.6628841607565011, 0.7536643026004728, 0.642080378250591, 0.6822695035460993])\nU5_ae32_mnist_mse = np.array([0.9791962174940898, 0.9427895981087471, 0.9806146572104019, 0.9621749408983451, 0.8704491725768322])\nU6_pca30_mnist_mse = np.array([0.9815602836879432, 0.9820330969267139, 0.9810874704491725, 0.9829787234042553, 0.9839243498817967])\nU6_ae30_mnist_mse = np.array([0.8326241134751773, 0.8146572104018912, 0.9371158392434988, 0.8349881796690307, 0.8865248226950354])\nU6_pca32_mnist_mse = np.array([0.6832151300236406, 0.7347517730496453, 0.6217494089834515, 0.7040189125295508, 0.6803782505910165])\nU6_ae32_mnist_mse = np.array([0.9276595744680851, 0.91725768321513, 0.9574468085106383, 0.9867612293144208, 0.9111111111111111])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "TTN_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "TTN_ae32_mnist_mse = np.array([0.8439716312056738, 0.900709219858156, 0.807565011820331, 0.8387706855791962, 0.7801418439716312])\nU5_pca30_mnist_mse = np.array([0.983451536643026, 0.983451536643026, 0.9787234042553191, 0.9820330969267139, 0.9791962174940898])\nU5_ae30_mnist_mse = np.array([0.8600472813238771, 0.8250591016548463, 0.7650118203309693, 0.7754137115839244, 0.7848699763593381])\nU5_pca32_mnist_mse = np.array([0.7040189125295508, 0.6628841607565011, 0.7536643026004728, 0.642080378250591, 0.6822695035460993])\nU5_ae32_mnist_mse = np.array([0.9791962174940898, 0.9427895981087471, 0.9806146572104019, 0.9621749408983451, 0.8704491725768322])\nU6_pca30_mnist_mse = np.array([0.9815602836879432, 0.9820330969267139, 0.9810874704491725, 0.9829787234042553, 0.9839243498817967])\nU6_ae30_mnist_mse = np.array([0.8326241134751773, 0.8146572104018912, 0.9371158392434988, 0.8349881796690307, 0.8865248226950354])\nU6_pca32_mnist_mse = np.array([0.6832151300236406, 0.7347517730496453, 0.6217494089834515, 0.7040189125295508, 0.6803782505910165])\nU6_ae32_mnist_mse = np.array([0.9276595744680851, 0.91725768321513, 0.9574468085106383, 0.9867612293144208, 0.9111111111111111])\nU9_pca30_mnist_mse = np.array([0.9304964539007092, 0.9286052009456265, 0.8926713947990543, 0.975886524822695, 0.9205673758865248])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U5_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U5_pca30_mnist_mse = np.array([0.983451536643026, 0.983451536643026, 0.9787234042553191, 0.9820330969267139, 0.9791962174940898])\nU5_ae30_mnist_mse = np.array([0.8600472813238771, 0.8250591016548463, 0.7650118203309693, 0.7754137115839244, 0.7848699763593381])\nU5_pca32_mnist_mse = np.array([0.7040189125295508, 0.6628841607565011, 0.7536643026004728, 0.642080378250591, 0.6822695035460993])\nU5_ae32_mnist_mse = np.array([0.9791962174940898, 0.9427895981087471, 0.9806146572104019, 0.9621749408983451, 0.8704491725768322])\nU6_pca30_mnist_mse = np.array([0.9815602836879432, 0.9820330969267139, 0.9810874704491725, 0.9829787234042553, 0.9839243498817967])\nU6_ae30_mnist_mse = np.array([0.8326241134751773, 0.8146572104018912, 0.9371158392434988, 0.8349881796690307, 0.8865248226950354])\nU6_pca32_mnist_mse = np.array([0.6832151300236406, 0.7347517730496453, 0.6217494089834515, 0.7040189125295508, 0.6803782505910165])\nU6_ae32_mnist_mse = np.array([0.9276595744680851, 0.91725768321513, 0.9574468085106383, 0.9867612293144208, 0.9111111111111111])\nU9_pca30_mnist_mse = np.array([0.9304964539007092, 0.9286052009456265, 0.8926713947990543, 0.975886524822695, 0.9205673758865248])\nU9_ae30_mnist_mse = np.array([0.8047281323877069, 0.8444444444444444, 0.6723404255319149, 0.7612293144208038, 0.8520094562647754])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U5_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U5_ae30_mnist_mse = np.array([0.8600472813238771, 0.8250591016548463, 0.7650118203309693, 0.7754137115839244, 0.7848699763593381])\nU5_pca32_mnist_mse = np.array([0.7040189125295508, 0.6628841607565011, 0.7536643026004728, 0.642080378250591, 0.6822695035460993])\nU5_ae32_mnist_mse = np.array([0.9791962174940898, 0.9427895981087471, 0.9806146572104019, 0.9621749408983451, 0.8704491725768322])\nU6_pca30_mnist_mse = np.array([0.9815602836879432, 0.9820330969267139, 0.9810874704491725, 0.9829787234042553, 0.9839243498817967])\nU6_ae30_mnist_mse = np.array([0.8326241134751773, 0.8146572104018912, 0.9371158392434988, 0.8349881796690307, 0.8865248226950354])\nU6_pca32_mnist_mse = np.array([0.6832151300236406, 0.7347517730496453, 0.6217494089834515, 0.7040189125295508, 0.6803782505910165])\nU6_ae32_mnist_mse = np.array([0.9276595744680851, 0.91725768321513, 0.9574468085106383, 0.9867612293144208, 0.9111111111111111])\nU9_pca30_mnist_mse = np.array([0.9304964539007092, 0.9286052009456265, 0.8926713947990543, 0.975886524822695, 0.9205673758865248])\nU9_ae30_mnist_mse = np.array([0.8047281323877069, 0.8444444444444444, 0.6723404255319149, 0.7612293144208038, 0.8520094562647754])\nU9_pca32_mnist_mse = np.array([0.46477541371158393, 0.557919621749409, 0.5801418439716312, 0.5234042553191489, 0.5947990543735224])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U5_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U5_pca32_mnist_mse = np.array([0.7040189125295508, 0.6628841607565011, 0.7536643026004728, 0.642080378250591, 0.6822695035460993])\nU5_ae32_mnist_mse = np.array([0.9791962174940898, 0.9427895981087471, 0.9806146572104019, 0.9621749408983451, 0.8704491725768322])\nU6_pca30_mnist_mse = np.array([0.9815602836879432, 0.9820330969267139, 0.9810874704491725, 0.9829787234042553, 0.9839243498817967])\nU6_ae30_mnist_mse = np.array([0.8326241134751773, 0.8146572104018912, 0.9371158392434988, 0.8349881796690307, 0.8865248226950354])\nU6_pca32_mnist_mse = np.array([0.6832151300236406, 0.7347517730496453, 0.6217494089834515, 0.7040189125295508, 0.6803782505910165])\nU6_ae32_mnist_mse = np.array([0.9276595744680851, 0.91725768321513, 0.9574468085106383, 0.9867612293144208, 0.9111111111111111])\nU9_pca30_mnist_mse = np.array([0.9304964539007092, 0.9286052009456265, 0.8926713947990543, 0.975886524822695, 0.9205673758865248])\nU9_ae30_mnist_mse = np.array([0.8047281323877069, 0.8444444444444444, 0.6723404255319149, 0.7612293144208038, 0.8520094562647754])\nU9_pca32_mnist_mse = np.array([0.46477541371158393, 0.557919621749409, 0.5801418439716312, 0.5234042553191489, 0.5947990543735224])\nU9_ae32_mnist_mse = np.array([0.7144208037825059, 0.8978723404255319, 0.8680851063829788, 0.9286052009456265, 0.9650118203309692])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U5_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U5_ae32_mnist_mse = np.array([0.9791962174940898, 0.9427895981087471, 0.9806146572104019, 0.9621749408983451, 0.8704491725768322])\nU6_pca30_mnist_mse = np.array([0.9815602836879432, 0.9820330969267139, 0.9810874704491725, 0.9829787234042553, 0.9839243498817967])\nU6_ae30_mnist_mse = np.array([0.8326241134751773, 0.8146572104018912, 0.9371158392434988, 0.8349881796690307, 0.8865248226950354])\nU6_pca32_mnist_mse = np.array([0.6832151300236406, 0.7347517730496453, 0.6217494089834515, 0.7040189125295508, 0.6803782505910165])\nU6_ae32_mnist_mse = np.array([0.9276595744680851, 0.91725768321513, 0.9574468085106383, 0.9867612293144208, 0.9111111111111111])\nU9_pca30_mnist_mse = np.array([0.9304964539007092, 0.9286052009456265, 0.8926713947990543, 0.975886524822695, 0.9205673758865248])\nU9_ae30_mnist_mse = np.array([0.8047281323877069, 0.8444444444444444, 0.6723404255319149, 0.7612293144208038, 0.8520094562647754])\nU9_pca32_mnist_mse = np.array([0.46477541371158393, 0.557919621749409, 0.5801418439716312, 0.5234042553191489, 0.5947990543735224])\nU9_ae32_mnist_mse = np.array([0.7144208037825059, 0.8978723404255319, 0.8680851063829788, 0.9286052009456265, 0.9650118203309692])\nU13_pca30_mnist_mse = np.array([0.9806146572104019, 0.9829787234042553, 0.9754137115839243, 0.9787234042553191, 0.9801418439716312])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U6_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U6_pca30_mnist_mse = np.array([0.9815602836879432, 0.9820330969267139, 0.9810874704491725, 0.9829787234042553, 0.9839243498817967])\nU6_ae30_mnist_mse = np.array([0.8326241134751773, 0.8146572104018912, 0.9371158392434988, 0.8349881796690307, 0.8865248226950354])\nU6_pca32_mnist_mse = np.array([0.6832151300236406, 0.7347517730496453, 0.6217494089834515, 0.7040189125295508, 0.6803782505910165])\nU6_ae32_mnist_mse = np.array([0.9276595744680851, 0.91725768321513, 0.9574468085106383, 0.9867612293144208, 0.9111111111111111])\nU9_pca30_mnist_mse = np.array([0.9304964539007092, 0.9286052009456265, 0.8926713947990543, 0.975886524822695, 0.9205673758865248])\nU9_ae30_mnist_mse = np.array([0.8047281323877069, 0.8444444444444444, 0.6723404255319149, 0.7612293144208038, 0.8520094562647754])\nU9_pca32_mnist_mse = np.array([0.46477541371158393, 0.557919621749409, 0.5801418439716312, 0.5234042553191489, 0.5947990543735224])\nU9_ae32_mnist_mse = np.array([0.7144208037825059, 0.8978723404255319, 0.8680851063829788, 0.9286052009456265, 0.9650118203309692])\nU13_pca30_mnist_mse = np.array([0.9806146572104019, 0.9829787234042553, 0.9754137115839243, 0.9787234042553191, 0.9801418439716312])\nU13_ae30_mnist_mse = np.array([0.8699763593380615, 0.8680851063829788, 0.873758865248227, 0.8222222222222222, 0.840661938534279])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U6_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U6_ae30_mnist_mse = np.array([0.8326241134751773, 0.8146572104018912, 0.9371158392434988, 0.8349881796690307, 0.8865248226950354])\nU6_pca32_mnist_mse = np.array([0.6832151300236406, 0.7347517730496453, 0.6217494089834515, 0.7040189125295508, 0.6803782505910165])\nU6_ae32_mnist_mse = np.array([0.9276595744680851, 0.91725768321513, 0.9574468085106383, 0.9867612293144208, 0.9111111111111111])\nU9_pca30_mnist_mse = np.array([0.9304964539007092, 0.9286052009456265, 0.8926713947990543, 0.975886524822695, 0.9205673758865248])\nU9_ae30_mnist_mse = np.array([0.8047281323877069, 0.8444444444444444, 0.6723404255319149, 0.7612293144208038, 0.8520094562647754])\nU9_pca32_mnist_mse = np.array([0.46477541371158393, 0.557919621749409, 0.5801418439716312, 0.5234042553191489, 0.5947990543735224])\nU9_ae32_mnist_mse = np.array([0.7144208037825059, 0.8978723404255319, 0.8680851063829788, 0.9286052009456265, 0.9650118203309692])\nU13_pca30_mnist_mse = np.array([0.9806146572104019, 0.9829787234042553, 0.9754137115839243, 0.9787234042553191, 0.9801418439716312])\nU13_ae30_mnist_mse = np.array([0.8699763593380615, 0.8680851063829788, 0.873758865248227, 0.8222222222222222, 0.840661938534279])\nU13_pca32_mnist_mse = np.array([0.6959810874704492, 0.608983451536643, 0.6463356973995272, 0.6245862884160757, 0.5858156028368794])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U6_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U6_pca32_mnist_mse = np.array([0.6832151300236406, 0.7347517730496453, 0.6217494089834515, 0.7040189125295508, 0.6803782505910165])\nU6_ae32_mnist_mse = np.array([0.9276595744680851, 0.91725768321513, 0.9574468085106383, 0.9867612293144208, 0.9111111111111111])\nU9_pca30_mnist_mse = np.array([0.9304964539007092, 0.9286052009456265, 0.8926713947990543, 0.975886524822695, 0.9205673758865248])\nU9_ae30_mnist_mse = np.array([0.8047281323877069, 0.8444444444444444, 0.6723404255319149, 0.7612293144208038, 0.8520094562647754])\nU9_pca32_mnist_mse = np.array([0.46477541371158393, 0.557919621749409, 0.5801418439716312, 0.5234042553191489, 0.5947990543735224])\nU9_ae32_mnist_mse = np.array([0.7144208037825059, 0.8978723404255319, 0.8680851063829788, 0.9286052009456265, 0.9650118203309692])\nU13_pca30_mnist_mse = np.array([0.9806146572104019, 0.9829787234042553, 0.9754137115839243, 0.9787234042553191, 0.9801418439716312])\nU13_ae30_mnist_mse = np.array([0.8699763593380615, 0.8680851063829788, 0.873758865248227, 0.8222222222222222, 0.840661938534279])\nU13_pca32_mnist_mse = np.array([0.6959810874704492, 0.608983451536643, 0.6463356973995272, 0.6245862884160757, 0.5858156028368794])\nU13_ae32_mnist_mse = np.array([0.941371158392435, 0.9673758865248226, 0.948936170212766, 0.9361702127659575, 0.9479905437352246])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U6_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U6_ae32_mnist_mse = np.array([0.9276595744680851, 0.91725768321513, 0.9574468085106383, 0.9867612293144208, 0.9111111111111111])\nU9_pca30_mnist_mse = np.array([0.9304964539007092, 0.9286052009456265, 0.8926713947990543, 0.975886524822695, 0.9205673758865248])\nU9_ae30_mnist_mse = np.array([0.8047281323877069, 0.8444444444444444, 0.6723404255319149, 0.7612293144208038, 0.8520094562647754])\nU9_pca32_mnist_mse = np.array([0.46477541371158393, 0.557919621749409, 0.5801418439716312, 0.5234042553191489, 0.5947990543735224])\nU9_ae32_mnist_mse = np.array([0.7144208037825059, 0.8978723404255319, 0.8680851063829788, 0.9286052009456265, 0.9650118203309692])\nU13_pca30_mnist_mse = np.array([0.9806146572104019, 0.9829787234042553, 0.9754137115839243, 0.9787234042553191, 0.9801418439716312])\nU13_ae30_mnist_mse = np.array([0.8699763593380615, 0.8680851063829788, 0.873758865248227, 0.8222222222222222, 0.840661938534279])\nU13_pca32_mnist_mse = np.array([0.6959810874704492, 0.608983451536643, 0.6463356973995272, 0.6245862884160757, 0.5858156028368794])\nU13_ae32_mnist_mse = np.array([0.941371158392435, 0.9673758865248226, 0.948936170212766, 0.9361702127659575, 0.9479905437352246])\nU14_pca30_mnist_mse = np.array([0.9782505910165484, 0.9820330969267139, 0.9796690307328605, 0.9787234042553191, 0.9801418439716312])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U9_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U9_pca30_mnist_mse = np.array([0.9304964539007092, 0.9286052009456265, 0.8926713947990543, 0.975886524822695, 0.9205673758865248])\nU9_ae30_mnist_mse = np.array([0.8047281323877069, 0.8444444444444444, 0.6723404255319149, 0.7612293144208038, 0.8520094562647754])\nU9_pca32_mnist_mse = np.array([0.46477541371158393, 0.557919621749409, 0.5801418439716312, 0.5234042553191489, 0.5947990543735224])\nU9_ae32_mnist_mse = np.array([0.7144208037825059, 0.8978723404255319, 0.8680851063829788, 0.9286052009456265, 0.9650118203309692])\nU13_pca30_mnist_mse = np.array([0.9806146572104019, 0.9829787234042553, 0.9754137115839243, 0.9787234042553191, 0.9801418439716312])\nU13_ae30_mnist_mse = np.array([0.8699763593380615, 0.8680851063829788, 0.873758865248227, 0.8222222222222222, 0.840661938534279])\nU13_pca32_mnist_mse = np.array([0.6959810874704492, 0.608983451536643, 0.6463356973995272, 0.6245862884160757, 0.5858156028368794])\nU13_ae32_mnist_mse = np.array([0.941371158392435, 0.9673758865248226, 0.948936170212766, 0.9361702127659575, 0.9479905437352246])\nU14_pca30_mnist_mse = np.array([0.9782505910165484, 0.9820330969267139, 0.9796690307328605, 0.9787234042553191, 0.9801418439716312])\nU14_ae30_mnist_mse = np.array([0.8321513002364066, 0.8534278959810875, 0.7692671394799054, 0.7943262411347518, 0.8992907801418439])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U9_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U9_ae30_mnist_mse = np.array([0.8047281323877069, 0.8444444444444444, 0.6723404255319149, 0.7612293144208038, 0.8520094562647754])\nU9_pca32_mnist_mse = np.array([0.46477541371158393, 0.557919621749409, 0.5801418439716312, 0.5234042553191489, 0.5947990543735224])\nU9_ae32_mnist_mse = np.array([0.7144208037825059, 0.8978723404255319, 0.8680851063829788, 0.9286052009456265, 0.9650118203309692])\nU13_pca30_mnist_mse = np.array([0.9806146572104019, 0.9829787234042553, 0.9754137115839243, 0.9787234042553191, 0.9801418439716312])\nU13_ae30_mnist_mse = np.array([0.8699763593380615, 0.8680851063829788, 0.873758865248227, 0.8222222222222222, 0.840661938534279])\nU13_pca32_mnist_mse = np.array([0.6959810874704492, 0.608983451536643, 0.6463356973995272, 0.6245862884160757, 0.5858156028368794])\nU13_ae32_mnist_mse = np.array([0.941371158392435, 0.9673758865248226, 0.948936170212766, 0.9361702127659575, 0.9479905437352246])\nU14_pca30_mnist_mse = np.array([0.9782505910165484, 0.9820330969267139, 0.9796690307328605, 0.9787234042553191, 0.9801418439716312])\nU14_ae30_mnist_mse = np.array([0.8321513002364066, 0.8534278959810875, 0.7692671394799054, 0.7943262411347518, 0.8992907801418439])\nU14_pca32_mnist_mse = np.array([0.6690307328605201, 0.6699763593380614, 0.6108747044917258, 0.6638297872340425, 0.6704491725768321])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U9_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U9_pca32_mnist_mse = np.array([0.46477541371158393, 0.557919621749409, 0.5801418439716312, 0.5234042553191489, 0.5947990543735224])\nU9_ae32_mnist_mse = np.array([0.7144208037825059, 0.8978723404255319, 0.8680851063829788, 0.9286052009456265, 0.9650118203309692])\nU13_pca30_mnist_mse = np.array([0.9806146572104019, 0.9829787234042553, 0.9754137115839243, 0.9787234042553191, 0.9801418439716312])\nU13_ae30_mnist_mse = np.array([0.8699763593380615, 0.8680851063829788, 0.873758865248227, 0.8222222222222222, 0.840661938534279])\nU13_pca32_mnist_mse = np.array([0.6959810874704492, 0.608983451536643, 0.6463356973995272, 0.6245862884160757, 0.5858156028368794])\nU13_ae32_mnist_mse = np.array([0.941371158392435, 0.9673758865248226, 0.948936170212766, 0.9361702127659575, 0.9479905437352246])\nU14_pca30_mnist_mse = np.array([0.9782505910165484, 0.9820330969267139, 0.9796690307328605, 0.9787234042553191, 0.9801418439716312])\nU14_ae30_mnist_mse = np.array([0.8321513002364066, 0.8534278959810875, 0.7692671394799054, 0.7943262411347518, 0.8992907801418439])\nU14_pca32_mnist_mse = np.array([0.6690307328605201, 0.6699763593380614, 0.6108747044917258, 0.6638297872340425, 0.6704491725768321])\nU14_ae32_mnist_mse = np.array([0.9073286052009456, 0.9281323877068558, 0.858628841607565, 0.9645390070921985, 0.9295508274231679])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U9_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U9_ae32_mnist_mse = np.array([0.7144208037825059, 0.8978723404255319, 0.8680851063829788, 0.9286052009456265, 0.9650118203309692])\nU13_pca30_mnist_mse = np.array([0.9806146572104019, 0.9829787234042553, 0.9754137115839243, 0.9787234042553191, 0.9801418439716312])\nU13_ae30_mnist_mse = np.array([0.8699763593380615, 0.8680851063829788, 0.873758865248227, 0.8222222222222222, 0.840661938534279])\nU13_pca32_mnist_mse = np.array([0.6959810874704492, 0.608983451536643, 0.6463356973995272, 0.6245862884160757, 0.5858156028368794])\nU13_ae32_mnist_mse = np.array([0.941371158392435, 0.9673758865248226, 0.948936170212766, 0.9361702127659575, 0.9479905437352246])\nU14_pca30_mnist_mse = np.array([0.9782505910165484, 0.9820330969267139, 0.9796690307328605, 0.9787234042553191, 0.9801418439716312])\nU14_ae30_mnist_mse = np.array([0.8321513002364066, 0.8534278959810875, 0.7692671394799054, 0.7943262411347518, 0.8992907801418439])\nU14_pca32_mnist_mse = np.array([0.6690307328605201, 0.6699763593380614, 0.6108747044917258, 0.6638297872340425, 0.6704491725768321])\nU14_ae32_mnist_mse = np.array([0.9073286052009456, 0.9281323877068558, 0.858628841607565, 0.9645390070921985, 0.9295508274231679])\nU15_pca30_mnist_mse = np.array([0.9773049645390071, 0.983451536643026, 0.9815602836879432, 0.984869976359338, 0.9853427895981087])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U13_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U13_pca30_mnist_mse = np.array([0.9806146572104019, 0.9829787234042553, 0.9754137115839243, 0.9787234042553191, 0.9801418439716312])\nU13_ae30_mnist_mse = np.array([0.8699763593380615, 0.8680851063829788, 0.873758865248227, 0.8222222222222222, 0.840661938534279])\nU13_pca32_mnist_mse = np.array([0.6959810874704492, 0.608983451536643, 0.6463356973995272, 0.6245862884160757, 0.5858156028368794])\nU13_ae32_mnist_mse = np.array([0.941371158392435, 0.9673758865248226, 0.948936170212766, 0.9361702127659575, 0.9479905437352246])\nU14_pca30_mnist_mse = np.array([0.9782505910165484, 0.9820330969267139, 0.9796690307328605, 0.9787234042553191, 0.9801418439716312])\nU14_ae30_mnist_mse = np.array([0.8321513002364066, 0.8534278959810875, 0.7692671394799054, 0.7943262411347518, 0.8992907801418439])\nU14_pca32_mnist_mse = np.array([0.6690307328605201, 0.6699763593380614, 0.6108747044917258, 0.6638297872340425, 0.6704491725768321])\nU14_ae32_mnist_mse = np.array([0.9073286052009456, 0.9281323877068558, 0.858628841607565, 0.9645390070921985, 0.9295508274231679])\nU15_pca30_mnist_mse = np.array([0.9773049645390071, 0.983451536643026, 0.9815602836879432, 0.984869976359338, 0.9853427895981087])\nU15_ae30_mnist_mse = np.array([0.8099290780141843, 0.7721040189125296, 0.8491725768321513, 0.8799054373522459, 0.8624113475177305])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U13_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U13_ae30_mnist_mse = np.array([0.8699763593380615, 0.8680851063829788, 0.873758865248227, 0.8222222222222222, 0.840661938534279])\nU13_pca32_mnist_mse = np.array([0.6959810874704492, 0.608983451536643, 0.6463356973995272, 0.6245862884160757, 0.5858156028368794])\nU13_ae32_mnist_mse = np.array([0.941371158392435, 0.9673758865248226, 0.948936170212766, 0.9361702127659575, 0.9479905437352246])\nU14_pca30_mnist_mse = np.array([0.9782505910165484, 0.9820330969267139, 0.9796690307328605, 0.9787234042553191, 0.9801418439716312])\nU14_ae30_mnist_mse = np.array([0.8321513002364066, 0.8534278959810875, 0.7692671394799054, 0.7943262411347518, 0.8992907801418439])\nU14_pca32_mnist_mse = np.array([0.6690307328605201, 0.6699763593380614, 0.6108747044917258, 0.6638297872340425, 0.6704491725768321])\nU14_ae32_mnist_mse = np.array([0.9073286052009456, 0.9281323877068558, 0.858628841607565, 0.9645390070921985, 0.9295508274231679])\nU15_pca30_mnist_mse = np.array([0.9773049645390071, 0.983451536643026, 0.9815602836879432, 0.984869976359338, 0.9853427895981087])\nU15_ae30_mnist_mse = np.array([0.8099290780141843, 0.7721040189125296, 0.8491725768321513, 0.8799054373522459, 0.8624113475177305])\nU15_pca32_mnist_mse = np.array([0.6382978723404256, 0.8028368794326242, 0.6614657210401891, 0.7314420803782505, 0.6539007092198581])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U13_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U13_pca32_mnist_mse = np.array([0.6959810874704492, 0.608983451536643, 0.6463356973995272, 0.6245862884160757, 0.5858156028368794])\nU13_ae32_mnist_mse = np.array([0.941371158392435, 0.9673758865248226, 0.948936170212766, 0.9361702127659575, 0.9479905437352246])\nU14_pca30_mnist_mse = np.array([0.9782505910165484, 0.9820330969267139, 0.9796690307328605, 0.9787234042553191, 0.9801418439716312])\nU14_ae30_mnist_mse = np.array([0.8321513002364066, 0.8534278959810875, 0.7692671394799054, 0.7943262411347518, 0.8992907801418439])\nU14_pca32_mnist_mse = np.array([0.6690307328605201, 0.6699763593380614, 0.6108747044917258, 0.6638297872340425, 0.6704491725768321])\nU14_ae32_mnist_mse = np.array([0.9073286052009456, 0.9281323877068558, 0.858628841607565, 0.9645390070921985, 0.9295508274231679])\nU15_pca30_mnist_mse = np.array([0.9773049645390071, 0.983451536643026, 0.9815602836879432, 0.984869976359338, 0.9853427895981087])\nU15_ae30_mnist_mse = np.array([0.8099290780141843, 0.7721040189125296, 0.8491725768321513, 0.8799054373522459, 0.8624113475177305])\nU15_pca32_mnist_mse = np.array([0.6382978723404256, 0.8028368794326242, 0.6614657210401891, 0.7314420803782505, 0.6539007092198581])\nU15_ae32_mnist_mse = np.array([0.9508274231678487, 0.88274231678487, 0.9721040189125295, 0.9148936170212766, 0.9886524822695035])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U13_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U13_ae32_mnist_mse = np.array([0.941371158392435, 0.9673758865248226, 0.948936170212766, 0.9361702127659575, 0.9479905437352246])\nU14_pca30_mnist_mse = np.array([0.9782505910165484, 0.9820330969267139, 0.9796690307328605, 0.9787234042553191, 0.9801418439716312])\nU14_ae30_mnist_mse = np.array([0.8321513002364066, 0.8534278959810875, 0.7692671394799054, 0.7943262411347518, 0.8992907801418439])\nU14_pca32_mnist_mse = np.array([0.6690307328605201, 0.6699763593380614, 0.6108747044917258, 0.6638297872340425, 0.6704491725768321])\nU14_ae32_mnist_mse = np.array([0.9073286052009456, 0.9281323877068558, 0.858628841607565, 0.9645390070921985, 0.9295508274231679])\nU15_pca30_mnist_mse = np.array([0.9773049645390071, 0.983451536643026, 0.9815602836879432, 0.984869976359338, 0.9853427895981087])\nU15_ae30_mnist_mse = np.array([0.8099290780141843, 0.7721040189125296, 0.8491725768321513, 0.8799054373522459, 0.8624113475177305])\nU15_pca32_mnist_mse = np.array([0.6382978723404256, 0.8028368794326242, 0.6614657210401891, 0.7314420803782505, 0.6539007092198581])\nU15_ae32_mnist_mse = np.array([0.9508274231678487, 0.88274231678487, 0.9721040189125295, 0.9148936170212766, 0.9886524822695035])\nSO4_pca30_mnist_mse = np.array([0.9853427895981087, 0.984869976359338, 0.9825059101654846, 0.9829787234042553, 0.9791962174940898])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U14_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U14_pca30_mnist_mse = np.array([0.9782505910165484, 0.9820330969267139, 0.9796690307328605, 0.9787234042553191, 0.9801418439716312])\nU14_ae30_mnist_mse = np.array([0.8321513002364066, 0.8534278959810875, 0.7692671394799054, 0.7943262411347518, 0.8992907801418439])\nU14_pca32_mnist_mse = np.array([0.6690307328605201, 0.6699763593380614, 0.6108747044917258, 0.6638297872340425, 0.6704491725768321])\nU14_ae32_mnist_mse = np.array([0.9073286052009456, 0.9281323877068558, 0.858628841607565, 0.9645390070921985, 0.9295508274231679])\nU15_pca30_mnist_mse = np.array([0.9773049645390071, 0.983451536643026, 0.9815602836879432, 0.984869976359338, 0.9853427895981087])\nU15_ae30_mnist_mse = np.array([0.8099290780141843, 0.7721040189125296, 0.8491725768321513, 0.8799054373522459, 0.8624113475177305])\nU15_pca32_mnist_mse = np.array([0.6382978723404256, 0.8028368794326242, 0.6614657210401891, 0.7314420803782505, 0.6539007092198581])\nU15_ae32_mnist_mse = np.array([0.9508274231678487, 0.88274231678487, 0.9721040189125295, 0.9148936170212766, 0.9886524822695035])\nSO4_pca30_mnist_mse = np.array([0.9853427895981087, 0.984869976359338, 0.9825059101654846, 0.9829787234042553, 0.9791962174940898])\nSO4_ae30_mnist_mse = np.array([0.8851063829787233, 0.8340425531914893, 0.8028368794326242, 0.8888888888888888, 0.9196217494089834])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U14_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U14_ae30_mnist_mse = np.array([0.8321513002364066, 0.8534278959810875, 0.7692671394799054, 0.7943262411347518, 0.8992907801418439])\nU14_pca32_mnist_mse = np.array([0.6690307328605201, 0.6699763593380614, 0.6108747044917258, 0.6638297872340425, 0.6704491725768321])\nU14_ae32_mnist_mse = np.array([0.9073286052009456, 0.9281323877068558, 0.858628841607565, 0.9645390070921985, 0.9295508274231679])\nU15_pca30_mnist_mse = np.array([0.9773049645390071, 0.983451536643026, 0.9815602836879432, 0.984869976359338, 0.9853427895981087])\nU15_ae30_mnist_mse = np.array([0.8099290780141843, 0.7721040189125296, 0.8491725768321513, 0.8799054373522459, 0.8624113475177305])\nU15_pca32_mnist_mse = np.array([0.6382978723404256, 0.8028368794326242, 0.6614657210401891, 0.7314420803782505, 0.6539007092198581])\nU15_ae32_mnist_mse = np.array([0.9508274231678487, 0.88274231678487, 0.9721040189125295, 0.9148936170212766, 0.9886524822695035])\nSO4_pca30_mnist_mse = np.array([0.9853427895981087, 0.984869976359338, 0.9825059101654846, 0.9829787234042553, 0.9791962174940898])\nSO4_ae30_mnist_mse = np.array([0.8851063829787233, 0.8340425531914893, 0.8028368794326242, 0.8888888888888888, 0.9196217494089834])\nSO4_pca32_mnist_mse = np.array([0.7026004728132388, 0.7829787234042553, 0.7687943262411348, 0.7754137115839244, 0.6898345153664303])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U14_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U14_pca32_mnist_mse = np.array([0.6690307328605201, 0.6699763593380614, 0.6108747044917258, 0.6638297872340425, 0.6704491725768321])\nU14_ae32_mnist_mse = np.array([0.9073286052009456, 0.9281323877068558, 0.858628841607565, 0.9645390070921985, 0.9295508274231679])\nU15_pca30_mnist_mse = np.array([0.9773049645390071, 0.983451536643026, 0.9815602836879432, 0.984869976359338, 0.9853427895981087])\nU15_ae30_mnist_mse = np.array([0.8099290780141843, 0.7721040189125296, 0.8491725768321513, 0.8799054373522459, 0.8624113475177305])\nU15_pca32_mnist_mse = np.array([0.6382978723404256, 0.8028368794326242, 0.6614657210401891, 0.7314420803782505, 0.6539007092198581])\nU15_ae32_mnist_mse = np.array([0.9508274231678487, 0.88274231678487, 0.9721040189125295, 0.9148936170212766, 0.9886524822695035])\nSO4_pca30_mnist_mse = np.array([0.9853427895981087, 0.984869976359338, 0.9825059101654846, 0.9829787234042553, 0.9791962174940898])\nSO4_ae30_mnist_mse = np.array([0.8851063829787233, 0.8340425531914893, 0.8028368794326242, 0.8888888888888888, 0.9196217494089834])\nSO4_pca32_mnist_mse = np.array([0.7026004728132388, 0.7829787234042553, 0.7687943262411348, 0.7754137115839244, 0.6898345153664303])\nSO4_ae32_mnist_mse = np.array([0.9588652482269504, 0.9555555555555556, 0.9853427895981087, 0.9002364066193853, 0.9408983451536643])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U14_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U14_ae32_mnist_mse = np.array([0.9073286052009456, 0.9281323877068558, 0.858628841607565, 0.9645390070921985, 0.9295508274231679])\nU15_pca30_mnist_mse = np.array([0.9773049645390071, 0.983451536643026, 0.9815602836879432, 0.984869976359338, 0.9853427895981087])\nU15_ae30_mnist_mse = np.array([0.8099290780141843, 0.7721040189125296, 0.8491725768321513, 0.8799054373522459, 0.8624113475177305])\nU15_pca32_mnist_mse = np.array([0.6382978723404256, 0.8028368794326242, 0.6614657210401891, 0.7314420803782505, 0.6539007092198581])\nU15_ae32_mnist_mse = np.array([0.9508274231678487, 0.88274231678487, 0.9721040189125295, 0.9148936170212766, 0.9886524822695035])\nSO4_pca30_mnist_mse = np.array([0.9853427895981087, 0.984869976359338, 0.9825059101654846, 0.9829787234042553, 0.9791962174940898])\nSO4_ae30_mnist_mse = np.array([0.8851063829787233, 0.8340425531914893, 0.8028368794326242, 0.8888888888888888, 0.9196217494089834])\nSO4_pca32_mnist_mse = np.array([0.7026004728132388, 0.7829787234042553, 0.7687943262411348, 0.7754137115839244, 0.6898345153664303])\nSO4_ae32_mnist_mse = np.array([0.9588652482269504, 0.9555555555555556, 0.9853427895981087, 0.9002364066193853, 0.9408983451536643])\nSU4_pca30_mnist_mse = np.array([0.9843971631205674, 0.9862884160756501, 0.9891252955082742, 0.9829787234042553, 0.9843971631205674])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U15_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U15_pca30_mnist_mse = np.array([0.9773049645390071, 0.983451536643026, 0.9815602836879432, 0.984869976359338, 0.9853427895981087])\nU15_ae30_mnist_mse = np.array([0.8099290780141843, 0.7721040189125296, 0.8491725768321513, 0.8799054373522459, 0.8624113475177305])\nU15_pca32_mnist_mse = np.array([0.6382978723404256, 0.8028368794326242, 0.6614657210401891, 0.7314420803782505, 0.6539007092198581])\nU15_ae32_mnist_mse = np.array([0.9508274231678487, 0.88274231678487, 0.9721040189125295, 0.9148936170212766, 0.9886524822695035])\nSO4_pca30_mnist_mse = np.array([0.9853427895981087, 0.984869976359338, 0.9825059101654846, 0.9829787234042553, 0.9791962174940898])\nSO4_ae30_mnist_mse = np.array([0.8851063829787233, 0.8340425531914893, 0.8028368794326242, 0.8888888888888888, 0.9196217494089834])\nSO4_pca32_mnist_mse = np.array([0.7026004728132388, 0.7829787234042553, 0.7687943262411348, 0.7754137115839244, 0.6898345153664303])\nSO4_ae32_mnist_mse = np.array([0.9588652482269504, 0.9555555555555556, 0.9853427895981087, 0.9002364066193853, 0.9408983451536643])\nSU4_pca30_mnist_mse = np.array([0.9843971631205674, 0.9862884160756501, 0.9891252955082742, 0.9829787234042553, 0.9843971631205674])\nSU4_ae30_mnist_mse = np.array([0.8728132387706856, 0.9290780141843972, 0.7527186761229314, 0.8614657210401891, 0.83451536643026])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U15_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U15_ae30_mnist_mse = np.array([0.8099290780141843, 0.7721040189125296, 0.8491725768321513, 0.8799054373522459, 0.8624113475177305])\nU15_pca32_mnist_mse = np.array([0.6382978723404256, 0.8028368794326242, 0.6614657210401891, 0.7314420803782505, 0.6539007092198581])\nU15_ae32_mnist_mse = np.array([0.9508274231678487, 0.88274231678487, 0.9721040189125295, 0.9148936170212766, 0.9886524822695035])\nSO4_pca30_mnist_mse = np.array([0.9853427895981087, 0.984869976359338, 0.9825059101654846, 0.9829787234042553, 0.9791962174940898])\nSO4_ae30_mnist_mse = np.array([0.8851063829787233, 0.8340425531914893, 0.8028368794326242, 0.8888888888888888, 0.9196217494089834])\nSO4_pca32_mnist_mse = np.array([0.7026004728132388, 0.7829787234042553, 0.7687943262411348, 0.7754137115839244, 0.6898345153664303])\nSO4_ae32_mnist_mse = np.array([0.9588652482269504, 0.9555555555555556, 0.9853427895981087, 0.9002364066193853, 0.9408983451536643])\nSU4_pca30_mnist_mse = np.array([0.9843971631205674, 0.9862884160756501, 0.9891252955082742, 0.9829787234042553, 0.9843971631205674])\nSU4_ae30_mnist_mse = np.array([0.8728132387706856, 0.9290780141843972, 0.7527186761229314, 0.8614657210401891, 0.83451536643026])\nSU4_pca32_mnist_mse = np.array([0.7881796690307329, 0.7583924349881797, 0.7952718676122932, 0.782033096926714, 0.7470449172576832])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U15_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U15_pca32_mnist_mse = np.array([0.6382978723404256, 0.8028368794326242, 0.6614657210401891, 0.7314420803782505, 0.6539007092198581])\nU15_ae32_mnist_mse = np.array([0.9508274231678487, 0.88274231678487, 0.9721040189125295, 0.9148936170212766, 0.9886524822695035])\nSO4_pca30_mnist_mse = np.array([0.9853427895981087, 0.984869976359338, 0.9825059101654846, 0.9829787234042553, 0.9791962174940898])\nSO4_ae30_mnist_mse = np.array([0.8851063829787233, 0.8340425531914893, 0.8028368794326242, 0.8888888888888888, 0.9196217494089834])\nSO4_pca32_mnist_mse = np.array([0.7026004728132388, 0.7829787234042553, 0.7687943262411348, 0.7754137115839244, 0.6898345153664303])\nSO4_ae32_mnist_mse = np.array([0.9588652482269504, 0.9555555555555556, 0.9853427895981087, 0.9002364066193853, 0.9408983451536643])\nSU4_pca30_mnist_mse = np.array([0.9843971631205674, 0.9862884160756501, 0.9891252955082742, 0.9829787234042553, 0.9843971631205674])\nSU4_ae30_mnist_mse = np.array([0.8728132387706856, 0.9290780141843972, 0.7527186761229314, 0.8614657210401891, 0.83451536643026])\nSU4_pca32_mnist_mse = np.array([0.7881796690307329, 0.7583924349881797, 0.7952718676122932, 0.782033096926714, 0.7470449172576832])\nSU4_ae32_mnist_mse = np.array([0.9773049645390071, 0.9517730496453901, 0.991016548463357, 0.9905437352245863, 0.9508274231678487])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U15_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U15_ae32_mnist_mse = np.array([0.9508274231678487, 0.88274231678487, 0.9721040189125295, 0.9148936170212766, 0.9886524822695035])\nSO4_pca30_mnist_mse = np.array([0.9853427895981087, 0.984869976359338, 0.9825059101654846, 0.9829787234042553, 0.9791962174940898])\nSO4_ae30_mnist_mse = np.array([0.8851063829787233, 0.8340425531914893, 0.8028368794326242, 0.8888888888888888, 0.9196217494089834])\nSO4_pca32_mnist_mse = np.array([0.7026004728132388, 0.7829787234042553, 0.7687943262411348, 0.7754137115839244, 0.6898345153664303])\nSO4_ae32_mnist_mse = np.array([0.9588652482269504, 0.9555555555555556, 0.9853427895981087, 0.9002364066193853, 0.9408983451536643])\nSU4_pca30_mnist_mse = np.array([0.9843971631205674, 0.9862884160756501, 0.9891252955082742, 0.9829787234042553, 0.9843971631205674])\nSU4_ae30_mnist_mse = np.array([0.8728132387706856, 0.9290780141843972, 0.7527186761229314, 0.8614657210401891, 0.83451536643026])\nSU4_pca32_mnist_mse = np.array([0.7881796690307329, 0.7583924349881797, 0.7952718676122932, 0.782033096926714, 0.7470449172576832])\nSU4_ae32_mnist_mse = np.array([0.9773049645390071, 0.9517730496453901, 0.991016548463357, 0.9905437352245863, 0.9508274231678487])\nSU4_1_pca30_mnist_mse = np.array([0.983451536643026, 0.9829787234042553, 0.9872340425531915, 0.9815602836879432, 0.9782505910165484])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SO4_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SO4_pca30_mnist_mse = np.array([0.9853427895981087, 0.984869976359338, 0.9825059101654846, 0.9829787234042553, 0.9791962174940898])\nSO4_ae30_mnist_mse = np.array([0.8851063829787233, 0.8340425531914893, 0.8028368794326242, 0.8888888888888888, 0.9196217494089834])\nSO4_pca32_mnist_mse = np.array([0.7026004728132388, 0.7829787234042553, 0.7687943262411348, 0.7754137115839244, 0.6898345153664303])\nSO4_ae32_mnist_mse = np.array([0.9588652482269504, 0.9555555555555556, 0.9853427895981087, 0.9002364066193853, 0.9408983451536643])\nSU4_pca30_mnist_mse = np.array([0.9843971631205674, 0.9862884160756501, 0.9891252955082742, 0.9829787234042553, 0.9843971631205674])\nSU4_ae30_mnist_mse = np.array([0.8728132387706856, 0.9290780141843972, 0.7527186761229314, 0.8614657210401891, 0.83451536643026])\nSU4_pca32_mnist_mse = np.array([0.7881796690307329, 0.7583924349881797, 0.7952718676122932, 0.782033096926714, 0.7470449172576832])\nSU4_ae32_mnist_mse = np.array([0.9773049645390071, 0.9517730496453901, 0.991016548463357, 0.9905437352245863, 0.9508274231678487])\nSU4_1_pca30_mnist_mse = np.array([0.983451536643026, 0.9829787234042553, 0.9872340425531915, 0.9815602836879432, 0.9782505910165484])\nSU4_1_ae30_mnist_mse = np.array([0.8841607565011821, 0.8401891252955083, 0.8973995271867612, 0.8808510638297873, 0.7943262411347518])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SO4_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SO4_ae30_mnist_mse = np.array([0.8851063829787233, 0.8340425531914893, 0.8028368794326242, 0.8888888888888888, 0.9196217494089834])\nSO4_pca32_mnist_mse = np.array([0.7026004728132388, 0.7829787234042553, 0.7687943262411348, 0.7754137115839244, 0.6898345153664303])\nSO4_ae32_mnist_mse = np.array([0.9588652482269504, 0.9555555555555556, 0.9853427895981087, 0.9002364066193853, 0.9408983451536643])\nSU4_pca30_mnist_mse = np.array([0.9843971631205674, 0.9862884160756501, 0.9891252955082742, 0.9829787234042553, 0.9843971631205674])\nSU4_ae30_mnist_mse = np.array([0.8728132387706856, 0.9290780141843972, 0.7527186761229314, 0.8614657210401891, 0.83451536643026])\nSU4_pca32_mnist_mse = np.array([0.7881796690307329, 0.7583924349881797, 0.7952718676122932, 0.782033096926714, 0.7470449172576832])\nSU4_ae32_mnist_mse = np.array([0.9773049645390071, 0.9517730496453901, 0.991016548463357, 0.9905437352245863, 0.9508274231678487])\nSU4_1_pca30_mnist_mse = np.array([0.983451536643026, 0.9829787234042553, 0.9872340425531915, 0.9815602836879432, 0.9782505910165484])\nSU4_1_ae30_mnist_mse = np.array([0.8841607565011821, 0.8401891252955083, 0.8973995271867612, 0.8808510638297873, 0.7943262411347518])\nSU4_1_pca32_mnist_mse = np.array([0.7588652482269503, 0.7711583924349882, 0.7635933806146572, 0.7588652482269503, 0.7446808510638298])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SO4_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SO4_pca32_mnist_mse = np.array([0.7026004728132388, 0.7829787234042553, 0.7687943262411348, 0.7754137115839244, 0.6898345153664303])\nSO4_ae32_mnist_mse = np.array([0.9588652482269504, 0.9555555555555556, 0.9853427895981087, 0.9002364066193853, 0.9408983451536643])\nSU4_pca30_mnist_mse = np.array([0.9843971631205674, 0.9862884160756501, 0.9891252955082742, 0.9829787234042553, 0.9843971631205674])\nSU4_ae30_mnist_mse = np.array([0.8728132387706856, 0.9290780141843972, 0.7527186761229314, 0.8614657210401891, 0.83451536643026])\nSU4_pca32_mnist_mse = np.array([0.7881796690307329, 0.7583924349881797, 0.7952718676122932, 0.782033096926714, 0.7470449172576832])\nSU4_ae32_mnist_mse = np.array([0.9773049645390071, 0.9517730496453901, 0.991016548463357, 0.9905437352245863, 0.9508274231678487])\nSU4_1_pca30_mnist_mse = np.array([0.983451536643026, 0.9829787234042553, 0.9872340425531915, 0.9815602836879432, 0.9782505910165484])\nSU4_1_ae30_mnist_mse = np.array([0.8841607565011821, 0.8401891252955083, 0.8973995271867612, 0.8808510638297873, 0.7943262411347518])\nSU4_1_pca32_mnist_mse = np.array([0.7588652482269503, 0.7711583924349882, 0.7635933806146572, 0.7588652482269503, 0.7446808510638298])\nSU4_1_ae32_mnist_mse = np.array([0.975886524822695, 0.9773049645390071, 0.9858156028368794, 0.9385342789598109, 0.9560283687943263])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SO4_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SO4_ae32_mnist_mse = np.array([0.9588652482269504, 0.9555555555555556, 0.9853427895981087, 0.9002364066193853, 0.9408983451536643])\nSU4_pca30_mnist_mse = np.array([0.9843971631205674, 0.9862884160756501, 0.9891252955082742, 0.9829787234042553, 0.9843971631205674])\nSU4_ae30_mnist_mse = np.array([0.8728132387706856, 0.9290780141843972, 0.7527186761229314, 0.8614657210401891, 0.83451536643026])\nSU4_pca32_mnist_mse = np.array([0.7881796690307329, 0.7583924349881797, 0.7952718676122932, 0.782033096926714, 0.7470449172576832])\nSU4_ae32_mnist_mse = np.array([0.9773049645390071, 0.9517730496453901, 0.991016548463357, 0.9905437352245863, 0.9508274231678487])\nSU4_1_pca30_mnist_mse = np.array([0.983451536643026, 0.9829787234042553, 0.9872340425531915, 0.9815602836879432, 0.9782505910165484])\nSU4_1_ae30_mnist_mse = np.array([0.8841607565011821, 0.8401891252955083, 0.8973995271867612, 0.8808510638297873, 0.7943262411347518])\nSU4_1_pca32_mnist_mse = np.array([0.7588652482269503, 0.7711583924349882, 0.7635933806146572, 0.7588652482269503, 0.7446808510638298])\nSU4_1_ae32_mnist_mse = np.array([0.975886524822695, 0.9773049645390071, 0.9858156028368794, 0.9385342789598109, 0.9560283687943263])\n# Fashion MNIST", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_pca30_mnist_mse = np.array([0.9843971631205674, 0.9862884160756501, 0.9891252955082742, 0.9829787234042553, 0.9843971631205674])\nSU4_ae30_mnist_mse = np.array([0.8728132387706856, 0.9290780141843972, 0.7527186761229314, 0.8614657210401891, 0.83451536643026])\nSU4_pca32_mnist_mse = np.array([0.7881796690307329, 0.7583924349881797, 0.7952718676122932, 0.782033096926714, 0.7470449172576832])\nSU4_ae32_mnist_mse = np.array([0.9773049645390071, 0.9517730496453901, 0.991016548463357, 0.9905437352245863, 0.9508274231678487])\nSU4_1_pca30_mnist_mse = np.array([0.983451536643026, 0.9829787234042553, 0.9872340425531915, 0.9815602836879432, 0.9782505910165484])\nSU4_1_ae30_mnist_mse = np.array([0.8841607565011821, 0.8401891252955083, 0.8973995271867612, 0.8808510638297873, 0.7943262411347518])\nSU4_1_pca32_mnist_mse = np.array([0.7588652482269503, 0.7711583924349882, 0.7635933806146572, 0.7588652482269503, 0.7446808510638298])\nSU4_1_ae32_mnist_mse = np.array([0.975886524822695, 0.9773049645390071, 0.9858156028368794, 0.9385342789598109, 0.9560283687943263])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.835, 0.8595, 0.835, 0.8545, 0.824])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_ae30_mnist_mse = np.array([0.8728132387706856, 0.9290780141843972, 0.7527186761229314, 0.8614657210401891, 0.83451536643026])\nSU4_pca32_mnist_mse = np.array([0.7881796690307329, 0.7583924349881797, 0.7952718676122932, 0.782033096926714, 0.7470449172576832])\nSU4_ae32_mnist_mse = np.array([0.9773049645390071, 0.9517730496453901, 0.991016548463357, 0.9905437352245863, 0.9508274231678487])\nSU4_1_pca30_mnist_mse = np.array([0.983451536643026, 0.9829787234042553, 0.9872340425531915, 0.9815602836879432, 0.9782505910165484])\nSU4_1_ae30_mnist_mse = np.array([0.8841607565011821, 0.8401891252955083, 0.8973995271867612, 0.8808510638297873, 0.7943262411347518])\nSU4_1_pca32_mnist_mse = np.array([0.7588652482269503, 0.7711583924349882, 0.7635933806146572, 0.7588652482269503, 0.7446808510638298])\nSU4_1_ae32_mnist_mse = np.array([0.975886524822695, 0.9773049645390071, 0.9858156028368794, 0.9385342789598109, 0.9560283687943263])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.835, 0.8595, 0.835, 0.8545, 0.824])\nTTN_ae30_fashion_mse = np.array([0.748, 0.855, 0.87, 0.8495, 0.834])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_pca32_mnist_mse = np.array([0.7881796690307329, 0.7583924349881797, 0.7952718676122932, 0.782033096926714, 0.7470449172576832])\nSU4_ae32_mnist_mse = np.array([0.9773049645390071, 0.9517730496453901, 0.991016548463357, 0.9905437352245863, 0.9508274231678487])\nSU4_1_pca30_mnist_mse = np.array([0.983451536643026, 0.9829787234042553, 0.9872340425531915, 0.9815602836879432, 0.9782505910165484])\nSU4_1_ae30_mnist_mse = np.array([0.8841607565011821, 0.8401891252955083, 0.8973995271867612, 0.8808510638297873, 0.7943262411347518])\nSU4_1_pca32_mnist_mse = np.array([0.7588652482269503, 0.7711583924349882, 0.7635933806146572, 0.7588652482269503, 0.7446808510638298])\nSU4_1_ae32_mnist_mse = np.array([0.975886524822695, 0.9773049645390071, 0.9858156028368794, 0.9385342789598109, 0.9560283687943263])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.835, 0.8595, 0.835, 0.8545, 0.824])\nTTN_ae30_fashion_mse = np.array([0.748, 0.855, 0.87, 0.8495, 0.834])\nTTN_pca32_fashion_mse = np.array([0.549, 0.6065, 0.5735, 0.5845, 0.615])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_ae32_mnist_mse = np.array([0.9773049645390071, 0.9517730496453901, 0.991016548463357, 0.9905437352245863, 0.9508274231678487])\nSU4_1_pca30_mnist_mse = np.array([0.983451536643026, 0.9829787234042553, 0.9872340425531915, 0.9815602836879432, 0.9782505910165484])\nSU4_1_ae30_mnist_mse = np.array([0.8841607565011821, 0.8401891252955083, 0.8973995271867612, 0.8808510638297873, 0.7943262411347518])\nSU4_1_pca32_mnist_mse = np.array([0.7588652482269503, 0.7711583924349882, 0.7635933806146572, 0.7588652482269503, 0.7446808510638298])\nSU4_1_ae32_mnist_mse = np.array([0.975886524822695, 0.9773049645390071, 0.9858156028368794, 0.9385342789598109, 0.9560283687943263])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.835, 0.8595, 0.835, 0.8545, 0.824])\nTTN_ae30_fashion_mse = np.array([0.748, 0.855, 0.87, 0.8495, 0.834])\nTTN_pca32_fashion_mse = np.array([0.549, 0.6065, 0.5735, 0.5845, 0.615])\nTTN_ae32_fashion_mse = np.array([0.901, 0.9055, 0.864, 0.8975, 0.917])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_pca30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_1_pca30_mnist_mse = np.array([0.983451536643026, 0.9829787234042553, 0.9872340425531915, 0.9815602836879432, 0.9782505910165484])\nSU4_1_ae30_mnist_mse = np.array([0.8841607565011821, 0.8401891252955083, 0.8973995271867612, 0.8808510638297873, 0.7943262411347518])\nSU4_1_pca32_mnist_mse = np.array([0.7588652482269503, 0.7711583924349882, 0.7635933806146572, 0.7588652482269503, 0.7446808510638298])\nSU4_1_ae32_mnist_mse = np.array([0.975886524822695, 0.9773049645390071, 0.9858156028368794, 0.9385342789598109, 0.9560283687943263])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.835, 0.8595, 0.835, 0.8545, 0.824])\nTTN_ae30_fashion_mse = np.array([0.748, 0.855, 0.87, 0.8495, 0.834])\nTTN_pca32_fashion_mse = np.array([0.549, 0.6065, 0.5735, 0.5845, 0.615])\nTTN_ae32_fashion_mse = np.array([0.901, 0.9055, 0.864, 0.8975, 0.917])\nU5_pca30_fashion_mse = np.array([0.844, 0.8365, 0.854, 0.855, 0.8385])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_ae30_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_1_ae30_mnist_mse = np.array([0.8841607565011821, 0.8401891252955083, 0.8973995271867612, 0.8808510638297873, 0.7943262411347518])\nSU4_1_pca32_mnist_mse = np.array([0.7588652482269503, 0.7711583924349882, 0.7635933806146572, 0.7588652482269503, 0.7446808510638298])\nSU4_1_ae32_mnist_mse = np.array([0.975886524822695, 0.9773049645390071, 0.9858156028368794, 0.9385342789598109, 0.9560283687943263])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.835, 0.8595, 0.835, 0.8545, 0.824])\nTTN_ae30_fashion_mse = np.array([0.748, 0.855, 0.87, 0.8495, 0.834])\nTTN_pca32_fashion_mse = np.array([0.549, 0.6065, 0.5735, 0.5845, 0.615])\nTTN_ae32_fashion_mse = np.array([0.901, 0.9055, 0.864, 0.8975, 0.917])\nU5_pca30_fashion_mse = np.array([0.844, 0.8365, 0.854, 0.855, 0.8385])\nU5_ae30_fashion_mse = np.array([0.8955, 0.8295, 0.9205, 0.91, 0.896])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_pca32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_1_pca32_mnist_mse = np.array([0.7588652482269503, 0.7711583924349882, 0.7635933806146572, 0.7588652482269503, 0.7446808510638298])\nSU4_1_ae32_mnist_mse = np.array([0.975886524822695, 0.9773049645390071, 0.9858156028368794, 0.9385342789598109, 0.9560283687943263])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.835, 0.8595, 0.835, 0.8545, 0.824])\nTTN_ae30_fashion_mse = np.array([0.748, 0.855, 0.87, 0.8495, 0.834])\nTTN_pca32_fashion_mse = np.array([0.549, 0.6065, 0.5735, 0.5845, 0.615])\nTTN_ae32_fashion_mse = np.array([0.901, 0.9055, 0.864, 0.8975, 0.917])\nU5_pca30_fashion_mse = np.array([0.844, 0.8365, 0.854, 0.855, 0.8385])\nU5_ae30_fashion_mse = np.array([0.8955, 0.8295, 0.9205, 0.91, 0.896])\nU5_pca32_fashion_mse = np.array([0.646, 0.6545, 0.64, 0.6675, 0.614])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_ae32_mnist_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_1_ae32_mnist_mse = np.array([0.975886524822695, 0.9773049645390071, 0.9858156028368794, 0.9385342789598109, 0.9560283687943263])\n# Fashion MNIST\nTTN_pca30_fashion_mse = np.array([0.835, 0.8595, 0.835, 0.8545, 0.824])\nTTN_ae30_fashion_mse = np.array([0.748, 0.855, 0.87, 0.8495, 0.834])\nTTN_pca32_fashion_mse = np.array([0.549, 0.6065, 0.5735, 0.5845, 0.615])\nTTN_ae32_fashion_mse = np.array([0.901, 0.9055, 0.864, 0.8975, 0.917])\nU5_pca30_fashion_mse = np.array([0.844, 0.8365, 0.854, 0.855, 0.8385])\nU5_ae30_fashion_mse = np.array([0.8955, 0.8295, 0.9205, 0.91, 0.896])\nU5_pca32_fashion_mse = np.array([0.646, 0.6545, 0.64, 0.6675, 0.614])\nU5_ae32_fashion_mse = np.array([0.913, 0.9035, 0.9245, 0.951, 0.9025])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "TTN_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "TTN_pca30_fashion_mse = np.array([0.835, 0.8595, 0.835, 0.8545, 0.824])\nTTN_ae30_fashion_mse = np.array([0.748, 0.855, 0.87, 0.8495, 0.834])\nTTN_pca32_fashion_mse = np.array([0.549, 0.6065, 0.5735, 0.5845, 0.615])\nTTN_ae32_fashion_mse = np.array([0.901, 0.9055, 0.864, 0.8975, 0.917])\nU5_pca30_fashion_mse = np.array([0.844, 0.8365, 0.854, 0.855, 0.8385])\nU5_ae30_fashion_mse = np.array([0.8955, 0.8295, 0.9205, 0.91, 0.896])\nU5_pca32_fashion_mse = np.array([0.646, 0.6545, 0.64, 0.6675, 0.614])\nU5_ae32_fashion_mse = np.array([0.913, 0.9035, 0.9245, 0.951, 0.9025])\nU6_pca30_fashion_mse = np.array([0.8815, 0.8805, 0.8345, 0.877, 0.8515])\nU6_ae30_fashion_mse = np.array([0.7245, 0.9295, 0.8845, 0.87, 0.909])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "TTN_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "TTN_ae30_fashion_mse = np.array([0.748, 0.855, 0.87, 0.8495, 0.834])\nTTN_pca32_fashion_mse = np.array([0.549, 0.6065, 0.5735, 0.5845, 0.615])\nTTN_ae32_fashion_mse = np.array([0.901, 0.9055, 0.864, 0.8975, 0.917])\nU5_pca30_fashion_mse = np.array([0.844, 0.8365, 0.854, 0.855, 0.8385])\nU5_ae30_fashion_mse = np.array([0.8955, 0.8295, 0.9205, 0.91, 0.896])\nU5_pca32_fashion_mse = np.array([0.646, 0.6545, 0.64, 0.6675, 0.614])\nU5_ae32_fashion_mse = np.array([0.913, 0.9035, 0.9245, 0.951, 0.9025])\nU6_pca30_fashion_mse = np.array([0.8815, 0.8805, 0.8345, 0.877, 0.8515])\nU6_ae30_fashion_mse = np.array([0.7245, 0.9295, 0.8845, 0.87, 0.909])\nU6_pca32_fashion_mse = np.array([0.6415, 0.6885, 0.6285, 0.6215, 0.6655])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "TTN_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "TTN_pca32_fashion_mse = np.array([0.549, 0.6065, 0.5735, 0.5845, 0.615])\nTTN_ae32_fashion_mse = np.array([0.901, 0.9055, 0.864, 0.8975, 0.917])\nU5_pca30_fashion_mse = np.array([0.844, 0.8365, 0.854, 0.855, 0.8385])\nU5_ae30_fashion_mse = np.array([0.8955, 0.8295, 0.9205, 0.91, 0.896])\nU5_pca32_fashion_mse = np.array([0.646, 0.6545, 0.64, 0.6675, 0.614])\nU5_ae32_fashion_mse = np.array([0.913, 0.9035, 0.9245, 0.951, 0.9025])\nU6_pca30_fashion_mse = np.array([0.8815, 0.8805, 0.8345, 0.877, 0.8515])\nU6_ae30_fashion_mse = np.array([0.7245, 0.9295, 0.8845, 0.87, 0.909])\nU6_pca32_fashion_mse = np.array([0.6415, 0.6885, 0.6285, 0.6215, 0.6655])\nU6_ae32_fashion_mse = np.array([0.931, 0.9405, 0.92, 0.933, 0.9225])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "TTN_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "TTN_ae32_fashion_mse = np.array([0.901, 0.9055, 0.864, 0.8975, 0.917])\nU5_pca30_fashion_mse = np.array([0.844, 0.8365, 0.854, 0.855, 0.8385])\nU5_ae30_fashion_mse = np.array([0.8955, 0.8295, 0.9205, 0.91, 0.896])\nU5_pca32_fashion_mse = np.array([0.646, 0.6545, 0.64, 0.6675, 0.614])\nU5_ae32_fashion_mse = np.array([0.913, 0.9035, 0.9245, 0.951, 0.9025])\nU6_pca30_fashion_mse = np.array([0.8815, 0.8805, 0.8345, 0.877, 0.8515])\nU6_ae30_fashion_mse = np.array([0.7245, 0.9295, 0.8845, 0.87, 0.909])\nU6_pca32_fashion_mse = np.array([0.6415, 0.6885, 0.6285, 0.6215, 0.6655])\nU6_ae32_fashion_mse = np.array([0.931, 0.9405, 0.92, 0.933, 0.9225])\nU9_pca30_fashion_mse = np.array([0.805, 0.7855, 0.799, 0.844, 0.708])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U5_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U5_pca30_fashion_mse = np.array([0.844, 0.8365, 0.854, 0.855, 0.8385])\nU5_ae30_fashion_mse = np.array([0.8955, 0.8295, 0.9205, 0.91, 0.896])\nU5_pca32_fashion_mse = np.array([0.646, 0.6545, 0.64, 0.6675, 0.614])\nU5_ae32_fashion_mse = np.array([0.913, 0.9035, 0.9245, 0.951, 0.9025])\nU6_pca30_fashion_mse = np.array([0.8815, 0.8805, 0.8345, 0.877, 0.8515])\nU6_ae30_fashion_mse = np.array([0.7245, 0.9295, 0.8845, 0.87, 0.909])\nU6_pca32_fashion_mse = np.array([0.6415, 0.6885, 0.6285, 0.6215, 0.6655])\nU6_ae32_fashion_mse = np.array([0.931, 0.9405, 0.92, 0.933, 0.9225])\nU9_pca30_fashion_mse = np.array([0.805, 0.7855, 0.799, 0.844, 0.708])\nU9_ae30_fashion_mse = np.array([0.8235, 0.822, 0.841, 0.8785, 0.725])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U5_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U5_ae30_fashion_mse = np.array([0.8955, 0.8295, 0.9205, 0.91, 0.896])\nU5_pca32_fashion_mse = np.array([0.646, 0.6545, 0.64, 0.6675, 0.614])\nU5_ae32_fashion_mse = np.array([0.913, 0.9035, 0.9245, 0.951, 0.9025])\nU6_pca30_fashion_mse = np.array([0.8815, 0.8805, 0.8345, 0.877, 0.8515])\nU6_ae30_fashion_mse = np.array([0.7245, 0.9295, 0.8845, 0.87, 0.909])\nU6_pca32_fashion_mse = np.array([0.6415, 0.6885, 0.6285, 0.6215, 0.6655])\nU6_ae32_fashion_mse = np.array([0.931, 0.9405, 0.92, 0.933, 0.9225])\nU9_pca30_fashion_mse = np.array([0.805, 0.7855, 0.799, 0.844, 0.708])\nU9_ae30_fashion_mse = np.array([0.8235, 0.822, 0.841, 0.8785, 0.725])\nU9_pca32_fashion_mse = np.array([0.487, 0.54, 0.547, 0.5325, 0.5975])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U5_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U5_pca32_fashion_mse = np.array([0.646, 0.6545, 0.64, 0.6675, 0.614])\nU5_ae32_fashion_mse = np.array([0.913, 0.9035, 0.9245, 0.951, 0.9025])\nU6_pca30_fashion_mse = np.array([0.8815, 0.8805, 0.8345, 0.877, 0.8515])\nU6_ae30_fashion_mse = np.array([0.7245, 0.9295, 0.8845, 0.87, 0.909])\nU6_pca32_fashion_mse = np.array([0.6415, 0.6885, 0.6285, 0.6215, 0.6655])\nU6_ae32_fashion_mse = np.array([0.931, 0.9405, 0.92, 0.933, 0.9225])\nU9_pca30_fashion_mse = np.array([0.805, 0.7855, 0.799, 0.844, 0.708])\nU9_ae30_fashion_mse = np.array([0.8235, 0.822, 0.841, 0.8785, 0.725])\nU9_pca32_fashion_mse = np.array([0.487, 0.54, 0.547, 0.5325, 0.5975])\nU9_ae32_fashion_mse = np.array([0.873, 0.9255, 0.8685, 0.8535, 0.9135])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U5_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U5_ae32_fashion_mse = np.array([0.913, 0.9035, 0.9245, 0.951, 0.9025])\nU6_pca30_fashion_mse = np.array([0.8815, 0.8805, 0.8345, 0.877, 0.8515])\nU6_ae30_fashion_mse = np.array([0.7245, 0.9295, 0.8845, 0.87, 0.909])\nU6_pca32_fashion_mse = np.array([0.6415, 0.6885, 0.6285, 0.6215, 0.6655])\nU6_ae32_fashion_mse = np.array([0.931, 0.9405, 0.92, 0.933, 0.9225])\nU9_pca30_fashion_mse = np.array([0.805, 0.7855, 0.799, 0.844, 0.708])\nU9_ae30_fashion_mse = np.array([0.8235, 0.822, 0.841, 0.8785, 0.725])\nU9_pca32_fashion_mse = np.array([0.487, 0.54, 0.547, 0.5325, 0.5975])\nU9_ae32_fashion_mse = np.array([0.873, 0.9255, 0.8685, 0.8535, 0.9135])\nU13_pca30_fashion_mse = np.array([0.855, 0.859, 0.8165, 0.8235, 0.845])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U6_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U6_pca30_fashion_mse = np.array([0.8815, 0.8805, 0.8345, 0.877, 0.8515])\nU6_ae30_fashion_mse = np.array([0.7245, 0.9295, 0.8845, 0.87, 0.909])\nU6_pca32_fashion_mse = np.array([0.6415, 0.6885, 0.6285, 0.6215, 0.6655])\nU6_ae32_fashion_mse = np.array([0.931, 0.9405, 0.92, 0.933, 0.9225])\nU9_pca30_fashion_mse = np.array([0.805, 0.7855, 0.799, 0.844, 0.708])\nU9_ae30_fashion_mse = np.array([0.8235, 0.822, 0.841, 0.8785, 0.725])\nU9_pca32_fashion_mse = np.array([0.487, 0.54, 0.547, 0.5325, 0.5975])\nU9_ae32_fashion_mse = np.array([0.873, 0.9255, 0.8685, 0.8535, 0.9135])\nU13_pca30_fashion_mse = np.array([0.855, 0.859, 0.8165, 0.8235, 0.845])\nU13_ae30_fashion_mse = np.array([0.823, 0.904, 0.8785, 0.9115, 0.9255])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U6_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U6_ae30_fashion_mse = np.array([0.7245, 0.9295, 0.8845, 0.87, 0.909])\nU6_pca32_fashion_mse = np.array([0.6415, 0.6885, 0.6285, 0.6215, 0.6655])\nU6_ae32_fashion_mse = np.array([0.931, 0.9405, 0.92, 0.933, 0.9225])\nU9_pca30_fashion_mse = np.array([0.805, 0.7855, 0.799, 0.844, 0.708])\nU9_ae30_fashion_mse = np.array([0.8235, 0.822, 0.841, 0.8785, 0.725])\nU9_pca32_fashion_mse = np.array([0.487, 0.54, 0.547, 0.5325, 0.5975])\nU9_ae32_fashion_mse = np.array([0.873, 0.9255, 0.8685, 0.8535, 0.9135])\nU13_pca30_fashion_mse = np.array([0.855, 0.859, 0.8165, 0.8235, 0.845])\nU13_ae30_fashion_mse = np.array([0.823, 0.904, 0.8785, 0.9115, 0.9255])\nU13_pca32_fashion_mse = np.array([0.6275, 0.6135, 0.6325, 0.595, 0.656])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U6_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U6_pca32_fashion_mse = np.array([0.6415, 0.6885, 0.6285, 0.6215, 0.6655])\nU6_ae32_fashion_mse = np.array([0.931, 0.9405, 0.92, 0.933, 0.9225])\nU9_pca30_fashion_mse = np.array([0.805, 0.7855, 0.799, 0.844, 0.708])\nU9_ae30_fashion_mse = np.array([0.8235, 0.822, 0.841, 0.8785, 0.725])\nU9_pca32_fashion_mse = np.array([0.487, 0.54, 0.547, 0.5325, 0.5975])\nU9_ae32_fashion_mse = np.array([0.873, 0.9255, 0.8685, 0.8535, 0.9135])\nU13_pca30_fashion_mse = np.array([0.855, 0.859, 0.8165, 0.8235, 0.845])\nU13_ae30_fashion_mse = np.array([0.823, 0.904, 0.8785, 0.9115, 0.9255])\nU13_pca32_fashion_mse = np.array([0.6275, 0.6135, 0.6325, 0.595, 0.656])\nU13_ae32_fashion_mse = np.array([0.929, 0.9415, 0.8615, 0.8055, 0.8805])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U6_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U6_ae32_fashion_mse = np.array([0.931, 0.9405, 0.92, 0.933, 0.9225])\nU9_pca30_fashion_mse = np.array([0.805, 0.7855, 0.799, 0.844, 0.708])\nU9_ae30_fashion_mse = np.array([0.8235, 0.822, 0.841, 0.8785, 0.725])\nU9_pca32_fashion_mse = np.array([0.487, 0.54, 0.547, 0.5325, 0.5975])\nU9_ae32_fashion_mse = np.array([0.873, 0.9255, 0.8685, 0.8535, 0.9135])\nU13_pca30_fashion_mse = np.array([0.855, 0.859, 0.8165, 0.8235, 0.845])\nU13_ae30_fashion_mse = np.array([0.823, 0.904, 0.8785, 0.9115, 0.9255])\nU13_pca32_fashion_mse = np.array([0.6275, 0.6135, 0.6325, 0.595, 0.656])\nU13_ae32_fashion_mse = np.array([0.929, 0.9415, 0.8615, 0.8055, 0.8805])\nU14_pca30_fashion_mse = np.array([0.8515, 0.824, 0.828, 0.8175, 0.9125])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U9_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U9_pca30_fashion_mse = np.array([0.805, 0.7855, 0.799, 0.844, 0.708])\nU9_ae30_fashion_mse = np.array([0.8235, 0.822, 0.841, 0.8785, 0.725])\nU9_pca32_fashion_mse = np.array([0.487, 0.54, 0.547, 0.5325, 0.5975])\nU9_ae32_fashion_mse = np.array([0.873, 0.9255, 0.8685, 0.8535, 0.9135])\nU13_pca30_fashion_mse = np.array([0.855, 0.859, 0.8165, 0.8235, 0.845])\nU13_ae30_fashion_mse = np.array([0.823, 0.904, 0.8785, 0.9115, 0.9255])\nU13_pca32_fashion_mse = np.array([0.6275, 0.6135, 0.6325, 0.595, 0.656])\nU13_ae32_fashion_mse = np.array([0.929, 0.9415, 0.8615, 0.8055, 0.8805])\nU14_pca30_fashion_mse = np.array([0.8515, 0.824, 0.828, 0.8175, 0.9125])\nU14_ae30_fashion_mse = np.array([0.8295, 0.8625, 0.8435, 0.7805, 0.927])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U9_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U9_ae30_fashion_mse = np.array([0.8235, 0.822, 0.841, 0.8785, 0.725])\nU9_pca32_fashion_mse = np.array([0.487, 0.54, 0.547, 0.5325, 0.5975])\nU9_ae32_fashion_mse = np.array([0.873, 0.9255, 0.8685, 0.8535, 0.9135])\nU13_pca30_fashion_mse = np.array([0.855, 0.859, 0.8165, 0.8235, 0.845])\nU13_ae30_fashion_mse = np.array([0.823, 0.904, 0.8785, 0.9115, 0.9255])\nU13_pca32_fashion_mse = np.array([0.6275, 0.6135, 0.6325, 0.595, 0.656])\nU13_ae32_fashion_mse = np.array([0.929, 0.9415, 0.8615, 0.8055, 0.8805])\nU14_pca30_fashion_mse = np.array([0.8515, 0.824, 0.828, 0.8175, 0.9125])\nU14_ae30_fashion_mse = np.array([0.8295, 0.8625, 0.8435, 0.7805, 0.927])\nU14_pca32_fashion_mse = np.array([0.6025, 0.6075, 0.6125, 0.613, 0.5875])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U9_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U9_pca32_fashion_mse = np.array([0.487, 0.54, 0.547, 0.5325, 0.5975])\nU9_ae32_fashion_mse = np.array([0.873, 0.9255, 0.8685, 0.8535, 0.9135])\nU13_pca30_fashion_mse = np.array([0.855, 0.859, 0.8165, 0.8235, 0.845])\nU13_ae30_fashion_mse = np.array([0.823, 0.904, 0.8785, 0.9115, 0.9255])\nU13_pca32_fashion_mse = np.array([0.6275, 0.6135, 0.6325, 0.595, 0.656])\nU13_ae32_fashion_mse = np.array([0.929, 0.9415, 0.8615, 0.8055, 0.8805])\nU14_pca30_fashion_mse = np.array([0.8515, 0.824, 0.828, 0.8175, 0.9125])\nU14_ae30_fashion_mse = np.array([0.8295, 0.8625, 0.8435, 0.7805, 0.927])\nU14_pca32_fashion_mse = np.array([0.6025, 0.6075, 0.6125, 0.613, 0.5875])\nU14_ae32_fashion_mse = np.array([0.901, 0.91, 0.8895, 0.9415, 0.8915])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U9_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U9_ae32_fashion_mse = np.array([0.873, 0.9255, 0.8685, 0.8535, 0.9135])\nU13_pca30_fashion_mse = np.array([0.855, 0.859, 0.8165, 0.8235, 0.845])\nU13_ae30_fashion_mse = np.array([0.823, 0.904, 0.8785, 0.9115, 0.9255])\nU13_pca32_fashion_mse = np.array([0.6275, 0.6135, 0.6325, 0.595, 0.656])\nU13_ae32_fashion_mse = np.array([0.929, 0.9415, 0.8615, 0.8055, 0.8805])\nU14_pca30_fashion_mse = np.array([0.8515, 0.824, 0.828, 0.8175, 0.9125])\nU14_ae30_fashion_mse = np.array([0.8295, 0.8625, 0.8435, 0.7805, 0.927])\nU14_pca32_fashion_mse = np.array([0.6025, 0.6075, 0.6125, 0.613, 0.5875])\nU14_ae32_fashion_mse = np.array([0.901, 0.91, 0.8895, 0.9415, 0.8915])\nU15_pca30_fashion_mse = np.array([0.897, 0.844, 0.876, 0.8215, 0.862])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U13_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U13_pca30_fashion_mse = np.array([0.855, 0.859, 0.8165, 0.8235, 0.845])\nU13_ae30_fashion_mse = np.array([0.823, 0.904, 0.8785, 0.9115, 0.9255])\nU13_pca32_fashion_mse = np.array([0.6275, 0.6135, 0.6325, 0.595, 0.656])\nU13_ae32_fashion_mse = np.array([0.929, 0.9415, 0.8615, 0.8055, 0.8805])\nU14_pca30_fashion_mse = np.array([0.8515, 0.824, 0.828, 0.8175, 0.9125])\nU14_ae30_fashion_mse = np.array([0.8295, 0.8625, 0.8435, 0.7805, 0.927])\nU14_pca32_fashion_mse = np.array([0.6025, 0.6075, 0.6125, 0.613, 0.5875])\nU14_ae32_fashion_mse = np.array([0.901, 0.91, 0.8895, 0.9415, 0.8915])\nU15_pca30_fashion_mse = np.array([0.897, 0.844, 0.876, 0.8215, 0.862])\nU15_ae30_fashion_mse = np.array([0.8685, 0.811, 0.8395, 0.893, 0.8965])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U13_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U13_ae30_fashion_mse = np.array([0.823, 0.904, 0.8785, 0.9115, 0.9255])\nU13_pca32_fashion_mse = np.array([0.6275, 0.6135, 0.6325, 0.595, 0.656])\nU13_ae32_fashion_mse = np.array([0.929, 0.9415, 0.8615, 0.8055, 0.8805])\nU14_pca30_fashion_mse = np.array([0.8515, 0.824, 0.828, 0.8175, 0.9125])\nU14_ae30_fashion_mse = np.array([0.8295, 0.8625, 0.8435, 0.7805, 0.927])\nU14_pca32_fashion_mse = np.array([0.6025, 0.6075, 0.6125, 0.613, 0.5875])\nU14_ae32_fashion_mse = np.array([0.901, 0.91, 0.8895, 0.9415, 0.8915])\nU15_pca30_fashion_mse = np.array([0.897, 0.844, 0.876, 0.8215, 0.862])\nU15_ae30_fashion_mse = np.array([0.8685, 0.811, 0.8395, 0.893, 0.8965])\nU15_pca32_fashion_mse = np.array([0.599, 0.581, 0.6035, 0.6665, 0.6075])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U13_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U13_pca32_fashion_mse = np.array([0.6275, 0.6135, 0.6325, 0.595, 0.656])\nU13_ae32_fashion_mse = np.array([0.929, 0.9415, 0.8615, 0.8055, 0.8805])\nU14_pca30_fashion_mse = np.array([0.8515, 0.824, 0.828, 0.8175, 0.9125])\nU14_ae30_fashion_mse = np.array([0.8295, 0.8625, 0.8435, 0.7805, 0.927])\nU14_pca32_fashion_mse = np.array([0.6025, 0.6075, 0.6125, 0.613, 0.5875])\nU14_ae32_fashion_mse = np.array([0.901, 0.91, 0.8895, 0.9415, 0.8915])\nU15_pca30_fashion_mse = np.array([0.897, 0.844, 0.876, 0.8215, 0.862])\nU15_ae30_fashion_mse = np.array([0.8685, 0.811, 0.8395, 0.893, 0.8965])\nU15_pca32_fashion_mse = np.array([0.599, 0.581, 0.6035, 0.6665, 0.6075])\nU15_ae32_fashion_mse = np.array([0.891, 0.893, 0.9215, 0.9, 0.9015])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U13_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U13_ae32_fashion_mse = np.array([0.929, 0.9415, 0.8615, 0.8055, 0.8805])\nU14_pca30_fashion_mse = np.array([0.8515, 0.824, 0.828, 0.8175, 0.9125])\nU14_ae30_fashion_mse = np.array([0.8295, 0.8625, 0.8435, 0.7805, 0.927])\nU14_pca32_fashion_mse = np.array([0.6025, 0.6075, 0.6125, 0.613, 0.5875])\nU14_ae32_fashion_mse = np.array([0.901, 0.91, 0.8895, 0.9415, 0.8915])\nU15_pca30_fashion_mse = np.array([0.897, 0.844, 0.876, 0.8215, 0.862])\nU15_ae30_fashion_mse = np.array([0.8685, 0.811, 0.8395, 0.893, 0.8965])\nU15_pca32_fashion_mse = np.array([0.599, 0.581, 0.6035, 0.6665, 0.6075])\nU15_ae32_fashion_mse = np.array([0.891, 0.893, 0.9215, 0.9, 0.9015])\nSO4_pca30_fashion_mse = np.array([0.8435, 0.8255, 0.845, 0.8515, 0.8555])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U14_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U14_pca30_fashion_mse = np.array([0.8515, 0.824, 0.828, 0.8175, 0.9125])\nU14_ae30_fashion_mse = np.array([0.8295, 0.8625, 0.8435, 0.7805, 0.927])\nU14_pca32_fashion_mse = np.array([0.6025, 0.6075, 0.6125, 0.613, 0.5875])\nU14_ae32_fashion_mse = np.array([0.901, 0.91, 0.8895, 0.9415, 0.8915])\nU15_pca30_fashion_mse = np.array([0.897, 0.844, 0.876, 0.8215, 0.862])\nU15_ae30_fashion_mse = np.array([0.8685, 0.811, 0.8395, 0.893, 0.8965])\nU15_pca32_fashion_mse = np.array([0.599, 0.581, 0.6035, 0.6665, 0.6075])\nU15_ae32_fashion_mse = np.array([0.891, 0.893, 0.9215, 0.9, 0.9015])\nSO4_pca30_fashion_mse = np.array([0.8435, 0.8255, 0.845, 0.8515, 0.8555])\nSO4_ae30_fashion_mse = np.array([0.871, 0.905, 0.8215, 0.8005, 0.8905])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U14_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U14_ae30_fashion_mse = np.array([0.8295, 0.8625, 0.8435, 0.7805, 0.927])\nU14_pca32_fashion_mse = np.array([0.6025, 0.6075, 0.6125, 0.613, 0.5875])\nU14_ae32_fashion_mse = np.array([0.901, 0.91, 0.8895, 0.9415, 0.8915])\nU15_pca30_fashion_mse = np.array([0.897, 0.844, 0.876, 0.8215, 0.862])\nU15_ae30_fashion_mse = np.array([0.8685, 0.811, 0.8395, 0.893, 0.8965])\nU15_pca32_fashion_mse = np.array([0.599, 0.581, 0.6035, 0.6665, 0.6075])\nU15_ae32_fashion_mse = np.array([0.891, 0.893, 0.9215, 0.9, 0.9015])\nSO4_pca30_fashion_mse = np.array([0.8435, 0.8255, 0.845, 0.8515, 0.8555])\nSO4_ae30_fashion_mse = np.array([0.871, 0.905, 0.8215, 0.8005, 0.8905])\nSO4_pca32_fashion_mse = np.array([0.6585, 0.6205, 0.626, 0.617, 0.672])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U14_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U14_pca32_fashion_mse = np.array([0.6025, 0.6075, 0.6125, 0.613, 0.5875])\nU14_ae32_fashion_mse = np.array([0.901, 0.91, 0.8895, 0.9415, 0.8915])\nU15_pca30_fashion_mse = np.array([0.897, 0.844, 0.876, 0.8215, 0.862])\nU15_ae30_fashion_mse = np.array([0.8685, 0.811, 0.8395, 0.893, 0.8965])\nU15_pca32_fashion_mse = np.array([0.599, 0.581, 0.6035, 0.6665, 0.6075])\nU15_ae32_fashion_mse = np.array([0.891, 0.893, 0.9215, 0.9, 0.9015])\nSO4_pca30_fashion_mse = np.array([0.8435, 0.8255, 0.845, 0.8515, 0.8555])\nSO4_ae30_fashion_mse = np.array([0.871, 0.905, 0.8215, 0.8005, 0.8905])\nSO4_pca32_fashion_mse = np.array([0.6585, 0.6205, 0.626, 0.617, 0.672])\nSO4_ae32_fashion_mse = np.array([0.9285, 0.936, 0.886, 0.9275, 0.907])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U14_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U14_ae32_fashion_mse = np.array([0.901, 0.91, 0.8895, 0.9415, 0.8915])\nU15_pca30_fashion_mse = np.array([0.897, 0.844, 0.876, 0.8215, 0.862])\nU15_ae30_fashion_mse = np.array([0.8685, 0.811, 0.8395, 0.893, 0.8965])\nU15_pca32_fashion_mse = np.array([0.599, 0.581, 0.6035, 0.6665, 0.6075])\nU15_ae32_fashion_mse = np.array([0.891, 0.893, 0.9215, 0.9, 0.9015])\nSO4_pca30_fashion_mse = np.array([0.8435, 0.8255, 0.845, 0.8515, 0.8555])\nSO4_ae30_fashion_mse = np.array([0.871, 0.905, 0.8215, 0.8005, 0.8905])\nSO4_pca32_fashion_mse = np.array([0.6585, 0.6205, 0.626, 0.617, 0.672])\nSO4_ae32_fashion_mse = np.array([0.9285, 0.936, 0.886, 0.9275, 0.907])\nSU4_pca30_fashion_mse = np.array([0.8615, 0.8985, 0.89, 0.8975, 0.87])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U15_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U15_pca30_fashion_mse = np.array([0.897, 0.844, 0.876, 0.8215, 0.862])\nU15_ae30_fashion_mse = np.array([0.8685, 0.811, 0.8395, 0.893, 0.8965])\nU15_pca32_fashion_mse = np.array([0.599, 0.581, 0.6035, 0.6665, 0.6075])\nU15_ae32_fashion_mse = np.array([0.891, 0.893, 0.9215, 0.9, 0.9015])\nSO4_pca30_fashion_mse = np.array([0.8435, 0.8255, 0.845, 0.8515, 0.8555])\nSO4_ae30_fashion_mse = np.array([0.871, 0.905, 0.8215, 0.8005, 0.8905])\nSO4_pca32_fashion_mse = np.array([0.6585, 0.6205, 0.626, 0.617, 0.672])\nSO4_ae32_fashion_mse = np.array([0.9285, 0.936, 0.886, 0.9275, 0.907])\nSU4_pca30_fashion_mse = np.array([0.8615, 0.8985, 0.89, 0.8975, 0.87])\nSU4_ae30_fashion_mse = np.array([0.937, 0.873, 0.916, 0.9135, 0.899])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U15_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U15_ae30_fashion_mse = np.array([0.8685, 0.811, 0.8395, 0.893, 0.8965])\nU15_pca32_fashion_mse = np.array([0.599, 0.581, 0.6035, 0.6665, 0.6075])\nU15_ae32_fashion_mse = np.array([0.891, 0.893, 0.9215, 0.9, 0.9015])\nSO4_pca30_fashion_mse = np.array([0.8435, 0.8255, 0.845, 0.8515, 0.8555])\nSO4_ae30_fashion_mse = np.array([0.871, 0.905, 0.8215, 0.8005, 0.8905])\nSO4_pca32_fashion_mse = np.array([0.6585, 0.6205, 0.626, 0.617, 0.672])\nSO4_ae32_fashion_mse = np.array([0.9285, 0.936, 0.886, 0.9275, 0.907])\nSU4_pca30_fashion_mse = np.array([0.8615, 0.8985, 0.89, 0.8975, 0.87])\nSU4_ae30_fashion_mse = np.array([0.937, 0.873, 0.916, 0.9135, 0.899])\nSU4_pca32_fashion_mse = np.array([0.696, 0.673, 0.672, 0.646, 0.655])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U15_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U15_pca32_fashion_mse = np.array([0.599, 0.581, 0.6035, 0.6665, 0.6075])\nU15_ae32_fashion_mse = np.array([0.891, 0.893, 0.9215, 0.9, 0.9015])\nSO4_pca30_fashion_mse = np.array([0.8435, 0.8255, 0.845, 0.8515, 0.8555])\nSO4_ae30_fashion_mse = np.array([0.871, 0.905, 0.8215, 0.8005, 0.8905])\nSO4_pca32_fashion_mse = np.array([0.6585, 0.6205, 0.626, 0.617, 0.672])\nSO4_ae32_fashion_mse = np.array([0.9285, 0.936, 0.886, 0.9275, 0.907])\nSU4_pca30_fashion_mse = np.array([0.8615, 0.8985, 0.89, 0.8975, 0.87])\nSU4_ae30_fashion_mse = np.array([0.937, 0.873, 0.916, 0.9135, 0.899])\nSU4_pca32_fashion_mse = np.array([0.696, 0.673, 0.672, 0.646, 0.655])\nSU4_ae32_fashion_mse = np.array([0.9135, 0.9115, 0.936, 0.94, 0.94])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U15_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "U15_ae32_fashion_mse = np.array([0.891, 0.893, 0.9215, 0.9, 0.9015])\nSO4_pca30_fashion_mse = np.array([0.8435, 0.8255, 0.845, 0.8515, 0.8555])\nSO4_ae30_fashion_mse = np.array([0.871, 0.905, 0.8215, 0.8005, 0.8905])\nSO4_pca32_fashion_mse = np.array([0.6585, 0.6205, 0.626, 0.617, 0.672])\nSO4_ae32_fashion_mse = np.array([0.9285, 0.936, 0.886, 0.9275, 0.907])\nSU4_pca30_fashion_mse = np.array([0.8615, 0.8985, 0.89, 0.8975, 0.87])\nSU4_ae30_fashion_mse = np.array([0.937, 0.873, 0.916, 0.9135, 0.899])\nSU4_pca32_fashion_mse = np.array([0.696, 0.673, 0.672, 0.646, 0.655])\nSU4_ae32_fashion_mse = np.array([0.9135, 0.9115, 0.936, 0.94, 0.94])\nSU4_1_pca30_fashion_mse = np.array([0.89, 0.8995, 0.9095, 0.8655, 0.902])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SO4_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SO4_pca30_fashion_mse = np.array([0.8435, 0.8255, 0.845, 0.8515, 0.8555])\nSO4_ae30_fashion_mse = np.array([0.871, 0.905, 0.8215, 0.8005, 0.8905])\nSO4_pca32_fashion_mse = np.array([0.6585, 0.6205, 0.626, 0.617, 0.672])\nSO4_ae32_fashion_mse = np.array([0.9285, 0.936, 0.886, 0.9275, 0.907])\nSU4_pca30_fashion_mse = np.array([0.8615, 0.8985, 0.89, 0.8975, 0.87])\nSU4_ae30_fashion_mse = np.array([0.937, 0.873, 0.916, 0.9135, 0.899])\nSU4_pca32_fashion_mse = np.array([0.696, 0.673, 0.672, 0.646, 0.655])\nSU4_ae32_fashion_mse = np.array([0.9135, 0.9115, 0.936, 0.94, 0.94])\nSU4_1_pca30_fashion_mse = np.array([0.89, 0.8995, 0.9095, 0.8655, 0.902])\nSU4_1_ae30_fashion_mse = np.array([0.901, 0.949, 0.9145, 0.8095, 0.8845])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SO4_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SO4_ae30_fashion_mse = np.array([0.871, 0.905, 0.8215, 0.8005, 0.8905])\nSO4_pca32_fashion_mse = np.array([0.6585, 0.6205, 0.626, 0.617, 0.672])\nSO4_ae32_fashion_mse = np.array([0.9285, 0.936, 0.886, 0.9275, 0.907])\nSU4_pca30_fashion_mse = np.array([0.8615, 0.8985, 0.89, 0.8975, 0.87])\nSU4_ae30_fashion_mse = np.array([0.937, 0.873, 0.916, 0.9135, 0.899])\nSU4_pca32_fashion_mse = np.array([0.696, 0.673, 0.672, 0.646, 0.655])\nSU4_ae32_fashion_mse = np.array([0.9135, 0.9115, 0.936, 0.94, 0.94])\nSU4_1_pca30_fashion_mse = np.array([0.89, 0.8995, 0.9095, 0.8655, 0.902])\nSU4_1_ae30_fashion_mse = np.array([0.901, 0.949, 0.9145, 0.8095, 0.8845])\nSU4_1_pca32_fashion_mse = np.array([0.629, 0.633, 0.688, 0.6875, 0.6425])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SO4_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SO4_pca32_fashion_mse = np.array([0.6585, 0.6205, 0.626, 0.617, 0.672])\nSO4_ae32_fashion_mse = np.array([0.9285, 0.936, 0.886, 0.9275, 0.907])\nSU4_pca30_fashion_mse = np.array([0.8615, 0.8985, 0.89, 0.8975, 0.87])\nSU4_ae30_fashion_mse = np.array([0.937, 0.873, 0.916, 0.9135, 0.899])\nSU4_pca32_fashion_mse = np.array([0.696, 0.673, 0.672, 0.646, 0.655])\nSU4_ae32_fashion_mse = np.array([0.9135, 0.9115, 0.936, 0.94, 0.94])\nSU4_1_pca30_fashion_mse = np.array([0.89, 0.8995, 0.9095, 0.8655, 0.902])\nSU4_1_ae30_fashion_mse = np.array([0.901, 0.949, 0.9145, 0.8095, 0.8845])\nSU4_1_pca32_fashion_mse = np.array([0.629, 0.633, 0.688, 0.6875, 0.6425])\nSU4_1_ae32_fashion_mse = np.array([0.9365, 0.919, 0.9495, 0.935, 0.9045])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SO4_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SO4_ae32_fashion_mse = np.array([0.9285, 0.936, 0.886, 0.9275, 0.907])\nSU4_pca30_fashion_mse = np.array([0.8615, 0.8985, 0.89, 0.8975, 0.87])\nSU4_ae30_fashion_mse = np.array([0.937, 0.873, 0.916, 0.9135, 0.899])\nSU4_pca32_fashion_mse = np.array([0.696, 0.673, 0.672, 0.646, 0.655])\nSU4_ae32_fashion_mse = np.array([0.9135, 0.9115, 0.936, 0.94, 0.94])\nSU4_1_pca30_fashion_mse = np.array([0.89, 0.8995, 0.9095, 0.8655, 0.902])\nSU4_1_ae30_fashion_mse = np.array([0.901, 0.949, 0.9145, 0.8095, 0.8845])\nSU4_1_pca32_fashion_mse = np.array([0.629, 0.633, 0.688, 0.6875, 0.6425])\nSU4_1_ae32_fashion_mse = np.array([0.9365, 0.919, 0.9495, 0.935, 0.9045])\nprint(\"HAE and HDE result with MSE\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_pca30_fashion_mse = np.array([0.8615, 0.8985, 0.89, 0.8975, 0.87])\nSU4_ae30_fashion_mse = np.array([0.937, 0.873, 0.916, 0.9135, 0.899])\nSU4_pca32_fashion_mse = np.array([0.696, 0.673, 0.672, 0.646, 0.655])\nSU4_ae32_fashion_mse = np.array([0.9135, 0.9115, 0.936, 0.94, 0.94])\nSU4_1_pca30_fashion_mse = np.array([0.89, 0.8995, 0.9095, 0.8655, 0.902])\nSU4_1_ae30_fashion_mse = np.array([0.901, 0.949, 0.9145, 0.8095, 0.8845])\nSU4_1_pca32_fashion_mse = np.array([0.629, 0.633, 0.688, 0.6875, 0.6425])\nSU4_1_ae32_fashion_mse = np.array([0.9365, 0.919, 0.9495, 0.935, 0.9045])\nprint(\"HAE and HDE result with MSE\")\nprint(\"MNIST DATASET: \")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_ae30_fashion_mse = np.array([0.937, 0.873, 0.916, 0.9135, 0.899])\nSU4_pca32_fashion_mse = np.array([0.696, 0.673, 0.672, 0.646, 0.655])\nSU4_ae32_fashion_mse = np.array([0.9135, 0.9115, 0.936, 0.94, 0.94])\nSU4_1_pca30_fashion_mse = np.array([0.89, 0.8995, 0.9095, 0.8655, 0.902])\nSU4_1_ae30_fashion_mse = np.array([0.901, 0.949, 0.9145, 0.8095, 0.8845])\nSU4_1_pca32_fashion_mse = np.array([0.629, 0.633, 0.688, 0.6875, 0.6425])\nSU4_1_ae32_fashion_mse = np.array([0.9365, 0.919, 0.9495, 0.935, 0.9045])\nprint(\"HAE and HDE result with MSE\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_pca32_fashion_mse = np.array([0.696, 0.673, 0.672, 0.646, 0.655])\nSU4_ae32_fashion_mse = np.array([0.9135, 0.9115, 0.936, 0.94, 0.94])\nSU4_1_pca30_fashion_mse = np.array([0.89, 0.8995, 0.9095, 0.8655, 0.902])\nSU4_1_ae30_fashion_mse = np.array([0.901, 0.949, 0.9145, 0.8095, 0.8845])\nSU4_1_pca32_fashion_mse = np.array([0.629, 0.633, 0.688, 0.6875, 0.6425])\nSU4_1_ae32_fashion_mse = np.array([0.9365, 0.919, 0.9495, 0.935, 0.9045])\nprint(\"HAE and HDE result with MSE\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_ae32_fashion_mse = np.array([0.9135, 0.9115, 0.936, 0.94, 0.94])\nSU4_1_pca30_fashion_mse = np.array([0.89, 0.8995, 0.9095, 0.8655, 0.902])\nSU4_1_ae30_fashion_mse = np.array([0.901, 0.949, 0.9145, 0.8095, 0.8845])\nSU4_1_pca32_fashion_mse = np.array([0.629, 0.633, 0.688, 0.6875, 0.6425])\nSU4_1_ae32_fashion_mse = np.array([0.9365, 0.919, 0.9495, 0.935, 0.9045])\nprint(\"HAE and HDE result with MSE\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))\nprint(\"TTN PCA32 \" + str(TTN_pca32_mnist_mse.mean()) + \" +/- \" + str(TTN_pca32_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_pca30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_1_pca30_fashion_mse = np.array([0.89, 0.8995, 0.9095, 0.8655, 0.902])\nSU4_1_ae30_fashion_mse = np.array([0.901, 0.949, 0.9145, 0.8095, 0.8845])\nSU4_1_pca32_fashion_mse = np.array([0.629, 0.633, 0.688, 0.6875, 0.6425])\nSU4_1_ae32_fashion_mse = np.array([0.9365, 0.919, 0.9495, 0.935, 0.9045])\nprint(\"HAE and HDE result with MSE\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))\nprint(\"TTN PCA32 \" + str(TTN_pca32_mnist_mse.mean()) + \" +/- \" + str(TTN_pca32_mnist_mse.std()))\nprint(\"TTN AE32 \" + str(TTN_ae32_mnist_mse.mean()) + \" +/- \" + str(TTN_ae32_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_ae30_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_1_ae30_fashion_mse = np.array([0.901, 0.949, 0.9145, 0.8095, 0.8845])\nSU4_1_pca32_fashion_mse = np.array([0.629, 0.633, 0.688, 0.6875, 0.6425])\nSU4_1_ae32_fashion_mse = np.array([0.9365, 0.919, 0.9495, 0.935, 0.9045])\nprint(\"HAE and HDE result with MSE\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))\nprint(\"TTN PCA32 \" + str(TTN_pca32_mnist_mse.mean()) + \" +/- \" + str(TTN_pca32_mnist_mse.std()))\nprint(\"TTN AE32 \" + str(TTN_ae32_mnist_mse.mean()) + \" +/- \" + str(TTN_ae32_mnist_mse.std()))\nprint(\"\\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_pca32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_1_pca32_fashion_mse = np.array([0.629, 0.633, 0.688, 0.6875, 0.6425])\nSU4_1_ae32_fashion_mse = np.array([0.9365, 0.919, 0.9495, 0.935, 0.9045])\nprint(\"HAE and HDE result with MSE\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))\nprint(\"TTN PCA32 \" + str(TTN_pca32_mnist_mse.mean()) + \" +/- \" + str(TTN_pca32_mnist_mse.std()))\nprint(\"TTN AE32 \" + str(TTN_ae32_mnist_mse.mean()) + \" +/- \" + str(TTN_ae32_mnist_mse.std()))\nprint(\"\\n\")\nprint(\"U5 PCA30 \" + str(U5_pca30_mnist_mse.mean()) + \" +/- \" + str(U5_pca30_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "SU4_1_ae32_fashion_mse", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "peekOfCode": "SU4_1_ae32_fashion_mse = np.array([0.9365, 0.919, 0.9495, 0.935, 0.9045])\nprint(\"HAE and HDE result with MSE\")\nprint(\"MNIST DATASET: \")\nprint(\"TTN PCA30 \" + str(TTN_pca30_mnist_mse.mean()) + \" +/- \" + str(TTN_pca30_mnist_mse.std()))\nprint(\"TTN AE30 \" + str(TTN_ae30_mnist_mse.mean()) + \" +/- \" + str(TTN_ae30_mnist_mse.std()))\nprint(\"TTN PCA32 \" + str(TTN_pca32_mnist_mse.mean()) + \" +/- \" + str(TTN_pca32_mnist_mse.std()))\nprint(\"TTN AE32 \" + str(TTN_ae32_mnist_mse.mean()) + \" +/- \" + str(TTN_ae32_mnist_mse.std()))\nprint(\"\\n\")\nprint(\"U5 PCA30 \" + str(U5_pca30_mnist_mse.mean()) + \" +/- \" + str(U5_pca30_mnist_mse.std()))\nprint(\"U5 AE30 \" + str(U5_ae30_mnist_mse.mean()) + \" +/- \" + str(U5_ae30_mnist_mse.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid Embedding.MSE_Hybrid", + "documentation": {} + }, + { + "label": "U9_resize256_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "U9_resize256_MNIST = np.array([0.8969267139479905, 0.866193853427896, 0.9016548463356974, 0.9437352245862884, 0.9111111111111111])\nU9_PCA8_MNIST = np.array([0.9763593380614657, 0.9825059101654846, 0.9782505910165484, 0.9829787234042553, 0.9810874704491725])\nU9_AE8_MNIST = np.array([0.9744680851063829, 0.9895981087470449, 0.9952718676122931, 0.9494089834515367, 0.9068557919621749])\nU9_PCA16_MNIST = np.array([0.9763593380614657, 0.9763593380614657, 0.9796690307328605, 0.9763593380614657, 0.9763593380614657])\nU9_AE16_MNIST = np.array([0.8326241134751773, 0.9219858156028369, 0.9101654846335697, 0.8581560283687943, 0.7990543735224587])\nSU4_resize256_MNIST = np.array([0.9806146572104019, 0.9891252955082742, 0.9739952718676123, 0.9881796690307328, 0.9877068557919622])\nSU4_PCA8_MNIST = np.array([0.983451536643026, 0.9796690307328605, 0.9796690307328605, 0.9806146572104019, 0.984869976359338])\nSU4_AE8_MNIST = np.array([0.9791962174940898, 0.9891252955082742, 0.9531914893617022, 0.968321513002364, 0.9309692671394799])\nSU4_PCA16_MNIST = np.array([0.9787234042553191, 0.9754137115839243, 0.9862884160756501, 0.9782505910165484, 0.9810874704491725])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.983451536643026, 0.9626477541371158, 0.9872340425531915, 0.9735224586288416])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "U9_PCA8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "U9_PCA8_MNIST = np.array([0.9763593380614657, 0.9825059101654846, 0.9782505910165484, 0.9829787234042553, 0.9810874704491725])\nU9_AE8_MNIST = np.array([0.9744680851063829, 0.9895981087470449, 0.9952718676122931, 0.9494089834515367, 0.9068557919621749])\nU9_PCA16_MNIST = np.array([0.9763593380614657, 0.9763593380614657, 0.9796690307328605, 0.9763593380614657, 0.9763593380614657])\nU9_AE16_MNIST = np.array([0.8326241134751773, 0.9219858156028369, 0.9101654846335697, 0.8581560283687943, 0.7990543735224587])\nSU4_resize256_MNIST = np.array([0.9806146572104019, 0.9891252955082742, 0.9739952718676123, 0.9881796690307328, 0.9877068557919622])\nSU4_PCA8_MNIST = np.array([0.983451536643026, 0.9796690307328605, 0.9796690307328605, 0.9806146572104019, 0.984869976359338])\nSU4_AE8_MNIST = np.array([0.9791962174940898, 0.9891252955082742, 0.9531914893617022, 0.968321513002364, 0.9309692671394799])\nSU4_PCA16_MNIST = np.array([0.9787234042553191, 0.9754137115839243, 0.9862884160756501, 0.9782505910165484, 0.9810874704491725])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.983451536643026, 0.9626477541371158, 0.9872340425531915, 0.9735224586288416])\nU9_resize256_FASHION = np.array([0.913, 0.9025, 0.888, 0.9295, 0.9195])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "U9_AE8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "U9_AE8_MNIST = np.array([0.9744680851063829, 0.9895981087470449, 0.9952718676122931, 0.9494089834515367, 0.9068557919621749])\nU9_PCA16_MNIST = np.array([0.9763593380614657, 0.9763593380614657, 0.9796690307328605, 0.9763593380614657, 0.9763593380614657])\nU9_AE16_MNIST = np.array([0.8326241134751773, 0.9219858156028369, 0.9101654846335697, 0.8581560283687943, 0.7990543735224587])\nSU4_resize256_MNIST = np.array([0.9806146572104019, 0.9891252955082742, 0.9739952718676123, 0.9881796690307328, 0.9877068557919622])\nSU4_PCA8_MNIST = np.array([0.983451536643026, 0.9796690307328605, 0.9796690307328605, 0.9806146572104019, 0.984869976359338])\nSU4_AE8_MNIST = np.array([0.9791962174940898, 0.9891252955082742, 0.9531914893617022, 0.968321513002364, 0.9309692671394799])\nSU4_PCA16_MNIST = np.array([0.9787234042553191, 0.9754137115839243, 0.9862884160756501, 0.9782505910165484, 0.9810874704491725])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.983451536643026, 0.9626477541371158, 0.9872340425531915, 0.9735224586288416])\nU9_resize256_FASHION = np.array([0.913, 0.9025, 0.888, 0.9295, 0.9195])\nU9_PCA8_FASHION = np.array([0.8665, 0.866, 0.856, 0.8575, 0.866])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "U9_PCA16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "U9_PCA16_MNIST = np.array([0.9763593380614657, 0.9763593380614657, 0.9796690307328605, 0.9763593380614657, 0.9763593380614657])\nU9_AE16_MNIST = np.array([0.8326241134751773, 0.9219858156028369, 0.9101654846335697, 0.8581560283687943, 0.7990543735224587])\nSU4_resize256_MNIST = np.array([0.9806146572104019, 0.9891252955082742, 0.9739952718676123, 0.9881796690307328, 0.9877068557919622])\nSU4_PCA8_MNIST = np.array([0.983451536643026, 0.9796690307328605, 0.9796690307328605, 0.9806146572104019, 0.984869976359338])\nSU4_AE8_MNIST = np.array([0.9791962174940898, 0.9891252955082742, 0.9531914893617022, 0.968321513002364, 0.9309692671394799])\nSU4_PCA16_MNIST = np.array([0.9787234042553191, 0.9754137115839243, 0.9862884160756501, 0.9782505910165484, 0.9810874704491725])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.983451536643026, 0.9626477541371158, 0.9872340425531915, 0.9735224586288416])\nU9_resize256_FASHION = np.array([0.913, 0.9025, 0.888, 0.9295, 0.9195])\nU9_PCA8_FASHION = np.array([0.8665, 0.866, 0.856, 0.8575, 0.866])\nU9_AE8_FASHION = np.array([0.9335, 0.8895, 0.883, 0.8425, 0.8695])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "U9_AE16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "U9_AE16_MNIST = np.array([0.8326241134751773, 0.9219858156028369, 0.9101654846335697, 0.8581560283687943, 0.7990543735224587])\nSU4_resize256_MNIST = np.array([0.9806146572104019, 0.9891252955082742, 0.9739952718676123, 0.9881796690307328, 0.9877068557919622])\nSU4_PCA8_MNIST = np.array([0.983451536643026, 0.9796690307328605, 0.9796690307328605, 0.9806146572104019, 0.984869976359338])\nSU4_AE8_MNIST = np.array([0.9791962174940898, 0.9891252955082742, 0.9531914893617022, 0.968321513002364, 0.9309692671394799])\nSU4_PCA16_MNIST = np.array([0.9787234042553191, 0.9754137115839243, 0.9862884160756501, 0.9782505910165484, 0.9810874704491725])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.983451536643026, 0.9626477541371158, 0.9872340425531915, 0.9735224586288416])\nU9_resize256_FASHION = np.array([0.913, 0.9025, 0.888, 0.9295, 0.9195])\nU9_PCA8_FASHION = np.array([0.8665, 0.866, 0.856, 0.8575, 0.866])\nU9_AE8_FASHION = np.array([0.9335, 0.8895, 0.883, 0.8425, 0.8695])\nU9_PCA16_FASHION = np.array([0.7475, 0.8275, 0.872, 0.8745, 0.842])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "SU4_resize256_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "SU4_resize256_MNIST = np.array([0.9806146572104019, 0.9891252955082742, 0.9739952718676123, 0.9881796690307328, 0.9877068557919622])\nSU4_PCA8_MNIST = np.array([0.983451536643026, 0.9796690307328605, 0.9796690307328605, 0.9806146572104019, 0.984869976359338])\nSU4_AE8_MNIST = np.array([0.9791962174940898, 0.9891252955082742, 0.9531914893617022, 0.968321513002364, 0.9309692671394799])\nSU4_PCA16_MNIST = np.array([0.9787234042553191, 0.9754137115839243, 0.9862884160756501, 0.9782505910165484, 0.9810874704491725])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.983451536643026, 0.9626477541371158, 0.9872340425531915, 0.9735224586288416])\nU9_resize256_FASHION = np.array([0.913, 0.9025, 0.888, 0.9295, 0.9195])\nU9_PCA8_FASHION = np.array([0.8665, 0.866, 0.856, 0.8575, 0.866])\nU9_AE8_FASHION = np.array([0.9335, 0.8895, 0.883, 0.8425, 0.8695])\nU9_PCA16_FASHION = np.array([0.7475, 0.8275, 0.872, 0.8745, 0.842])\nU9_AE16_FASHION = np.array([0.935, 0.903, 0.9055, 0.7765, 0.8405])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "SU4_PCA8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "SU4_PCA8_MNIST = np.array([0.983451536643026, 0.9796690307328605, 0.9796690307328605, 0.9806146572104019, 0.984869976359338])\nSU4_AE8_MNIST = np.array([0.9791962174940898, 0.9891252955082742, 0.9531914893617022, 0.968321513002364, 0.9309692671394799])\nSU4_PCA16_MNIST = np.array([0.9787234042553191, 0.9754137115839243, 0.9862884160756501, 0.9782505910165484, 0.9810874704491725])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.983451536643026, 0.9626477541371158, 0.9872340425531915, 0.9735224586288416])\nU9_resize256_FASHION = np.array([0.913, 0.9025, 0.888, 0.9295, 0.9195])\nU9_PCA8_FASHION = np.array([0.8665, 0.866, 0.856, 0.8575, 0.866])\nU9_AE8_FASHION = np.array([0.9335, 0.8895, 0.883, 0.8425, 0.8695])\nU9_PCA16_FASHION = np.array([0.7475, 0.8275, 0.872, 0.8745, 0.842])\nU9_AE16_FASHION = np.array([0.935, 0.903, 0.9055, 0.7765, 0.8405])\nSU4_resize256_FASHION = np.array([0.926, 0.8985, 0.9155, 0.929, 0.914])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "SU4_AE8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "SU4_AE8_MNIST = np.array([0.9791962174940898, 0.9891252955082742, 0.9531914893617022, 0.968321513002364, 0.9309692671394799])\nSU4_PCA16_MNIST = np.array([0.9787234042553191, 0.9754137115839243, 0.9862884160756501, 0.9782505910165484, 0.9810874704491725])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.983451536643026, 0.9626477541371158, 0.9872340425531915, 0.9735224586288416])\nU9_resize256_FASHION = np.array([0.913, 0.9025, 0.888, 0.9295, 0.9195])\nU9_PCA8_FASHION = np.array([0.8665, 0.866, 0.856, 0.8575, 0.866])\nU9_AE8_FASHION = np.array([0.9335, 0.8895, 0.883, 0.8425, 0.8695])\nU9_PCA16_FASHION = np.array([0.7475, 0.8275, 0.872, 0.8745, 0.842])\nU9_AE16_FASHION = np.array([0.935, 0.903, 0.9055, 0.7765, 0.8405])\nSU4_resize256_FASHION = np.array([0.926, 0.8985, 0.9155, 0.929, 0.914])\nSU4_PCA8_FASHION = np.array([0.923, 0.9265, 0.92, 0.9095, 0.921])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "SU4_PCA16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "SU4_PCA16_MNIST = np.array([0.9787234042553191, 0.9754137115839243, 0.9862884160756501, 0.9782505910165484, 0.9810874704491725])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.983451536643026, 0.9626477541371158, 0.9872340425531915, 0.9735224586288416])\nU9_resize256_FASHION = np.array([0.913, 0.9025, 0.888, 0.9295, 0.9195])\nU9_PCA8_FASHION = np.array([0.8665, 0.866, 0.856, 0.8575, 0.866])\nU9_AE8_FASHION = np.array([0.9335, 0.8895, 0.883, 0.8425, 0.8695])\nU9_PCA16_FASHION = np.array([0.7475, 0.8275, 0.872, 0.8745, 0.842])\nU9_AE16_FASHION = np.array([0.935, 0.903, 0.9055, 0.7765, 0.8405])\nSU4_resize256_FASHION = np.array([0.926, 0.8985, 0.9155, 0.929, 0.914])\nSU4_PCA8_FASHION = np.array([0.923, 0.9265, 0.92, 0.9095, 0.921])\nSU4_AE8_FASHION = np.array([0.899, 0.9305, 0.9125, 0.9375, 0.9455])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "SU4_AE16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "SU4_AE16_MNIST = np.array([0.9739952718676123, 0.983451536643026, 0.9626477541371158, 0.9872340425531915, 0.9735224586288416])\nU9_resize256_FASHION = np.array([0.913, 0.9025, 0.888, 0.9295, 0.9195])\nU9_PCA8_FASHION = np.array([0.8665, 0.866, 0.856, 0.8575, 0.866])\nU9_AE8_FASHION = np.array([0.9335, 0.8895, 0.883, 0.8425, 0.8695])\nU9_PCA16_FASHION = np.array([0.7475, 0.8275, 0.872, 0.8745, 0.842])\nU9_AE16_FASHION = np.array([0.935, 0.903, 0.9055, 0.7765, 0.8405])\nSU4_resize256_FASHION = np.array([0.926, 0.8985, 0.9155, 0.929, 0.914])\nSU4_PCA8_FASHION = np.array([0.923, 0.9265, 0.92, 0.9095, 0.921])\nSU4_AE8_FASHION = np.array([0.899, 0.9305, 0.9125, 0.9375, 0.9455])\nSU4_PCA16_FASHION = np.array([0.8835, 0.919, 0.923, 0.8925, 0.917])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "U9_resize256_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "U9_resize256_FASHION = np.array([0.913, 0.9025, 0.888, 0.9295, 0.9195])\nU9_PCA8_FASHION = np.array([0.8665, 0.866, 0.856, 0.8575, 0.866])\nU9_AE8_FASHION = np.array([0.9335, 0.8895, 0.883, 0.8425, 0.8695])\nU9_PCA16_FASHION = np.array([0.7475, 0.8275, 0.872, 0.8745, 0.842])\nU9_AE16_FASHION = np.array([0.935, 0.903, 0.9055, 0.7765, 0.8405])\nSU4_resize256_FASHION = np.array([0.926, 0.8985, 0.9155, 0.929, 0.914])\nSU4_PCA8_FASHION = np.array([0.923, 0.9265, 0.92, 0.9095, 0.921])\nSU4_AE8_FASHION = np.array([0.899, 0.9305, 0.9125, 0.9375, 0.9455])\nSU4_PCA16_FASHION = np.array([0.8835, 0.919, 0.923, 0.8925, 0.917])\nSU4_AE16_FASHION = np.array([0.9395, 0.9115, 0.9345, 0.94, 0.94])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "U9_PCA8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "U9_PCA8_FASHION = np.array([0.8665, 0.866, 0.856, 0.8575, 0.866])\nU9_AE8_FASHION = np.array([0.9335, 0.8895, 0.883, 0.8425, 0.8695])\nU9_PCA16_FASHION = np.array([0.7475, 0.8275, 0.872, 0.8745, 0.842])\nU9_AE16_FASHION = np.array([0.935, 0.903, 0.9055, 0.7765, 0.8405])\nSU4_resize256_FASHION = np.array([0.926, 0.8985, 0.9155, 0.929, 0.914])\nSU4_PCA8_FASHION = np.array([0.923, 0.9265, 0.92, 0.9095, 0.921])\nSU4_AE8_FASHION = np.array([0.899, 0.9305, 0.9125, 0.9375, 0.9455])\nSU4_PCA16_FASHION = np.array([0.8835, 0.919, 0.923, 0.8925, 0.917])\nSU4_AE16_FASHION = np.array([0.9395, 0.9115, 0.9345, 0.94, 0.94])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with CrossEntropyLoss\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "U9_AE8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "U9_AE8_FASHION = np.array([0.9335, 0.8895, 0.883, 0.8425, 0.8695])\nU9_PCA16_FASHION = np.array([0.7475, 0.8275, 0.872, 0.8745, 0.842])\nU9_AE16_FASHION = np.array([0.935, 0.903, 0.9055, 0.7765, 0.8405])\nSU4_resize256_FASHION = np.array([0.926, 0.8985, 0.9155, 0.929, 0.914])\nSU4_PCA8_FASHION = np.array([0.923, 0.9265, 0.92, 0.9095, 0.921])\nSU4_AE8_FASHION = np.array([0.899, 0.9305, 0.9125, 0.9375, 0.9455])\nSU4_PCA16_FASHION = np.array([0.8835, 0.919, 0.923, 0.8925, 0.917])\nSU4_AE16_FASHION = np.array([0.9395, 0.9115, 0.9345, 0.94, 0.94])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with CrossEntropyLoss\")\nprint(\"Result with U_9: \")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "U9_PCA16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "U9_PCA16_FASHION = np.array([0.7475, 0.8275, 0.872, 0.8745, 0.842])\nU9_AE16_FASHION = np.array([0.935, 0.903, 0.9055, 0.7765, 0.8405])\nSU4_resize256_FASHION = np.array([0.926, 0.8985, 0.9155, 0.929, 0.914])\nSU4_PCA8_FASHION = np.array([0.923, 0.9265, 0.92, 0.9095, 0.921])\nSU4_AE8_FASHION = np.array([0.899, 0.9305, 0.9125, 0.9375, 0.9455])\nSU4_PCA16_FASHION = np.array([0.8835, 0.919, 0.923, 0.8925, 0.917])\nSU4_AE16_FASHION = np.array([0.9395, 0.9115, 0.9345, 0.94, 0.94])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with CrossEntropyLoss\")\nprint(\"Result with U_9: \")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "U9_AE16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "U9_AE16_FASHION = np.array([0.935, 0.903, 0.9055, 0.7765, 0.8405])\nSU4_resize256_FASHION = np.array([0.926, 0.8985, 0.9155, 0.929, 0.914])\nSU4_PCA8_FASHION = np.array([0.923, 0.9265, 0.92, 0.9095, 0.921])\nSU4_AE8_FASHION = np.array([0.899, 0.9305, 0.9125, 0.9375, 0.9455])\nSU4_PCA16_FASHION = np.array([0.8835, 0.919, 0.923, 0.8925, 0.917])\nSU4_AE16_FASHION = np.array([0.9395, 0.9115, 0.9345, 0.94, 0.94])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with CrossEntropyLoss\")\nprint(\"Result with U_9: \")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "SU4_resize256_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "SU4_resize256_FASHION = np.array([0.926, 0.8985, 0.9155, 0.929, 0.914])\nSU4_PCA8_FASHION = np.array([0.923, 0.9265, 0.92, 0.9095, 0.921])\nSU4_AE8_FASHION = np.array([0.899, 0.9305, 0.9125, 0.9375, 0.9455])\nSU4_PCA16_FASHION = np.array([0.8835, 0.919, 0.923, 0.8925, 0.917])\nSU4_AE16_FASHION = np.array([0.9395, 0.9115, 0.9345, 0.94, 0.94])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with CrossEntropyLoss\")\nprint(\"Result with U_9: \")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "SU4_PCA8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "SU4_PCA8_FASHION = np.array([0.923, 0.9265, 0.92, 0.9095, 0.921])\nSU4_AE8_FASHION = np.array([0.899, 0.9305, 0.9125, 0.9375, 0.9455])\nSU4_PCA16_FASHION = np.array([0.8835, 0.919, 0.923, 0.8925, 0.917])\nSU4_AE16_FASHION = np.array([0.9395, 0.9115, 0.9345, 0.94, 0.94])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with CrossEntropyLoss\")\nprint(\"Result with U_9: \")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "SU4_AE8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "SU4_AE8_FASHION = np.array([0.899, 0.9305, 0.9125, 0.9375, 0.9455])\nSU4_PCA16_FASHION = np.array([0.8835, 0.919, 0.923, 0.8925, 0.917])\nSU4_AE16_FASHION = np.array([0.9395, 0.9115, 0.9345, 0.94, 0.94])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with CrossEntropyLoss\")\nprint(\"Result with U_9: \")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "SU4_PCA16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "SU4_PCA16_FASHION = np.array([0.8835, 0.919, 0.923, 0.8925, 0.917])\nSU4_AE16_FASHION = np.array([0.9395, 0.9115, 0.9345, 0.94, 0.94])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with CrossEntropyLoss\")\nprint(\"Result with U_9: \")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))\nprint(\"\\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "SU4_AE16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "peekOfCode": "SU4_AE16_FASHION = np.array([0.9395, 0.9115, 0.9345, 0.94, 0.94])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with CrossEntropyLoss\")\nprint(\"Result with U_9: \")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))\nprint(\"\\n\")\nprint(\"Result with SU4: \")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(crossentropy)", + "documentation": {} + }, + { + "label": "U9_resize256_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "U9_resize256_MNIST = np.array([0.858628841607565, 0.9460992907801419, 0.9281323877068558, 0.798581560283688, 0.7976359338061466])\nU9_AE8_MNIST = np.array([0.851063829787234, 0.9550827423167849, 0.9309692671394799, 0.968321513002364, 0.7111111111111111])\nU9_PCA8_MNIST = np.array([0.9862884160756501, 0.9782505910165484, 0.9754137115839243, 0.9782505910165484, 0.9777777777777777])\nU9_PCA16_MNIST = np.array([0.9427895981087471, 0.9304964539007092, 0.975886524822695, 0.9687943262411347, 0.7182033096926714])\nU9_AE16_MNIST = np.array([0.9456264775413712, 0.9711583924349881, 0.891725768321513, 0.7990543735224587, 0.9200945626477541])\nSU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "U9_AE8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "U9_AE8_MNIST = np.array([0.851063829787234, 0.9550827423167849, 0.9309692671394799, 0.968321513002364, 0.7111111111111111])\nU9_PCA8_MNIST = np.array([0.9862884160756501, 0.9782505910165484, 0.9754137115839243, 0.9782505910165484, 0.9777777777777777])\nU9_PCA16_MNIST = np.array([0.9427895981087471, 0.9304964539007092, 0.975886524822695, 0.9687943262411347, 0.7182033096926714])\nU9_AE16_MNIST = np.array([0.9456264775413712, 0.9711583924349881, 0.891725768321513, 0.7990543735224587, 0.9200945626477541])\nSU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "U9_PCA8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "U9_PCA8_MNIST = np.array([0.9862884160756501, 0.9782505910165484, 0.9754137115839243, 0.9782505910165484, 0.9777777777777777])\nU9_PCA16_MNIST = np.array([0.9427895981087471, 0.9304964539007092, 0.975886524822695, 0.9687943262411347, 0.7182033096926714])\nU9_AE16_MNIST = np.array([0.9456264775413712, 0.9711583924349881, 0.891725768321513, 0.7990543735224587, 0.9200945626477541])\nSU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "U9_PCA16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "U9_PCA16_MNIST = np.array([0.9427895981087471, 0.9304964539007092, 0.975886524822695, 0.9687943262411347, 0.7182033096926714])\nU9_AE16_MNIST = np.array([0.9456264775413712, 0.9711583924349881, 0.891725768321513, 0.7990543735224587, 0.9200945626477541])\nSU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "U9_AE16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "U9_AE16_MNIST = np.array([0.9456264775413712, 0.9711583924349881, 0.891725768321513, 0.7990543735224587, 0.9200945626477541])\nSU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "SU4_resize256_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "SU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "SU4_AE8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "SU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "SU4_PCA8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "SU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "SU4_PCA16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "SU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "SU4_AE16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "SU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "U9_resize256_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "U9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "U9_AE8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "U9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with MSELoss\\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "U9_PCA8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "U9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with MSELoss\\n\")\nprint(\"Result with U_9: \\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "U9_PCA16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "U9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with MSELoss\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "U9_AE16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "U9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with MSELoss\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "SU4_resize256_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "SU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with MSELoss\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "SU4_AE8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "SU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with MSELoss\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "SU4_PCA8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "SU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with MSELoss\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "SU4_PCA16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "SU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with MSELoss\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))\nprint(\"Result with SU4: \\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "SU4_AE16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "peekOfCode": "SU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure with MSELoss\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))\nprint(\"Result with SU4: \\n\")\nprint(\"resize256: \" + str(SU4_resize256_MNIST.mean()) +\" +/- \"+ str(SU4_resize256_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN 1D.QCNN_1D_chain_result(mse)", + "documentation": {} + }, + { + "label": "CNN_resize256_adam", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "peekOfCode": "CNN_resize256_adam = np.array([0.96, 0.9585, 0.5, 0.9535, 0.9525])\nCNN_pca8_adam = np.array([0.925, 0.91, 0.8745, 0.8985, 0.935])\nCNN_ae8_adam = np.array([0.928, 0.9495, 0.9495, 0.9215, 0.9395])\nCNN_pca16_adam = np.array([0.906, 0.944, 0.909, 0.9465, 0.9195])\nCNN_ae16_adam = np.array([0.961, 0.952, 0.949, 0.903, 0.9455])\nCNN_resize256_nesterov = np.array([0.8715, 0.936, 0.945, 0.9535, 0.948])\nCNN_pca8_nesterov = np.array([0.5, 0.908, 0.8005, 0.6215, 0.737])\nCNN_ae8_nesterov = np.array([0.708, 0.6145, 0.5905, 0.7565, 0.7385])\nCNN_pca16_nesterov = np.array([0.7045, 0.5, 0.788, 0.851, 0.5])\nCNN_ae16_nesterov = np.array([0.828, 0.5005, 0.8855, 0.9155, 0.5])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "documentation": {} + }, + { + "label": "CNN_pca8_adam", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "peekOfCode": "CNN_pca8_adam = np.array([0.925, 0.91, 0.8745, 0.8985, 0.935])\nCNN_ae8_adam = np.array([0.928, 0.9495, 0.9495, 0.9215, 0.9395])\nCNN_pca16_adam = np.array([0.906, 0.944, 0.909, 0.9465, 0.9195])\nCNN_ae16_adam = np.array([0.961, 0.952, 0.949, 0.903, 0.9455])\nCNN_resize256_nesterov = np.array([0.8715, 0.936, 0.945, 0.9535, 0.948])\nCNN_pca8_nesterov = np.array([0.5, 0.908, 0.8005, 0.6215, 0.737])\nCNN_ae8_nesterov = np.array([0.708, 0.6145, 0.5905, 0.7565, 0.7385])\nCNN_pca16_nesterov = np.array([0.7045, 0.5, 0.788, 0.851, 0.5])\nCNN_ae16_nesterov = np.array([0.828, 0.5005, 0.8855, 0.9155, 0.5])\nprint(\"Resize256(ADAM) result: \" + str(CNN_resize256_adam.mean()) + \" +/- \" + str(CNN_resize256_adam.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "documentation": {} + }, + { + "label": "CNN_ae8_adam", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "peekOfCode": "CNN_ae8_adam = np.array([0.928, 0.9495, 0.9495, 0.9215, 0.9395])\nCNN_pca16_adam = np.array([0.906, 0.944, 0.909, 0.9465, 0.9195])\nCNN_ae16_adam = np.array([0.961, 0.952, 0.949, 0.903, 0.9455])\nCNN_resize256_nesterov = np.array([0.8715, 0.936, 0.945, 0.9535, 0.948])\nCNN_pca8_nesterov = np.array([0.5, 0.908, 0.8005, 0.6215, 0.737])\nCNN_ae8_nesterov = np.array([0.708, 0.6145, 0.5905, 0.7565, 0.7385])\nCNN_pca16_nesterov = np.array([0.7045, 0.5, 0.788, 0.851, 0.5])\nCNN_ae16_nesterov = np.array([0.828, 0.5005, 0.8855, 0.9155, 0.5])\nprint(\"Resize256(ADAM) result: \" + str(CNN_resize256_adam.mean()) + \" +/- \" + str(CNN_resize256_adam.std()))\nprint(\"PCA8(ADAM) result: \" + str(CNN_pca8_adam.mean()) + \" +/- \" + str(CNN_pca8_adam.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "documentation": {} + }, + { + "label": "CNN_pca16_adam", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "peekOfCode": "CNN_pca16_adam = np.array([0.906, 0.944, 0.909, 0.9465, 0.9195])\nCNN_ae16_adam = np.array([0.961, 0.952, 0.949, 0.903, 0.9455])\nCNN_resize256_nesterov = np.array([0.8715, 0.936, 0.945, 0.9535, 0.948])\nCNN_pca8_nesterov = np.array([0.5, 0.908, 0.8005, 0.6215, 0.737])\nCNN_ae8_nesterov = np.array([0.708, 0.6145, 0.5905, 0.7565, 0.7385])\nCNN_pca16_nesterov = np.array([0.7045, 0.5, 0.788, 0.851, 0.5])\nCNN_ae16_nesterov = np.array([0.828, 0.5005, 0.8855, 0.9155, 0.5])\nprint(\"Resize256(ADAM) result: \" + str(CNN_resize256_adam.mean()) + \" +/- \" + str(CNN_resize256_adam.std()))\nprint(\"PCA8(ADAM) result: \" + str(CNN_pca8_adam.mean()) + \" +/- \" + str(CNN_pca8_adam.std()))\nprint(\"AE8(ADAM) result: \" + str(CNN_ae8_adam.mean()) + \" +/- \" + str(CNN_ae8_adam.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "documentation": {} + }, + { + "label": "CNN_ae16_adam", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "peekOfCode": "CNN_ae16_adam = np.array([0.961, 0.952, 0.949, 0.903, 0.9455])\nCNN_resize256_nesterov = np.array([0.8715, 0.936, 0.945, 0.9535, 0.948])\nCNN_pca8_nesterov = np.array([0.5, 0.908, 0.8005, 0.6215, 0.737])\nCNN_ae8_nesterov = np.array([0.708, 0.6145, 0.5905, 0.7565, 0.7385])\nCNN_pca16_nesterov = np.array([0.7045, 0.5, 0.788, 0.851, 0.5])\nCNN_ae16_nesterov = np.array([0.828, 0.5005, 0.8855, 0.9155, 0.5])\nprint(\"Resize256(ADAM) result: \" + str(CNN_resize256_adam.mean()) + \" +/- \" + str(CNN_resize256_adam.std()))\nprint(\"PCA8(ADAM) result: \" + str(CNN_pca8_adam.mean()) + \" +/- \" + str(CNN_pca8_adam.std()))\nprint(\"AE8(ADAM) result: \" + str(CNN_ae8_adam.mean()) + \" +/- \" + str(CNN_ae8_adam.std()))\nprint(\"PCA16(ADAM) result: \" + str(CNN_pca16_adam.mean()) + \" +/- \" + str(CNN_pca16_adam.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "documentation": {} + }, + { + "label": "CNN_resize256_nesterov", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "peekOfCode": "CNN_resize256_nesterov = np.array([0.8715, 0.936, 0.945, 0.9535, 0.948])\nCNN_pca8_nesterov = np.array([0.5, 0.908, 0.8005, 0.6215, 0.737])\nCNN_ae8_nesterov = np.array([0.708, 0.6145, 0.5905, 0.7565, 0.7385])\nCNN_pca16_nesterov = np.array([0.7045, 0.5, 0.788, 0.851, 0.5])\nCNN_ae16_nesterov = np.array([0.828, 0.5005, 0.8855, 0.9155, 0.5])\nprint(\"Resize256(ADAM) result: \" + str(CNN_resize256_adam.mean()) + \" +/- \" + str(CNN_resize256_adam.std()))\nprint(\"PCA8(ADAM) result: \" + str(CNN_pca8_adam.mean()) + \" +/- \" + str(CNN_pca8_adam.std()))\nprint(\"AE8(ADAM) result: \" + str(CNN_ae8_adam.mean()) + \" +/- \" + str(CNN_ae8_adam.std()))\nprint(\"PCA16(ADAM) result: \" + str(CNN_pca16_adam.mean()) + \" +/- \" + str(CNN_pca16_adam.std()))\nprint(\"AE16(ADAM) result: \" + str(CNN_ae16_adam.mean()) + \" +/- \" + str(CNN_ae16_adam.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "documentation": {} + }, + { + "label": "CNN_pca8_nesterov", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "peekOfCode": "CNN_pca8_nesterov = np.array([0.5, 0.908, 0.8005, 0.6215, 0.737])\nCNN_ae8_nesterov = np.array([0.708, 0.6145, 0.5905, 0.7565, 0.7385])\nCNN_pca16_nesterov = np.array([0.7045, 0.5, 0.788, 0.851, 0.5])\nCNN_ae16_nesterov = np.array([0.828, 0.5005, 0.8855, 0.9155, 0.5])\nprint(\"Resize256(ADAM) result: \" + str(CNN_resize256_adam.mean()) + \" +/- \" + str(CNN_resize256_adam.std()))\nprint(\"PCA8(ADAM) result: \" + str(CNN_pca8_adam.mean()) + \" +/- \" + str(CNN_pca8_adam.std()))\nprint(\"AE8(ADAM) result: \" + str(CNN_ae8_adam.mean()) + \" +/- \" + str(CNN_ae8_adam.std()))\nprint(\"PCA16(ADAM) result: \" + str(CNN_pca16_adam.mean()) + \" +/- \" + str(CNN_pca16_adam.std()))\nprint(\"AE16(ADAM) result: \" + str(CNN_ae16_adam.mean()) + \" +/- \" + str(CNN_ae16_adam.std()))\nprint(\"\\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "documentation": {} + }, + { + "label": "CNN_ae8_nesterov", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "peekOfCode": "CNN_ae8_nesterov = np.array([0.708, 0.6145, 0.5905, 0.7565, 0.7385])\nCNN_pca16_nesterov = np.array([0.7045, 0.5, 0.788, 0.851, 0.5])\nCNN_ae16_nesterov = np.array([0.828, 0.5005, 0.8855, 0.9155, 0.5])\nprint(\"Resize256(ADAM) result: \" + str(CNN_resize256_adam.mean()) + \" +/- \" + str(CNN_resize256_adam.std()))\nprint(\"PCA8(ADAM) result: \" + str(CNN_pca8_adam.mean()) + \" +/- \" + str(CNN_pca8_adam.std()))\nprint(\"AE8(ADAM) result: \" + str(CNN_ae8_adam.mean()) + \" +/- \" + str(CNN_ae8_adam.std()))\nprint(\"PCA16(ADAM) result: \" + str(CNN_pca16_adam.mean()) + \" +/- \" + str(CNN_pca16_adam.std()))\nprint(\"AE16(ADAM) result: \" + str(CNN_ae16_adam.mean()) + \" +/- \" + str(CNN_ae16_adam.std()))\nprint(\"\\n\")\nprint(\"Resize256(nesterov) result: \" + str(CNN_resize256_nesterov.mean()) + \" +/- \" + str(CNN_resize256_nesterov.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "documentation": {} + }, + { + "label": "CNN_pca16_nesterov", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "peekOfCode": "CNN_pca16_nesterov = np.array([0.7045, 0.5, 0.788, 0.851, 0.5])\nCNN_ae16_nesterov = np.array([0.828, 0.5005, 0.8855, 0.9155, 0.5])\nprint(\"Resize256(ADAM) result: \" + str(CNN_resize256_adam.mean()) + \" +/- \" + str(CNN_resize256_adam.std()))\nprint(\"PCA8(ADAM) result: \" + str(CNN_pca8_adam.mean()) + \" +/- \" + str(CNN_pca8_adam.std()))\nprint(\"AE8(ADAM) result: \" + str(CNN_ae8_adam.mean()) + \" +/- \" + str(CNN_ae8_adam.std()))\nprint(\"PCA16(ADAM) result: \" + str(CNN_pca16_adam.mean()) + \" +/- \" + str(CNN_pca16_adam.std()))\nprint(\"AE16(ADAM) result: \" + str(CNN_ae16_adam.mean()) + \" +/- \" + str(CNN_ae16_adam.std()))\nprint(\"\\n\")\nprint(\"Resize256(nesterov) result: \" + str(CNN_resize256_nesterov.mean()) + \" +/- \" + str(CNN_resize256_nesterov.std()))\nprint(\"PCA8(nesterov) result: \" + str(CNN_pca8_nesterov.mean()) + \" +/- \" + str(CNN_pca8_nesterov.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "documentation": {} + }, + { + "label": "CNN_ae16_nesterov", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "peekOfCode": "CNN_ae16_nesterov = np.array([0.828, 0.5005, 0.8855, 0.9155, 0.5])\nprint(\"Resize256(ADAM) result: \" + str(CNN_resize256_adam.mean()) + \" +/- \" + str(CNN_resize256_adam.std()))\nprint(\"PCA8(ADAM) result: \" + str(CNN_pca8_adam.mean()) + \" +/- \" + str(CNN_pca8_adam.std()))\nprint(\"AE8(ADAM) result: \" + str(CNN_ae8_adam.mean()) + \" +/- \" + str(CNN_ae8_adam.std()))\nprint(\"PCA16(ADAM) result: \" + str(CNN_pca16_adam.mean()) + \" +/- \" + str(CNN_pca16_adam.std()))\nprint(\"AE16(ADAM) result: \" + str(CNN_ae16_adam.mean()) + \" +/- \" + str(CNN_ae16_adam.std()))\nprint(\"\\n\")\nprint(\"Resize256(nesterov) result: \" + str(CNN_resize256_nesterov.mean()) + \" +/- \" + str(CNN_resize256_nesterov.std()))\nprint(\"PCA8(nesterov) result: \" + str(CNN_pca8_nesterov.mean()) + \" +/- \" + str(CNN_pca8_nesterov.std()))\nprint(\"AE8(nesterov) result: \" + str(CNN_ae8_nesterov.mean()) + \" +/- \" + str(CNN_ae8_nesterov.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.CNN_result", + "documentation": {} + }, + { + "label": "AE12_1", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE12_1 = np.array([0.8822695035460993, 0.9744680851063829, 0.9536643026004729, 0.9891252955082742, 0.9593380614657211])\nAE12_2 = np.array([0.9621749408983451, 0.9862884160756501, 0.8884160756501182, 0.9702127659574468, 0.9527186761229315])\nAE12_3 = np.array([0.9328605200945627, 0.9366430260047282, 0.9791962174940898, 0.9569739952718677, 0.9375886524822695])\nAE12_4 = np.array([0.9773049645390071, 0.9612293144208038, 0.9494089834515367, 0.9385342789598109, 0.8888888888888888])\nPCA12_1 = np.array([0.9843971631205674, 0.9810874704491725, 0.9735224586288416, 0.984869976359338, 0.9839243498817967])\nPCA12_2 = np.array([0.9843971631205674, 0.984869976359338, 0.9725768321513002, 0.9829787234042553, 0.9839243498817967])\nPCA12_3 = np.array([0.9763593380614657, 0.9810874704491725, 0.9810874704491725, 0.9815602836879432, 0.9839243498817967])\nPCA12_4 = np.array([0.9617021276595744, 0.9815602836879432, 0.9791962174940898, 0.9825059101654846, 0.9806146572104019])\nAE12_1_mean, AE12_1_std = AE12_1.mean(), AE12_1.std()\nAE12_2_mean, AE12_2_std = AE12_2.mean(), AE12_2.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE12_2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE12_2 = np.array([0.9621749408983451, 0.9862884160756501, 0.8884160756501182, 0.9702127659574468, 0.9527186761229315])\nAE12_3 = np.array([0.9328605200945627, 0.9366430260047282, 0.9791962174940898, 0.9569739952718677, 0.9375886524822695])\nAE12_4 = np.array([0.9773049645390071, 0.9612293144208038, 0.9494089834515367, 0.9385342789598109, 0.8888888888888888])\nPCA12_1 = np.array([0.9843971631205674, 0.9810874704491725, 0.9735224586288416, 0.984869976359338, 0.9839243498817967])\nPCA12_2 = np.array([0.9843971631205674, 0.984869976359338, 0.9725768321513002, 0.9829787234042553, 0.9839243498817967])\nPCA12_3 = np.array([0.9763593380614657, 0.9810874704491725, 0.9810874704491725, 0.9815602836879432, 0.9839243498817967])\nPCA12_4 = np.array([0.9617021276595744, 0.9815602836879432, 0.9791962174940898, 0.9825059101654846, 0.9806146572104019])\nAE12_1_mean, AE12_1_std = AE12_1.mean(), AE12_1.std()\nAE12_2_mean, AE12_2_std = AE12_2.mean(), AE12_2.std()\nAE12_3_mean, AE12_3_std = AE12_3.mean(), AE12_3.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE12_3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE12_3 = np.array([0.9328605200945627, 0.9366430260047282, 0.9791962174940898, 0.9569739952718677, 0.9375886524822695])\nAE12_4 = np.array([0.9773049645390071, 0.9612293144208038, 0.9494089834515367, 0.9385342789598109, 0.8888888888888888])\nPCA12_1 = np.array([0.9843971631205674, 0.9810874704491725, 0.9735224586288416, 0.984869976359338, 0.9839243498817967])\nPCA12_2 = np.array([0.9843971631205674, 0.984869976359338, 0.9725768321513002, 0.9829787234042553, 0.9839243498817967])\nPCA12_3 = np.array([0.9763593380614657, 0.9810874704491725, 0.9810874704491725, 0.9815602836879432, 0.9839243498817967])\nPCA12_4 = np.array([0.9617021276595744, 0.9815602836879432, 0.9791962174940898, 0.9825059101654846, 0.9806146572104019])\nAE12_1_mean, AE12_1_std = AE12_1.mean(), AE12_1.std()\nAE12_2_mean, AE12_2_std = AE12_2.mean(), AE12_2.std()\nAE12_3_mean, AE12_3_std = AE12_3.mean(), AE12_3.std()\nAE12_4_mean, AE12_4_std = AE12_4.mean(), AE12_4.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE12_4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE12_4 = np.array([0.9773049645390071, 0.9612293144208038, 0.9494089834515367, 0.9385342789598109, 0.8888888888888888])\nPCA12_1 = np.array([0.9843971631205674, 0.9810874704491725, 0.9735224586288416, 0.984869976359338, 0.9839243498817967])\nPCA12_2 = np.array([0.9843971631205674, 0.984869976359338, 0.9725768321513002, 0.9829787234042553, 0.9839243498817967])\nPCA12_3 = np.array([0.9763593380614657, 0.9810874704491725, 0.9810874704491725, 0.9815602836879432, 0.9839243498817967])\nPCA12_4 = np.array([0.9617021276595744, 0.9815602836879432, 0.9791962174940898, 0.9825059101654846, 0.9806146572104019])\nAE12_1_mean, AE12_1_std = AE12_1.mean(), AE12_1.std()\nAE12_2_mean, AE12_2_std = AE12_2.mean(), AE12_2.std()\nAE12_3_mean, AE12_3_std = AE12_3.mean(), AE12_3.std()\nAE12_4_mean, AE12_4_std = AE12_4.mean(), AE12_4.std()\nPCA12_1_mean, PCA12_1_std = PCA12_1.mean(), PCA12_1.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA12_1", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA12_1 = np.array([0.9843971631205674, 0.9810874704491725, 0.9735224586288416, 0.984869976359338, 0.9839243498817967])\nPCA12_2 = np.array([0.9843971631205674, 0.984869976359338, 0.9725768321513002, 0.9829787234042553, 0.9839243498817967])\nPCA12_3 = np.array([0.9763593380614657, 0.9810874704491725, 0.9810874704491725, 0.9815602836879432, 0.9839243498817967])\nPCA12_4 = np.array([0.9617021276595744, 0.9815602836879432, 0.9791962174940898, 0.9825059101654846, 0.9806146572104019])\nAE12_1_mean, AE12_1_std = AE12_1.mean(), AE12_1.std()\nAE12_2_mean, AE12_2_std = AE12_2.mean(), AE12_2.std()\nAE12_3_mean, AE12_3_std = AE12_3.mean(), AE12_3.std()\nAE12_4_mean, AE12_4_std = AE12_4.mean(), AE12_4.std()\nPCA12_1_mean, PCA12_1_std = PCA12_1.mean(), PCA12_1.std()\nPCA12_2_mean, PCA12_2_std = PCA12_2.mean(), PCA12_2.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA12_2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA12_2 = np.array([0.9843971631205674, 0.984869976359338, 0.9725768321513002, 0.9829787234042553, 0.9839243498817967])\nPCA12_3 = np.array([0.9763593380614657, 0.9810874704491725, 0.9810874704491725, 0.9815602836879432, 0.9839243498817967])\nPCA12_4 = np.array([0.9617021276595744, 0.9815602836879432, 0.9791962174940898, 0.9825059101654846, 0.9806146572104019])\nAE12_1_mean, AE12_1_std = AE12_1.mean(), AE12_1.std()\nAE12_2_mean, AE12_2_std = AE12_2.mean(), AE12_2.std()\nAE12_3_mean, AE12_3_std = AE12_3.mean(), AE12_3.std()\nAE12_4_mean, AE12_4_std = AE12_4.mean(), AE12_4.std()\nPCA12_1_mean, PCA12_1_std = PCA12_1.mean(), PCA12_1.std()\nPCA12_2_mean, PCA12_2_std = PCA12_2.mean(), PCA12_2.std()\nPCA12_3_mean, PCA12_3_std = PCA12_3.mean(), PCA12_3.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA12_3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA12_3 = np.array([0.9763593380614657, 0.9810874704491725, 0.9810874704491725, 0.9815602836879432, 0.9839243498817967])\nPCA12_4 = np.array([0.9617021276595744, 0.9815602836879432, 0.9791962174940898, 0.9825059101654846, 0.9806146572104019])\nAE12_1_mean, AE12_1_std = AE12_1.mean(), AE12_1.std()\nAE12_2_mean, AE12_2_std = AE12_2.mean(), AE12_2.std()\nAE12_3_mean, AE12_3_std = AE12_3.mean(), AE12_3.std()\nAE12_4_mean, AE12_4_std = AE12_4.mean(), AE12_4.std()\nPCA12_1_mean, PCA12_1_std = PCA12_1.mean(), PCA12_1.std()\nPCA12_2_mean, PCA12_2_std = PCA12_2.mean(), PCA12_2.std()\nPCA12_3_mean, PCA12_3_std = PCA12_3.mean(), PCA12_3.std()\nPCA12_4_mean, PCA12_4_std = PCA12_4.mean(), PCA12_4.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA12_4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA12_4 = np.array([0.9617021276595744, 0.9815602836879432, 0.9791962174940898, 0.9825059101654846, 0.9806146572104019])\nAE12_1_mean, AE12_1_std = AE12_1.mean(), AE12_1.std()\nAE12_2_mean, AE12_2_std = AE12_2.mean(), AE12_2.std()\nAE12_3_mean, AE12_3_std = AE12_3.mean(), AE12_3.std()\nAE12_4_mean, AE12_4_std = AE12_4.mean(), AE12_4.std()\nPCA12_1_mean, PCA12_1_std = PCA12_1.mean(), PCA12_1.std()\nPCA12_2_mean, PCA12_2_std = PCA12_2.mean(), PCA12_2.std()\nPCA12_3_mean, PCA12_3_std = PCA12_3.mean(), PCA12_3.std()\nPCA12_4_mean, PCA12_4_std = PCA12_4.mean(), PCA12_4.std()\n######### Amplitude Hybrid 2 #########", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE16_1", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE16_1 = np.array([0.9947990543735225, 0.9612293144208038, 0.9319148936170213, 0.9626477541371158, 0.9905437352245863])\nAE16_2 = np.array([0.9328605200945627, 0.9938534278959811, 0.9593380614657211, 0.9479905437352246, 0.9938534278959811])\nAE16_3 = np.array([0.9356973995271868, 0.984869976359338, 0.9460992907801419, 0.9801418439716312, 0.9815602836879432])\nAE16_4 = np.array([0.932387706855792, 0.9650118203309692, 0.984869976359338, 0.9484633569739953, 0.9290780141843972])\nPCA16_1 = np.array([0.775886524822695, 0.7621749408983451, 0.7602836879432624, 0.7021276595744681, 0.8033096926713948])\nPCA16_2 = np.array([0.7621749408983451, 0.7267139479905438, 0.7947990543735225, 0.7460992907801418, 0.7536643026004728])\nPCA16_3 = np.array([0.7087470449172577, 0.7924349881796691, 0.7219858156028369, 0.7773049645390071, 0.7692671394799054])\nPCA16_4 = np.array([0.7210401891252955, 0.7569739952718676, 0.7016548463356974, 0.7635933806146572, 0.7683215130023641])\nAE16_1_mean, AE16_1_std = AE16_1.mean(), AE16_1.std()\nAE16_2_mean, AE16_2_std = AE16_2.mean(), AE16_2.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE16_2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE16_2 = np.array([0.9328605200945627, 0.9938534278959811, 0.9593380614657211, 0.9479905437352246, 0.9938534278959811])\nAE16_3 = np.array([0.9356973995271868, 0.984869976359338, 0.9460992907801419, 0.9801418439716312, 0.9815602836879432])\nAE16_4 = np.array([0.932387706855792, 0.9650118203309692, 0.984869976359338, 0.9484633569739953, 0.9290780141843972])\nPCA16_1 = np.array([0.775886524822695, 0.7621749408983451, 0.7602836879432624, 0.7021276595744681, 0.8033096926713948])\nPCA16_2 = np.array([0.7621749408983451, 0.7267139479905438, 0.7947990543735225, 0.7460992907801418, 0.7536643026004728])\nPCA16_3 = np.array([0.7087470449172577, 0.7924349881796691, 0.7219858156028369, 0.7773049645390071, 0.7692671394799054])\nPCA16_4 = np.array([0.7210401891252955, 0.7569739952718676, 0.7016548463356974, 0.7635933806146572, 0.7683215130023641])\nAE16_1_mean, AE16_1_std = AE16_1.mean(), AE16_1.std()\nAE16_2_mean, AE16_2_std = AE16_2.mean(), AE16_2.std()\nAE16_3_mean, AE16_3_std = AE16_3.mean(), AE16_3.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE16_3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE16_3 = np.array([0.9356973995271868, 0.984869976359338, 0.9460992907801419, 0.9801418439716312, 0.9815602836879432])\nAE16_4 = np.array([0.932387706855792, 0.9650118203309692, 0.984869976359338, 0.9484633569739953, 0.9290780141843972])\nPCA16_1 = np.array([0.775886524822695, 0.7621749408983451, 0.7602836879432624, 0.7021276595744681, 0.8033096926713948])\nPCA16_2 = np.array([0.7621749408983451, 0.7267139479905438, 0.7947990543735225, 0.7460992907801418, 0.7536643026004728])\nPCA16_3 = np.array([0.7087470449172577, 0.7924349881796691, 0.7219858156028369, 0.7773049645390071, 0.7692671394799054])\nPCA16_4 = np.array([0.7210401891252955, 0.7569739952718676, 0.7016548463356974, 0.7635933806146572, 0.7683215130023641])\nAE16_1_mean, AE16_1_std = AE16_1.mean(), AE16_1.std()\nAE16_2_mean, AE16_2_std = AE16_2.mean(), AE16_2.std()\nAE16_3_mean, AE16_3_std = AE16_3.mean(), AE16_3.std()\nAE16_4_mean, AE16_4_std = AE16_4.mean(), AE16_4.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE16_4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE16_4 = np.array([0.932387706855792, 0.9650118203309692, 0.984869976359338, 0.9484633569739953, 0.9290780141843972])\nPCA16_1 = np.array([0.775886524822695, 0.7621749408983451, 0.7602836879432624, 0.7021276595744681, 0.8033096926713948])\nPCA16_2 = np.array([0.7621749408983451, 0.7267139479905438, 0.7947990543735225, 0.7460992907801418, 0.7536643026004728])\nPCA16_3 = np.array([0.7087470449172577, 0.7924349881796691, 0.7219858156028369, 0.7773049645390071, 0.7692671394799054])\nPCA16_4 = np.array([0.7210401891252955, 0.7569739952718676, 0.7016548463356974, 0.7635933806146572, 0.7683215130023641])\nAE16_1_mean, AE16_1_std = AE16_1.mean(), AE16_1.std()\nAE16_2_mean, AE16_2_std = AE16_2.mean(), AE16_2.std()\nAE16_3_mean, AE16_3_std = AE16_3.mean(), AE16_3.std()\nAE16_4_mean, AE16_4_std = AE16_4.mean(), AE16_4.std()\nPCA16_1_mean, PCA16_1_std = PCA16_1.mean(), PCA16_1.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA16_1", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA16_1 = np.array([0.775886524822695, 0.7621749408983451, 0.7602836879432624, 0.7021276595744681, 0.8033096926713948])\nPCA16_2 = np.array([0.7621749408983451, 0.7267139479905438, 0.7947990543735225, 0.7460992907801418, 0.7536643026004728])\nPCA16_3 = np.array([0.7087470449172577, 0.7924349881796691, 0.7219858156028369, 0.7773049645390071, 0.7692671394799054])\nPCA16_4 = np.array([0.7210401891252955, 0.7569739952718676, 0.7016548463356974, 0.7635933806146572, 0.7683215130023641])\nAE16_1_mean, AE16_1_std = AE16_1.mean(), AE16_1.std()\nAE16_2_mean, AE16_2_std = AE16_2.mean(), AE16_2.std()\nAE16_3_mean, AE16_3_std = AE16_3.mean(), AE16_3.std()\nAE16_4_mean, AE16_4_std = AE16_4.mean(), AE16_4.std()\nPCA16_1_mean, PCA16_1_std = PCA16_1.mean(), PCA16_1.std()\nPCA16_2_mean, PCA16_2_std = PCA16_2.mean(), PCA16_2.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA16_2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA16_2 = np.array([0.7621749408983451, 0.7267139479905438, 0.7947990543735225, 0.7460992907801418, 0.7536643026004728])\nPCA16_3 = np.array([0.7087470449172577, 0.7924349881796691, 0.7219858156028369, 0.7773049645390071, 0.7692671394799054])\nPCA16_4 = np.array([0.7210401891252955, 0.7569739952718676, 0.7016548463356974, 0.7635933806146572, 0.7683215130023641])\nAE16_1_mean, AE16_1_std = AE16_1.mean(), AE16_1.std()\nAE16_2_mean, AE16_2_std = AE16_2.mean(), AE16_2.std()\nAE16_3_mean, AE16_3_std = AE16_3.mean(), AE16_3.std()\nAE16_4_mean, AE16_4_std = AE16_4.mean(), AE16_4.std()\nPCA16_1_mean, PCA16_1_std = PCA16_1.mean(), PCA16_1.std()\nPCA16_2_mean, PCA16_2_std = PCA16_2.mean(), PCA16_2.std()\nPCA16_3_mean, PCA16_3_std = PCA16_3.mean(), PCA16_3.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA16_3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA16_3 = np.array([0.7087470449172577, 0.7924349881796691, 0.7219858156028369, 0.7773049645390071, 0.7692671394799054])\nPCA16_4 = np.array([0.7210401891252955, 0.7569739952718676, 0.7016548463356974, 0.7635933806146572, 0.7683215130023641])\nAE16_1_mean, AE16_1_std = AE16_1.mean(), AE16_1.std()\nAE16_2_mean, AE16_2_std = AE16_2.mean(), AE16_2.std()\nAE16_3_mean, AE16_3_std = AE16_3.mean(), AE16_3.std()\nAE16_4_mean, AE16_4_std = AE16_4.mean(), AE16_4.std()\nPCA16_1_mean, PCA16_1_std = PCA16_1.mean(), PCA16_1.std()\nPCA16_2_mean, PCA16_2_std = PCA16_2.mean(), PCA16_2.std()\nPCA16_3_mean, PCA16_3_std = PCA16_3.mean(), PCA16_3.std()\nPCA16_4_mean, PCA16_4_std = PCA16_4.mean(), PCA16_4.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA16_4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA16_4 = np.array([0.7210401891252955, 0.7569739952718676, 0.7016548463356974, 0.7635933806146572, 0.7683215130023641])\nAE16_1_mean, AE16_1_std = AE16_1.mean(), AE16_1.std()\nAE16_2_mean, AE16_2_std = AE16_2.mean(), AE16_2.std()\nAE16_3_mean, AE16_3_std = AE16_3.mean(), AE16_3.std()\nAE16_4_mean, AE16_4_std = AE16_4.mean(), AE16_4.std()\nPCA16_1_mean, PCA16_1_std = PCA16_1.mean(), PCA16_1.std()\nPCA16_2_mean, PCA16_2_std = PCA16_2.mean(), PCA16_2.std()\nPCA16_3_mean, PCA16_3_std = PCA16_3.mean(), PCA16_3.std()\nPCA16_4_mean, PCA16_4_std = PCA16_4.mean(), PCA16_4.std()\n######### Angular Hybrid 4 #########", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE30_1", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE30_1 = np.array([0.8832151300236407, 0.8581560283687943, 0.941371158392435, 0.8132387706855791, 0.9352245862884161])\nAE30_2 = np.array([0.9016548463356974, 0.9281323877068558, 0.8033096926713948, 0.9177304964539007, 0.8917257683215131])\nAE30_3 = np.array([0.9186761229314421, 0.8988179669030733, 0.9427895981087471, 0.9125295508274232, 0.9177304964539007])\nAE30_4 = np.array([0.9304964539007092, 0.9177304964539007, 0.9035460992907801, 0.950354609929078, 0.9144208037825059])\nPCA30_1 = np.array([0.9843971631205674, 0.9650118203309692, 0.9763593380614657, 0.983451536643026, 0.9229314420803783])\nPCA30_2 = np.array([0.9843971631205674, 0.9697399527186761, 0.9735224586288416, 0.9768321513002364, 0.983451536643026])\nPCA30_3 = np.array([0.9820330969267139, 0.9645390070921985, 0.966903073286052, 0.9829787234042553, 0.9820330969267139])\nPCA30_4 = np.array([0.9645390070921985, 0.9569739952718677, 0.9791962174940898, 0.9749408983451536, 0.9777777777777777])\nAE30_1_mean, AE30_1_std = AE30_1.mean(), AE30_1.std()\nAE30_2_mean, AE30_2_std = AE30_2.mean(), AE30_2.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE30_2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE30_2 = np.array([0.9016548463356974, 0.9281323877068558, 0.8033096926713948, 0.9177304964539007, 0.8917257683215131])\nAE30_3 = np.array([0.9186761229314421, 0.8988179669030733, 0.9427895981087471, 0.9125295508274232, 0.9177304964539007])\nAE30_4 = np.array([0.9304964539007092, 0.9177304964539007, 0.9035460992907801, 0.950354609929078, 0.9144208037825059])\nPCA30_1 = np.array([0.9843971631205674, 0.9650118203309692, 0.9763593380614657, 0.983451536643026, 0.9229314420803783])\nPCA30_2 = np.array([0.9843971631205674, 0.9697399527186761, 0.9735224586288416, 0.9768321513002364, 0.983451536643026])\nPCA30_3 = np.array([0.9820330969267139, 0.9645390070921985, 0.966903073286052, 0.9829787234042553, 0.9820330969267139])\nPCA30_4 = np.array([0.9645390070921985, 0.9569739952718677, 0.9791962174940898, 0.9749408983451536, 0.9777777777777777])\nAE30_1_mean, AE30_1_std = AE30_1.mean(), AE30_1.std()\nAE30_2_mean, AE30_2_std = AE30_2.mean(), AE30_2.std()\nAE30_3_mean, AE30_3_std = AE30_3.mean(), AE30_3.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE30_3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE30_3 = np.array([0.9186761229314421, 0.8988179669030733, 0.9427895981087471, 0.9125295508274232, 0.9177304964539007])\nAE30_4 = np.array([0.9304964539007092, 0.9177304964539007, 0.9035460992907801, 0.950354609929078, 0.9144208037825059])\nPCA30_1 = np.array([0.9843971631205674, 0.9650118203309692, 0.9763593380614657, 0.983451536643026, 0.9229314420803783])\nPCA30_2 = np.array([0.9843971631205674, 0.9697399527186761, 0.9735224586288416, 0.9768321513002364, 0.983451536643026])\nPCA30_3 = np.array([0.9820330969267139, 0.9645390070921985, 0.966903073286052, 0.9829787234042553, 0.9820330969267139])\nPCA30_4 = np.array([0.9645390070921985, 0.9569739952718677, 0.9791962174940898, 0.9749408983451536, 0.9777777777777777])\nAE30_1_mean, AE30_1_std = AE30_1.mean(), AE30_1.std()\nAE30_2_mean, AE30_2_std = AE30_2.mean(), AE30_2.std()\nAE30_3_mean, AE30_3_std = AE30_3.mean(), AE30_3.std()\nAE30_4_mean, AE30_4_std = AE30_4.mean(), AE30_4.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE30_4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE30_4 = np.array([0.9304964539007092, 0.9177304964539007, 0.9035460992907801, 0.950354609929078, 0.9144208037825059])\nPCA30_1 = np.array([0.9843971631205674, 0.9650118203309692, 0.9763593380614657, 0.983451536643026, 0.9229314420803783])\nPCA30_2 = np.array([0.9843971631205674, 0.9697399527186761, 0.9735224586288416, 0.9768321513002364, 0.983451536643026])\nPCA30_3 = np.array([0.9820330969267139, 0.9645390070921985, 0.966903073286052, 0.9829787234042553, 0.9820330969267139])\nPCA30_4 = np.array([0.9645390070921985, 0.9569739952718677, 0.9791962174940898, 0.9749408983451536, 0.9777777777777777])\nAE30_1_mean, AE30_1_std = AE30_1.mean(), AE30_1.std()\nAE30_2_mean, AE30_2_std = AE30_2.mean(), AE30_2.std()\nAE30_3_mean, AE30_3_std = AE30_3.mean(), AE30_3.std()\nAE30_4_mean, AE30_4_std = AE30_4.mean(), AE30_4.std()\nPCA30_1_mean, PCA30_1_std = PCA30_1.mean(), PCA30_1.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA30_1", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA30_1 = np.array([0.9843971631205674, 0.9650118203309692, 0.9763593380614657, 0.983451536643026, 0.9229314420803783])\nPCA30_2 = np.array([0.9843971631205674, 0.9697399527186761, 0.9735224586288416, 0.9768321513002364, 0.983451536643026])\nPCA30_3 = np.array([0.9820330969267139, 0.9645390070921985, 0.966903073286052, 0.9829787234042553, 0.9820330969267139])\nPCA30_4 = np.array([0.9645390070921985, 0.9569739952718677, 0.9791962174940898, 0.9749408983451536, 0.9777777777777777])\nAE30_1_mean, AE30_1_std = AE30_1.mean(), AE30_1.std()\nAE30_2_mean, AE30_2_std = AE30_2.mean(), AE30_2.std()\nAE30_3_mean, AE30_3_std = AE30_3.mean(), AE30_3.std()\nAE30_4_mean, AE30_4_std = AE30_4.mean(), AE30_4.std()\nPCA30_1_mean, PCA30_1_std = PCA30_1.mean(), PCA30_1.std()\nPCA30_2_mean, PCA30_2_std = PCA30_2.mean(), PCA30_2.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA30_2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA30_2 = np.array([0.9843971631205674, 0.9697399527186761, 0.9735224586288416, 0.9768321513002364, 0.983451536643026])\nPCA30_3 = np.array([0.9820330969267139, 0.9645390070921985, 0.966903073286052, 0.9829787234042553, 0.9820330969267139])\nPCA30_4 = np.array([0.9645390070921985, 0.9569739952718677, 0.9791962174940898, 0.9749408983451536, 0.9777777777777777])\nAE30_1_mean, AE30_1_std = AE30_1.mean(), AE30_1.std()\nAE30_2_mean, AE30_2_std = AE30_2.mean(), AE30_2.std()\nAE30_3_mean, AE30_3_std = AE30_3.mean(), AE30_3.std()\nAE30_4_mean, AE30_4_std = AE30_4.mean(), AE30_4.std()\nPCA30_1_mean, PCA30_1_std = PCA30_1.mean(), PCA30_1.std()\nPCA30_2_mean, PCA30_2_std = PCA30_2.mean(), PCA30_2.std()\nPCA30_3_mean, PCA30_3_std = PCA30_3.mean(), PCA30_3.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA30_3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA30_3 = np.array([0.9820330969267139, 0.9645390070921985, 0.966903073286052, 0.9829787234042553, 0.9820330969267139])\nPCA30_4 = np.array([0.9645390070921985, 0.9569739952718677, 0.9791962174940898, 0.9749408983451536, 0.9777777777777777])\nAE30_1_mean, AE30_1_std = AE30_1.mean(), AE30_1.std()\nAE30_2_mean, AE30_2_std = AE30_2.mean(), AE30_2.std()\nAE30_3_mean, AE30_3_std = AE30_3.mean(), AE30_3.std()\nAE30_4_mean, AE30_4_std = AE30_4.mean(), AE30_4.std()\nPCA30_1_mean, PCA30_1_std = PCA30_1.mean(), PCA30_1.std()\nPCA30_2_mean, PCA30_2_std = PCA30_2.mean(), PCA30_2.std()\nPCA30_3_mean, PCA30_3_std = PCA30_3.mean(), PCA30_3.std()\nPCA30_4_mean, PCA30_4_std = PCA30_4.mean(), PCA30_4.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA30_4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA30_4 = np.array([0.9645390070921985, 0.9569739952718677, 0.9791962174940898, 0.9749408983451536, 0.9777777777777777])\nAE30_1_mean, AE30_1_std = AE30_1.mean(), AE30_1.std()\nAE30_2_mean, AE30_2_std = AE30_2.mean(), AE30_2.std()\nAE30_3_mean, AE30_3_std = AE30_3.mean(), AE30_3.std()\nAE30_4_mean, AE30_4_std = AE30_4.mean(), AE30_4.std()\nPCA30_1_mean, PCA30_1_std = PCA30_1.mean(), PCA30_1.std()\nPCA30_2_mean, PCA30_2_std = PCA30_2.mean(), PCA30_2.std()\nPCA30_3_mean, PCA30_3_std = PCA30_3.mean(), PCA30_3.std()\nPCA30_4_mean, PCA30_4_std = PCA30_4.mean(), PCA30_4.std()\n######### Amplitude Hybrid 4 #########", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE32_1", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE32_1 = np.array([0.9588652482269504, 0.9186761229314421, 0.9366430260047282, 0.9919621749408983, 0.9862884160756501])\nAE32_2 = np.array([0.9825059101654846, 0.9891252955082742, 0.9791962174940898, 0.9858156028368794, 0.9900709219858156])\nAE32_3 = np.array([0.9914893617021276, 0.9730496453900709, 0.9602836879432625, 0.9593380614657211, 0.9635933806146572])\nAE32_4 = np.array([0.9711583924349881, 0.9763593380614657, 0.902127659574468, 0.9773049645390071, 0.9735224586288416])\nPCA32_1 = np.array([0.791016548463357, 0.7867612293144208, 0.7749408983451537, 0.723404255319149, 0.798581560283688])\nPCA32_2 = np.array([0.7153664302600473, 0.735224586288416, 0.7063829787234043, 0.7059101654846336, 0.7063829787234043])\nPCA32_3 = np.array([0.8080378250591016, 0.7470449172576832, 0.7924349881796691, 0.7711583924349882, 0.7924349881796691])\nPCA32_4 = np.array([0.7867612293144208, 0.8037825059101655, 0.7078014184397163, 0.750354609929078, 0.7825059101654847])\nAE32_1_mean, AE32_1_std = AE32_1.mean(), AE32_1.std()\nAE32_2_mean, AE32_2_std = AE32_2.mean(), AE32_2.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE32_2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE32_2 = np.array([0.9825059101654846, 0.9891252955082742, 0.9791962174940898, 0.9858156028368794, 0.9900709219858156])\nAE32_3 = np.array([0.9914893617021276, 0.9730496453900709, 0.9602836879432625, 0.9593380614657211, 0.9635933806146572])\nAE32_4 = np.array([0.9711583924349881, 0.9763593380614657, 0.902127659574468, 0.9773049645390071, 0.9735224586288416])\nPCA32_1 = np.array([0.791016548463357, 0.7867612293144208, 0.7749408983451537, 0.723404255319149, 0.798581560283688])\nPCA32_2 = np.array([0.7153664302600473, 0.735224586288416, 0.7063829787234043, 0.7059101654846336, 0.7063829787234043])\nPCA32_3 = np.array([0.8080378250591016, 0.7470449172576832, 0.7924349881796691, 0.7711583924349882, 0.7924349881796691])\nPCA32_4 = np.array([0.7867612293144208, 0.8037825059101655, 0.7078014184397163, 0.750354609929078, 0.7825059101654847])\nAE32_1_mean, AE32_1_std = AE32_1.mean(), AE32_1.std()\nAE32_2_mean, AE32_2_std = AE32_2.mean(), AE32_2.std()\nAE32_3_mean, AE32_3_std = AE32_3.mean(), AE32_3.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE32_3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE32_3 = np.array([0.9914893617021276, 0.9730496453900709, 0.9602836879432625, 0.9593380614657211, 0.9635933806146572])\nAE32_4 = np.array([0.9711583924349881, 0.9763593380614657, 0.902127659574468, 0.9773049645390071, 0.9735224586288416])\nPCA32_1 = np.array([0.791016548463357, 0.7867612293144208, 0.7749408983451537, 0.723404255319149, 0.798581560283688])\nPCA32_2 = np.array([0.7153664302600473, 0.735224586288416, 0.7063829787234043, 0.7059101654846336, 0.7063829787234043])\nPCA32_3 = np.array([0.8080378250591016, 0.7470449172576832, 0.7924349881796691, 0.7711583924349882, 0.7924349881796691])\nPCA32_4 = np.array([0.7867612293144208, 0.8037825059101655, 0.7078014184397163, 0.750354609929078, 0.7825059101654847])\nAE32_1_mean, AE32_1_std = AE32_1.mean(), AE32_1.std()\nAE32_2_mean, AE32_2_std = AE32_2.mean(), AE32_2.std()\nAE32_3_mean, AE32_3_std = AE32_3.mean(), AE32_3.std()\nAE32_4_mean, AE32_4_std = AE32_4.mean(), AE32_4.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "AE32_4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "AE32_4 = np.array([0.9711583924349881, 0.9763593380614657, 0.902127659574468, 0.9773049645390071, 0.9735224586288416])\nPCA32_1 = np.array([0.791016548463357, 0.7867612293144208, 0.7749408983451537, 0.723404255319149, 0.798581560283688])\nPCA32_2 = np.array([0.7153664302600473, 0.735224586288416, 0.7063829787234043, 0.7059101654846336, 0.7063829787234043])\nPCA32_3 = np.array([0.8080378250591016, 0.7470449172576832, 0.7924349881796691, 0.7711583924349882, 0.7924349881796691])\nPCA32_4 = np.array([0.7867612293144208, 0.8037825059101655, 0.7078014184397163, 0.750354609929078, 0.7825059101654847])\nAE32_1_mean, AE32_1_std = AE32_1.mean(), AE32_1.std()\nAE32_2_mean, AE32_2_std = AE32_2.mean(), AE32_2.std()\nAE32_3_mean, AE32_3_std = AE32_3.mean(), AE32_3.std()\nAE32_4_mean, AE32_4_std = AE32_4.mean(), AE32_4.std()\nPCA32_1_mean, PCA32_1_std = PCA32_1.mean(), PCA32_1.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA32_1", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA32_1 = np.array([0.791016548463357, 0.7867612293144208, 0.7749408983451537, 0.723404255319149, 0.798581560283688])\nPCA32_2 = np.array([0.7153664302600473, 0.735224586288416, 0.7063829787234043, 0.7059101654846336, 0.7063829787234043])\nPCA32_3 = np.array([0.8080378250591016, 0.7470449172576832, 0.7924349881796691, 0.7711583924349882, 0.7924349881796691])\nPCA32_4 = np.array([0.7867612293144208, 0.8037825059101655, 0.7078014184397163, 0.750354609929078, 0.7825059101654847])\nAE32_1_mean, AE32_1_std = AE32_1.mean(), AE32_1.std()\nAE32_2_mean, AE32_2_std = AE32_2.mean(), AE32_2.std()\nAE32_3_mean, AE32_3_std = AE32_3.mean(), AE32_3.std()\nAE32_4_mean, AE32_4_std = AE32_4.mean(), AE32_4.std()\nPCA32_1_mean, PCA32_1_std = PCA32_1.mean(), PCA32_1.std()\nPCA32_2_mean, PCA32_2_std = PCA32_2.mean(), PCA32_2.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA32_2", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA32_2 = np.array([0.7153664302600473, 0.735224586288416, 0.7063829787234043, 0.7059101654846336, 0.7063829787234043])\nPCA32_3 = np.array([0.8080378250591016, 0.7470449172576832, 0.7924349881796691, 0.7711583924349882, 0.7924349881796691])\nPCA32_4 = np.array([0.7867612293144208, 0.8037825059101655, 0.7078014184397163, 0.750354609929078, 0.7825059101654847])\nAE32_1_mean, AE32_1_std = AE32_1.mean(), AE32_1.std()\nAE32_2_mean, AE32_2_std = AE32_2.mean(), AE32_2.std()\nAE32_3_mean, AE32_3_std = AE32_3.mean(), AE32_3.std()\nAE32_4_mean, AE32_4_std = AE32_4.mean(), AE32_4.std()\nPCA32_1_mean, PCA32_1_std = PCA32_1.mean(), PCA32_1.std()\nPCA32_2_mean, PCA32_2_std = PCA32_2.mean(), PCA32_2.std()\nPCA32_3_mean, PCA32_3_std = PCA32_3.mean(), PCA32_3.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA32_3", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA32_3 = np.array([0.8080378250591016, 0.7470449172576832, 0.7924349881796691, 0.7711583924349882, 0.7924349881796691])\nPCA32_4 = np.array([0.7867612293144208, 0.8037825059101655, 0.7078014184397163, 0.750354609929078, 0.7825059101654847])\nAE32_1_mean, AE32_1_std = AE32_1.mean(), AE32_1.std()\nAE32_2_mean, AE32_2_std = AE32_2.mean(), AE32_2.std()\nAE32_3_mean, AE32_3_std = AE32_3.mean(), AE32_3.std()\nAE32_4_mean, AE32_4_std = AE32_4.mean(), AE32_4.std()\nPCA32_1_mean, PCA32_1_std = PCA32_1.mean(), PCA32_1.std()\nPCA32_2_mean, PCA32_2_std = PCA32_2.mean(), PCA32_2.std()\nPCA32_3_mean, PCA32_3_std = PCA32_3.mean(), PCA32_3.std()\nPCA32_4_mean, PCA32_4_std = PCA32_4.mean(), PCA32_4.std()", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "PCA32_4", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "peekOfCode": "PCA32_4 = np.array([0.7867612293144208, 0.8037825059101655, 0.7078014184397163, 0.750354609929078, 0.7825059101654847])\nAE32_1_mean, AE32_1_std = AE32_1.mean(), AE32_1.std()\nAE32_2_mean, AE32_2_std = AE32_2.mean(), AE32_2.std()\nAE32_3_mean, AE32_3_std = AE32_3.mean(), AE32_3.std()\nAE32_4_mean, AE32_4_std = AE32_4.mean(), AE32_4.std()\nPCA32_1_mean, PCA32_1_std = PCA32_1.mean(), PCA32_1.std()\nPCA32_2_mean, PCA32_2_std = PCA32_2.mean(), PCA32_2.std()\nPCA32_3_mean, PCA32_3_std = PCA32_3.mean(), PCA32_3.std()\nPCA32_4_mean, PCA32_4_std = PCA32_4.mean(), PCA32_4.std()\n###########", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.Hybrid_embedding(structure)_accuracy", + "documentation": {} + }, + { + "label": "plot_loss_history", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "peekOfCode": "def plot_loss_history(Encodings, datasets):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n if datasets == 'mnist':\n loss_history_CNN = loss_histories_CNN_MNIST[i]\n loss_history_QCNN = loss_histories_QCNN_MNIST[i]\n loss_history_TTN = loss_histories_TTN_MNIST[i]\n elif datasets == 'fashion':\n loss_history_CNN = loss_histories_CNN_FASHION[i]\n loss_history_QCNN = loss_histories_QCNN_FASHION[i]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_CNN_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "peekOfCode": "loss_histories_CNN_MNIST = np.array([[0.714053750038147, 0.6757248044013977, 0.6670724749565125, 0.6759045124053955, 0.6712536811828613, 0.6944219470024109, 0.7101644277572632, 0.6841925978660583, 0.6728434562683105, 0.668999433517456, 0.6950646042823792, 0.6684354543685913, 0.6642752885818481, 0.666652500629425, 0.6504575610160828, 0.6460185050964355, 0.645703136920929, 0.6277170181274414, 0.6505084037780762, 0.6278526186943054, 0.6088988184928894, 0.6650892496109009, 0.615799605846405, 0.6277462244033813, 0.6499466896057129, 0.615242600440979, 0.6231257915496826, 0.5951676368713379, 0.5982342958450317, 0.5950978994369507, 0.5420237183570862, 0.5638814568519592, 0.545005202293396, 0.5448441505432129, 0.5539039373397827, 0.5069094300270081, 0.5558350682258606, 0.48426052927970886, 0.5421356558799744, 0.5551093816757202, 0.5049896836280823, 0.4414564371109009, 0.5025984048843384, 0.4578346908092499, 0.4678928256034851, 0.43783116340637207, 0.45159730315208435, 0.4467982351779938, 0.47169816493988037, 0.40727710723876953, 0.4425938129425049, 0.4238426983356476, 0.4433065354824066, 0.4436638355255127, 0.30902591347694397, 0.4994756281375885, 0.4161745011806488, 0.32217615842819214, 0.4133518934249878, 0.32330527901649475, 0.3460567891597748, 0.2753203213214874, 0.4248920679092407, 0.2533189058303833, 0.33595219254493713, 0.31314578652381897, 0.3584780991077423, 0.39643460512161255, 0.36942508816719055, 0.34632256627082825, 0.43791523575782776, 0.2648601233959198, 0.2459360510110855, 0.2911376655101776, 0.3936261832714081, 0.22703512012958527, 0.34607556462287903, 0.22962436079978943, 0.31266433000564575, 0.3194691836833954, 0.3004220128059387, 0.2597625255584717, 0.27975985407829285, 0.3647230863571167, 0.2021866738796234, 0.310232013463974, 0.267425537109375, 0.20514070987701416, 0.3879457116127014, 0.2696777284145355, 0.20186719298362732, 0.19994035363197327, 0.21005916595458984, 0.20109756290912628, 0.24811165034770966, 0.26313042640686035, 0.21158762276172638, 0.23533910512924194, 0.19377802312374115, 0.26648518443107605, 0.327722430229187, 0.4033016860485077, 0.2014351487159729, 0.32094427943229675, 0.35047662258148193, 0.29127103090286255, 0.4139123260974884, 0.4130251705646515, 0.24823781847953796, 0.21104803681373596, 0.306765079498291, 0.20947876572608948, 0.17692415416240692, 0.19715525209903717, 0.35659322142601013, 0.1806756854057312, 0.13031096756458282, 0.19041313230991364, 0.18950439989566803, 0.28686586022377014, 0.24760431051254272, 0.2115887999534607, 0.2320551723241806, 0.2639220356941223, 0.26703938841819763, 0.18723364174365997, 0.3086016774177551, 0.0961514264345169, 0.5094321966171265, 0.13711600005626678, 0.12534093856811523, 0.13913847506046295, 0.228322371840477, 0.10770484060049057, 0.17475208640098572, 0.15008972585201263, 0.18664470314979553, 0.2059953659772873, 0.1808759719133377, 0.14071853458881378, 0.11081507056951523, 0.21924449503421783, 0.34293651580810547, 0.07046297937631607, 0.08555634319782257, 0.2753034830093384, 0.18749330937862396, 0.09204571694135666, 0.1777571439743042, 0.15454773604869843, 0.30302849411964417, 0.1936318427324295, 0.1466241478919983, 0.10814632475376129, 0.28570395708084106, 0.21450716257095337, 0.2574478089809418, 0.08683276921510696, 0.12849056720733643, 0.23936748504638672, 0.35046452283859253, 0.18132348358631134, 0.06745906174182892, 0.08519738167524338, 0.16765129566192627, 0.1226348802447319, 0.4494912028312683, 0.2902643084526062, 0.14854183793067932, 0.12457065284252167, 0.08460919559001923, 0.165123850107193, 0.128731831908226, 0.2893070578575134, 0.26522427797317505, 0.14514367282390594, 0.09648929536342621, 0.3117218017578125, 0.2046540230512619, 0.08452397584915161, 0.4166283905506134, 0.2918921709060669, 0.356248676776886, 0.3074325621128082, 0.12465698271989822, 0.1480899155139923, 0.24002531170845032, 0.3299065828323364, 0.05778765305876732, 0.18016034364700317, 0.10325605422258377, 0.3677785098552704, 0.07459772378206253, 0.13220126926898956, 0.17934730648994446, 0.07940277457237244, 0.08888012170791626, 0.26546308398246765, 0.4198744595050812, 0.2377086877822876],\n [0.722859263420105, 0.721849262714386, 0.7045133113861084, 0.7295671105384827, 0.6710929274559021, 0.7143537402153015, 0.6963018178939819, 0.683974027633667, 0.7001882791519165, 0.7011011242866516, 0.6926947236061096, 0.6945953369140625, 0.6880753040313721, 0.6847305297851562, 0.6930335760116577, 0.6383837461471558, 0.6585142016410828, 0.6643711924552917, 0.670865535736084, 0.6620815992355347, 0.6525277495384216, 0.6566925048828125, 0.6603410840034485, 0.646219789981842, 0.6375677585601807, 0.7443196773529053, 0.6225131154060364, 0.653272807598114, 0.6358342170715332, 0.681471049785614, 0.6139385104179382, 0.6432995796203613, 0.6474753618240356, 0.6299405097961426, 0.6331878900527954, 0.630324125289917, 0.6321933269500732, 0.6136326789855957, 0.6219558715820312, 0.6019330024719238, 0.6050845980644226, 0.6444574594497681, 0.6327107548713684, 0.6075921058654785, 0.5880964398384094, 0.5826204419136047, 0.5313653945922852, 0.6057491898536682, 0.6037223935127258, 0.5866155028343201, 0.5857460498809814, 0.5763251781463623, 0.5990739464759827, 0.5852251648902893, 0.5917327404022217, 0.594275951385498, 0.5615271329879761, 0.570615291595459, 0.4967952072620392, 0.4958264231681824, 0.5339020490646362, 0.5119616985321045, 0.565489649772644, 0.5976889729499817, 0.557020366191864, 0.5004237294197083, 0.5867930054664612, 0.5332251787185669, 0.6047292947769165, 0.52519690990448, 0.5135031342506409, 0.4499858617782593, 0.5737566947937012, 0.537904679775238, 0.5518447160720825, 0.5273762345314026, 0.4588744342327118, 0.48763972520828247, 0.4228086471557617, 0.6082445383071899, 0.44678717851638794, 0.5027003288269043, 0.5093401074409485, 0.4335576891899109, 0.6178605556488037, 0.5808064341545105, 0.4536117613315582, 0.46775364875793457, 0.4472064971923828, 0.45179107785224915, 0.452457994222641, 0.46096116304397583, 0.4410419464111328, 0.49289727210998535, 0.48667240142822266, 0.5219956040382385, 0.4383401870727539, 0.4490739405155182, 0.43243953585624695, 0.4779336452484131, 0.4499213397502899, 0.4945499002933502, 0.4267149269580841, 0.44638243317604065, 0.3887633979320526, 0.45998772978782654, 0.4307110905647278, 0.349350243806839, 0.4914915859699249, 0.39171886444091797, 0.3926960229873657, 0.41929078102111816, 0.3891267478466034, 0.37243568897247314, 0.3615072965621948, 0.3486050069332123, 0.39569419622421265, 0.4160010516643524, 0.395239919424057, 0.3473738133907318, 0.3690200746059418, 0.38260650634765625, 0.37693727016448975, 0.35455551743507385, 0.3153707683086395, 0.4163806438446045, 0.33833393454551697, 0.3445013165473938, 0.3172437250614166, 0.2776515483856201, 0.2830849289894104, 0.3238430917263031, 0.39875033497810364, 0.3399185240268707, 0.5361736416816711, 0.41780292987823486, 0.3610226809978485, 0.2841382920742035, 0.3309812545776367, 0.24294963479042053, 0.23834404349327087, 0.2678911089897156, 0.2697659730911255, 0.32697904109954834, 0.37020769715309143, 0.2873440384864807, 0.20928901433944702, 0.35995548963546753, 0.25920143723487854, 0.44556766748428345, 0.2845075726509094, 0.3141847550868988, 0.3044743835926056, 0.32837897539138794, 0.24346278607845306, 0.2761305570602417, 0.2605215013027191, 0.24642413854599, 0.3306944966316223, 0.2538274824619293, 0.19690634310245514, 0.2894386947154999, 0.277312695980072, 0.2049439549446106, 0.2273852676153183, 0.23064839839935303, 0.2347431629896164, 0.3478816747665405, 0.2558739185333252, 0.22614656388759613, 0.2866215705871582, 0.29618847370147705, 0.2791079580783844, 0.2986387312412262, 0.15219609439373016, 0.2581731081008911, 0.292522132396698, 0.24959693849086761, 0.28260356187820435, 0.1927589476108551, 0.18100348114967346, 0.31418827176094055, 0.23249584436416626, 0.3172941505908966, 0.27895286679267883, 0.28041180968284607, 0.24284619092941284, 0.19051550328731537, 0.20800790190696716, 0.23449639976024628, 0.20080867409706116, 0.3689790368080139, 0.1671542525291443, 0.3172452449798584, 0.2963358163833618, 0.17572879791259766, 0.2509303092956543, 0.15980759263038635, 0.2943058907985687, 0.24765785038471222],\n [0.6766626834869385, 0.6733847856521606, 0.7268949747085571, 0.6916412115097046, 0.7079758644104004, 0.6945012807846069, 0.6860032677650452, 0.6836898922920227, 0.6823083758354187, 0.6946573853492737, 0.6738471388816833, 0.7055628299713135, 0.7074865698814392, 0.6980187296867371, 0.6752068996429443, 0.6878193616867065, 0.6858772039413452, 0.6845739483833313, 0.6781122088432312, 0.6687148213386536, 0.6551575660705566, 0.6618691086769104, 0.6707185506820679, 0.6695660948753357, 0.6733136177062988, 0.6597137451171875, 0.6290885806083679, 0.688670814037323, 0.6812517642974854, 0.6421326994895935, 0.6218180656433105, 0.6294578313827515, 0.5880572199821472, 0.6018733978271484, 0.6334924101829529, 0.7113765478134155, 0.6618599891662598, 0.6857787370681763, 0.6958155035972595, 0.6620758771896362, 0.6098875999450684, 0.6351224780082703, 0.7133780121803284, 0.6134438514709473, 0.6188390254974365, 0.598128080368042, 0.6313796043395996, 0.5903613567352295, 0.6385324001312256, 0.5774256587028503, 0.607389509677887, 0.5639699101448059, 0.573137104511261, 0.6269221901893616, 0.5388535261154175, 0.5750295519828796, 0.6302374005317688, 0.5416637063026428, 0.5846621990203857, 0.5884502530097961, 0.5980594158172607, 0.6035245656967163, 0.5252978801727295, 0.5058540105819702, 0.4807260036468506, 0.5133909583091736, 0.6284927129745483, 0.5735365152359009, 0.5716258883476257, 0.5082230567932129, 0.5524265766143799, 0.4895395040512085, 0.4933544099330902, 0.5380381345748901, 0.5306010246276855, 0.5045926570892334, 0.5111860036849976, 0.39580845832824707, 0.46140849590301514, 0.40367892384529114, 0.5849067568778992, 0.4099409878253937, 0.46059444546699524, 0.47587355971336365, 0.36137184500694275, 0.4260672330856323, 0.46819329261779785, 0.3892982006072998, 0.4808935821056366, 0.3398974537849426, 0.37576550245285034, 0.34359949827194214, 0.2874930202960968, 0.3132879137992859, 0.3394784927368164, 0.32755810022354126, 0.3825659453868866, 0.4756413400173187, 0.31717604398727417, 0.27863574028015137, 0.3506808578968048, 0.36046916246414185, 0.44146835803985596, 0.3126901090145111, 0.2787816524505615, 0.3439459204673767, 0.37225142121315, 0.5115729570388794, 0.3006516695022583, 0.2852512001991272, 0.2545645534992218, 0.38197603821754456, 0.29596179723739624, 0.2493038922548294, 0.4055787920951843, 0.23028181493282318, 0.30714738368988037, 0.24847815930843353, 0.2578228712081909, 0.2965392768383026, 0.2399429827928543, 0.26306965947151184, 0.18465633690357208, 0.2370777726173401, 0.402022123336792, 0.2274547517299652, 0.24400709569454193, 0.2846790552139282, 0.2497858852148056, 0.2619662880897522, 0.30175483226776123, 0.22820281982421875, 0.22678717970848083, 0.36070916056632996, 0.31420132517814636, 0.4352460503578186, 0.22265543043613434, 0.34359288215637207, 0.13647298514842987, 0.21282663941383362, 0.21635796129703522, 0.26533767580986023, 0.3534102737903595, 0.32491347193717957, 0.2294543832540512, 0.27164843678474426, 0.19885091483592987, 0.18760965764522552, 0.2540241777896881, 0.4632641077041626, 0.29165011644363403, 0.4135802984237671, 0.26327550411224365, 0.1997772455215454, 0.13516023755073547, 0.3695870637893677, 0.11264805495738983, 0.29873695969581604, 0.16909335553646088, 0.30733317136764526, 0.20011112093925476, 0.18087071180343628, 0.27615079283714294, 0.18393971025943756, 0.5226209163665771, 0.34045127034187317, 0.19876118004322052, 0.1125766783952713, 0.1594187170267105, 0.20853376388549805, 0.1732969880104065, 0.14923624694347382, 0.3410123586654663, 0.3147244155406952, 0.1611364781856537, 0.20200948417186737, 0.3477379083633423, 0.27802884578704834, 0.13795316219329834, 0.18882125616073608, 0.2493482530117035, 0.21802087128162384, 0.08311432600021362, 0.3638332784175873, 0.1593743860721588, 0.12320718169212341, 0.14631333947181702, 0.14829546213150024, 0.12080324441194534, 0.2866232395172119, 0.11320976167917252, 0.1859615445137024, 0.0962543860077858, 0.3311655521392822, 0.22131189703941345, 0.1387200504541397, 0.3458332419395447, 0.2703413665294647, 0.08891014009714127, 0.07229170948266983],\n [0.787895917892456, 0.6903800368309021, 0.7158403992652893, 0.7122862339019775, 0.6886484026908875, 0.6939958930015564, 0.6935360431671143, 0.650316059589386, 0.6544728875160217, 0.6994084715843201, 0.6894413232803345, 0.6850844621658325, 0.6804036498069763, 0.6757852435112, 0.6671650409698486, 0.6817684769630432, 0.6735156178474426, 0.6615545749664307, 0.6588905453681946, 0.645582914352417, 0.6698077917098999, 0.6574685573577881, 0.657529890537262, 0.6392114758491516, 0.649583101272583, 0.6608424186706543, 0.6229669451713562, 0.6509481072425842, 0.6281221508979797, 0.6040320992469788, 0.6476627588272095, 0.6141312718391418, 0.6132612824440002, 0.5976462364196777, 0.5747413039207458, 0.5883685946464539, 0.6068240404129028, 0.5791706442832947, 0.5358545780181885, 0.5668087601661682, 0.5293481945991516, 0.5469346642494202, 0.5151075720787048, 0.48746681213378906, 0.49674975872039795, 0.4565601646900177, 0.4422961175441742, 0.455774188041687, 0.4999108910560608, 0.4826383888721466, 0.3973392844200134, 0.4315979480743408, 0.5005404949188232, 0.3493035137653351, 0.40820634365081787, 0.4267416000366211, 0.3546534478664398, 0.29594555497169495, 0.37569165229797363, 0.3514409363269806, 0.39161917567253113, 0.381989449262619, 0.35257411003112793, 0.2563234269618988, 0.32872822880744934, 0.3258020281791687, 0.4029194712638855, 0.27450475096702576, 0.29574957489967346, 0.22943808138370514, 0.21788370609283447, 0.1983984112739563, 0.2505156695842743, 0.17859338223934174, 0.19801589846611023, 0.33501136302948, 0.2805149555206299, 0.0791587084531784, 0.2965260446071625, 0.09564807265996933, 0.26017022132873535, 0.28739306330680847, 0.39182159304618835, 0.28442826867103577, 0.20208536088466644, 0.20845864713191986, 0.083095982670784, 0.11846388131380081, 0.2296963334083557, 0.4584989845752716, 0.2712455689907074, 0.44045203924179077, 0.24386362731456757, 0.25447797775268555, 0.17673835158348083, 0.2929280400276184, 0.2384115606546402, 0.2909412682056427, 0.3392401933670044, 0.19307689368724823, 0.27072784304618835, 0.22949834167957306, 0.09036514163017273, 0.20298032462596893, 0.1406632363796234, 0.1747269630432129, 0.32058724761009216, 0.3728943169116974, 0.3406106233596802, 0.2329806089401245, 0.11757044494152069, 0.10781583189964294, 0.1772853434085846, 0.41519486904144287, 0.2126924693584442, 0.13533304631710052, 0.10644836723804474, 0.22335217893123627, 0.3297368586063385, 0.3315538465976715, 0.43632546067237854, 0.24056647717952728, 0.37213650345802307, 0.16111552715301514, 0.05763351917266846, 0.21711736917495728, 0.13189882040023804, 0.048426304012537, 0.14040182530879974, 0.16238971054553986, 0.3589114844799042, 0.12540899217128754, 0.08508334308862686, 0.1410427689552307, 0.18061935901641846, 0.44160178303718567, 0.3028709888458252, 0.07879309356212616, 0.17187120020389557, 0.17292766273021698, 0.22851528227329254, 0.21786810457706451, 0.142022967338562, 0.09644395858049393, 0.08551015704870224, 0.33149322867393494, 0.20092003047466278, 0.06972720474004745, 0.20957961678504944, 0.06169748678803444, 0.14879749715328217, 0.2783214747905731, 0.06366264820098877, 0.5207465291023254, 0.16419930756092072, 0.20410758256912231, 0.22914914786815643, 0.1090226098895073, 0.3697149157524109, 0.4854496121406555, 0.15068838000297546, 0.31723499298095703, 0.25102636218070984, 0.23569755256175995, 0.3954540193080902, 0.19988884031772614, 0.0597945898771286, 0.14768925309181213, 0.06746945530176163, 0.11429297178983688, 0.1333230584859848, 0.3710431158542633, 0.13212214410305023, 0.05055677890777588, 0.18356674909591675, 0.2802237272262573, 0.3557482063770294, 0.18571825325489044, 0.2556218206882477, 0.4787002503871918, 0.2715683877468109, 0.13747304677963257, 0.4504898488521576, 0.13083931803703308, 0.22413353621959686, 0.16070926189422607, 0.26609182357788086, 0.14192163944244385, 0.19613544642925262, 0.1497608721256256, 0.2040126472711563, 0.04195399209856987, 0.03975365310907364, 0.580817461013794, 0.24257855117321014, 0.3649570047855377, 0.14266571402549744, 0.2238038033246994, 0.3230799734592438, 0.09331774711608887]])\nloss_histories_QCNN_MNIST = np.array([])\nloss_histories_TTN_MNIST = np.array([])\nloss_histories_CNN_FASHION = np.array([])\nloss_histories_QCNN_FASHION = np.array([])\nloss_histories_TTN_FASHION = np.array([])\nEncodings = ['pca8', 'autoencoder8', 'pca16-compact', 'autoencoder16-compact']", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_QCNN_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "peekOfCode": "loss_histories_QCNN_MNIST = np.array([])\nloss_histories_TTN_MNIST = np.array([])\nloss_histories_CNN_FASHION = np.array([])\nloss_histories_QCNN_FASHION = np.array([])\nloss_histories_TTN_FASHION = np.array([])\nEncodings = ['pca8', 'autoencoder8', 'pca16-compact', 'autoencoder16-compact']\ndef plot_loss_history(Encodings, datasets):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n if datasets == 'mnist':", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_TTN_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "peekOfCode": "loss_histories_TTN_MNIST = np.array([])\nloss_histories_CNN_FASHION = np.array([])\nloss_histories_QCNN_FASHION = np.array([])\nloss_histories_TTN_FASHION = np.array([])\nEncodings = ['pca8', 'autoencoder8', 'pca16-compact', 'autoencoder16-compact']\ndef plot_loss_history(Encodings, datasets):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n if datasets == 'mnist':\n loss_history_CNN = loss_histories_CNN_MNIST[i]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_CNN_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "peekOfCode": "loss_histories_CNN_FASHION = np.array([])\nloss_histories_QCNN_FASHION = np.array([])\nloss_histories_TTN_FASHION = np.array([])\nEncodings = ['pca8', 'autoencoder8', 'pca16-compact', 'autoencoder16-compact']\ndef plot_loss_history(Encodings, datasets):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n if datasets == 'mnist':\n loss_history_CNN = loss_histories_CNN_MNIST[i]\n loss_history_QCNN = loss_histories_QCNN_MNIST[i]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_QCNN_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "peekOfCode": "loss_histories_QCNN_FASHION = np.array([])\nloss_histories_TTN_FASHION = np.array([])\nEncodings = ['pca8', 'autoencoder8', 'pca16-compact', 'autoencoder16-compact']\ndef plot_loss_history(Encodings, datasets):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n if datasets == 'mnist':\n loss_history_CNN = loss_histories_CNN_MNIST[i]\n loss_history_QCNN = loss_histories_QCNN_MNIST[i]\n loss_history_TTN = loss_histories_TTN_MNIST[i]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "documentation": {} + }, + { + "label": "loss_histories_TTN_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "peekOfCode": "loss_histories_TTN_FASHION = np.array([])\nEncodings = ['pca8', 'autoencoder8', 'pca16-compact', 'autoencoder16-compact']\ndef plot_loss_history(Encodings, datasets):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n if datasets == 'mnist':\n loss_history_CNN = loss_histories_CNN_MNIST[i]\n loss_history_QCNN = loss_histories_QCNN_MNIST[i]\n loss_history_TTN = loss_histories_TTN_MNIST[i]\n elif datasets == 'fashion':", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "documentation": {} + }, + { + "label": "Encodings", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "peekOfCode": "Encodings = ['pca8', 'autoencoder8', 'pca16-compact', 'autoencoder16-compact']\ndef plot_loss_history(Encodings, datasets):\n for i in range(len(Encodings)):\n Encoding = Encodings[i]\n if datasets == 'mnist':\n loss_history_CNN = loss_histories_CNN_MNIST[i]\n loss_history_QCNN = loss_histories_QCNN_MNIST[i]\n loss_history_TTN = loss_histories_TTN_MNIST[i]\n elif datasets == 'fashion':\n loss_history_CNN = loss_histories_CNN_FASHION[i]", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "documentation": {} + }, + { + "label": "datasets", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "peekOfCode": "datasets = 'mnist'\nplot_loss_history(Encodings, datasets)", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.loss_history", + "documentation": {} + }, + { + "label": "U9_resize256_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "U9_resize256_MNIST = np.array([0.858628841607565, 0.9460992907801419, 0.9281323877068558, 0.798581560283688, 0.7976359338061466])\nU9_AE8_MNIST = np.array([0.851063829787234, 0.9550827423167849, 0.9309692671394799, 0.968321513002364, 0.7111111111111111])\nU9_PCA8_MNIST = np.array([0.9862884160756501, 0.9782505910165484, 0.9754137115839243, 0.9782505910165484, 0.9777777777777777])\nU9_PCA16_MNIST = np.array([0.9427895981087471, 0.9304964539007092, 0.975886524822695, 0.9687943262411347, 0.7182033096926714])\nU9_AE16_MNIST = np.array([0.9456264775413712, 0.9711583924349881, 0.891725768321513, 0.7990543735224587, 0.9200945626477541])\nSU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "U9_AE8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "U9_AE8_MNIST = np.array([0.851063829787234, 0.9550827423167849, 0.9309692671394799, 0.968321513002364, 0.7111111111111111])\nU9_PCA8_MNIST = np.array([0.9862884160756501, 0.9782505910165484, 0.9754137115839243, 0.9782505910165484, 0.9777777777777777])\nU9_PCA16_MNIST = np.array([0.9427895981087471, 0.9304964539007092, 0.975886524822695, 0.9687943262411347, 0.7182033096926714])\nU9_AE16_MNIST = np.array([0.9456264775413712, 0.9711583924349881, 0.891725768321513, 0.7990543735224587, 0.9200945626477541])\nSU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "U9_PCA8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "U9_PCA8_MNIST = np.array([0.9862884160756501, 0.9782505910165484, 0.9754137115839243, 0.9782505910165484, 0.9777777777777777])\nU9_PCA16_MNIST = np.array([0.9427895981087471, 0.9304964539007092, 0.975886524822695, 0.9687943262411347, 0.7182033096926714])\nU9_AE16_MNIST = np.array([0.9456264775413712, 0.9711583924349881, 0.891725768321513, 0.7990543735224587, 0.9200945626477541])\nSU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "U9_PCA16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "U9_PCA16_MNIST = np.array([0.9427895981087471, 0.9304964539007092, 0.975886524822695, 0.9687943262411347, 0.7182033096926714])\nU9_AE16_MNIST = np.array([0.9456264775413712, 0.9711583924349881, 0.891725768321513, 0.7990543735224587, 0.9200945626477541])\nSU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "U9_AE16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "U9_AE16_MNIST = np.array([0.9456264775413712, 0.9711583924349881, 0.891725768321513, 0.7990543735224587, 0.9200945626477541])\nSU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "SU4_resize256_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "SU4_resize256_MNIST = np.array([0.9673758865248226, 0.9650118203309692, 0.966903073286052, 0.9829787234042553, 0.9361702127659575])\nSU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "SU4_AE8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "SU4_AE8_MNIST = np.array([0.957919621749409, 0.9229314420803783, 0.9867612293144208, 0.9735224586288416, 0.9919621749408983])\nSU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "SU4_PCA8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "SU4_PCA8_MNIST = np.array([0.9791962174940898, 0.983451536643026, 0.9829787234042553, 0.9900709219858156, 0.9777777777777777])\nSU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "SU4_PCA16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "SU4_PCA16_MNIST = np.array([0.9801418439716312, 0.9829787234042553, 0.9801418439716312, 0.983451536643026, 0.9768321513002364])\nSU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "SU4_AE16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "SU4_AE16_MNIST = np.array([0.9470449172576832, 0.9640661938534278, 0.9139479905437352, 0.9385342789598109, 0.9702127659574468])\nU9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "U9_resize256_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "U9_resize256_FASHION = np.array([0.7795, 0.876, 0.8555, 0.698, 0.9395])\nU9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "U9_AE8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "U9_AE8_FASHION = np.array([0.9135, 0.7215, 0.852, 0.9035, 0.7855])\nU9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure\\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "U9_PCA8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "U9_PCA8_FASHION = np.array([0.892, 0.8435, 0.87, 0.797, 0.8405])\nU9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "U9_PCA16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "U9_PCA16_FASHION = np.array([0.5475, 0.7535, 0.788, 0.8305, 0.8285])\nU9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "U9_AE16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "U9_AE16_FASHION = np.array([0.649, 0.8535, 0.847, 0.9165, 0.8425])\nSU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "SU4_resize256_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "SU4_resize256_FASHION = np.array([0.925, 0.8935, 0.902, 0.9115, 0.9045])\nSU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "SU4_AE8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "SU4_AE8_FASHION = np.array([0.8995, 0.934, 0.8435, 0.9215, 0.932])\nSU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "SU4_PCA8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "SU4_PCA8_FASHION = np.array([0.921, 0.916, 0.9295, 0.934, 0.9285])\nSU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "SU4_PCA16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "SU4_PCA16_FASHION = np.array([0.9265, 0.926, 0.9335, 0.928, 0.914])\nSU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))\nprint(\"Result with SU4: \\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "SU4_AE16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "peekOfCode": "SU4_AE16_FASHION = np.array([0.934, 0.9445, 0.8915, 0.927, 0.918])\nprint(\"Result for MNIST dataset with 1D chain QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))\nprint(\"Result with SU4: \\n\")\nprint(\"resize256: \" + str(SU4_resize256_MNIST.mean()) +\" +/- \"+ str(SU4_resize256_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_1D_chain_result", + "documentation": {} + }, + { + "label": "U9_resize256_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "U9_resize256_MNIST = np.array([0.8884160756501182, 0.891725768321513, 0.9347517730496454, 0.8222222222222222, 0.8600472813238771])\nU9_AE8_MNIST = np.array([0.9598108747044918, 0.9465721040189126, 0.9527186761229315, 0.9730496453900709, 0.968321513002364])\nU9_PCA8_MNIST = np.array([0.9768321513002364, 0.9782505910165484, 0.9900709219858156, 0.9678486997635933, 0.9768321513002364])\nU9_PCA16_MNIST = np.array([0.9678486997635933, 0.9560283687943263, 0.9309692671394799, 0.9749408983451536, 0.9617021276595744])\nU9_AE16_MNIST = np.array([0.9598108747044918, 0.9276595744680851, 0.91725768321513, 0.9059101654846335, 0.9182033096926714])\nSU4_resize256_MNIST = np.array([0.9867612293144208, 0.9801418439716312, 0.9536643026004729, 0.9825059101654846, 0.9843971631205674])\nSU4_AE8_MNIST = np.array([0.9692671394799054, 0.9895981087470449, 0.9607565011820332, 0.9867612293144208, 0.9408983451536643])\nSU4_PCA8_MNIST = np.array([0.9886524822695035, 0.984869976359338, 0.9881796690307328, 0.9806146572104019, 0.983451536643026])\nSU4_PCA16_MNIST = np.array([0.9763593380614657, 0.9886524822695035, 0.9806146572104019, 0.9801418439716312, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.9546099290780142, 0.9673758865248226, 0.941371158392435, 0.9891252955082742])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "U9_AE8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "U9_AE8_MNIST = np.array([0.9598108747044918, 0.9465721040189126, 0.9527186761229315, 0.9730496453900709, 0.968321513002364])\nU9_PCA8_MNIST = np.array([0.9768321513002364, 0.9782505910165484, 0.9900709219858156, 0.9678486997635933, 0.9768321513002364])\nU9_PCA16_MNIST = np.array([0.9678486997635933, 0.9560283687943263, 0.9309692671394799, 0.9749408983451536, 0.9617021276595744])\nU9_AE16_MNIST = np.array([0.9598108747044918, 0.9276595744680851, 0.91725768321513, 0.9059101654846335, 0.9182033096926714])\nSU4_resize256_MNIST = np.array([0.9867612293144208, 0.9801418439716312, 0.9536643026004729, 0.9825059101654846, 0.9843971631205674])\nSU4_AE8_MNIST = np.array([0.9692671394799054, 0.9895981087470449, 0.9607565011820332, 0.9867612293144208, 0.9408983451536643])\nSU4_PCA8_MNIST = np.array([0.9886524822695035, 0.984869976359338, 0.9881796690307328, 0.9806146572104019, 0.983451536643026])\nSU4_PCA16_MNIST = np.array([0.9763593380614657, 0.9886524822695035, 0.9806146572104019, 0.9801418439716312, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.9546099290780142, 0.9673758865248226, 0.941371158392435, 0.9891252955082742])\nU9_resize256_FASHION = np.array([0.909, 0.9165, 0.889, 0.874, 0.913])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "U9_PCA8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "U9_PCA8_MNIST = np.array([0.9768321513002364, 0.9782505910165484, 0.9900709219858156, 0.9678486997635933, 0.9768321513002364])\nU9_PCA16_MNIST = np.array([0.9678486997635933, 0.9560283687943263, 0.9309692671394799, 0.9749408983451536, 0.9617021276595744])\nU9_AE16_MNIST = np.array([0.9598108747044918, 0.9276595744680851, 0.91725768321513, 0.9059101654846335, 0.9182033096926714])\nSU4_resize256_MNIST = np.array([0.9867612293144208, 0.9801418439716312, 0.9536643026004729, 0.9825059101654846, 0.9843971631205674])\nSU4_AE8_MNIST = np.array([0.9692671394799054, 0.9895981087470449, 0.9607565011820332, 0.9867612293144208, 0.9408983451536643])\nSU4_PCA8_MNIST = np.array([0.9886524822695035, 0.984869976359338, 0.9881796690307328, 0.9806146572104019, 0.983451536643026])\nSU4_PCA16_MNIST = np.array([0.9763593380614657, 0.9886524822695035, 0.9806146572104019, 0.9801418439716312, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.9546099290780142, 0.9673758865248226, 0.941371158392435, 0.9891252955082742])\nU9_resize256_FASHION = np.array([0.909, 0.9165, 0.889, 0.874, 0.913])\nU9_AE8_FASHION = np.array([0.9285, 0.8135, 0.8715, 0.8485, 0.929])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "U9_PCA16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "U9_PCA16_MNIST = np.array([0.9678486997635933, 0.9560283687943263, 0.9309692671394799, 0.9749408983451536, 0.9617021276595744])\nU9_AE16_MNIST = np.array([0.9598108747044918, 0.9276595744680851, 0.91725768321513, 0.9059101654846335, 0.9182033096926714])\nSU4_resize256_MNIST = np.array([0.9867612293144208, 0.9801418439716312, 0.9536643026004729, 0.9825059101654846, 0.9843971631205674])\nSU4_AE8_MNIST = np.array([0.9692671394799054, 0.9895981087470449, 0.9607565011820332, 0.9867612293144208, 0.9408983451536643])\nSU4_PCA8_MNIST = np.array([0.9886524822695035, 0.984869976359338, 0.9881796690307328, 0.9806146572104019, 0.983451536643026])\nSU4_PCA16_MNIST = np.array([0.9763593380614657, 0.9886524822695035, 0.9806146572104019, 0.9801418439716312, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.9546099290780142, 0.9673758865248226, 0.941371158392435, 0.9891252955082742])\nU9_resize256_FASHION = np.array([0.909, 0.9165, 0.889, 0.874, 0.913])\nU9_AE8_FASHION = np.array([0.9285, 0.8135, 0.8715, 0.8485, 0.929])\nU9_PCA8_FASHION = np.array([0.8125, 0.847, 0.884, 0.8305, 0.8075])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "U9_AE16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "U9_AE16_MNIST = np.array([0.9598108747044918, 0.9276595744680851, 0.91725768321513, 0.9059101654846335, 0.9182033096926714])\nSU4_resize256_MNIST = np.array([0.9867612293144208, 0.9801418439716312, 0.9536643026004729, 0.9825059101654846, 0.9843971631205674])\nSU4_AE8_MNIST = np.array([0.9692671394799054, 0.9895981087470449, 0.9607565011820332, 0.9867612293144208, 0.9408983451536643])\nSU4_PCA8_MNIST = np.array([0.9886524822695035, 0.984869976359338, 0.9881796690307328, 0.9806146572104019, 0.983451536643026])\nSU4_PCA16_MNIST = np.array([0.9763593380614657, 0.9886524822695035, 0.9806146572104019, 0.9801418439716312, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.9546099290780142, 0.9673758865248226, 0.941371158392435, 0.9891252955082742])\nU9_resize256_FASHION = np.array([0.909, 0.9165, 0.889, 0.874, 0.913])\nU9_AE8_FASHION = np.array([0.9285, 0.8135, 0.8715, 0.8485, 0.929])\nU9_PCA8_FASHION = np.array([0.8125, 0.847, 0.884, 0.8305, 0.8075])\nU9_PCA16_FASHION = np.array([0.778, 0.9275, 0.8925, 0.912, 0.829])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "SU4_resize256_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "SU4_resize256_MNIST = np.array([0.9867612293144208, 0.9801418439716312, 0.9536643026004729, 0.9825059101654846, 0.9843971631205674])\nSU4_AE8_MNIST = np.array([0.9692671394799054, 0.9895981087470449, 0.9607565011820332, 0.9867612293144208, 0.9408983451536643])\nSU4_PCA8_MNIST = np.array([0.9886524822695035, 0.984869976359338, 0.9881796690307328, 0.9806146572104019, 0.983451536643026])\nSU4_PCA16_MNIST = np.array([0.9763593380614657, 0.9886524822695035, 0.9806146572104019, 0.9801418439716312, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.9546099290780142, 0.9673758865248226, 0.941371158392435, 0.9891252955082742])\nU9_resize256_FASHION = np.array([0.909, 0.9165, 0.889, 0.874, 0.913])\nU9_AE8_FASHION = np.array([0.9285, 0.8135, 0.8715, 0.8485, 0.929])\nU9_PCA8_FASHION = np.array([0.8125, 0.847, 0.884, 0.8305, 0.8075])\nU9_PCA16_FASHION = np.array([0.778, 0.9275, 0.8925, 0.912, 0.829])\nU9_AE16_FASHION = np.array([0.9135, 0.8945, 0.871, 0.9065, 0.8365])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "SU4_AE8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "SU4_AE8_MNIST = np.array([0.9692671394799054, 0.9895981087470449, 0.9607565011820332, 0.9867612293144208, 0.9408983451536643])\nSU4_PCA8_MNIST = np.array([0.9886524822695035, 0.984869976359338, 0.9881796690307328, 0.9806146572104019, 0.983451536643026])\nSU4_PCA16_MNIST = np.array([0.9763593380614657, 0.9886524822695035, 0.9806146572104019, 0.9801418439716312, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.9546099290780142, 0.9673758865248226, 0.941371158392435, 0.9891252955082742])\nU9_resize256_FASHION = np.array([0.909, 0.9165, 0.889, 0.874, 0.913])\nU9_AE8_FASHION = np.array([0.9285, 0.8135, 0.8715, 0.8485, 0.929])\nU9_PCA8_FASHION = np.array([0.8125, 0.847, 0.884, 0.8305, 0.8075])\nU9_PCA16_FASHION = np.array([0.778, 0.9275, 0.8925, 0.912, 0.829])\nU9_AE16_FASHION = np.array([0.9135, 0.8945, 0.871, 0.9065, 0.8365])\nSU4_resize256_FASHION = np.array([0.908, 0.9245, 0.9165, 0.9035, 0.916])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "SU4_PCA8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "SU4_PCA8_MNIST = np.array([0.9886524822695035, 0.984869976359338, 0.9881796690307328, 0.9806146572104019, 0.983451536643026])\nSU4_PCA16_MNIST = np.array([0.9763593380614657, 0.9886524822695035, 0.9806146572104019, 0.9801418439716312, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.9546099290780142, 0.9673758865248226, 0.941371158392435, 0.9891252955082742])\nU9_resize256_FASHION = np.array([0.909, 0.9165, 0.889, 0.874, 0.913])\nU9_AE8_FASHION = np.array([0.9285, 0.8135, 0.8715, 0.8485, 0.929])\nU9_PCA8_FASHION = np.array([0.8125, 0.847, 0.884, 0.8305, 0.8075])\nU9_PCA16_FASHION = np.array([0.778, 0.9275, 0.8925, 0.912, 0.829])\nU9_AE16_FASHION = np.array([0.9135, 0.8945, 0.871, 0.9065, 0.8365])\nSU4_resize256_FASHION = np.array([0.908, 0.9245, 0.9165, 0.9035, 0.916])\nSU4_AE8_FASHION = np.array([0.939, 0.9325, 0.9335, 0.9315, 0.939])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "SU4_PCA16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "SU4_PCA16_MNIST = np.array([0.9763593380614657, 0.9886524822695035, 0.9806146572104019, 0.9801418439716312, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9739952718676123, 0.9546099290780142, 0.9673758865248226, 0.941371158392435, 0.9891252955082742])\nU9_resize256_FASHION = np.array([0.909, 0.9165, 0.889, 0.874, 0.913])\nU9_AE8_FASHION = np.array([0.9285, 0.8135, 0.8715, 0.8485, 0.929])\nU9_PCA8_FASHION = np.array([0.8125, 0.847, 0.884, 0.8305, 0.8075])\nU9_PCA16_FASHION = np.array([0.778, 0.9275, 0.8925, 0.912, 0.829])\nU9_AE16_FASHION = np.array([0.9135, 0.8945, 0.871, 0.9065, 0.8365])\nSU4_resize256_FASHION = np.array([0.908, 0.9245, 0.9165, 0.9035, 0.916])\nSU4_AE8_FASHION = np.array([0.939, 0.9325, 0.9335, 0.9315, 0.939])\nSU4_PCA8_FASHION = np.array([0.9105, 0.8495, 0.8685, 0.8585, 0.8775])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "SU4_AE16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "SU4_AE16_MNIST = np.array([0.9739952718676123, 0.9546099290780142, 0.9673758865248226, 0.941371158392435, 0.9891252955082742])\nU9_resize256_FASHION = np.array([0.909, 0.9165, 0.889, 0.874, 0.913])\nU9_AE8_FASHION = np.array([0.9285, 0.8135, 0.8715, 0.8485, 0.929])\nU9_PCA8_FASHION = np.array([0.8125, 0.847, 0.884, 0.8305, 0.8075])\nU9_PCA16_FASHION = np.array([0.778, 0.9275, 0.8925, 0.912, 0.829])\nU9_AE16_FASHION = np.array([0.9135, 0.8945, 0.871, 0.9065, 0.8365])\nSU4_resize256_FASHION = np.array([0.908, 0.9245, 0.9165, 0.9035, 0.916])\nSU4_AE8_FASHION = np.array([0.939, 0.9325, 0.9335, 0.9315, 0.939])\nSU4_PCA8_FASHION = np.array([0.9105, 0.8495, 0.8685, 0.8585, 0.8775])\nSU4_PCA16_FASHION = np.array([0.9025, 0.8995, 0.9165, 0.884, 0.8865])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "U9_resize256_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "U9_resize256_FASHION = np.array([0.909, 0.9165, 0.889, 0.874, 0.913])\nU9_AE8_FASHION = np.array([0.9285, 0.8135, 0.8715, 0.8485, 0.929])\nU9_PCA8_FASHION = np.array([0.8125, 0.847, 0.884, 0.8305, 0.8075])\nU9_PCA16_FASHION = np.array([0.778, 0.9275, 0.8925, 0.912, 0.829])\nU9_AE16_FASHION = np.array([0.9135, 0.8945, 0.871, 0.9065, 0.8365])\nSU4_resize256_FASHION = np.array([0.908, 0.9245, 0.9165, 0.9035, 0.916])\nSU4_AE8_FASHION = np.array([0.939, 0.9325, 0.9335, 0.9315, 0.939])\nSU4_PCA8_FASHION = np.array([0.9105, 0.8495, 0.8685, 0.8585, 0.8775])\nSU4_PCA16_FASHION = np.array([0.9025, 0.8995, 0.9165, 0.884, 0.8865])\nSU4_AE16_FASHION = np.array([0.919, 0.937, 0.9345, 0.9435, 0.911])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "U9_AE8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "U9_AE8_FASHION = np.array([0.9285, 0.8135, 0.8715, 0.8485, 0.929])\nU9_PCA8_FASHION = np.array([0.8125, 0.847, 0.884, 0.8305, 0.8075])\nU9_PCA16_FASHION = np.array([0.778, 0.9275, 0.8925, 0.912, 0.829])\nU9_AE16_FASHION = np.array([0.9135, 0.8945, 0.871, 0.9065, 0.8365])\nSU4_resize256_FASHION = np.array([0.908, 0.9245, 0.9165, 0.9035, 0.916])\nSU4_AE8_FASHION = np.array([0.939, 0.9325, 0.9335, 0.9315, 0.939])\nSU4_PCA8_FASHION = np.array([0.9105, 0.8495, 0.8685, 0.8585, 0.8775])\nSU4_PCA16_FASHION = np.array([0.9025, 0.8995, 0.9165, 0.884, 0.8865])\nSU4_AE16_FASHION = np.array([0.919, 0.937, 0.9345, 0.9435, 0.911])\nprint(\"Result for MNIST dataset with Double Layer QCNN structure\\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "U9_PCA8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "U9_PCA8_FASHION = np.array([0.8125, 0.847, 0.884, 0.8305, 0.8075])\nU9_PCA16_FASHION = np.array([0.778, 0.9275, 0.8925, 0.912, 0.829])\nU9_AE16_FASHION = np.array([0.9135, 0.8945, 0.871, 0.9065, 0.8365])\nSU4_resize256_FASHION = np.array([0.908, 0.9245, 0.9165, 0.9035, 0.916])\nSU4_AE8_FASHION = np.array([0.939, 0.9325, 0.9335, 0.9315, 0.939])\nSU4_PCA8_FASHION = np.array([0.9105, 0.8495, 0.8685, 0.8585, 0.8775])\nSU4_PCA16_FASHION = np.array([0.9025, 0.8995, 0.9165, 0.884, 0.8865])\nSU4_AE16_FASHION = np.array([0.919, 0.937, 0.9345, 0.9435, 0.911])\nprint(\"Result for MNIST dataset with Double Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "U9_PCA16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "U9_PCA16_FASHION = np.array([0.778, 0.9275, 0.8925, 0.912, 0.829])\nU9_AE16_FASHION = np.array([0.9135, 0.8945, 0.871, 0.9065, 0.8365])\nSU4_resize256_FASHION = np.array([0.908, 0.9245, 0.9165, 0.9035, 0.916])\nSU4_AE8_FASHION = np.array([0.939, 0.9325, 0.9335, 0.9315, 0.939])\nSU4_PCA8_FASHION = np.array([0.9105, 0.8495, 0.8685, 0.8585, 0.8775])\nSU4_PCA16_FASHION = np.array([0.9025, 0.8995, 0.9165, 0.884, 0.8865])\nSU4_AE16_FASHION = np.array([0.919, 0.937, 0.9345, 0.9435, 0.911])\nprint(\"Result for MNIST dataset with Double Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "U9_AE16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "U9_AE16_FASHION = np.array([0.9135, 0.8945, 0.871, 0.9065, 0.8365])\nSU4_resize256_FASHION = np.array([0.908, 0.9245, 0.9165, 0.9035, 0.916])\nSU4_AE8_FASHION = np.array([0.939, 0.9325, 0.9335, 0.9315, 0.939])\nSU4_PCA8_FASHION = np.array([0.9105, 0.8495, 0.8685, 0.8585, 0.8775])\nSU4_PCA16_FASHION = np.array([0.9025, 0.8995, 0.9165, 0.884, 0.8865])\nSU4_AE16_FASHION = np.array([0.919, 0.937, 0.9345, 0.9435, 0.911])\nprint(\"Result for MNIST dataset with Double Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "SU4_resize256_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "SU4_resize256_FASHION = np.array([0.908, 0.9245, 0.9165, 0.9035, 0.916])\nSU4_AE8_FASHION = np.array([0.939, 0.9325, 0.9335, 0.9315, 0.939])\nSU4_PCA8_FASHION = np.array([0.9105, 0.8495, 0.8685, 0.8585, 0.8775])\nSU4_PCA16_FASHION = np.array([0.9025, 0.8995, 0.9165, 0.884, 0.8865])\nSU4_AE16_FASHION = np.array([0.919, 0.937, 0.9345, 0.9435, 0.911])\nprint(\"Result for MNIST dataset with Double Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "SU4_AE8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "SU4_AE8_FASHION = np.array([0.939, 0.9325, 0.9335, 0.9315, 0.939])\nSU4_PCA8_FASHION = np.array([0.9105, 0.8495, 0.8685, 0.8585, 0.8775])\nSU4_PCA16_FASHION = np.array([0.9025, 0.8995, 0.9165, 0.884, 0.8865])\nSU4_AE16_FASHION = np.array([0.919, 0.937, 0.9345, 0.9435, 0.911])\nprint(\"Result for MNIST dataset with Double Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "SU4_PCA8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "SU4_PCA8_FASHION = np.array([0.9105, 0.8495, 0.8685, 0.8585, 0.8775])\nSU4_PCA16_FASHION = np.array([0.9025, 0.8995, 0.9165, 0.884, 0.8865])\nSU4_AE16_FASHION = np.array([0.919, 0.937, 0.9345, 0.9435, 0.911])\nprint(\"Result for MNIST dataset with Double Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "SU4_PCA16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "SU4_PCA16_FASHION = np.array([0.9025, 0.8995, 0.9165, 0.884, 0.8865])\nSU4_AE16_FASHION = np.array([0.919, 0.937, 0.9345, 0.9435, 0.911])\nprint(\"Result for MNIST dataset with Double Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))\nprint(\"Result with SU4: \\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "SU4_AE16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "peekOfCode": "SU4_AE16_FASHION = np.array([0.919, 0.937, 0.9345, 0.9435, 0.911])\nprint(\"Result for MNIST dataset with Double Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))\nprint(\"Result with SU4: \\n\")\nprint(\"resize256: \" + str(SU4_resize256_MNIST.mean()) +\" +/- \"+ str(SU4_resize256_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_double_layer_result", + "documentation": {} + }, + { + "label": "U9_resize256_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "U9_resize256_MNIST = np.array([0.924822695035461, 0.9011820330969267, 0.9200945626477541, 0.9739952718676123, 0.9716312056737588])\nU9_AE8_MNIST = np.array([0.9408983451536643, 0.9801418439716312, 0.9706855791962175, 0.9385342789598109, 0.9229314420803783])\nU9_PCA8_MNIST = np.array([0.9546099290780142, 0.9806146572104019, 0.9773049645390071, 0.9574468085106383, 0.9517730496453901])\nU9_PCA16_MNIST = np.array([0.984869976359338, 0.9796690307328605, 0.9806146572104019, 0.9815602836879432, 0.9569739952718677])\nU9_AE16_MNIST = np.array([0.9588652482269504, 0.9092198581560283, 0.9508274231678487, 0.8846335697399527, 0.9205673758865248])\nSU4_resize256_MNIST = np.array([0.9820330969267139, 0.984869976359338, 0.9801418439716312, 0.9825059101654846, 0.9867612293144208])\nSU4_AE8_MNIST = np.array([0.9749408983451536, 0.9862884160756501, 0.9867612293144208, 0.9858156028368794, 0.9872340425531915])\nSU4_PCA8_MNIST = np.array([0.984869976359338, 0.9891252955082742, 0.9810874704491725, 0.9853427895981087, 0.9791962174940898])\nSU4_PCA16_MNIST = np.array([0.9858156028368794, 0.9801418439716312, 0.984869976359338, 0.984869976359338, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9725768321513002, 0.9333333333333333, 0.9744680851063829, 0.9678486997635933, 0.992434988179669])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "U9_AE8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "U9_AE8_MNIST = np.array([0.9408983451536643, 0.9801418439716312, 0.9706855791962175, 0.9385342789598109, 0.9229314420803783])\nU9_PCA8_MNIST = np.array([0.9546099290780142, 0.9806146572104019, 0.9773049645390071, 0.9574468085106383, 0.9517730496453901])\nU9_PCA16_MNIST = np.array([0.984869976359338, 0.9796690307328605, 0.9806146572104019, 0.9815602836879432, 0.9569739952718677])\nU9_AE16_MNIST = np.array([0.9588652482269504, 0.9092198581560283, 0.9508274231678487, 0.8846335697399527, 0.9205673758865248])\nSU4_resize256_MNIST = np.array([0.9820330969267139, 0.984869976359338, 0.9801418439716312, 0.9825059101654846, 0.9867612293144208])\nSU4_AE8_MNIST = np.array([0.9749408983451536, 0.9862884160756501, 0.9867612293144208, 0.9858156028368794, 0.9872340425531915])\nSU4_PCA8_MNIST = np.array([0.984869976359338, 0.9891252955082742, 0.9810874704491725, 0.9853427895981087, 0.9791962174940898])\nSU4_PCA16_MNIST = np.array([0.9858156028368794, 0.9801418439716312, 0.984869976359338, 0.984869976359338, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9725768321513002, 0.9333333333333333, 0.9744680851063829, 0.9678486997635933, 0.992434988179669])\nU9_resize256_FASHION = np.array([0.9225, 0.918, 0.8975, 0.9225, 0.905])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "U9_PCA8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "U9_PCA8_MNIST = np.array([0.9546099290780142, 0.9806146572104019, 0.9773049645390071, 0.9574468085106383, 0.9517730496453901])\nU9_PCA16_MNIST = np.array([0.984869976359338, 0.9796690307328605, 0.9806146572104019, 0.9815602836879432, 0.9569739952718677])\nU9_AE16_MNIST = np.array([0.9588652482269504, 0.9092198581560283, 0.9508274231678487, 0.8846335697399527, 0.9205673758865248])\nSU4_resize256_MNIST = np.array([0.9820330969267139, 0.984869976359338, 0.9801418439716312, 0.9825059101654846, 0.9867612293144208])\nSU4_AE8_MNIST = np.array([0.9749408983451536, 0.9862884160756501, 0.9867612293144208, 0.9858156028368794, 0.9872340425531915])\nSU4_PCA8_MNIST = np.array([0.984869976359338, 0.9891252955082742, 0.9810874704491725, 0.9853427895981087, 0.9791962174940898])\nSU4_PCA16_MNIST = np.array([0.9858156028368794, 0.9801418439716312, 0.984869976359338, 0.984869976359338, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9725768321513002, 0.9333333333333333, 0.9744680851063829, 0.9678486997635933, 0.992434988179669])\nU9_resize256_FASHION = np.array([0.9225, 0.918, 0.8975, 0.9225, 0.905])\nU9_AE8_FASHION = np.array([0.9105, 0.8555, 0.921, 0.942, 0.927])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "U9_PCA16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "U9_PCA16_MNIST = np.array([0.984869976359338, 0.9796690307328605, 0.9806146572104019, 0.9815602836879432, 0.9569739952718677])\nU9_AE16_MNIST = np.array([0.9588652482269504, 0.9092198581560283, 0.9508274231678487, 0.8846335697399527, 0.9205673758865248])\nSU4_resize256_MNIST = np.array([0.9820330969267139, 0.984869976359338, 0.9801418439716312, 0.9825059101654846, 0.9867612293144208])\nSU4_AE8_MNIST = np.array([0.9749408983451536, 0.9862884160756501, 0.9867612293144208, 0.9858156028368794, 0.9872340425531915])\nSU4_PCA8_MNIST = np.array([0.984869976359338, 0.9891252955082742, 0.9810874704491725, 0.9853427895981087, 0.9791962174940898])\nSU4_PCA16_MNIST = np.array([0.9858156028368794, 0.9801418439716312, 0.984869976359338, 0.984869976359338, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9725768321513002, 0.9333333333333333, 0.9744680851063829, 0.9678486997635933, 0.992434988179669])\nU9_resize256_FASHION = np.array([0.9225, 0.918, 0.8975, 0.9225, 0.905])\nU9_AE8_FASHION = np.array([0.9105, 0.8555, 0.921, 0.942, 0.927])\nU9_PCA8_FASHION = np.array([0.868, 0.8745, 0.85, 0.828, 0.8385])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "U9_AE16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "U9_AE16_MNIST = np.array([0.9588652482269504, 0.9092198581560283, 0.9508274231678487, 0.8846335697399527, 0.9205673758865248])\nSU4_resize256_MNIST = np.array([0.9820330969267139, 0.984869976359338, 0.9801418439716312, 0.9825059101654846, 0.9867612293144208])\nSU4_AE8_MNIST = np.array([0.9749408983451536, 0.9862884160756501, 0.9867612293144208, 0.9858156028368794, 0.9872340425531915])\nSU4_PCA8_MNIST = np.array([0.984869976359338, 0.9891252955082742, 0.9810874704491725, 0.9853427895981087, 0.9791962174940898])\nSU4_PCA16_MNIST = np.array([0.9858156028368794, 0.9801418439716312, 0.984869976359338, 0.984869976359338, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9725768321513002, 0.9333333333333333, 0.9744680851063829, 0.9678486997635933, 0.992434988179669])\nU9_resize256_FASHION = np.array([0.9225, 0.918, 0.8975, 0.9225, 0.905])\nU9_AE8_FASHION = np.array([0.9105, 0.8555, 0.921, 0.942, 0.927])\nU9_PCA8_FASHION = np.array([0.868, 0.8745, 0.85, 0.828, 0.8385])\nU9_PCA16_FASHION = np.array([0.9115, 0.847, 0.909, 0.852, 0.8605])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "SU4_resize256_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "SU4_resize256_MNIST = np.array([0.9820330969267139, 0.984869976359338, 0.9801418439716312, 0.9825059101654846, 0.9867612293144208])\nSU4_AE8_MNIST = np.array([0.9749408983451536, 0.9862884160756501, 0.9867612293144208, 0.9858156028368794, 0.9872340425531915])\nSU4_PCA8_MNIST = np.array([0.984869976359338, 0.9891252955082742, 0.9810874704491725, 0.9853427895981087, 0.9791962174940898])\nSU4_PCA16_MNIST = np.array([0.9858156028368794, 0.9801418439716312, 0.984869976359338, 0.984869976359338, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9725768321513002, 0.9333333333333333, 0.9744680851063829, 0.9678486997635933, 0.992434988179669])\nU9_resize256_FASHION = np.array([0.9225, 0.918, 0.8975, 0.9225, 0.905])\nU9_AE8_FASHION = np.array([0.9105, 0.8555, 0.921, 0.942, 0.927])\nU9_PCA8_FASHION = np.array([0.868, 0.8745, 0.85, 0.828, 0.8385])\nU9_PCA16_FASHION = np.array([0.9115, 0.847, 0.909, 0.852, 0.8605])\nU9_AE16_FASHION = np.array([0.9245, 0.894, 0.889, 0.888, 0.9065])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "SU4_AE8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "SU4_AE8_MNIST = np.array([0.9749408983451536, 0.9862884160756501, 0.9867612293144208, 0.9858156028368794, 0.9872340425531915])\nSU4_PCA8_MNIST = np.array([0.984869976359338, 0.9891252955082742, 0.9810874704491725, 0.9853427895981087, 0.9791962174940898])\nSU4_PCA16_MNIST = np.array([0.9858156028368794, 0.9801418439716312, 0.984869976359338, 0.984869976359338, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9725768321513002, 0.9333333333333333, 0.9744680851063829, 0.9678486997635933, 0.992434988179669])\nU9_resize256_FASHION = np.array([0.9225, 0.918, 0.8975, 0.9225, 0.905])\nU9_AE8_FASHION = np.array([0.9105, 0.8555, 0.921, 0.942, 0.927])\nU9_PCA8_FASHION = np.array([0.868, 0.8745, 0.85, 0.828, 0.8385])\nU9_PCA16_FASHION = np.array([0.9115, 0.847, 0.909, 0.852, 0.8605])\nU9_AE16_FASHION = np.array([0.9245, 0.894, 0.889, 0.888, 0.9065])\nSU4_resize256_FASHION = np.array([0.9145, 0.8915, 0.9165, 0.8985, 0.8945])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "SU4_PCA8_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "SU4_PCA8_MNIST = np.array([0.984869976359338, 0.9891252955082742, 0.9810874704491725, 0.9853427895981087, 0.9791962174940898])\nSU4_PCA16_MNIST = np.array([0.9858156028368794, 0.9801418439716312, 0.984869976359338, 0.984869976359338, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9725768321513002, 0.9333333333333333, 0.9744680851063829, 0.9678486997635933, 0.992434988179669])\nU9_resize256_FASHION = np.array([0.9225, 0.918, 0.8975, 0.9225, 0.905])\nU9_AE8_FASHION = np.array([0.9105, 0.8555, 0.921, 0.942, 0.927])\nU9_PCA8_FASHION = np.array([0.868, 0.8745, 0.85, 0.828, 0.8385])\nU9_PCA16_FASHION = np.array([0.9115, 0.847, 0.909, 0.852, 0.8605])\nU9_AE16_FASHION = np.array([0.9245, 0.894, 0.889, 0.888, 0.9065])\nSU4_resize256_FASHION = np.array([0.9145, 0.8915, 0.9165, 0.8985, 0.8945])\nSU4_AE8_FASHION = np.array([0.9205, 0.93, 0.941, 0.931, 0.9495])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "SU4_PCA16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "SU4_PCA16_MNIST = np.array([0.9858156028368794, 0.9801418439716312, 0.984869976359338, 0.984869976359338, 0.9853427895981087])\nSU4_AE16_MNIST = np.array([0.9725768321513002, 0.9333333333333333, 0.9744680851063829, 0.9678486997635933, 0.992434988179669])\nU9_resize256_FASHION = np.array([0.9225, 0.918, 0.8975, 0.9225, 0.905])\nU9_AE8_FASHION = np.array([0.9105, 0.8555, 0.921, 0.942, 0.927])\nU9_PCA8_FASHION = np.array([0.868, 0.8745, 0.85, 0.828, 0.8385])\nU9_PCA16_FASHION = np.array([0.9115, 0.847, 0.909, 0.852, 0.8605])\nU9_AE16_FASHION = np.array([0.9245, 0.894, 0.889, 0.888, 0.9065])\nSU4_resize256_FASHION = np.array([0.9145, 0.8915, 0.9165, 0.8985, 0.8945])\nSU4_AE8_FASHION = np.array([0.9205, 0.93, 0.941, 0.931, 0.9495])\nSU4_PCA8_FASHION = np.array([0.8995, 0.882, 0.875, 0.9055, 0.881])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "SU4_AE16_MNIST", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "SU4_AE16_MNIST = np.array([0.9725768321513002, 0.9333333333333333, 0.9744680851063829, 0.9678486997635933, 0.992434988179669])\nU9_resize256_FASHION = np.array([0.9225, 0.918, 0.8975, 0.9225, 0.905])\nU9_AE8_FASHION = np.array([0.9105, 0.8555, 0.921, 0.942, 0.927])\nU9_PCA8_FASHION = np.array([0.868, 0.8745, 0.85, 0.828, 0.8385])\nU9_PCA16_FASHION = np.array([0.9115, 0.847, 0.909, 0.852, 0.8605])\nU9_AE16_FASHION = np.array([0.9245, 0.894, 0.889, 0.888, 0.9065])\nSU4_resize256_FASHION = np.array([0.9145, 0.8915, 0.9165, 0.8985, 0.8945])\nSU4_AE8_FASHION = np.array([0.9205, 0.93, 0.941, 0.931, 0.9495])\nSU4_PCA8_FASHION = np.array([0.8995, 0.882, 0.875, 0.9055, 0.881])\nSU4_PCA16_FASHION = np.array([0.9155, 0.8885, 0.901, 0.879, 0.881])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "U9_resize256_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "U9_resize256_FASHION = np.array([0.9225, 0.918, 0.8975, 0.9225, 0.905])\nU9_AE8_FASHION = np.array([0.9105, 0.8555, 0.921, 0.942, 0.927])\nU9_PCA8_FASHION = np.array([0.868, 0.8745, 0.85, 0.828, 0.8385])\nU9_PCA16_FASHION = np.array([0.9115, 0.847, 0.909, 0.852, 0.8605])\nU9_AE16_FASHION = np.array([0.9245, 0.894, 0.889, 0.888, 0.9065])\nSU4_resize256_FASHION = np.array([0.9145, 0.8915, 0.9165, 0.8985, 0.8945])\nSU4_AE8_FASHION = np.array([0.9205, 0.93, 0.941, 0.931, 0.9495])\nSU4_PCA8_FASHION = np.array([0.8995, 0.882, 0.875, 0.9055, 0.881])\nSU4_PCA16_FASHION = np.array([0.9155, 0.8885, 0.901, 0.879, 0.881])\nSU4_AE16_FASHION = np.array([0.936, 0.933, 0.937, 0.9505, 0.95])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "U9_AE8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "U9_AE8_FASHION = np.array([0.9105, 0.8555, 0.921, 0.942, 0.927])\nU9_PCA8_FASHION = np.array([0.868, 0.8745, 0.85, 0.828, 0.8385])\nU9_PCA16_FASHION = np.array([0.9115, 0.847, 0.909, 0.852, 0.8605])\nU9_AE16_FASHION = np.array([0.9245, 0.894, 0.889, 0.888, 0.9065])\nSU4_resize256_FASHION = np.array([0.9145, 0.8915, 0.9165, 0.8985, 0.8945])\nSU4_AE8_FASHION = np.array([0.9205, 0.93, 0.941, 0.931, 0.9495])\nSU4_PCA8_FASHION = np.array([0.8995, 0.882, 0.875, 0.9055, 0.881])\nSU4_PCA16_FASHION = np.array([0.9155, 0.8885, 0.901, 0.879, 0.881])\nSU4_AE16_FASHION = np.array([0.936, 0.933, 0.937, 0.9505, 0.95])\nprint(\"Result for MNIST dataset with Triple Layer QCNN structure\\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "U9_PCA8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "U9_PCA8_FASHION = np.array([0.868, 0.8745, 0.85, 0.828, 0.8385])\nU9_PCA16_FASHION = np.array([0.9115, 0.847, 0.909, 0.852, 0.8605])\nU9_AE16_FASHION = np.array([0.9245, 0.894, 0.889, 0.888, 0.9065])\nSU4_resize256_FASHION = np.array([0.9145, 0.8915, 0.9165, 0.8985, 0.8945])\nSU4_AE8_FASHION = np.array([0.9205, 0.93, 0.941, 0.931, 0.9495])\nSU4_PCA8_FASHION = np.array([0.8995, 0.882, 0.875, 0.9055, 0.881])\nSU4_PCA16_FASHION = np.array([0.9155, 0.8885, 0.901, 0.879, 0.881])\nSU4_AE16_FASHION = np.array([0.936, 0.933, 0.937, 0.9505, 0.95])\nprint(\"Result for MNIST dataset with Triple Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "U9_PCA16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "U9_PCA16_FASHION = np.array([0.9115, 0.847, 0.909, 0.852, 0.8605])\nU9_AE16_FASHION = np.array([0.9245, 0.894, 0.889, 0.888, 0.9065])\nSU4_resize256_FASHION = np.array([0.9145, 0.8915, 0.9165, 0.8985, 0.8945])\nSU4_AE8_FASHION = np.array([0.9205, 0.93, 0.941, 0.931, 0.9495])\nSU4_PCA8_FASHION = np.array([0.8995, 0.882, 0.875, 0.9055, 0.881])\nSU4_PCA16_FASHION = np.array([0.9155, 0.8885, 0.901, 0.879, 0.881])\nSU4_AE16_FASHION = np.array([0.936, 0.933, 0.937, 0.9505, 0.95])\nprint(\"Result for MNIST dataset with Triple Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "U9_AE16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "U9_AE16_FASHION = np.array([0.9245, 0.894, 0.889, 0.888, 0.9065])\nSU4_resize256_FASHION = np.array([0.9145, 0.8915, 0.9165, 0.8985, 0.8945])\nSU4_AE8_FASHION = np.array([0.9205, 0.93, 0.941, 0.931, 0.9495])\nSU4_PCA8_FASHION = np.array([0.8995, 0.882, 0.875, 0.9055, 0.881])\nSU4_PCA16_FASHION = np.array([0.9155, 0.8885, 0.901, 0.879, 0.881])\nSU4_AE16_FASHION = np.array([0.936, 0.933, 0.937, 0.9505, 0.95])\nprint(\"Result for MNIST dataset with Triple Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "SU4_resize256_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "SU4_resize256_FASHION = np.array([0.9145, 0.8915, 0.9165, 0.8985, 0.8945])\nSU4_AE8_FASHION = np.array([0.9205, 0.93, 0.941, 0.931, 0.9495])\nSU4_PCA8_FASHION = np.array([0.8995, 0.882, 0.875, 0.9055, 0.881])\nSU4_PCA16_FASHION = np.array([0.9155, 0.8885, 0.901, 0.879, 0.881])\nSU4_AE16_FASHION = np.array([0.936, 0.933, 0.937, 0.9505, 0.95])\nprint(\"Result for MNIST dataset with Triple Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "SU4_AE8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "SU4_AE8_FASHION = np.array([0.9205, 0.93, 0.941, 0.931, 0.9495])\nSU4_PCA8_FASHION = np.array([0.8995, 0.882, 0.875, 0.9055, 0.881])\nSU4_PCA16_FASHION = np.array([0.9155, 0.8885, 0.901, 0.879, 0.881])\nSU4_AE16_FASHION = np.array([0.936, 0.933, 0.937, 0.9505, 0.95])\nprint(\"Result for MNIST dataset with Triple Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "SU4_PCA8_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "SU4_PCA8_FASHION = np.array([0.8995, 0.882, 0.875, 0.9055, 0.881])\nSU4_PCA16_FASHION = np.array([0.9155, 0.8885, 0.901, 0.879, 0.881])\nSU4_AE16_FASHION = np.array([0.936, 0.933, 0.937, 0.9505, 0.95])\nprint(\"Result for MNIST dataset with Triple Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "SU4_PCA16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "SU4_PCA16_FASHION = np.array([0.9155, 0.8885, 0.901, 0.879, 0.881])\nSU4_AE16_FASHION = np.array([0.936, 0.933, 0.937, 0.9505, 0.95])\nprint(\"Result for MNIST dataset with Triple Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))\nprint(\"Result with SU4: \\n\")", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "SU4_AE16_FASHION", + "kind": 5, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "peekOfCode": "SU4_AE16_FASHION = np.array([0.936, 0.933, 0.937, 0.9505, 0.95])\nprint(\"Result for MNIST dataset with Triple Layer QCNN structure\\n\")\nprint(\"Result with U_9: \\n\")\nprint(\"resize256: \" + str(U9_resize256_MNIST.mean()) +\" +/- \"+ str(U9_resize256_MNIST.std()))\nprint(\"PCA8: \" + str(U9_PCA8_MNIST.mean()) + \" +/- \" + str(U9_PCA8_MNIST.std()))\nprint(\"AE8: \" + str(U9_AE8_MNIST.mean()) + \" +/- \" + str(U9_AE8_MNIST.std()))\nprint(\"PCA16: \" + str(U9_PCA16_MNIST.mean()) + \" +/- \" + str(U9_PCA16_MNIST.std()))\nprint(\"AE16: \" + str(U9_AE16_MNIST.mean()) + \" +/- \" + str(U9_AE16_MNIST.std()))\nprint(\"Result with SU4: \\n\")\nprint(\"resize256: \" + str(SU4_resize256_MNIST.mean()) +\" +/- \"+ str(SU4_resize256_MNIST.std()))", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.Result.Data.QCNN_triple_layer_result", + "documentation": {} + }, + { + "label": "square_loss", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "peekOfCode": "def square_loss(labels, predictions):\n loss = 0\n for l, p in zip(labels, predictions):\n loss = loss + (l - p) ** 2\n loss = loss / len(labels)\n return loss\ndef cross_entropy(labels, predictions):\n loss = 0\n for l, p in zip(labels, predictions):\n c_entropy = l * (anp.log(p[l])) + (1 - l) * anp.log(1 - p[1 - l])", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "documentation": {} + }, + { + "label": "cross_entropy", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "peekOfCode": "def cross_entropy(labels, predictions):\n loss = 0\n for l, p in zip(labels, predictions):\n c_entropy = l * (anp.log(p[l])) + (1 - l) * anp.log(1 - p[1 - l])\n loss = loss + c_entropy\n return -1 * loss\ndef cost(params, X, Y, U, U_params, embedding_type, circuit, cost_fn):\n if circuit == 'QCNN':\n predictions = [QCNN_circuit.QCNN(x, params, U, U_params, embedding_type, cost_fn=cost_fn) for x in X]\n if cost_fn == 'mse':", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "documentation": {} + }, + { + "label": "cost", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "peekOfCode": "def cost(params, X, Y, U, U_params, embedding_type, circuit, cost_fn):\n if circuit == 'QCNN':\n predictions = [QCNN_circuit.QCNN(x, params, U, U_params, embedding_type, cost_fn=cost_fn) for x in X]\n if cost_fn == 'mse':\n loss = square_loss(Y, predictions)\n elif cost_fn == 'cross_entropy':\n loss = cross_entropy(Y, predictions)\n return loss\ndef circuit_training(X_train, Y_train, U, U_params, embedding_type, layers, circuit, cost_fn, opt_type = \"Adam\", steps=50, initial_learning_rate=0.01, batch_size=128, decay_factor=0.5, decay_steps=10):\n if circuit == 'QCNN':", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "documentation": {} + }, + { + "label": "circuit_training", + "kind": 2, + "importPath": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "description": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "peekOfCode": "def circuit_training(X_train, Y_train, U, U_params, embedding_type, layers, circuit, cost_fn, opt_type = \"Adam\", steps=50, initial_learning_rate=0.01, batch_size=128, decay_factor=0.5, decay_steps=10):\n if circuit == 'QCNN':\n if U == 'U_SU4_no_pooling' or U == 'U_SU4_1D' or U == 'U_9_1D' or U == \"U2_equiv\":\n total_params = U_params * 6 \n elif U == \"U4_equiv\":\n total_params = 5+6+5+6+ U_params + 5 +3\n else:\n total_params = U_params * 3 + 2 * 3\n opt_classes = {\"Adam\": qml.AdamOptimizer(stepsize= initial_learning_rate), \n \"Nesterov\": qml.NesterovMomentumOptimizer(stepsize=initial_learning_rate) }", + "detail": "EQNN_for_HEP_Lazaro_Diaz_Lievano.Equivariant_QCNN.training.Training", + "documentation": {} + }, + { + "label": "run_model", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "peekOfCode": "def run_model(n, n_dim, n_gen, n_com, eps, lr, epochs, oracle, include_sc, asp, plot_loss=False, new_loss = True):\n #####################################################################################\n # Initialize general set up\n # initialiaze data\n angle = np.pi/3 #0.001 # infinitesimal\n U = nn.Parameter(torch.randn(1))\n rotation = torch.tensor([[ np.cos(angle), np.sin(angle)],\n [-np.sin(angle), np.cos(angle)]])\n data = torch.tensor(np.random.randn(n,n_dim))\n # data_t = data", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "documentation": {} + }, + { + "label": "run_model_nonlinear", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "peekOfCode": "def run_model_nonlinear(n, n_dim, n_gen, eps, lr, epochs, oracle):\n #####################################################################################\n # Initialize general set up\n # initialiaze data\n data = torch.tensor(np.random.randn(n,n_dim))\n # Lie Bracket or Commutator\n def bracket(A, B):\n return A @ B - B @ A\n # Define model\n class find_nonlinear_generators(nn.Module):", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "documentation": {} + }, + { + "label": "draw_sym_vectors", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "peekOfCode": "def draw_sym_vectors(M, oracle):\n plt.figure(figsize=(4,3.25)) #, dpi=100)\n # Makes the background contour:\n x_grid, y_grid = np.meshgrid(np.linspace(-2,2,101), np.linspace(-2,2,101))\n grid_points = torch.tensor(np.stack([x_grid.flatten(), y_grid.flatten()], axis=1))\n oracle_vals = oracle(grid_points).detach().numpy().reshape(x_grid.shape) # detach() if oracle is neural net. If not, just remove it\n print(f\"Min oracle value before normalization: {oracle_vals.min()}\")\n print(f\"Max oracle value before normalization: {oracle_vals.max()}\")\n # oracle_vals = (oracle_vals - oracle_vals.min()) / (oracle_vals.max() - oracle_vals.min())\n # # Print the range of oracle values", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "documentation": {} + }, + { + "label": "draw_vectors_nonlinear", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "peekOfCode": "def draw_vectors_nonlinear(model, oracle, eps):\n plt.figure(figsize=(4,3.25)) #, dpi=100)\n # Makes the background contour:\n x_grid, y_grid = np.meshgrid(np.linspace(-2,2,101), np.linspace(-2,2,101))\n grid_points = torch.tensor(np.stack([x_grid.flatten(), y_grid.flatten()], axis=1))\n oracle_vals = oracle(grid_points).numpy().reshape(x_grid.shape)\n plt.contourf(x_grid, y_grid, oracle_vals, 32, cmap='RdBu') #, norm = mpl.colors.CenteredNorm() )\n # now make the vector field:\n # This makes the points which are the tails of the vectors\n x_grid, y_grid = np.meshgrid(np.linspace(-2,2,20), np.linspace(-2,2,20))", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "documentation": {} + }, + { + "label": "visualize_generators", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "peekOfCode": "def visualize_generators(figsize, n_dim, n_gen, eps, gens_pred, rows, cols):\n # Create labels for matrix rows and columns\n ticks_gen_im =[]\n ticks_gen_im_label = []\n for i in range(n_dim):\n ticks_gen_im.append(i)\n ticks_gen_im_label.append(str(i+1))\n if rows==1 and cols==1:\n fig = plt.subplots(rows,cols,figsize=figsize)\n GEN = gens_pred[0]", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "documentation": {} + }, + { + "label": "visualize_generator_axes", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "peekOfCode": "def visualize_generator_axes(gens_pred):\n def draw_vec(ax, v, lw, color, label):\n # Draw a vector to ax, this adds lines for the projection\n # Draw vector (0,0,0) to (v0,v1,v2)\n ax.plot([0, v[0]], [0, v[1]], [0, v[2]], color=color, lw=lw, label=label)\n # Fix (x,y) and draw a line from z=0 to z=v[2] (z component of rot vec)\n # Draw vector (v0,v1,0) to (v0,v1,v2) == straight line up... etc.\n ax.plot([v[0],v[0]], [v[1],v[1]], [0,v[2]], color='b', alpha=.25, ls='--')\n # Fix (x,z) and draw a line from y=0 to y=v[1] (y component of rot vec)\n ax.plot([v[0],v[0]], [0 ,v[1]], [0,0 ], color='b', alpha=.25, ls='--')", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "documentation": {} + }, + { + "label": "visualize_structure_constants", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "peekOfCode": "def visualize_structure_constants(figsize, n_gen, n_com, struc_pred):\n if n_gen==3:\n X = torch.tensor(struc_pred.numpy())\n struc_cyclic = X\n struc_cyclic[1] = -X[1]\n commutator_labels = []\n if n_com==3:\n # Make the commutations cyclic for 3 generators\n for i in range(n_gen):\n for j in range(n_gen):", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "documentation": {} + }, + { + "label": "verify_struc_constants", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "peekOfCode": "def verify_struc_constants(n_gen, struc_pred, gens_pred):\n # Lie Bracket or Commutator\n def bracket(A, B):\n return A @ B - B @ A\n if n_gen==3:\n X = torch.tensor(struc_pred.numpy())\n struc_cyclic = X\n struc_cyclic[1] = -X[1]\n comm_index = 0\n Cs = []", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "documentation": {} + }, + { + "label": "verify_orthogonality", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "peekOfCode": "def verify_orthogonality(gens_pred):\n def get_angle(v, w):\n # Angle between vectors\n return np.arccos( float(v @ w / (torch.norm(v) * torch.norm(w)) ))\n def get_axis(M):\n # Finds the eigenvector with min(Imaginary(eigenvalue))\n # if the matrix is a rotation matrix or a generator of rotation,s then this vector is the axis of rotation \n eig_vals, eig_vecs = torch.linalg.eig(M)\n # find the minimum arg of the minimum imaginary component\n # pass that to the transposed eigenvector array to pull the eigenvector", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "documentation": {} + }, + { + "label": "plt.rcParams[\"font.family\"]", + "kind": 5, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "peekOfCode": "plt.rcParams[\"font.family\"] = 'sans-serif'\nnp.set_printoptions(formatter={'float_kind':'{:f}'.format})\n# Choose device\ndevice = \"cuda\" if torch.cuda.is_available() else \"cpu\"\nprint(f\"Using {device} device\")\n#####################################################################################\ndef run_model(n, n_dim, n_gen, n_com, eps, lr, epochs, oracle, include_sc, asp, plot_loss=False, new_loss = True):\n #####################################################################################\n # Initialize general set up\n # initialiaze data", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "peekOfCode": "device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\nprint(f\"Using {device} device\")\n#####################################################################################\ndef run_model(n, n_dim, n_gen, n_com, eps, lr, epochs, oracle, include_sc, asp, plot_loss=False, new_loss = True):\n #####################################################################################\n # Initialize general set up\n # initialiaze data\n angle = np.pi/3 #0.001 # infinitesimal\n U = nn.Parameter(torch.randn(1))\n rotation = torch.tensor([[ np.cos(angle), np.sin(angle)],", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.sym_utils", + "documentation": {} + }, + { + "label": "GradualWarmupScheduler", + "kind": 6, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "peekOfCode": "class GradualWarmupScheduler(_LRScheduler):\n \"\"\" Gradually warm-up(increasing) learning rate in optimizer.\n Proposed in 'Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour'.\n Args:\n optimizer (Optimizer): Wrapped optimizer.\n multiplier: target learning rate = base lr * multiplier if multiplier > 1.0. if multiplier = 1.0, lr starts from 0 and ends up with the base_lr.\n warmup_epoch: target learning rate is reached at warmup_epoch, gradually\n after_scheduler: after target_epoch, use this scheduler(eg. ReduceLROnPlateau)\n Reference:\n https://github.com/ildoonet/pytorch-gradual-warmup-lr", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "documentation": {} + }, + { + "label": "makedir", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "peekOfCode": "def makedir(path):\n try:\n os.makedirs(path)\n except OSError:\n pass\ndef args_init(args):\n r''' Initialize seed and exp_name.\n '''\n if args.seed is None: # use random seed if not specified\n args.seed = np.random.randint(100)", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "documentation": {} + }, + { + "label": "args_init", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "peekOfCode": "def args_init(args):\n r''' Initialize seed and exp_name.\n '''\n if args.seed is None: # use random seed if not specified\n args.seed = np.random.randint(100)\n if args.exp_name == '': # use random strings if not specified\n args.exp_name = ''.join(random.choices(string.ascii_lowercase + string.digits, k=8))\n if (args.local_rank == 0): # master\n print(args)\n makedir(f\"{args.logdir}/{args.exp_name}\")", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "documentation": {} + }, + { + "label": "sum_reduce", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "peekOfCode": "def sum_reduce(num, device):\n r''' Sum the tensor across the devices.\n '''\n if not torch.is_tensor(num):\n rt = torch.tensor(num).to(device)\n else:\n rt = num.clone()\n dist.all_reduce(rt, op=dist.ReduceOp.SUM)\n return rt\nfrom torch.optim.lr_scheduler import _LRScheduler", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "documentation": {} + }, + { + "label": "buildROC", + "kind": 2, + "importPath": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "description": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "peekOfCode": "def buildROC(labels, score, targetEff=[0.3,0.5]):\n r''' ROC curve is a plot of the true positive rate (Sensitivity) in the function of the false positive rate\n (100-Specificity) for different cut-off points of a parameter. Each point on the ROC curve represents a\n sensitivity/specificity pair corresponding to a particular decision threshold. The Area Under the ROC\n curve (AUC) is a measure of how well a parameter can distinguish between two diagnostic groups.\n '''\n if not isinstance(targetEff, list):\n targetEff = [targetEff]\n fpr, tpr, threshold = roc_curve(labels, score)\n idx = [np.argmin(np.abs(tpr - Eff)) for Eff in targetEff]", + "detail": "Lie_EQGNN_for_HEP_Jogi_Suda_Neto.notebooks.utils_lorentz", + "documentation": {} + }, + { + "label": "AucCallback", + "kind": 6, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "peekOfCode": "class AucCallback(keras.callbacks.Callback):\n \"\"\"Computes and saves the auc for given test background and signal data for every epoch\"\"\"\n def __init__(self, model, x_test_bg, x_test_signal):\n \"\"\"Creates the callback object\n Args:\n model (keras model): model that will be trained\n x_test_bg (array): test data for the background events (events the model is trained on)\n x_test_signal (array): test data for the signal events (events the model should tag as anomalous)\"\"\"\n self.model = model\n self.x_test_bg = x_test_bg", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "documentation": {} + }, + { + "label": "Convolutional_Autoencoder_12x12", + "kind": 6, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "peekOfCode": "class Convolutional_Autoencoder_12x12(Model):\n \"\"\"Convolutional Autoencoder model specifically for comparasion with Quantum models with a small number of parameters\"\"\"\n def __init__(self, latent_dim):\n \"\"\"Create the model with given latenspace. Will have around 3000 parameters\n Args:\n latent_dim (int): number of latent neurons\"\"\"\n super(Convolutional_Autoencoder_12x12, self).__init__()\n self.encoder = tf.keras.Sequential([\n layers.Input(shape=(12, 12, 1)),\n layers.Conv2D(4, kernel_size=4, strides=1, activation='relu', padding='same'),", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "documentation": {} + }, + { + "label": "Convolutional_Autoencoder_Large", + "kind": 6, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "peekOfCode": "class Convolutional_Autoencoder_Large(Model):\n \"\"\"Larger conv ae model for 40x40 datset with a large amount of parameters.\n The model mostly follows:\n Finke, Thorben, et al.'Autoencoders for unsupervised anomaly detection\n in high energy physics.' Journal of High Energy Physics 2021.6 (2021): 1-32.\"\"\"\n def __init__(self, latent_dim):\n super(Convolutional_Autoencoder_Large, self).__init__()\n self.encoder = tf.keras.Sequential([\n layers.Input(shape=(40, 40, 1)),\n layers.Conv2D(10, kernel_size=4, strides=1, padding='same'),", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "documentation": {} + }, + { + "label": "Sampling", + "kind": 6, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "peekOfCode": "class Sampling(layers.Layer):\n \"\"\"Sampling layer for a varaiational autoencoder\"\"\"\n def call(self, inputs):\n z_mean, z_log_var = inputs\n batch_dim = tf.shape(z_mean)[0]\n latent_dim = tf.shape(z_mean)[1]\n epsilon = tf.keras.backend.random_normal(shape=(batch_dim, latent_dim))\n return z_mean + tf.exp(0.5 * z_log_var) * epsilon\nclass VAE(Model):\n \"\"\"Convolutional variational autoencoder\"\"\"", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "documentation": {} + }, + { + "label": "VAE", + "kind": 6, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "peekOfCode": "class VAE(Model):\n \"\"\"Convolutional variational autoencoder\"\"\"\n def __init__(self, latent_dim, input_size, **kwargs):\n \"\"\"Create the vae with variable latent dimension and input image size\n Args:\n latent_dim (int): dimension of latent space\n input_size (int): width of input images (must be dividable by four)\"\"\"\n super(VAE, self).__init__(**kwargs)\n encoder_inputs = keras.Input(shape=(input_size, input_size, 1))\n x = layers.Conv2D(32, 3, activation=\"relu\", strides=2, padding=\"same\")(encoder_inputs)", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.CAE", + "documentation": {} + }, + { + "label": "PCA_reduce", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "peekOfCode": "def PCA_reduce(data, pca_components, val_data=None, test_data=None):\n \"\"\"Reduce data dimension using pca\n Args:\n data (array): input data\n pca_components (int): number of principle components to redce to\n val_data (array): if given will be reduced just like data\n test_data (array): if given will be reduced just like data\n Returns:\n Data with reduced dimension, if val_data / test_data given, tuple of data sets\n \"\"\"", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "documentation": {} + }, + { + "label": "TruncatedPCA_reduce", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "peekOfCode": "def TruncatedPCA_reduce(data, pca_components, val_data=None, test_data=None):\n \"\"\"like Pca_reduce with truncated pca\n Args:\n data (array): input data\n pca_components (int): number of principle components to redce to\n val_data (array): if given will be reduced just like data\n test_data (array): if given will be reduced just like data\n Returns:\n Data with reduced dimension, if val_data / test_data given, tuple of data sets\n \"\"\"", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "documentation": {} + }, + { + "label": "tsne_reduce", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "peekOfCode": "def tsne_reduce(data, pca_components, val_data=None, test_data=None):\n \"\"\"like Pca_reduce with tsne\n Args:\n data (array): input data\n pca_components (int): number of principle components to redce to\n val_data (array): if given will be reduced just like data\n test_data (array): if given will be reduced just like data\n Returns:\n Data with reduced dimension, if val_data / test_data given, tuple of data sets\n \"\"\"", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "documentation": {} + }, + { + "label": "lle_reduce", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "peekOfCode": "def lle_reduce(data, pca_components, val_data=None, test_data=None):\n \"\"\"like Pca_reduce with lle\n Args:\n data (array): input data\n pca_components (int): number of principle components to redce to\n val_data (array): if given will be reduced just like data\n test_data (array): if given will be reduced just like data\n Returns:\n Data with reduced dimension, if val_data / test_data given, tuple of data sets\n \"\"\"", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "documentation": {} + }, + { + "label": "input_states", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "peekOfCode": "def input_states(data, data_qbits, latent_qbits):\n \"\"\"Prepare input states for tfq training of the large QAE architecture\n Args:\n data (array): input data as 2d array\n data_qubits (int): number of data qubits\n latent_qubits (int): number of qubits for the latent space\n Returns:\n cirq circuits with prepared input states\n \"\"\"\n network_qbits = data_qbits + (data_qbits - latent_qbits)", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "documentation": {} + }, + { + "label": "input_states_SQAE", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "peekOfCode": "def input_states_SQAE(data, data_qbits, latent_qbits):\n \"\"\"Prepare input states for tfq training of the SQAE\n Args:\n data (array): input data as 2d array\n data_qubits (int): number of data qubits\n latent_qubits (int): number of qubits for the latent space\n Returns:\n cirq circuits with prepared input states\n \"\"\"\n non_latent = data_qbits - latent_qbits", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.data_preprocessing", + "documentation": {} + }, + { + "label": "HybridAE", + "kind": 6, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.HAE", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.HAE", + "peekOfCode": "class HybridAE(Model):\n def __init__(self, latent_dim, input_dim, q_input_dim, DRCs, kernel_size, stride, device, diff_method=\"adjoint\"):\n super().__init__()\n tf.keras.backend.set_floatx('float64')\n self.latent_dim = latent_dim\n self.input_dim = input_dim\n self.q_input_dim = q_input_dim\n self.dev = device\n self.DRCs = DRCs\n self.kernel_size = kernel_size", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.HAE", + "documentation": {} + }, + { + "label": "baseSQAE", + "kind": 6, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_pennylane", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_pennylane", + "peekOfCode": "class baseSQAE:\n \"\"\"base class for an SQAE architecture with a simple encoding\n \"\"\"\n def __init__(self, data_qbits, latent_qbits, device, diff_method=\"best\"):\n \"\"\"Create basic SQAE\n Args:\n data_qbits (int): number of qbits to upload data and use as encoder\n latent_qbits (int): number of latent qbits\n device (pennylane device): pennylane device to use for circuit evaluation\n diff_method (str): method to differentiate quantum circuit, usually \"adjoint\" ist best.", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_pennylane", + "documentation": {} + }, + { + "label": "ConvSQAE", + "kind": 6, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_pennylane", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_pennylane", + "peekOfCode": "class ConvSQAE(baseSQAE):\n \"\"\"SQAE with a convolutional like path encoding of the data.\n \"\"\"\n def __init__(self, data_qbits, latent_qbits, device, img_dim, kernel_size, stride, DRCs, diff_method=\"best\"):\n \"\"\"Create basic convolutinal like SQAE\n Args:\n data_qbits (int): number of qbits to upload data and use as encoder\n latent_qbits (int): number of latent qbits\n device (pennylane device): pennylane device to use for circuit evaluation\n img_dim (int): dimension of the images (width)", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_pennylane", + "documentation": {} + }, + { + "label": "QAE_model", + "kind": 6, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_tfq", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_tfq", + "peekOfCode": "class QAE_model(Model):\n \"\"\"large QAE model with encoder and decoder\n \"\"\"\n def __init__(self, data_qbits, latent_qbits, layers):\n \"\"\"Create large QAE\n Args:\n data_qbits (int): number of qbits to upload data and use as encoder\n latent_qbits (int): number of latent qbits\n layers (int): number of layers to use for the pqc of encoder and decoder\n \"\"\"", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_tfq", + "documentation": {} + }, + { + "label": "SQAE_model", + "kind": 6, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_tfq", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_tfq", + "peekOfCode": "class SQAE_model(Model):\n \"\"\"SQAE model\n \"\"\"\n def __init__(self, data_qbits, latent_qbits, layers):\n \"\"\"Create SQAE\n Args:\n data_qbits (int): number of qbits to upload data and use as encoder\n latent_qbits (int): number of latent qbits\n layers (int): number of layers to use for the pqc of encoder and decoder\n \"\"\"", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.QAE_tfq", + "documentation": {} + }, + { + "label": "test_collapse", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "peekOfCode": "def test_collapse(x_true, x_recon):\n \"\"\"Test for mode collapse. Ratio of the difference of reconstructed and true images. Bad sign if close to zero\n Args:\n x_true (array): True images\n x_recon (array): reconstructed image\n Returns:\n Value to check the for mode collapse, small values are worse\n \"\"\"\n p = np.random.permutation(x_true.shape[0])\n x_true_shuffle = x_true[p]", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "documentation": {} + }, + { + "label": "intensity_hist", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "peekOfCode": "def intensity_hist(x_true, x_recon):\n \"\"\"Compute list of ratios of intensity reconstruction\n Args:\n x_true (array): True images\n x_recon (array): reconstructed image\n Returns:\n list of ratios of intensity reconstruction sorted by the intesity of the true image\n \"\"\"\n x_true = x_true.flatten()\n x_recon = x_recon.flatten()", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "documentation": {} + }, + { + "label": "eval_recon", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "peekOfCode": "def eval_recon(x_test, x_recon, lognorm=False):\n \"\"\"Evaluate the reconstruction capabilities of an autoencoder\n Args:\n x_true (array): True images\n x_recon (array): reconstructed image\n lognorm (bool): use logarithmic norm for example images\n Returns:\n evaluation plots\n \"\"\"\n x_test = x_test.reshape(x_test.shape[0], x_test.shape[1], x_test.shape[2],1)", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "documentation": {} + }, + { + "label": "eval_tagging", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "peekOfCode": "def eval_tagging(x_true_background, x_recon_background, x_true_signal, x_recon_signal):\n \"\"\"Evaluate the anomaly tagging capabilities of an autoencoder\n Args:\n x_true_background (array): True images of background events\n x_recon_background (array): reconstructed images of background events\n x_true_signal (array): True images of signal events\n x_recon_signal (array): reconstructed images of signal events\n Returns:\n Plots for tagging evaluation\n \"\"\"", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "documentation": {} + }, + { + "label": "iforest_latent_eval", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "peekOfCode": "def iforest_latent_eval(background_latent, signal_latent):\n \"\"\"Evaluate the anomaly tagging capabilities by using an isolation forest on the latent space representations\n Args:\n background_latent (array): latent representation of bg events\n signal_latent (array): latent represnation of signal events\n Returns:\n Plots for tagging evaluation\n \"\"\"\n clf = IsolationForest(random_state=0).fit(background_latent)\n if_pred_bg = clf.decision_function(background_latent)", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "documentation": {} + }, + { + "label": "img_to_event", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "peekOfCode": "def img_to_event(img):\n \"\"\"Convert event image to event format for energyflow (list of non zero pixels with position and intensity as features)\n Args:\n img (array): single jet image\n Returns:\n jet event as 2d np array\n \"\"\"\n x_dim, y_dim = img.shape\n y_pos = np.indices((x_dim, y_dim))[0]\n x_pos = np.indices((x_dim, y_dim))[1]", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "documentation": {} + }, + { + "label": "img_emd", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "peekOfCode": "def img_emd(img1, img2, R=0.4):\n \"\"\"Compute EMD for two images\n Args:\n img1 (array): first event image\n img2 (array): second event image\n R (float): Radius for emd computation\n Returns:\n EMD\n \"\"\"\n return ef.emd.emd(img_to_event(img1.reshape((img1.shape[0],img1.shape[1]))), img_to_event(img2.reshape((img2.shape[0],img2.shape[1]))), R=R)", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "documentation": {} + }, + { + "label": "avg_emd", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "peekOfCode": "def avg_emd(x_true, x_recon, R=0.4):\n \"\"\"Compute Average EMD for batch of images\n Args:\n x_true (array): batch of true event images\n x_recon (array): batch of reconstructed event images\n R (float): Radius for emd computation\n Returns:\n avg EMD\n \"\"\"\n return np.mean([img_emd(x,y) for x,y in zip(x_true, x_recon)])", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "documentation": {} + }, + { + "label": "imgs_to_events", + "kind": 2, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "peekOfCode": "def imgs_to_events(imgs):\n \"\"\"Convert batch of event images to event format for energyflow (list of non zero pixels with position and intensity as features)\n Args:\n img (array): batch of jet images\n Returns:\n batch of jet events as 3d np array\n \"\"\"\n x_dim, y_dim = imgs.shape[1], imgs.shape[2]\n y_pos = np.indices((x_dim, y_dim))[0]\n x_pos = np.indices((x_dim, y_dim))[1]", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.hep_VQAE.utils", + "documentation": {} + }, + { + "label": "CLASSIFIERS", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "CLASSIFIERS = '''\\\nLicense :: OSI Approved\nProgramming Language :: Python :: 3.9\nTopic :: Software Development\n'''\nDISTNAME = 'hep_VQAE'\nAUTHOR = 'Tom Magorsch'\nAUTHOR_EMAIL = 'tom.magorsch@tu-dortmund.de'\nDESCRIPTION = 'Quantum (Variational) Autoencoder for hep data analysis'\nLICENSE = 'MIT'", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "DISTNAME", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "DISTNAME = 'hep_VQAE'\nAUTHOR = 'Tom Magorsch'\nAUTHOR_EMAIL = 'tom.magorsch@tu-dortmund.de'\nDESCRIPTION = 'Quantum (Variational) Autoencoder for hep data analysis'\nLICENSE = 'MIT'\nREADME = 'Quantum (Variational) Autoencoder for hep data analysis'\nVERSION = '0.1.0'\nISRELEASED = False\nPYTHON_MIN_VERSION = '3.9'\nPYTHON_REQUIRES = f'>={PYTHON_MIN_VERSION}'", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "AUTHOR", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "AUTHOR = 'Tom Magorsch'\nAUTHOR_EMAIL = 'tom.magorsch@tu-dortmund.de'\nDESCRIPTION = 'Quantum (Variational) Autoencoder for hep data analysis'\nLICENSE = 'MIT'\nREADME = 'Quantum (Variational) Autoencoder for hep data analysis'\nVERSION = '0.1.0'\nISRELEASED = False\nPYTHON_MIN_VERSION = '3.9'\nPYTHON_REQUIRES = f'>={PYTHON_MIN_VERSION}'\nPACKAGES = [", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "AUTHOR_EMAIL", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "AUTHOR_EMAIL = 'tom.magorsch@tu-dortmund.de'\nDESCRIPTION = 'Quantum (Variational) Autoencoder for hep data analysis'\nLICENSE = 'MIT'\nREADME = 'Quantum (Variational) Autoencoder for hep data analysis'\nVERSION = '0.1.0'\nISRELEASED = False\nPYTHON_MIN_VERSION = '3.9'\nPYTHON_REQUIRES = f'>={PYTHON_MIN_VERSION}'\nPACKAGES = [\n 'hep_VQAE',", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "DESCRIPTION", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "DESCRIPTION = 'Quantum (Variational) Autoencoder for hep data analysis'\nLICENSE = 'MIT'\nREADME = 'Quantum (Variational) Autoencoder for hep data analysis'\nVERSION = '0.1.0'\nISRELEASED = False\nPYTHON_MIN_VERSION = '3.9'\nPYTHON_REQUIRES = f'>={PYTHON_MIN_VERSION}'\nPACKAGES = [\n 'hep_VQAE',\n]", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "LICENSE", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "LICENSE = 'MIT'\nREADME = 'Quantum (Variational) Autoencoder for hep data analysis'\nVERSION = '0.1.0'\nISRELEASED = False\nPYTHON_MIN_VERSION = '3.9'\nPYTHON_REQUIRES = f'>={PYTHON_MIN_VERSION}'\nPACKAGES = [\n 'hep_VQAE',\n]\nmetadata = dict(", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "README", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "README = 'Quantum (Variational) Autoencoder for hep data analysis'\nVERSION = '0.1.0'\nISRELEASED = False\nPYTHON_MIN_VERSION = '3.9'\nPYTHON_REQUIRES = f'>={PYTHON_MIN_VERSION}'\nPACKAGES = [\n 'hep_VQAE',\n]\nmetadata = dict(\n name=DISTNAME,", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "VERSION", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "VERSION = '0.1.0'\nISRELEASED = False\nPYTHON_MIN_VERSION = '3.9'\nPYTHON_REQUIRES = f'>={PYTHON_MIN_VERSION}'\nPACKAGES = [\n 'hep_VQAE',\n]\nmetadata = dict(\n name=DISTNAME,\n version=VERSION,", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "ISRELEASED", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "ISRELEASED = False\nPYTHON_MIN_VERSION = '3.9'\nPYTHON_REQUIRES = f'>={PYTHON_MIN_VERSION}'\nPACKAGES = [\n 'hep_VQAE',\n]\nmetadata = dict(\n name=DISTNAME,\n version=VERSION,\n long_description=README,", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "PYTHON_MIN_VERSION", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "PYTHON_MIN_VERSION = '3.9'\nPYTHON_REQUIRES = f'>={PYTHON_MIN_VERSION}'\nPACKAGES = [\n 'hep_VQAE',\n]\nmetadata = dict(\n name=DISTNAME,\n version=VERSION,\n long_description=README,\n packages=PACKAGES,", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "PYTHON_REQUIRES", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "PYTHON_REQUIRES = f'>={PYTHON_MIN_VERSION}'\nPACKAGES = [\n 'hep_VQAE',\n]\nmetadata = dict(\n name=DISTNAME,\n version=VERSION,\n long_description=README,\n packages=PACKAGES,\n python_requires=PYTHON_REQUIRES,", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "PACKAGES", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "PACKAGES = [\n 'hep_VQAE',\n]\nmetadata = dict(\n name=DISTNAME,\n version=VERSION,\n long_description=README,\n packages=PACKAGES,\n python_requires=PYTHON_REQUIRES,\n author=AUTHOR,", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "metadata", + "kind": 5, + "importPath": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "description": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "peekOfCode": "metadata = dict(\n name=DISTNAME,\n version=VERSION,\n long_description=README,\n packages=PACKAGES,\n python_requires=PYTHON_REQUIRES,\n author=AUTHOR,\n author_email=AUTHOR_EMAIL,\n description=DESCRIPTION,\n classifiers=[CLASSIFIERS],", + "detail": "Quantum_AE_for_HEP_Tom_Magorsch.setup", + "documentation": {} + }, + { + "label": "one_qubit_rotation", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.circuit_constructor", + "description": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.circuit_constructor", + "peekOfCode": "def one_qubit_rotation(qubit, symbols):\n \"\"\"\n Returns Cirq gates that apply a rotation of the bloch sphere about the X,\n Y and Z axis, specified by the values in `symbols`.\n \"\"\"\n return [cirq.rx(symbols[0])(qubit),\n cirq.ry(symbols[1])(qubit),\n cirq.rz(symbols[2])(qubit)]\ndef entangling_layer(qubits):\n \"\"\"", + "detail": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.circuit_constructor", + "documentation": {} + }, + { + "label": "entangling_layer", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.circuit_constructor", + "description": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.circuit_constructor", + "peekOfCode": "def entangling_layer(qubits):\n \"\"\"\n Returns a layer of CZ entangling gates on `qubits` (arranged in a circular topology).\n \"\"\"\n cz_ops = [cirq.CZ(q0, q1) for q0, q1 in zip(qubits, qubits[1:])]\n cz_ops += ([cirq.CZ(qubits[0], qubits[-1])] if len(qubits) != 2 else [])\n return cz_ops\ndef generate_circuit(qubits, n_layers, input_size, use_entanglement=True, use_terminal_entanglement=True):\n \"\"\"Prepares a data re-uploading circuit on `qubits` with `n_layers` layers.\"\"\"\n # Number of qubits", + "detail": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.circuit_constructor", + "documentation": {} + }, + { + "label": "generate_circuit", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.circuit_constructor", + "description": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.circuit_constructor", + "peekOfCode": "def generate_circuit(qubits, n_layers, input_size, use_entanglement=True, use_terminal_entanglement=True):\n \"\"\"Prepares a data re-uploading circuit on `qubits` with `n_layers` layers.\"\"\"\n # Number of qubits\n n_qubits = len(qubits)\n # Number of padding\n # Zero-pad the inputs and params if it is not a multiple of 3\n padding = (3 - (input_size % 3)) % 3\n # Sympy symbols for weights and bias parameters\n params = sympy.symbols(f'theta(0:{(input_size + padding) * n_layers * n_qubits})')\n params = np.asarray(params).reshape((n_layers, n_qubits, (input_size + padding)))", + "detail": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.circuit_constructor", + "documentation": {} + }, + { + "label": "ReUploadingPQC", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.data_reuploading", + "description": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.data_reuploading", + "peekOfCode": "class ReUploadingPQC(tf.keras.layers.Layer):\n def __init__(self, n_qubits, n_layers, input_size, use_entanglement=True, use_terminal_entanglement=True,\n observables=None, name=\"re-uploading_PQC\"):\n super(ReUploadingPQC, self).__init__(name=name)\n self.n_layers = n_layers\n self.n_qubits = n_qubits\n self.input_size = input_size\n self.use_entanglement = use_entanglement\n self.use_terminal_entanglement = use_terminal_entanglement\n self.main_name = name", + "detail": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.data_reuploading", + "documentation": {} + }, + { + "label": "QConv2D_DRC", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.quantum_convolution", + "description": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.quantum_convolution", + "peekOfCode": "class QConv2D_DRC:\n def __init__(self, filters, kernel_size, strides, drc_hyperparameters, layer_id, padding=None):\n # Parameter initialization\n self.filters = filters\n self.layer_id = layer_id\n self.padding = padding\n self.n_qubits = drc_hyperparameters[\"n_qubits\"]\n self.n_layers = drc_hyperparameters[\"n_layers\"]\n if any(np.array(self.n_qubits) != 1):\n self.use_ent = drc_hyperparameters[\"use_ent\"]", + "detail": "Quantum_CNN_for_HEP_Eraraya_Ricardo_Muten.qcnn_drc.quantum_convolution", + "documentation": {} + }, + { + "label": "Chen", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.chen", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.chen", + "peekOfCode": "class Chen:\n \"\"\"\n\tAnsatz based on\n\tS. Y. C. Chen, T. C. Wei, C.Zhang, H. Yu and S. Yoo, \n\tQuantum convolutional neural networks for high energy \n\tphysics data analysis, Phys. Rev. Res. \\textbf{4} (2022) no.1, 013231\n\tdoi:10.1103/PhysRevResearch.4.013231 \n\t\"\"\"\n def __init__(self) -> None:\n super().__init__()", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.chen", + "documentation": {} + }, + { + "label": "Cong", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.cong", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.cong", + "peekOfCode": "class Cong:\n \"\"\"\n Ansatz based on\n Cong, I., Choi, S. & Lukin, M.D. Quantum convolutional neural networks. \n Nat. Phys. 15, 1273–1278 (2019). https://doi.org/10.1038/s41567-019-0648-8\n \"\"\"\n def __init__(self) -> None:\n super().__init__()\n def _quantum_conv_circuit(self, bits, symbols):\n \"\"\"", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.cong", + "documentation": {} + }, + { + "label": "NUM_CONV_SYMBOLS", + "kind": 5, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.cong", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.cong", + "peekOfCode": "NUM_CONV_SYMBOLS = 15\nNUM_POOL_SYMBOLS = 6\nclass Cong:\n \"\"\"\n Ansatz based on\n Cong, I., Choi, S. & Lukin, M.D. Quantum convolutional neural networks. \n Nat. Phys. 15, 1273–1278 (2019). https://doi.org/10.1038/s41567-019-0648-8\n \"\"\"\n def __init__(self) -> None:\n super().__init__()", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.cong", + "documentation": {} + }, + { + "label": "NUM_POOL_SYMBOLS", + "kind": 5, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.cong", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.cong", + "peekOfCode": "NUM_POOL_SYMBOLS = 6\nclass Cong:\n \"\"\"\n Ansatz based on\n Cong, I., Choi, S. & Lukin, M.D. Quantum convolutional neural networks. \n Nat. Phys. 15, 1273–1278 (2019). https://doi.org/10.1038/s41567-019-0648-8\n \"\"\"\n def __init__(self) -> None:\n super().__init__()\n def _quantum_conv_circuit(self, bits, symbols):", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.cong", + "documentation": {} + }, + { + "label": "Farhi", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.farhi", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.farhi", + "peekOfCode": "class Farhi:\n \"\"\"\n Ansatz based on\n Farhi, Edward and Hartmut Neven. \n “Classification with Quantum Neural Networks on Near Term Processors.” \n arXiv: Quantum Physics (2018): n. pag.\n \"\"\"\n def __init__(self) -> None:\n super().__init__()\n def _generate_gates(self):", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.farhi", + "documentation": {} + }, + { + "label": "NQubit", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.n_qubit", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.n_qubit", + "peekOfCode": "class NQubit:\n def __init__(self) -> None:\n super().__init__()\n def __single_qubit_rot(self, qubit, symbols, sparse):\n if sparse:\n return [\n cirq.Z(qubit)**symbols[0],\n cirq.Y(qubit)**symbols[1],\n cirq.Z(qubit)**symbols[2]\n ]", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.n_qubit", + "documentation": {} + }, + { + "label": "TTN", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.ttn", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.ttn", + "peekOfCode": "class TTN:\n \"\"\"\n\tAnsatz based on\n\t\"\"\"\n def __init__(self) -> None:\n super().__init__()\n def _block(self, qubits, symbols):\n assert len(qubits) == 2\n assert len(qubits) == len(symbols)\n return cirq.Circuit(", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.ttn", + "documentation": {} + }, + { + "label": "one_qubit_unitary", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "peekOfCode": "def one_qubit_unitary(qubit, symbols):\n \"\"\"\n\t\tMake a Cirq circuit enacting a rotation of the bloch sphere about the X,\n\t\tY and Z axis, that depends on the values in `symbols`.\n\t\tArgs:\n\t\t\tqubit: The qubit to apply the unitary to.\n\t\t\tsymbols: a list of 3 symbols, each of which is either 0 or 1.\n\t\tReturns:\n\t\t\tA circuit with a single qubit and three gates.\n\t\t\"\"\"", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "documentation": {} + }, + { + "label": "cz_entangling_circuit", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "peekOfCode": "def cz_entangling_circuit(qubits):\n \"\"\"\n\t\tReturns a layer of CZ entangling gates on `qubits` (arranged in a circular topology).\n\t\tArgs:\n\t\t\tqubits: The qubits to entangle.\n\t\tReturns:\n\t\t\tA list of CZ gates.\n\t\t\"\"\"\n if len(qubits) == 1:\n return []", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "documentation": {} + }, + { + "label": "cnot_entangling_circuit", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "peekOfCode": "def cnot_entangling_circuit(qubits):\n \"\"\"\n\t\tReturns a layer of CNOT entangling gates on `qubits` (arranged in a circular topology).\n\t\tArgs:\n\t\t\tqubits: The qubits to entangle.\n\t\tReturns:\n\t\t\tA list of CNOT gates.\n\t\t\"\"\"\n if len(qubits) == 1:\n return []", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "documentation": {} + }, + { + "label": "cluster_state_circuit", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "peekOfCode": "def cluster_state_circuit(qubits):\n \"\"\"\n\t\tReturn a cluster state on the qubits in `qubits`\n\t\tArgs:\n\t\t\t\tqubits: The qubits to use in the circuit.\n\t\tReturns:\n\t\t\t\tA circuit that creates a cluster state.\n\t\t\"\"\"\n ops = [cirq.H(q) for q in qubits]\n if len(qubits) == 1:", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "documentation": {} + }, + { + "label": "two_qubit_unitary", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "peekOfCode": "def two_qubit_unitary(qubits, symbols):\n \"\"\"\n\t\tMake a Cirq circuit that creates an arbitrary two qubit unitary.\n\t\tArgs:\n\t\t\tqubits: a list of two qubits\n\t\t\tsymbols: a list of 15 symbols, each of which is a float between 0 and 2pi.\n\t\tReturns:\n\t\t\tA circuit with a two qubit unitary.\n\t\t\"\"\"\n circuit = cirq.Circuit()", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "documentation": {} + }, + { + "label": "two_qubit_pool", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "peekOfCode": "def two_qubit_pool(source_qubit, sink_qubit, symbols):\n \"\"\"\n\t\tMake a Cirq circuit to do a parameterized 'pooling' operation, which\n\t\tattempts to reduce entanglement down from two qubits to just one.\n\t\tArgs:\n\t\t\t\tsource_qubit: the qubit that is being measured\n\t\t\t\tsink_qubit: the qubit that will be measured\n\t\t\t\tsymbols: a list of 6 symbols, each of which is either 'X', 'Y', or 'Z'.\n\t\tReturns:\n\t\t\t\tA circuit that performs a two-qubit pooling operation.", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.ansatzes.utils", + "documentation": {} + }, + { + "label": "BaseDataModule", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.base_data_module", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.base_data_module", + "peekOfCode": "class BaseDataModule():\n \"\"\"\n The BaseDataModule class is a base class for all the datasets. It contains the basic functions that\n are common to all the datasets\n \"\"\"\n def __init__(self, args=None) -> None:\n self.args = vars(args) if args is not None else {}\n # Set the data directories\n self.data_dir = self.data_dirname() / \"downloaded\"\n self.processed_data_dir = self.data_dirname() / \"processed\"", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.base_data_module", + "documentation": {} + }, + { + "label": "TqdmUpTo", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.base_data_module", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.base_data_module", + "peekOfCode": "class TqdmUpTo(tqdm):\n \"\"\"From https://github.com/tqdm/tqdm/blob/master/examples/tqdm_wget.py\"\"\"\n def update_to(self, blocks=1, bsize=1, tsize=None):\n \"\"\"\n Parametersy_train\n ----------\n blocks: int, optional\n Number of blocks transferred so far [default: 1].\n bsize: int, optional\n Size of each block (in tqdm units) [default: 1].", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.base_data_module", + "documentation": {} + }, + { + "label": "get_stats", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.base_data_module", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.base_data_module", + "peekOfCode": "def get_stats(ds, mapping):\n ds = ds.unbatch()\n ds = ds.as_numpy_iterator()\n ds = [element for element in ds]\n x = np.array([element[0] for element in ds])\n y = np.array([element[1] for element in ds])\n x_size = x.shape\n y_size = y.shape\n max = x.max()\n min = x.min()", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.base_data_module", + "documentation": {} + }, + { + "label": "ELECTRON_PHOTON_SMALL_DATASET_URL", + "kind": 5, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.constants", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.constants", + "peekOfCode": "ELECTRON_PHOTON_SMALL_DATASET_URL = 'https://github.com/ML4SCI/ML4SCI_GSoC/raw/main/QMLHEP/qcnn/electron-photon.npz'", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.constants", + "documentation": {} + }, + { + "label": "ElectronPhoton", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.electron_photon", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.electron_photon", + "peekOfCode": "class ElectronPhoton(BaseDataModule):\n \"\"\"\n Electron Photon Data module\n \"\"\"\n def __init__(self, args=None) -> None:\n super().__init__(args)\n self.dims = (32, 32, 1)\n self.output_dims = (1, )\n self.mapping = list(range(2))\n self.classes = ['Photon', 'Electron']", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.electron_photon", + "documentation": {} + }, + { + "label": "MNIST", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.mnist", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.mnist", + "peekOfCode": "class MNIST(BaseDataModule):\n \"\"\"\n MNIST Data module\n \"\"\"\n def __init__(self, args=None) -> None:\n super().__init__(args)\n self.classes = list(range(10))\n self.dims = (28, 28, 1)\n self.output_dims = (1, )\n self.mapping = list(range(10))", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.mnist", + "documentation": {} + }, + { + "label": "DataPreprocessor", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.preprocessor", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.preprocessor", + "peekOfCode": "class DataPreprocessor():\n \"\"\"\n Data Preprocessing Module\n \"\"\"\n def __init__(self, args=None) -> None:\n # Load the data and arguments\n self.args = args if args is not None else {}\n # Parse args\n self._labels_to_categorical = self.args.get(\"labels_to_categorical\",\n False)", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.preprocessor", + "documentation": {} + }, + { + "label": "binary_filter", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.preprocessor", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.preprocessor", + "peekOfCode": "def binary_filter(d1, d2, x, y):\n \"\"\"\n It takes a dataset and two labels, and returns a dataset with only those two labels\n Args:\n d1: the first digit to filter for\n d2: the second digit to keep\n x: the data\n y: the labels\n Returns:\n the x and y values that are either d1 or d2.", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.preprocessor", + "documentation": {} + }, + { + "label": "QuarkGluon", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.quark_gluon", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.quark_gluon", + "peekOfCode": "class QuarkGluon(BaseDataModule):\n def __init__(self, args=None) -> None:\n super().__init__(args)\n self.dims = (40, 40, 1)\n self.output_dims = (1, )\n self.mapping = list(range(2))\n self.classes = ['Quark', 'Gluon']\n # Parse args\n self.args['is_binary_data'] = True\n self.filename = self.data_dir / f\"quark_gluon_{self.dataset_type}.npz\"", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.quark_gluon", + "documentation": {} + }, + { + "label": "extract_samples", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.utils", + "peekOfCode": "def extract_samples(x, y, mapping, percent):\n samples_per_class = int((len(y) / len(mapping)) * percent)\n keep = []\n for i in mapping:\n keep += list(np.where(y == i)[0][:samples_per_class])\n x, y = x[keep], y[keep]\n return x, y\ndef create_tf_ds(x, y, batch_size):\n AUTOTUNE = tf.data.AUTOTUNE\n ds = tf.data.Dataset.from_tensor_slices((x, y))", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.utils", + "documentation": {} + }, + { + "label": "create_tf_ds", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.utils", + "peekOfCode": "def create_tf_ds(x, y, batch_size):\n AUTOTUNE = tf.data.AUTOTUNE\n ds = tf.data.Dataset.from_tensor_slices((x, y))\n ds = ds.shuffle(100)\n ds = ds.batch(batch_size)\n ds = ds.prefetch(AUTOTUNE)\n return ds\ndef tf_ds_to_numpy(ds):\n ds = ds.unbatch()\n ds = ds.as_numpy_iterator()", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.utils", + "documentation": {} + }, + { + "label": "tf_ds_to_numpy", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.utils", + "peekOfCode": "def tf_ds_to_numpy(ds):\n ds = ds.unbatch()\n ds = ds.as_numpy_iterator()\n ds = [element for element in ds]\n x = np.array([x for x, _ in ds])\n y = np.array([y for _, y in ds])\n return x, y", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.data.utils", + "documentation": {} + }, + { + "label": "AmplitudeMap", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.amplitude", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.amplitude", + "peekOfCode": "class AmplitudeMap:\n def __init__(self):\n super().__init__()\n warnings.warn(\n \"AmplitudeMap currently does not normalize the input unless padding is needed.\\nUser must manually normalize the input.\"\n )\n def _beta(self, s, j, x):\n index_num = (2 * j - 1) * (2**(s - 1))\n index_den = (j - 1) * (2**s)\n num_start = index_num", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.amplitude", + "documentation": {} + }, + { + "label": "AngleMap", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.angle", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.angle", + "peekOfCode": "class AngleMap:\n def __init__(self, gate='rx'):\n valid_gates = ['rx', 'ry', 'rz']\n if gate not in valid_gates:\n raise ValueError('gate must be one of rx, ry, rz')\n self.gate = _import_class(\"cirq.{}\".format(gate))\n def build(self, qubits, symbols):\n num_in_symbols = len(symbols)\n symbols = np.asarray(symbols).reshape((num_in_symbols))\n e_ops = [", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.angle", + "documentation": {} + }, + { + "label": "BasisMap", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.basis", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.basis", + "peekOfCode": "class BasisMap:\n def __init__(self):\n super().__init__()\n def build(self, qubits, symbols):\n num_in_symbols = len(symbols)\n symbols = np.asarray(symbols).reshape((num_in_symbols))\n e_ops = [cirq.H(q) for q in qubits]\n e_ops += [\n cirq.Z(q)**(sp.GreaterThan(symbols[i], 0))\n for i, q in enumerate(qubits)", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.basis", + "documentation": {} + }, + { + "label": "DoubleAngleMap", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.double_angle", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.double_angle", + "peekOfCode": "class DoubleAngleMap:\n def __init__(self, activation='atan'):\n super().__init__()\n self.activation = getattr(sp, activation)\n def build(self, qubits, symbols):\n num_in_symbols = len(symbols)\n symbols = np.asarray(symbols).reshape((num_in_symbols))\n e_ops = [\n cirq.ry(sp.pi * self.activation(symbols[i]))(bit)\n for i, bit in enumerate(qubits)", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.encodings.double_angle", + "documentation": {} + }, + { + "label": "NQubitPQC", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.n_qubit_pqc", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.n_qubit_pqc", + "peekOfCode": "class NQubitPQC(Layer):\n def __init__(self,\n n_qubits,\n cluster_state=False,\n observable=None,\n n_layers=1,\n sparse=False,\n name='NQubitPQC'):\n super(NQubitPQC, self).__init__(name=name)\n self.n_layers = n_layers", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.n_qubit_pqc", + "documentation": {} + }, + { + "label": "QConv2D", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.qconv2d", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.qconv2d", + "peekOfCode": "class QConv2D(Layer):\n \"\"\"\n 2D Quantum convolution layer (e.g. spatial convolution over images).\n This layer creates a convolution kernel that is convolved \n with the layer input to produce a tensor of outputs. Finally,\n `activation` is applied to the outputs as well.\n \"\"\"\n def __init__(\n self,\n filters=1,", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.qconv2d", + "documentation": {} + }, + { + "label": "TwoLayerPQC", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.two_layer_pqc", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.two_layer_pqc", + "peekOfCode": "class TwoLayerPQC(Layer):\n def __init__(self,\n n_qubits,\n n_inputs,\n feature_map,\n ansatz,\n cluster_state=False,\n observable=None,\n n_layers=1,\n drc=False,", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.two_layer_pqc", + "documentation": {} + }, + { + "label": "get_count_of_qubits", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def get_count_of_qubits(feature_map, input_dim):\n if feature_map == 'AmplitudeMap':\n return int(np.ceil(np.log2(np.prod(input_dim))))\n return np.prod(input_dim)\ndef get_num_in_symbols(feature_map, input_dim):\n if feature_map == 'AmplitudeMap':\n return 2**int(np.ceil(np.log2(np.prod(input_dim))))\n return np.prod(input_dim)\ndef normalize_tuple(value, name):\n error_msg = (f'The `{name}` argument must be a tuple of 2'", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "get_num_in_symbols", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def get_num_in_symbols(feature_map, input_dim):\n if feature_map == 'AmplitudeMap':\n return 2**int(np.ceil(np.log2(np.prod(input_dim))))\n return np.prod(input_dim)\ndef normalize_tuple(value, name):\n error_msg = (f'The `{name}` argument must be a tuple of 2'\n f'integers. Received: {value}')\n if isinstance(value, int):\n value_tuple = (value, ) * 2\n else:", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "normalize_tuple", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def normalize_tuple(value, name):\n error_msg = (f'The `{name}` argument must be a tuple of 2'\n f'integers. Received: {value}')\n if isinstance(value, int):\n value_tuple = (value, ) * 2\n else:\n try:\n value_tuple = tuple(value)\n except TypeError:\n raise ValueError(error_msg)", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "normalize_padding", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def normalize_padding(value):\n if isinstance(value, (list, tuple)):\n return value\n padding = value.lower()\n if padding not in {'valid', 'same'}:\n raise ValueError(\n 'The `padding` argument must be a list/tuple or one of '\n '\"valid\", \"same\" '\n f'Received: {padding}')\n return padding", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "convolution_iters", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def convolution_iters(input_shape, kernel_size, strides, padding):\n # Calculate iterations\n input_shape = np.array(input_shape)\n kernel_size = np.array(kernel_size)\n strides = np.array(strides)\n iters = (input_shape - kernel_size) / strides + 1\n if padding == 'valid':\n return (int(iters[0]), int(iters[1])), constant([[0, 0], [0, 0],\n [0, 0], [0, 0]])\n elif padding == 'same':", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "stack", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def stack(func, lambda_set, intermediate=None):\n if intermediate is None:\n return stack(func, lambda_set[1:], lambda_set[0])\n if len(lambda_set) > 0:\n new_lambda = lambda x: func(intermediate(x), lambda_set[0](x))\n return stack(func, lambda_set[1:], new_lambda)\n else:\n return intermediate\ndef resolve_formulas(formulas, symbols):\n lambda_set = [resolve_formula(f, symbols) for f in formulas]", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "resolve_formulas", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def resolve_formulas(formulas, symbols):\n lambda_set = [resolve_formula(f, symbols) for f in formulas]\n stacked_ops = stack(lambda x, y: tf.concat((x, y), 0), lambda_set)\n n_formula = tf.constant([len(formulas)])\n transposed_x = lambda x: tf.transpose(\n x, perm=tf.roll(tf.range(tf.rank(x)), shift=1, axis=0))\n resolved_x = lambda x: stacked_ops(transposed_x(x))\n reshaped_x = lambda x: tf.reshape(\n resolved_x(x),\n tf.concat(", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "resolve_value", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def resolve_value(val):\n if isinstance(val, numbers.Number):\n return tf.constant(float(val), dtype=tf.float32)\n elif isinstance(val, (\n sympy_numbers.IntegerConstant,\n sympy_numbers.Integer,\n )):\n return tf.constant(float(val.p), dtype=tf.float32)\n elif isinstance(val,\n (sympy_numbers.RationalConstant, sympy_numbers.Rational)):", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "resolve_formula", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def resolve_formula(formula, symbols):\n # Input is a pass through type, no resolution needed: return early\n value = resolve_value(formula)\n if value is not NotImplemented:\n return lambda x: value\n # Handles 2 cases:\n # formula is a string and maps to a number in the dictionary\n # formula is a symbol and maps to a number in the dictionary\n # in both cases, return it directly.\n if formula in symbols:", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "natural_key", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def natural_key(symbol):\n '''Keys for human sorting\n Reference:\n http://nedbatchelder.com/blog/200712/human_sorting.html\n '''\n return [atoi(s) for s in re.split(r'(\\d+)', symbol.name)]\ndef symbols_in_expr_map(expr_map, to_str=False, sort_key=natural_key):\n \"\"\"Returns the set of symbols in an expression map\n Arguments:\n expr_map: cirq.ExpressionMap", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "symbols_in_expr_map", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def symbols_in_expr_map(expr_map, to_str=False, sort_key=natural_key):\n \"\"\"Returns the set of symbols in an expression map\n Arguments:\n expr_map: cirq.ExpressionMap\n The expression map to find the set of symbols in\n to_str: boolean, default=False\n Whether to convert symbol to strings\n sort_key: \n Sort key for the list of symbols\n Returns:", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "symbols_in_op", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def symbols_in_op(op):\n \"\"\"Returns the set of symbols associated with a parameterized gate operation.\n Arguments:\n op: cirq.Gate\n The parameterised gate operation to find the set of symbols associated with\n Returns:\n Set of symbols associated with the parameterized gate operation\n \"\"\"\n if isinstance(op, cirq.EigenGate):\n return op.exponent.free_symbols", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "atoi", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "def atoi(symbol):\n return int(symbol) if symbol.isdigit() else symbol", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "tf_ops_map", + "kind": 5, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "peekOfCode": "tf_ops_map = {\n sp.sin: sin,\n sp.cos: cos,\n sp.tan: tan,\n sp.asin: asin,\n sp.acos: acos,\n sp.atan: atan,\n sp.tanh: tanh,\n sp.sinh: sinh,\n sp.cosh: cosh,", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.layers.utils", + "documentation": {} + }, + { + "label": "BottleneckResidual", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.bottleneck", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.bottleneck", + "peekOfCode": "class BottleneckResidual(Layer):\n \"\"\"\n Bottleneck Residual Layer for Resent model\n \"\"\"\n def __init__(self,\n num_filters=16,\n kernel_size=3,\n strides=1,\n activation='relu',\n batch_normalization=True,", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.bottleneck", + "documentation": {} + }, + { + "label": "Resnet50", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.resnet50", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.resnet50", + "peekOfCode": "class Resnet50(BaseModel):\n def __init__(self, data_config, args=None):\n super(Resnet50, self).__init__(args)\n self.args = vars(args) if args is not None else {}\n # Data config\n self.input_dim = data_config[\"input_dims\"]\n self.num_classes = len(data_config[\"mapping\"])\n self.base_model = ResNet50(include_top=False,\n weights='imagenet',\n input_shape=(self.input_dim))", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.resnet50", + "documentation": {} + }, + { + "label": "ResnetV1", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.v1", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.v1", + "peekOfCode": "class ResnetV1(BaseModel):\n \"\"\"\n Resent v1 model. Paper: https://arxiv.org/abs/1512.03385\n This implementation is based on https://www.geeksforgeeks.org/residual-networks-resnet-deep-learning/\n \"\"\"\n def __init__(self, data_config, args=None):\n super(ResnetV1, self).__init__(args)\n self.args = vars(args) if args is not None else {}\n # Model configuration\n self.depth = self.args.get(\"resnet_depth\", 20)", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.v1", + "documentation": {} + }, + { + "label": "ResnetV2", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.v2", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.v2", + "peekOfCode": "class ResnetV2(BaseModel):\n \"\"\"\n Resent v2 model. Paper: https://arxiv.org/abs/1603.05027\n This implementation is based on https://www.geeksforgeeks.org/residual-networks-resnet-deep-learning/\n \"\"\"\n def __init__(self, data_config, args=None):\n super(ResnetV2, self).__init__(args)\n self.args = vars(args) if args is not None else {}\n # Model configuration\n self.depth = self.args.get(\"resnet_depth\", 56)", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.resnet.v2", + "documentation": {} + }, + { + "label": "CNN", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.cnn", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.cnn", + "peekOfCode": "class CNN(BaseModel):\n def __init__(self, data_config, args=None):\n super(CNN, self).__init__(args)\n self.args = vars(args) if args is not None else {}\n # Model Configuration\n self.num_conv_layers = self.args.get('num_conv_layers', 1)\n self.conv_dims = self.args.get('conv_dims', [64])\n assert len(\n self.conv_dims\n ) == self.num_conv_layers, 'conv_dims must be a list of length num_conv_layers'", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.cnn", + "documentation": {} + }, + { + "label": "MLP", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.mlp", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.mlp", + "peekOfCode": "class MLP(BaseModel):\n def __init__(self, data_config, args=None):\n super(MLP, self).__init__(args)\n self.args = vars(args) if args is not None else {}\n # Moel Configuration\n self.num_fc_layers = self.args.get('num_fc_layers', 1)\n self.fc_dims = self.args.get('fc_dims', [128])\n assert len(\n self.fc_dims\n ) == self.num_fc_layers, 'fc_dims must be a list of length num_fc_layers'", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.classical.mlp", + "documentation": {} + }, + { + "label": "FQCNN", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.fqcnn", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.fqcnn", + "peekOfCode": "class FQCNN(QCNN):\n \"\"\"\n\tGeneral Quantum Convolutional Neural Network\n\t\"\"\"\n def __init__(self, data_config, args=None):\n super(FQCNN, self).__init__(data_config, args)\n self.args = vars(args) if args is not None else {}\n input_shape = [None] + list(self.input_dim)\n self.num_qconv_layers = self.args.get('num_qconv_layers', 1)\n self.qconv_dims = self.args.get('qconv_dims', [1])", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.fqcnn", + "documentation": {} + }, + { + "label": "QCNN", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn", + "peekOfCode": "class QCNN(BaseModel):\n \"\"\"\n\tGeneral Quantum Convolutional Neural Network\n\t\"\"\"\n def __init__(self, data_config, args=None):\n super(QCNN, self).__init__(args)\n self.args = vars(args) if args is not None else {}\n # Data config\n self.input_dim = data_config[\"input_dims\"]\n self.cluster_state = self.args.get(\"cluster_state\", False)", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn", + "documentation": {} + }, + { + "label": "QCNNChen", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_chen", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_chen", + "peekOfCode": "class QCNNChen(BaseModel):\n \"\"\"\n\tQuantum Convolutional Neural Network.\n\tThis implementation is based on https://arxiv.org/abs/2012.12177\n\t\"\"\"\n def __init__(self, data_config, args=None):\n super(QCNNChen, self).__init__(args)\n self.args = vars(args) if args is not None else {}\n self.fm_class = \"DoubleAngleMap\"\n self.ansatz_class = \"Chen\"", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_chen", + "documentation": {} + }, + { + "label": "QCNNCong", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_cong", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_cong", + "peekOfCode": "class QCNNCong(BaseModel):\n \"\"\"\n\tQuantum Convolutional Neural Network.\n\tThis implementation is based on https://arxiv.org/abs/2012.12177\n\t\"\"\"\n def __init__(self, data_config, args=None):\n super(QCNNCong, self).__init__(args)\n self.args = vars(args) if args is not None else {}\n # Data config\n self.input_dim = data_config[\"input_dims\"]", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_cong", + "documentation": {} + }, + { + "label": "QCNNHybrid", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_hybrid", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_hybrid", + "peekOfCode": "class QCNNHybrid(QCNN):\n \"\"\"\n\tGeneral Quantum Convolutional Neural Network\n\t\"\"\"\n def __init__(self, data_config, args=None):\n super(QCNNHybrid, self).__init__(data_config, args)\n self.args = vars(args) if args is not None else {}\n self.qconv2d_1 = QConv2D(\n filters=1,\n kernel_size=3,", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_hybrid", + "documentation": {} + }, + { + "label": "QCNNSandwich", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_sandwich", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_sandwich", + "peekOfCode": "class QCNNSandwich(QCNN):\n \"\"\"\n\tGeneral Quantum Convolutional Neural Network\n\t\"\"\"\n def __init__(self, data_config, args=None):\n super(QCNNSandwich, self).__init__(data_config, args)\n self.args = vars(args) if args is not None else {}\n # Model Configuration\n self.num_conv_layers = self.args.get('num_conv_layers', 1)\n self.conv_dims = self.args.get('conv_dims', [64])", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.qcnn_sandwich", + "documentation": {} + }, + { + "label": "ResnetQ50", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.resnetq50", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.resnetq50", + "peekOfCode": "class ResnetQ50(QCNN):\n def __init__(self, data_config, args=None):\n super(ResnetQ50, self).__init__(data_config, args)\n self.args = vars(args) if args is not None else {}\n input_shape = [None] + list(self.input_dim)\n self.base_model = ResNet50(include_top=False,\n weights='imagenet',\n input_shape=(self.input_dim))\n self.base_model.trainable = False\n input_shape = self.base_model.compute_output_shape(input_shape)", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.resnetq50", + "documentation": {} + }, + { + "label": "VQC", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.vqc", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.vqc", + "peekOfCode": "class VQC(QCNN):\n \"\"\"\n\tGeneral Quantum Convolutional Neural Network\n\t\"\"\"\n def __init__(self, data_config, args=None):\n super(VQC, self).__init__(data_config, args)\n self.args = vars(args) if args is not None else {}\n if self.ansatz_class == 'NQubit':\n self.vqc = NQubitPQC(\n n_qubits=self.n_qubits,", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.quantum.vqc", + "documentation": {} + }, + { + "label": "BaseModel", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.base_model", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.base_model", + "peekOfCode": "class BaseModel(Model):\n def __init__(self, args=None):\n super().__init__()\n self.args = vars(args) if args is not None else {}\n # Loss function\n self.loss = self.args.get('loss', \"CategoricalCrossentropy\")\n self.loss_fn = getattr(losses, self.loss)()\n self.lr = self.args.get('learning_rate', 0.002)\n # Optimizer\n if self.args.get('optimizer', 'Adam') == 'Adam':", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.base_model", + "documentation": {} + }, + { + "label": "qAUC", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.metrics", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.metrics", + "peekOfCode": "class qAUC(tf.keras.metrics.AUC):\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n def update_state(self, y_true, y_pred, sample_weight=None):\n y_pred_ = tf.clip_by_value(y_pred, -1, 1)\n y_pred_ = (y_pred + 1) / 2\n y_true = (y_true + 1) / 2\n super().update_state(y_true, y_pred_, sample_weight=sample_weight)", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.metrics", + "documentation": {} + }, + { + "label": "custom_accuracy", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.metrics", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.metrics", + "peekOfCode": "def custom_accuracy(y_true, y_pred):\n y_true = tf.squeeze(y_true)\n y_pred = tf.map_fn(lambda x: 1.0 if x >= 0 else -1.0, y_pred)\n return tf.keras.backend.mean(tf.keras.backend.equal(y_true, y_pred))\nclass qAUC(tf.keras.metrics.AUC):\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n def update_state(self, y_true, y_pred, sample_weight=None):\n y_pred_ = tf.clip_by_value(y_pred, -1, 1)\n y_pred_ = (y_pred + 1) / 2", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.models.metrics", + "documentation": {} + }, + { + "label": "ParseAction", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.utils", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.utils", + "peekOfCode": "class ParseAction(Action):\n def __call__(self, parser, namespace, values, option_string=None):\n values = list(map(int, values.split()))\n setattr(namespace, self.dest, values)", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.qml_hep_lhc.utils", + "documentation": {} + }, + { + "label": "PRMetrics", + "kind": 6, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.training.callbacks", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.training.callbacks", + "peekOfCode": "class PRMetrics(Callback):\n def __init__(self, data, use_quantum):\n self.x, self.y = tf_ds_to_numpy(data.test_ds)\n self.use_quantum = use_quantum\n self.classes = data.classes\n def on_train_end(self, logs=None):\n out = self.model.predict(self.x)\n if self.use_quantum:\n preds = map_fn(lambda x: 1.0 if x >= 0.5 else 0, out)\n probs = out", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.training.callbacks", + "documentation": {} + }, + { + "label": "get_configuration", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def get_configuration(parser, args, data, model):\n arg_grps = {}\n for group in parser._action_groups:\n group_dict = {\n a.dest: getattr(args, a.dest, None) for a in group._group_actions\n }\n arg_grps[group.title] = group_dict\n # Add additional configurations\n arg_grps['Base Model Args']['loss'] = model.loss\n arg_grps['Base Model Args']['accuracy'] = model.acc_metrics", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "main", + "kind": 2, + "importPath": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def main():\n # Parsing the arguments from the command line.\n parser = _setup_parser()\n args = parser.parse_args()\n # Importing the data class\n data_class = _import_class(f\"qml_hep_lhc.data.{args.data_class}\")\n # Creating a data object, and then calling the prepare_data and setup methods on it.\n data = data_class(args)\n data.prepare_data()\n data.setup()", + "detail": "Quantum_CNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "angle_circuit", + "kind": 2, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "peekOfCode": "def angle_circuit(phi):\n for j in range(4):\n qml.RX(np.pi * phi[j], wires=j)\n return [qml.expval(qml.PauliZ(j)) for j in range(4)]\ndef angle_encoding(data, sample=0):\n new_dim = data.shape[1] // 2\n out = np.zeros((new_dim, new_dim, 4))\n for i in range(0, data.shape[1], 2):\n for j in range(0, data.shape[1], 2):\n q_results = angle_circuit(", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "documentation": {} + }, + { + "label": "angle_encoding", + "kind": 2, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "peekOfCode": "def angle_encoding(data, sample=0):\n new_dim = data.shape[1] // 2\n out = np.zeros((new_dim, new_dim, 4))\n for i in range(0, data.shape[1], 2):\n for j in range(0, data.shape[1], 2):\n q_results = angle_circuit(\n [\n data[sample, i, j],\n data[sample, i, j+1],\n data[sample, i+1, j],", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "documentation": {} + }, + { + "label": "seed", + "kind": 5, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "peekOfCode": "seed = 42\nnum_qubits = 64 # data.shape[1]**2\ndev = qml.device(\"default.qubit\", wires=num_qubits)\n@qml.qnode(dev)\ndef angle_circuit(phi):\n for j in range(4):\n qml.RX(np.pi * phi[j], wires=j)\n return [qml.expval(qml.PauliZ(j)) for j in range(4)]\ndef angle_encoding(data, sample=0):\n new_dim = data.shape[1] // 2", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "documentation": {} + }, + { + "label": "num_qubits", + "kind": 5, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "peekOfCode": "num_qubits = 64 # data.shape[1]**2\ndev = qml.device(\"default.qubit\", wires=num_qubits)\n@qml.qnode(dev)\ndef angle_circuit(phi):\n for j in range(4):\n qml.RX(np.pi * phi[j], wires=j)\n return [qml.expval(qml.PauliZ(j)) for j in range(4)]\ndef angle_encoding(data, sample=0):\n new_dim = data.shape[1] // 2\n out = np.zeros((new_dim, new_dim, 4))", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "documentation": {} + }, + { + "label": "dev", + "kind": 5, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "peekOfCode": "dev = qml.device(\"default.qubit\", wires=num_qubits)\n@qml.qnode(dev)\ndef angle_circuit(phi):\n for j in range(4):\n qml.RX(np.pi * phi[j], wires=j)\n return [qml.expval(qml.PauliZ(j)) for j in range(4)]\ndef angle_encoding(data, sample=0):\n new_dim = data.shape[1] // 2\n out = np.zeros((new_dim, new_dim, 4))\n for i in range(0, data.shape[1], 2):", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.angle_encoding_script", + "documentation": {} + }, + { + "label": "normal_random_unitary", + "kind": 2, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.gaussian_noising", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.gaussian_noising", + "peekOfCode": "def normal_random_unitary(seed=None):\n if seed is not None:\n np.random.seed(seed)\n return unitary_group.rvs(4)\ndef apply_haar_scrambling(encoded_data, num_samples, seed):\n scrambled_vectors = []\n for sample in range(num_samples):\n scrambled_vector = []\n for i in range(8):\n channels = []", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.gaussian_noising", + "documentation": {} + }, + { + "label": "apply_haar_scrambling", + "kind": 2, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.gaussian_noising", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.gaussian_noising", + "peekOfCode": "def apply_haar_scrambling(encoded_data, num_samples, seed):\n scrambled_vectors = []\n for sample in range(num_samples):\n scrambled_vector = []\n for i in range(8):\n channels = []\n for j in range(8):\n U = normal_random_unitary(seed)\n scrambled_state = np.dot(U, encoded_data[sample, i, j, :])\n scrambled_state /= np.linalg.norm(scrambled_state)", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.gaussian_noising", + "documentation": {} + }, + { + "label": "sin_prob_dist", + "kind": 6, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "peekOfCode": "class sin_prob_dist(rv_continuous):\n def _pdf(self, theta):\n return 0.5 * np.sin(theta)\nsin_sampler = sin_prob_dist(a=0, b=np.pi)\n@qml.qnode(dev)\ndef haar_random_unitary():\n phi1, omega1 = 2 * np.pi * np.random.uniform(size=2)\n theta1 = sin_sampler.rvs(size=1)\n phi2, omega2 = 2 * np.pi * np.random.uniform(size=2)\n theta2 = sin_sampler.rvs(size=1)", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "documentation": {} + }, + { + "label": "haar_random_unitary", + "kind": 2, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "peekOfCode": "def haar_random_unitary():\n phi1, omega1 = 2 * np.pi * np.random.uniform(size=2)\n theta1 = sin_sampler.rvs(size=1)\n phi2, omega2 = 2 * np.pi * np.random.uniform(size=2)\n theta2 = sin_sampler.rvs(size=1)\n qml.Rot(phi1, theta1, omega1, wires=0)\n qml.Rot(phi2, theta2, omega2, wires=1)\n return qml.state()\ndef apply_haar_scrambling(encoded_data, num_samples, seed=None):\n scrambled_vectors = []", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "documentation": {} + }, + { + "label": "apply_haar_scrambling", + "kind": 2, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "peekOfCode": "def apply_haar_scrambling(encoded_data, num_samples, seed=None):\n scrambled_vectors = []\n new_dim = encoded_data.shape[1]\n for sample in range(num_samples):\n scrambled_vector = []\n for _ in range(new_dim):\n channels = []\n for _ in range(new_dim):\n if seed is not None:\n np.random.seed(seed)", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "documentation": {} + }, + { + "label": "dev", + "kind": 5, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "peekOfCode": "dev = qml.device(\"default.qubit\", wires=2) \nclass sin_prob_dist(rv_continuous):\n def _pdf(self, theta):\n return 0.5 * np.sin(theta)\nsin_sampler = sin_prob_dist(a=0, b=np.pi)\n@qml.qnode(dev)\ndef haar_random_unitary():\n phi1, omega1 = 2 * np.pi * np.random.uniform(size=2)\n theta1 = sin_sampler.rvs(size=1)\n phi2, omega2 = 2 * np.pi * np.random.uniform(size=2)", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "documentation": {} + }, + { + "label": "sin_sampler", + "kind": 5, + "importPath": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "description": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "peekOfCode": "sin_sampler = sin_prob_dist(a=0, b=np.pi)\n@qml.qnode(dev)\ndef haar_random_unitary():\n phi1, omega1 = 2 * np.pi * np.random.uniform(size=2)\n theta1 = sin_sampler.rvs(size=1)\n phi2, omega2 = 2 * np.pi * np.random.uniform(size=2)\n theta2 = sin_sampler.rvs(size=1)\n qml.Rot(phi1, theta1, omega1, wires=0)\n qml.Rot(phi2, theta2, omega2, wires=1)\n return qml.state()", + "detail": "Quantum_Diffusion_Model_for_HEP_Masha_Baidachna.notebooks.quantum.project.haar_noising_script", + "documentation": {} + }, + { + "label": "load_data", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.data_preprocessing", + "peekOfCode": "def load_data(filepath):\n jet_mass_data = HDF5File(filepath, 'r')\n X_jet = jet_mass_data['image']\n X_jet = np.array(X_jet)\n X_jet = torch.tensor(X_jet, dtype=torch.float32)\n # Normalize the data\n X_jet = (X_jet - X_jet.min()) / (X_jet.max() - X_jet.min())\n X_jet = X_jet.unsqueeze(1)\n X_jet_resized = nn.functional.interpolate(X_jet, size=(16, 16), mode='bilinear', align_corners=False)\n return X_jet_resized", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.data_preprocessing", + "documentation": {} + }, + { + "label": "get_dataloader", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.data_preprocessing", + "peekOfCode": "def get_dataloader(data, batch_size=128):\n dataset = TensorDataset(data)\n dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True)\n return dataloader\nif __name__ == \"__main__\":\n jet_images_path = 'path/to/jet-images_Mass60-100_pT250-300_R1.25_Pix25.hdf5'\n data = load_data(jet_images_path)\n dataloader = get_dataloader(data)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.data_preprocessing", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.data_preprocessing", + "peekOfCode": "device = 'cuda' if torch.cuda.is_available() else 'cpu'\ndef load_data(filepath):\n jet_mass_data = HDF5File(filepath, 'r')\n X_jet = jet_mass_data['image']\n X_jet = np.array(X_jet)\n X_jet = torch.tensor(X_jet, dtype=torch.float32)\n # Normalize the data\n X_jet = (X_jet - X_jet.min()) / (X_jet.max() - X_jet.min())\n X_jet = X_jet.unsqueeze(1)\n X_jet_resized = nn.functional.interpolate(X_jet, size=(16, 16), mode='bilinear', align_corners=False)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.data_preprocessing", + "documentation": {} + }, + { + "label": "main", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.main", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.main", + "peekOfCode": "def main(args):\n data = load_data(args.dataset_path)\n dataloader = get_dataloader(data, batch_size=args.batch_size)\n if args.model == 'vanilla':\n train_vanilla_gan(dataloader, latent_dim=args.latent_dim, lr=args.lr, n_epochs=args.epochs)\n elif args.model == 'wgan':\n train_wgan(dataloader, latent_dim=args.latent_dim, lr=args.lr, n_epochs=args.epochs)\n elif args.model == 'tv':\n train_total_variation_gan(dataloader, latent_dim=args.latent_dim, lr=args.lr, n_epochs=args.epochs)\n elif args.model == 'perceptual':", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.main", + "documentation": {} + }, + { + "label": "PLGenerator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "class PLGenerator(nn.Module):\n def __init__(self, latent_dim):\n super(PLGenerator, self).__init__()\n self.latent_dim = latent_dim\n self.model = nn.Sequential(\n nn.ConvTranspose2d(latent_dim, 256, 4, 1, 0, bias=False),\n nn.BatchNorm2d(256),\n nn.ReLU(True),\n nn.Dropout(0.3),\n nn.ConvTranspose2d(256, 128, 4, 2, 1, bias=False),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "PLDiscriminator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "class PLDiscriminator(nn.Module):\n def __init__(self):\n super(PLDiscriminator, self).__init__()\n self.model = nn.Sequential(\n spectral_norm(nn.Conv2d(1, 64, 4, 2, 1, bias=False)),\n nn.LeakyReLU(0.2, inplace=True),\n nn.Dropout(0.3),\n spectral_norm(nn.Conv2d(64, 128, 4, 2, 1, bias=False)),\n nn.LeakyReLU(0.2, inplace=True),\n nn.Dropout(0.3),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "gradient_penalty", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "def gradient_penalty(discriminator, real_samples, fake_samples):\n alpha = torch.rand(real_samples.size(0), 1, 1, 1, device=real_samples.device)\n interpolates = alpha * real_samples + ((1 - alpha) * fake_samples)\n interpolates.requires_grad_(True)\n d_interpolates = discriminator(interpolates)\n fake = torch.ones(d_interpolates.size(), device=real_samples.device, requires_grad=False)\n gradients = torch.autograd.grad(\n outputs=d_interpolates,\n inputs=interpolates,\n grad_outputs=fake,", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "perceptual_loss", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "def perceptual_loss(real, fake, vgg):\n real_features = vgg(real.repeat(1, 3, 1, 1)) # Repeat channels to match VGG input\n fake_features = vgg(fake.repeat(1, 3, 1, 1))\n loss = F.mse_loss(fake_features, real_features)\n return loss\ndef calculate_fid(real_images, fake_images, batch_size=128):\n inception_model = models.inception_v3(pretrained=True, transform_input=False).to(device)\n inception_model.eval()\n def get_features(images):\n features = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "calculate_fid", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "def calculate_fid(real_images, fake_images, batch_size=128):\n inception_model = models.inception_v3(pretrained=True, transform_input=False).to(device)\n inception_model.eval()\n def get_features(images):\n features = []\n for i in range(0, len(images), batch_size):\n batch = images[i:i+batch_size].to(device)\n batch = F.interpolate(batch, size=(299, 299), mode='bilinear', align_corners=False)\n batch = batch.repeat(1, 3, 1, 1)\n with torch.no_grad():", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "plot_generated_samples", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "def plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))\n for i in range(num_samples):\n axes[i].imshow(gen_samples[i, 0])\n axes[i].axis('off')\n plt.show()\ndef plot_metrics(g_losses, d_losses, fid_scores):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "plot_metrics", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "def plot_metrics(g_losses, d_losses, fid_scores):\n epochs = range(1, len(g_losses) + 1)\n plt.figure(figsize=(14, 5))\n plt.subplot(1, 2, 1)\n plt.plot(epochs, g_losses, label='Generator Loss')\n plt.plot(epochs, d_losses, label='Discriminator Loss')\n plt.xlabel('Epoch')\n plt.ylabel('Loss')\n plt.legend()\n plt.title('Generator and Discriminator Losses')", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "device = 'cuda' if torch.cuda.is_available() else 'cpu'\nclass PLGenerator(nn.Module):\n def __init__(self, latent_dim):\n super(PLGenerator, self).__init__()\n self.latent_dim = latent_dim\n self.model = nn.Sequential(\n nn.ConvTranspose2d(latent_dim, 256, 4, 1, 0, bias=False),\n nn.BatchNorm2d(256),\n nn.ReLU(True),\n nn.Dropout(0.3),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "latent_dim", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "latent_dim = 100\nlr = 0.0002\nn_epochs = 30\nlambda_gp = 10\nlambda_perceptual = 1 # Weight for the perceptual loss term\ngenerator = PLGenerator(latent_dim).to(device)\ndiscriminator = PLDiscriminator().to(device)\nvgg = models.vgg16(pretrained=True).features[:16].to(device).eval()\nfor param in vgg.parameters():\n param.requires_grad = False", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "lr", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "lr = 0.0002\nn_epochs = 30\nlambda_gp = 10\nlambda_perceptual = 1 # Weight for the perceptual loss term\ngenerator = PLGenerator(latent_dim).to(device)\ndiscriminator = PLDiscriminator().to(device)\nvgg = models.vgg16(pretrained=True).features[:16].to(device).eval()\nfor param in vgg.parameters():\n param.requires_grad = False\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "n_epochs", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "n_epochs = 30\nlambda_gp = 10\nlambda_perceptual = 1 # Weight for the perceptual loss term\ngenerator = PLGenerator(latent_dim).to(device)\ndiscriminator = PLDiscriminator().to(device)\nvgg = models.vgg16(pretrained=True).features[:16].to(device).eval()\nfor param in vgg.parameters():\n param.requires_grad = False\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "lambda_gp", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "lambda_gp = 10\nlambda_perceptual = 1 # Weight for the perceptual loss term\ngenerator = PLGenerator(latent_dim).to(device)\ndiscriminator = PLDiscriminator().to(device)\nvgg = models.vgg16(pretrained=True).features[:16].to(device).eval()\nfor param in vgg.parameters():\n param.requires_grad = False\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "lambda_perceptual", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "lambda_perceptual = 1 # Weight for the perceptual loss term\ngenerator = PLGenerator(latent_dim).to(device)\ndiscriminator = PLDiscriminator().to(device)\nvgg = models.vgg16(pretrained=True).features[:16].to(device).eval()\nfor param in vgg.parameters():\n param.requires_grad = False\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "generator", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "generator = PLGenerator(latent_dim).to(device)\ndiscriminator = PLDiscriminator().to(device)\nvgg = models.vgg16(pretrained=True).features[:16].to(device).eval()\nfor param in vgg.parameters():\n param.requires_grad = False\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "discriminator", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "discriminator = PLDiscriminator().to(device)\nvgg = models.vgg16(pretrained=True).features[:16].to(device).eval()\nfor param in vgg.parameters():\n param.requires_grad = False\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "vgg", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "vgg = models.vgg16(pretrained=True).features[:16].to(device).eval()\nfor param in vgg.parameters():\n param.requires_grad = False\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "optimizer_G", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "optimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "optimizer_D", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "optimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))\n for i in range(num_samples):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "g_losses", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "g_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))\n for i in range(num_samples):\n axes[i].imshow(gen_samples[i, 0])", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "d_losses", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "d_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))\n for i in range(num_samples):\n axes[i].imshow(gen_samples[i, 0])\n axes[i].axis('off')", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "fid_scores", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "peekOfCode": "fid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))\n for i in range(num_samples):\n axes[i].imshow(gen_samples[i, 0])\n axes[i].axis('off')\n plt.show()", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.perceptual_loss", + "documentation": {} + }, + { + "label": "TVGenerator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "class TVGenerator(nn.Module):\n def __init__(self, latent_dim):\n super(TVGenerator, self).__init__()\n self.latent_dim = latent_dim\n self.model = nn.Sequential(\n nn.ConvTranspose2d(latent_dim, 256, 4, 1, 0, bias=False),\n nn.BatchNorm2d(256),\n nn.ReLU(True),\n nn.Dropout(0.3),\n nn.ConvTranspose2d(256, 128, 4, 2, 1, bias=False),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "TVDiscriminator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "class TVDiscriminator(nn.Module):\n def __init__(self):\n super(TVDiscriminator, self).__init__()\n self.model = nn.Sequential(\n spectral_norm(nn.Conv2d(1, 64, 4, 2, 1, bias=False)),\n nn.LeakyReLU(0.2, inplace=True),\n nn.Dropout(0.3),\n spectral_norm(nn.Conv2d(64, 128, 4, 2, 1, bias=False)),\n nn.LeakyReLU(0.2, inplace=True),\n nn.Dropout(0.3),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "gradient_penalty", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "def gradient_penalty(discriminator, real_samples, fake_samples):\n alpha = torch.rand(real_samples.size(0), 1, 1, 1, device=real_samples.device)\n interpolates = alpha * real_samples + ((1 - alpha) * fake_samples)\n interpolates.requires_grad_(True)\n d_interpolates = discriminator(interpolates)\n fake = torch.ones(d_interpolates.size(), device=real_samples.device, requires_grad=False)\n gradients = torch.autograd.grad(\n outputs=d_interpolates,\n inputs=interpolates,\n grad_outputs=fake,", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "total_variation_loss", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "def total_variation_loss(img):\n batch_size = img.size(0)\n h_x = img.size(2)\n w_x = img.size(3)\n count_h = (img.size(2) - 1) * img.size(3)\n count_w = img.size(2) * (img.size(3) - 1)\n h_tv = torch.pow((img[:,:,1:,:] - img[:,:,:h_x-1,:]), 2).sum()\n w_tv = torch.pow((img[:,:,:,1:] - img[:,:,:,:w_x-1]), 2).sum()\n return 2 * (h_tv / count_h + w_tv / count_w) / batch_size\ndef calculate_fid(real_images, fake_images, batch_size=128):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "calculate_fid", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "def calculate_fid(real_images, fake_images, batch_size=128):\n inception_model = models.inception_v3(pretrained=True, transform_input=False).to(device)\n inception_model.eval()\n def get_features(images):\n features = []\n for i in range(0, len(images), batch_size):\n batch = images[i:i+batch_size].to(device)\n batch = F.interpolate(batch, size=(299, 299), mode='bilinear', align_corners=False)\n batch = batch.repeat(1, 3, 1, 1)\n with torch.no_grad():", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "device = 'cuda' if torch.cuda.is_available() else 'cpu'\nclass TVGenerator(nn.Module):\n def __init__(self, latent_dim):\n super(TVGenerator, self).__init__()\n self.latent_dim = latent_dim\n self.model = nn.Sequential(\n nn.ConvTranspose2d(latent_dim, 256, 4, 1, 0, bias=False),\n nn.BatchNorm2d(256),\n nn.ReLU(True),\n nn.Dropout(0.3),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "latent_dim", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "latent_dim = 100\nlr = 0.0002\nn_epochs = 30\nlambda_gp = 10\nlambda_tv = 1\ngenerator = TVGenerator(latent_dim).to(device)\ndiscriminator = TVDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "lr", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "lr = 0.0002\nn_epochs = 30\nlambda_gp = 10\nlambda_tv = 1\ngenerator = TVGenerator(latent_dim).to(device)\ndiscriminator = TVDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "n_epochs", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "n_epochs = 30\nlambda_gp = 10\nlambda_tv = 1\ngenerator = TVGenerator(latent_dim).to(device)\ndiscriminator = TVDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "lambda_gp", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "lambda_gp = 10\nlambda_tv = 1\ngenerator = TVGenerator(latent_dim).to(device)\ndiscriminator = TVDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\nfor epoch in range(n_epochs):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "lambda_tv", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "lambda_tv = 1\ngenerator = TVGenerator(latent_dim).to(device)\ndiscriminator = TVDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\nfor epoch in range(n_epochs):\n for i, (real_samples,) in enumerate(dataloader):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "generator", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "generator = TVGenerator(latent_dim).to(device)\ndiscriminator = TVDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\nfor epoch in range(n_epochs):\n for i, (real_samples,) in enumerate(dataloader):\n real_samples = real_samples.to(device)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "discriminator", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "discriminator = TVDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\nfor epoch in range(n_epochs):\n for i, (real_samples,) in enumerate(dataloader):\n real_samples = real_samples.to(device)\n batch_size = real_samples.size(0)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "optimizer_G", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "optimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\nfor epoch in range(n_epochs):\n for i, (real_samples,) in enumerate(dataloader):\n real_samples = real_samples.to(device)\n batch_size = real_samples.size(0)\n optimizer_D.zero_grad()", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "optimizer_D", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "optimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\nfor epoch in range(n_epochs):\n for i, (real_samples,) in enumerate(dataloader):\n real_samples = real_samples.to(device)\n batch_size = real_samples.size(0)\n optimizer_D.zero_grad()\n z = torch.randn(batch_size, latent_dim, 1, 1).to(device)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "g_losses", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "g_losses = []\nd_losses = []\nfid_scores = []\nfor epoch in range(n_epochs):\n for i, (real_samples,) in enumerate(dataloader):\n real_samples = real_samples.to(device)\n batch_size = real_samples.size(0)\n optimizer_D.zero_grad()\n z = torch.randn(batch_size, latent_dim, 1, 1).to(device)\n fake_samples = generator(z).detach()", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "d_losses", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "d_losses = []\nfid_scores = []\nfor epoch in range(n_epochs):\n for i, (real_samples,) in enumerate(dataloader):\n real_samples = real_samples.to(device)\n batch_size = real_samples.size(0)\n optimizer_D.zero_grad()\n z = torch.randn(batch_size, latent_dim, 1, 1).to(device)\n fake_samples = generator(z).detach()\n real_validity = discriminator(real_samples)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "fid_scores", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "peekOfCode": "fid_scores = []\nfor epoch in range(n_epochs):\n for i, (real_samples,) in enumerate(dataloader):\n real_samples = real_samples.to(device)\n batch_size = real_samples.size(0)\n optimizer_D.zero_grad()\n z = torch.randn(batch_size, latent_dim, 1, 1).to(device)\n fake_samples = generator(z).detach()\n real_validity = discriminator(real_samples)\n fake_validity = discriminator(fake_samples)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.total_variation", + "documentation": {} + }, + { + "label": "Generator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "peekOfCode": "class Generator(nn.Module):\n def __init__(self, latent_dim):\n super(Generator, self).__init__()\n self.init_size = 4 # Initial size before upsampling\n self.l1 = nn.Sequential(nn.Linear(latent_dim, 128 * self.init_size ** 2))\n self.conv_blocks = nn.Sequential(\n nn.BatchNorm2d(128),\n nn.Upsample(scale_factor=2),\n nn.Conv2d(128, 128, 3, stride=1, padding=1),\n nn.BatchNorm2d(128, 0.8),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "documentation": {} + }, + { + "label": "Discriminator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "peekOfCode": "class Discriminator(nn.Module):\n def __init__(self):\n super(Discriminator, self).__init__()\n self.conv_blocks = nn.Sequential(\n nn.Conv2d(1, 64, 3, 2, 1),\n nn.LeakyReLU(0.2, inplace=True),\n nn.Conv2d(64, 128, 3, 2, 1),\n nn.BatchNorm2d(128),\n nn.LeakyReLU(0.2, inplace=True),\n nn.Conv2d(128, 256, 3, 2, 1),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "documentation": {} + }, + { + "label": "train_vanilla_gan", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "peekOfCode": "def train_vanilla_gan(dataloader, latent_dim=100, lr=0.0002, b1=0.5, b2=0.999, n_epochs=20):\n generator = Generator(latent_dim).to(device)\n discriminator = Discriminator().to(device)\n optimizer_G = optim.Adam(generator.parameters(), lr=lr, betas=(b1, b2))\n optimizer_D = optim.Adam(discriminator.parameters(), lr=lr, betas=(b1, b2))\n adversarial_loss = nn.BCELoss()\n d_losses = []\n g_losses = []\n for epoch in range(n_epochs):\n epoch_d_loss = 0", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "peekOfCode": "device = 'cuda' if torch.cuda.is_available() else 'cpu'\nclass Generator(nn.Module):\n def __init__(self, latent_dim):\n super(Generator, self).__init__()\n self.init_size = 4 # Initial size before upsampling\n self.l1 = nn.Sequential(nn.Linear(latent_dim, 128 * self.init_size ** 2))\n self.conv_blocks = nn.Sequential(\n nn.BatchNorm2d(128),\n nn.Upsample(scale_factor=2),\n nn.Conv2d(128, 128, 3, stride=1, padding=1),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.vanilla_gan", + "documentation": {} + }, + { + "label": "WGANGenerator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "class WGANGenerator(nn.Module):\n def __init__(self, latent_dim):\n super(WGANGenerator, self).__init__()\n self.latent_dim = latent_dim\n self.model = nn.Sequential(\n nn.ConvTranspose2d(latent_dim, 256, 4, 1, 0, bias=False),\n nn.BatchNorm2d(256),\n nn.ReLU(True),\n nn.Dropout(0.3),\n nn.ConvTranspose2d(256, 128, 4, 2, 1, bias=False),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "WGANDiscriminator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "class WGANDiscriminator(nn.Module):\n def __init__(self):\n super(WGANDiscriminator, self).__init__()\n self.model = nn.Sequential(\n spectral_norm(nn.Conv2d(1, 64, 4, 2, 1, bias=False)),\n nn.LeakyReLU(0.2, inplace=True),\n nn.Dropout(0.3),\n spectral_norm(nn.Conv2d(64, 128, 4, 2, 1, bias=False)),\n nn.LeakyReLU(0.2, inplace=True),\n nn.Dropout(0.3),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "gradient_penalty", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "def gradient_penalty(discriminator, real_samples, fake_samples):\n alpha = torch.rand(real_samples.size(0), 1, 1, 1, device=real_samples.device)\n interpolates = alpha * real_samples + ((1 - alpha) * fake_samples)\n interpolates.requires_grad_(True)\n d_interpolates = discriminator(interpolates)\n fake = torch.ones(d_interpolates.size(), device=real_samples.device, requires_grad=False)\n gradients = torch.autograd.grad(\n outputs=d_interpolates,\n inputs=interpolates,\n grad_outputs=fake,", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "calculate_fid", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "def calculate_fid(real_images, fake_images, batch_size=128):\n inception_model = models.inception_v3(pretrained=True, transform_input=False).to(device)\n inception_model.eval()\n def get_features(images):\n features = []\n for i in range(0, len(images), batch_size):\n batch = images[i:i+batch_size].to(device)\n batch = F.interpolate(batch, size=(299, 299), mode='bilinear', align_corners=False)\n batch = batch.repeat(1, 3, 1, 1)\n with torch.no_grad():", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "plot_generated_samples", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "def plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))\n for i in range(num_samples):\n axes[i].imshow(gen_samples[i, 0])\n axes[i].axis('off')\n plt.show()\ndef plot_metrics(g_losses, d_losses, fid_scores):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "plot_metrics", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "def plot_metrics(g_losses, d_losses, fid_scores):\n epochs = range(1, len(g_losses) + 1)\n plt.figure(figsize=(14, 5))\n plt.subplot(1, 2, 1)\n plt.plot(epochs, g_losses, label='Generator Loss')\n plt.plot(epochs, d_losses, label='Discriminator Loss')\n plt.xlabel('Epoch')\n plt.ylabel('Loss')\n plt.legend()\n plt.title('Generator and Discriminator Losses')", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "device = 'cuda' if torch.cuda.is_available() else 'cpu'\nclass WGANGenerator(nn.Module):\n def __init__(self, latent_dim):\n super(WGANGenerator, self).__init__()\n self.latent_dim = latent_dim\n self.model = nn.Sequential(\n nn.ConvTranspose2d(latent_dim, 256, 4, 1, 0, bias=False),\n nn.BatchNorm2d(256),\n nn.ReLU(True),\n nn.Dropout(0.3),", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "latent_dim", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "latent_dim = 100\nlr = 0.0002\nn_epochs = 30\nlambda_gp = 10\ngenerator = WGANGenerator(latent_dim).to(device)\ndiscriminator = WGANDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "lr", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "lr = 0.0002\nn_epochs = 30\nlambda_gp = 10\ngenerator = WGANGenerator(latent_dim).to(device)\ndiscriminator = WGANDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "n_epochs", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "n_epochs = 30\nlambda_gp = 10\ngenerator = WGANGenerator(latent_dim).to(device)\ndiscriminator = WGANDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "lambda_gp", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "lambda_gp = 10\ngenerator = WGANGenerator(latent_dim).to(device)\ndiscriminator = WGANDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "generator", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "generator = WGANGenerator(latent_dim).to(device)\ndiscriminator = WGANDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "discriminator", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "discriminator = WGANDiscriminator().to(device)\noptimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "optimizer_G", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "optimizer_G = optim.RMSprop(generator.parameters(), lr=lr)\noptimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "optimizer_D", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "optimizer_D = optim.RMSprop(discriminator.parameters(), lr=lr)\ng_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))\n for i in range(num_samples):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "g_losses", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "g_losses = []\nd_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))\n for i in range(num_samples):\n axes[i].imshow(gen_samples[i, 0])", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "d_losses", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "d_losses = []\nfid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))\n for i in range(num_samples):\n axes[i].imshow(gen_samples[i, 0])\n axes[i].axis('off')", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "fid_scores", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "peekOfCode": "fid_scores = []\ndef plot_generated_samples(generator, latent_dim, num_samples=16):\n z = torch.randn(num_samples, latent_dim, 1, 1).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples, 1))\n for i in range(num_samples):\n axes[i].imshow(gen_samples[i, 0])\n axes[i].axis('off')\n plt.show()", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Classical_gans.wgan", + "documentation": {} + }, + { + "label": "Discriminator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.discriminator", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.discriminator", + "peekOfCode": "class Discriminator(nn.Module):\n def __init__(self):\n super(Discriminator, self).__init__()\n self.conv1 = spectral_norm(nn.Conv2d(1, 64, kernel_size=3, stride=1, padding=1))\n self.conv2 = spectral_norm(nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=1))\n self.conv3 = spectral_norm(nn.Conv2d(128, 256, kernel_size=3, stride=2, padding=1))\n self.fc1 = spectral_norm(nn.Linear(256 * 4 * 4, 1))\n def forward(self, x):\n x = F.leaky_relu(self.conv1(x), 0.2)\n x = F.leaky_relu(self.conv2(x), 0.2)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.discriminator", + "documentation": {} + }, + { + "label": "QuantumGenerator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "peekOfCode": "class QuantumGenerator(nn.Module):\n def __init__(self, n_qubits=4, depth=3, output_dim=16*16):\n super(QuantumGenerator, self).__init__()\n self.n_qubits = n_qubits\n self.depth = depth\n self.output_dim = output_dim\n self.params = nn.Parameter(torch.randn((depth * 2 * n_qubits,), requires_grad=True))\n self.fc1 = nn.Linear(n_qubits, 128)\n self.fc2 = nn.Linear(128, 256)\n self.fc3 = nn.Linear(256, self.output_dim)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "documentation": {} + }, + { + "label": "quantum_circuit", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "peekOfCode": "def quantum_circuit(params, data, n_qubits=4):\n depth = len(params) // (2 * n_qubits)\n for d in range(depth):\n for i in range(n_qubits):\n qml.RY(data[i], wires=i)\n qml.RY(params[d * 2 * n_qubits + i], wires=i)\n for i in range(n_qubits):\n qml.CNOT(wires=[i, (i + 1) % n_qubits])\n for i in range(n_qubits):\n qml.RZ(params[d * 2 * n_qubits + n_qubits + i], wires=i)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "peekOfCode": "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\nclass QuantumGenerator(nn.Module):\n def __init__(self, n_qubits=4, depth=3, output_dim=16*16):\n super(QuantumGenerator, self).__init__()\n self.n_qubits = n_qubits\n self.depth = depth\n self.output_dim = output_dim\n self.params = nn.Parameter(torch.randn((depth * 2 * n_qubits,), requires_grad=True))\n self.fc1 = nn.Linear(n_qubits, 128)\n self.fc2 = nn.Linear(128, 256)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "documentation": {} + }, + { + "label": "n_qubits", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "peekOfCode": "n_qubits = 4\ndev = qml.device('default.qubit', wires=n_qubits)\ndef quantum_circuit(params, data, n_qubits=4):\n depth = len(params) // (2 * n_qubits)\n for d in range(depth):\n for i in range(n_qubits):\n qml.RY(data[i], wires=i)\n qml.RY(params[d * 2 * n_qubits + i], wires=i)\n for i in range(n_qubits):\n qml.CNOT(wires=[i, (i + 1) % n_qubits])", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "documentation": {} + }, + { + "label": "dev", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "peekOfCode": "dev = qml.device('default.qubit', wires=n_qubits)\ndef quantum_circuit(params, data, n_qubits=4):\n depth = len(params) // (2 * n_qubits)\n for d in range(depth):\n for i in range(n_qubits):\n qml.RY(data[i], wires=i)\n qml.RY(params[d * 2 * n_qubits + i], wires=i)\n for i in range(n_qubits):\n qml.CNOT(wires=[i, (i + 1) % n_qubits])\n for i in range(n_qubits):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "documentation": {} + }, + { + "label": "qnode", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "peekOfCode": "qnode = qml.QNode(quantum_circuit, dev)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.models.quantum_generator", + "documentation": {} + }, + { + "label": "total_variation_loss", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "peekOfCode": "def total_variation_loss(img, weight=1e-5):\n batch_size, _, height, width = img.size()\n tv_h = torch.mean(torch.abs(img[:, :, 1:, :] - img[:, :, :-1, :]))\n tv_w = torch.mean(torch.abs(img[:, :, :, 1:] - img[:, :, :, :-1]))\n return weight * (tv_h + tv_w)\ndef train_jet_gan(n_epochs=30, batch_size=64, n_qubits=4):\n jet_loader = load_jet_data(jet_images_path, batch_size)\n latent_dim = n_qubits\n gen = QuantumGenerator(n_qubits=n_qubits, depth=3, output_dim=16*16).to(device)\n discriminator = Discriminator().to(device)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "documentation": {} + }, + { + "label": "train_jet_gan", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "peekOfCode": "def train_jet_gan(n_epochs=30, batch_size=64, n_qubits=4):\n jet_loader = load_jet_data(jet_images_path, batch_size)\n latent_dim = n_qubits\n gen = QuantumGenerator(n_qubits=n_qubits, depth=3, output_dim=16*16).to(device)\n discriminator = Discriminator().to(device)\n criterion = torch.nn.BCELoss()\n optimizer_gen = Adam(gen.parameters(), lr=0.0002, betas=(0.5, 0.999))\n optimizer_disc = Adam(discriminator.parameters(), lr=0.0002, betas=(0.5, 0.999))\n gen_loss_list = []\n disc_loss_list = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "peekOfCode": "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\njet_images_path = 'data/jet-images_Mass60-100_pT250-300_R1.25_Pix25.hdf5'\ndef total_variation_loss(img, weight=1e-5):\n batch_size, _, height, width = img.size()\n tv_h = torch.mean(torch.abs(img[:, :, 1:, :] - img[:, :, :-1, :]))\n tv_w = torch.mean(torch.abs(img[:, :, :, 1:] - img[:, :, :, :-1]))\n return weight * (tv_h + tv_w)\ndef train_jet_gan(n_epochs=30, batch_size=64, n_qubits=4):\n jet_loader = load_jet_data(jet_images_path, batch_size)\n latent_dim = n_qubits", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "documentation": {} + }, + { + "label": "jet_images_path", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "peekOfCode": "jet_images_path = 'data/jet-images_Mass60-100_pT250-300_R1.25_Pix25.hdf5'\ndef total_variation_loss(img, weight=1e-5):\n batch_size, _, height, width = img.size()\n tv_h = torch.mean(torch.abs(img[:, :, 1:, :] - img[:, :, :-1, :]))\n tv_w = torch.mean(torch.abs(img[:, :, :, 1:] - img[:, :, :, :-1]))\n return weight * (tv_h + tv_w)\ndef train_jet_gan(n_epochs=30, batch_size=64, n_qubits=4):\n jet_loader = load_jet_data(jet_images_path, batch_size)\n latent_dim = n_qubits\n gen = QuantumGenerator(n_qubits=n_qubits, depth=3, output_dim=16*16).to(device)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.jet_training", + "documentation": {} + }, + { + "label": "total_variation_loss", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.mnist_training", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.mnist_training", + "peekOfCode": "def total_variation_loss(img, weight=1e-5):\n batch_size, _, height, width = img.size()\n tv_h = torch.mean(torch.abs(img[:, :, 1:, :] - img[:, :, :-1, :]))\n tv_w = torch.mean(torch.abs(img[:, :, :, 1:] - img[:, :, :, :-1]))\n return weight * (tv_h + tv_w)\ndef train_mnist_gan(n_epochs=50, batch_size=64, n_qubits=4):\n trainloader, _ = load_mnist_data(batch_size)\n latent_dim = n_qubits\n gen = QuantumGenerator(n_qubits=n_qubits, depth=3, output_dim=16*16).to(device)\n discriminator = Discriminator().to(device)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.mnist_training", + "documentation": {} + }, + { + "label": "train_mnist_gan", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.mnist_training", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.mnist_training", + "peekOfCode": "def train_mnist_gan(n_epochs=50, batch_size=64, n_qubits=4):\n trainloader, _ = load_mnist_data(batch_size)\n latent_dim = n_qubits\n gen = QuantumGenerator(n_qubits=n_qubits, depth=3, output_dim=16*16).to(device)\n discriminator = Discriminator().to(device)\n criterion = torch.nn.BCELoss()\n optimizer_gen = Adam(gen.parameters(), lr=0.0002, betas=(0.5, 0.999))\n optimizer_disc = Adam(discriminator.parameters(), lr=0.0002, betas=(0.5, 0.999))\n gen_loss_list = []\n disc_loss_list = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.mnist_training", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.mnist_training", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.mnist_training", + "peekOfCode": "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\ndef total_variation_loss(img, weight=1e-5):\n batch_size, _, height, width = img.size()\n tv_h = torch.mean(torch.abs(img[:, :, 1:, :] - img[:, :, :-1, :]))\n tv_w = torch.mean(torch.abs(img[:, :, :, 1:] - img[:, :, :, :-1]))\n return weight * (tv_h + tv_w)\ndef train_mnist_gan(n_epochs=50, batch_size=64, n_qubits=4):\n trainloader, _ = load_mnist_data(batch_size)\n latent_dim = n_qubits\n gen = QuantumGenerator(n_qubits=n_qubits, depth=3, output_dim=16*16).to(device)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.training.mnist_training", + "documentation": {} + }, + { + "label": "load_mnist_data", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.data_loader", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.data_loader", + "peekOfCode": "def load_mnist_data(batch_size=64):\n transform = transforms.Compose([\n transforms.ToTensor(),\n transforms.Normalize((0.5,), (0.5,))\n ])\n trainset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)\n trainloader = DataLoader(trainset, batch_size=batch_size, shuffle=True)\n testset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)\n testloader = DataLoader(testset, batch_size=batch_size, shuffle=False)\n return trainloader, testloader", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.data_loader", + "documentation": {} + }, + { + "label": "load_jet_data", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.data_loader", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.data_loader", + "peekOfCode": "def load_jet_data(jet_images_path, batch_size=64):\n with h5py.File(jet_images_path, 'r') as f:\n X_jet = np.array(f['image'])\n X_jet = torch.tensor(X_jet, dtype=torch.float32)\n X_jet = (X_jet - X_jet.min()) / (X_jet.max() - X_jet.min())\n X_jet = X_jet.unsqueeze(1)\n X_jet_resized = torch.nn.functional.interpolate(X_jet, size=(16, 16), mode='bilinear', align_corners=False)\n indices = random.sample(range(X_jet_resized.shape[0]), 100000)\n X_jet_sampled = X_jet_resized[indices]\n jet_dataset = TensorDataset(X_jet_sampled)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.data_loader", + "documentation": {} + }, + { + "label": "calculate_fid", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.fid_score", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.fid_score", + "peekOfCode": "def calculate_fid(real_images, fake_images, device):\n inception_model = inception_v3(pretrained=True, transform_input=False).to(device)\n inception_model.eval()\n real_images = F.interpolate(real_images, size=(299, 299), mode='bilinear')\n fake_images = F.interpolate(fake_images, size=(299, 299), mode='bilinear')\n real_images = real_images.repeat(1, 3, 1, 1)\n fake_images = fake_images.repeat(1, 3, 1, 1)\n def get_activations(images):\n activations = []\n with torch.no_grad():", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.fid_score", + "documentation": {} + }, + { + "label": "plot_losses", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.plot_utils", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.plot_utils", + "peekOfCode": "def plot_losses(gen_losses, disc_losses):\n plt.figure(figsize=(10, 5))\n plt.plot(gen_losses, label='Generator Loss')\n plt.plot(disc_losses, label='Discriminator Loss')\n plt.xlabel('Epochs')\n plt.ylabel('Loss')\n plt.title('Generator and Discriminator Loss During Training')\n plt.legend()\n plt.grid(True)\n plt.show()", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.plot_utils", + "documentation": {} + }, + { + "label": "plot_generated_samples", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.plot_utils", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.plot_utils", + "peekOfCode": "def plot_generated_samples(generator, latent_dim, num_samples=16, device='cpu'):\n z = torch.randn(num_samples, latent_dim).to(device)\n gen_samples = generator(z).detach().cpu()\n gen_samples = (gen_samples + 1) / 2.0\n fig, axes = plt.subplots(1, num_samples, figsize=(num_samples * 2, 2))\n for i in range(num_samples):\n img = gen_samples[i, 0].numpy()\n axes[i].imshow(img, cmap='viridis')\n axes[i].axis('off')\n plt.show()", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.utils.plot_utils", + "documentation": {} + }, + { + "label": "main", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.main", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.main", + "peekOfCode": "def main():\n parser = argparse.ArgumentParser(description=\"Hybrid GAN Training\")\n parser.add_argument('--dataset', type=str, required=True, choices=['mnist', 'jet'], help='Dataset to use for training (mnist or jet)')\n args = parser.parse_args()\n if args.dataset == 'mnist':\n from training.mnist_training import train_mnist_gan\n print(\"Training on MNIST dataset...\")\n train_mnist_gan()\n elif args.dataset == 'jet':\n from training.jet_training import train_jet_gan", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.Hybrid_gans.main", + "documentation": {} + }, + { + "label": "load_data", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "peekOfCode": "def load_data(filepath):\n with h5py.File(filepath, 'r') as jet_mass_data:\n X_jet = jet_mass_data['image']\n print(\"Original shape:\", X_jet.shape)\n X_jet = np.array(X_jet)\n X_jet = torch.tensor(X_jet, dtype=torch.float32)\n return X_jet\ndef normalize_and_resize(X, size=(16, 16)):\n X = (X - X.min()) / (X.max() - X.min())\n X = X.unsqueeze(1)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "documentation": {} + }, + { + "label": "normalize_and_resize", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "peekOfCode": "def normalize_and_resize(X, size=(16, 16)):\n X = (X - X.min()) / (X.max() - X.min())\n X = X.unsqueeze(1)\n X_resized = nn.functional.interpolate(X, size=size, mode='bilinear', align_corners=False)\n print(\"Resized shape:\", X_resized.shape)\n return X_resized\ndef sample_data(X, num_samples=10000):\n indices = random.sample(range(X.shape[0]), num_samples)\n X_sampled = X[indices]\n print(\"Sampled shape:\", X_sampled.shape)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "documentation": {} + }, + { + "label": "sample_data", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "peekOfCode": "def sample_data(X, num_samples=10000):\n indices = random.sample(range(X.shape[0]), num_samples)\n X_sampled = X[indices]\n print(\"Sampled shape:\", X_sampled.shape)\n return X_sampled\ndef apply_pca(X, n_components=2):\n X_flat = X.view(X.size(0), -1).numpy()\n pca = PCA(n_components=n_components)\n pca_data = pca.fit_transform(X_flat)\n print(f\"PCA applied with {n_components} components.\")", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "documentation": {} + }, + { + "label": "apply_pca", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "peekOfCode": "def apply_pca(X, n_components=2):\n X_flat = X.view(X.size(0), -1).numpy()\n pca = PCA(n_components=n_components)\n pca_data = pca.fit_transform(X_flat)\n print(f\"PCA applied with {n_components} components.\")\n return pca, pca_data\ndef normalize_pca(pca_data, n_components=2):\n pca_descaler = [[] for _ in range(n_components)]\n for i in range(n_components):\n if pca_data[:, i].min() < 0:", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "documentation": {} + }, + { + "label": "normalize_pca", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "peekOfCode": "def normalize_pca(pca_data, n_components=2):\n pca_descaler = [[] for _ in range(n_components)]\n for i in range(n_components):\n if pca_data[:, i].min() < 0:\n pca_descaler[i].append(pca_data[:, i].min())\n pca_data[:, i] += np.abs(pca_data[:, i].min())\n else:\n pca_descaler[i].append(pca_data[:, i].min())\n pca_data[:, i] -= pca_data[:, i].min()\n pca_descaler[i].append(pca_data[:, i].max())", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.data_preprocessing", + "documentation": {} + }, + { + "label": "QuantumGAN", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.model", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.model", + "peekOfCode": "class QuantumGAN(nn.Module):\n def __init__(self, qnode, weight_shapes):\n super(QuantumGAN, self).__init__()\n self.qlayer = qml.qnn.TorchLayer(qnode, weight_shapes)\n def forward(self, x):\n return self.qlayer(x)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.model", + "documentation": {} + }, + { + "label": "create_qnode", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.model", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.model", + "peekOfCode": "def create_qnode(n_qubits=5):\n dev = qml.device(\"default.qubit\", wires=n_qubits)\n @qml.qnode(dev)\n def qnode(inputs, weights):\n qml.RY(inputs[0], wires=1)\n qml.RY(inputs[1], wires=2)\n qml.RY(inputs[2], wires=3)\n qml.RY(inputs[3], wires=4)\n qml.RX(weights['w000'], wires=1)\n qml.RX(weights['w001'], wires=2)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.model", + "documentation": {} + }, + { + "label": "main", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.train", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.train", + "peekOfCode": "def main(args):\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n # Data Preprocessing\n X_jet = load_data(args.data_path)\n X_jet_resized = normalize_and_resize(X_jet)\n X_jet_sampled = sample_data(X_jet_resized, num_samples=args.num_samples)\n pca, pca_data = apply_pca(X_jet_sampled, n_components=args.pca_components)\n pca_data_rot, pca_descaler = normalize_pca(pca_data, n_components=args.pca_components)\n # PCA Transformer\n pca.data_ = pca_data # Attach data to PCA object for utility functions", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.train", + "documentation": {} + }, + { + "label": "Logloss", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "peekOfCode": "class Logloss(nn.Module):\n def __init__(self):\n super(Logloss, self).__init__()\n def forward(self, output):\n return torch.mean(-torch.log(output[0]))\ndef save_sample_images(model, epoch, pca, pca_descaler, output_dir='outputs/plots', sample_interval=1, device='cpu'):\n if epoch % sample_interval == 0:\n num_samples = 10\n sample_indices = np.random.choice(len(pca.data_), num_samples, replace=False)\n samples = pca.data_[sample_indices]", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "documentation": {} + }, + { + "label": "plot_and_save_graphs", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "peekOfCode": "def plot_and_save_graphs(loss_list, output_list, epoch, output_dir='outputs/plots'):\n plt.figure(figsize=(10, 5))\n plt.subplot(1, 2, 1)\n plt.plot(loss_list, label='Loss')\n plt.xlabel('Epoch')\n plt.ylabel('Loss')\n plt.title('Loss over Epochs')\n plt.legend()\n plt.subplot(1, 2, 2)\n plt.plot(output_list, label='Output')", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "documentation": {} + }, + { + "label": "descale_points", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "peekOfCode": "def descale_points(d_point, scales, tfrm):\n for col in range(d_point.shape[1]):\n d_point[:, col] *= scales[col][1]\n d_point[:, col] += scales[col][0]\n reconstruction = tfrm.inverse_transform(d_point)\n return reconstruction\nclass Logloss(nn.Module):\n def __init__(self):\n super(Logloss, self).__init__()\n def forward(self, output):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "documentation": {} + }, + { + "label": "save_sample_images", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "peekOfCode": "def save_sample_images(model, epoch, pca, pca_descaler, output_dir='outputs/plots', sample_interval=1, device='cpu'):\n if epoch % sample_interval == 0:\n num_samples = 10\n sample_indices = np.random.choice(len(pca.data_), num_samples, replace=False)\n samples = pca.data_[sample_indices]\n descaled_samples = descale_points(samples, pca_descaler, pca)\n fig, axes = plt.subplots(1, num_samples, figsize=(15, 2))\n for i, sample in enumerate(descaled_samples):\n axes[i].imshow(sample.reshape(16, 16), cmap='viridis')\n axes[i].axis('off')", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.IQGAN_replication.src.utils", + "documentation": {} + }, + { + "label": "JetDataset", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "peekOfCode": "class JetDataset(torch.utils.data.Dataset):\n def __init__(self, data):\n self.data = torch.tensor(data, dtype=torch.float32)\n def __len__(self):\n return len(self.data)\n def __getitem__(self, idx):\n return self.data[idx]", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "documentation": {} + }, + { + "label": "load_data", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "peekOfCode": "def load_data(filepath):\n with h5py.File(filepath, 'r') as jet_mass_data:\n X_jet = jet_mass_data['image']\n print(\"Original shape:\", X_jet.shape)\n X_jet = np.array(X_jet)\n X_jet = torch.tensor(X_jet, dtype=torch.float32)\n return X_jet\ndef normalize_and_resize(X, size=(16, 16)):\n X = (X - X.min()) / (X.max() - X.min())\n X = X.unsqueeze(1)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "documentation": {} + }, + { + "label": "normalize_and_resize", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "peekOfCode": "def normalize_and_resize(X, size=(16, 16)):\n X = (X - X.min()) / (X.max() - X.min())\n X = X.unsqueeze(1)\n X_resized = nn.functional.interpolate(X, size=size, mode='bilinear', align_corners=False)\n print(\"Resized shape:\", X_resized.shape)\n return X_resized\ndef sample_data(X, num_samples=10000):\n indices = random.sample(range(X.shape[0]), num_samples)\n X_sampled = X[indices]\n print(\"Sampled shape:\", X_sampled.shape)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "documentation": {} + }, + { + "label": "sample_data", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "peekOfCode": "def sample_data(X, num_samples=10000):\n indices = random.sample(range(X.shape[0]), num_samples)\n X_sampled = X[indices]\n print(\"Sampled shape:\", X_sampled.shape)\n return X_sampled\ndef apply_pca(X, n_components=8):\n X_flat = X.view(-1, 256).numpy()\n pca = PCA(n_components=n_components)\n X_pca = pca.fit_transform(X_flat)\n print(f\"PCA applied with {n_components} components.\")", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "documentation": {} + }, + { + "label": "apply_pca", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "peekOfCode": "def apply_pca(X, n_components=8):\n X_flat = X.view(-1, 256).numpy()\n pca = PCA(n_components=n_components)\n X_pca = pca.fit_transform(X_flat)\n print(f\"PCA applied with {n_components} components.\")\n return pca, X_pca\nclass JetDataset(torch.utils.data.Dataset):\n def __init__(self, data):\n self.data = torch.tensor(data, dtype=torch.float32)\n def __len__(self):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.data_preprocessing", + "documentation": {} + }, + { + "label": "QuantumGAN", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.model", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.model", + "peekOfCode": "class QuantumGAN(nn.Module):\n def __init__(self, qnode, weight_shapes):\n super(QuantumGAN, self).__init__()\n self.qnn = qml.qnn.TorchLayer(qnode, weight_shapes)\n def forward(self, x):\n return self.qnn(x)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.model", + "documentation": {} + }, + { + "label": "create_generator_qnode", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.model", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.model", + "peekOfCode": "def create_generator_qnode(dev, n_qubits):\n @qml.qnode(dev, interface='torch')\n def generator_qnode(inputs, weights):\n qml.AngleEmbedding(inputs, wires=range(n_qubits))\n qml.StronglyEntanglingLayers(weights, wires=range(n_qubits))\n return [qml.expval(qml.PauliZ(w)) for w in range(n_qubits)]\n return generator_qnode\ndef create_discriminator_qnode(dev, n_qubits):\n @qml.qnode(dev, interface='torch')\n def discriminator_qnode(inputs, weights):", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.model", + "documentation": {} + }, + { + "label": "create_discriminator_qnode", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.model", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.model", + "peekOfCode": "def create_discriminator_qnode(dev, n_qubits):\n @qml.qnode(dev, interface='torch')\n def discriminator_qnode(inputs, weights):\n qml.AngleEmbedding(inputs, wires=range(n_qubits))\n qml.StronglyEntanglingLayers(weights, wires=range(n_qubits))\n return qml.expval(qml.PauliZ(0))\n return discriminator_qnode\nclass QuantumGAN(nn.Module):\n def __init__(self, qnode, weight_shapes):\n super(QuantumGAN, self).__init__()", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.model", + "documentation": {} + }, + { + "label": "main", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.train", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.train", + "peekOfCode": "def main(args):\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n # Data Preprocessing\n X_jet = load_data(args.data_path)\n X_jet_resized = normalize_and_resize(X_jet)\n X_jet_sampled = sample_data(X_jet_resized, num_samples=args.num_samples)\n pca, X_pca = apply_pca(X_jet_sampled, n_components=args.pca_components)\n dataset = JetDataset(X_pca)\n dataloader = DataLoader(dataset, batch_size=args.batch_size, shuffle=True)\n # Define quantum devices", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.train", + "documentation": {} + }, + { + "label": "perceptual_loss", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "peekOfCode": "def perceptual_loss(real_images, fake_images, vgg, pca):\n # Inverse PCA transformation\n real_images = pca.inverse_transform(real_images.cpu().numpy())\n fake_images = pca.inverse_transform(fake_images.cpu().detach().numpy())\n # Reshape to image format\n real_images = torch.tensor(real_images, dtype=torch.float32).view(-1, 1, 16, 16).to(real_images.device)\n fake_images = torch.tensor(fake_images, dtype=torch.float32).view(-1, 1, 16, 16).to(fake_images.device)\n # Upsample to 224x224 and replicate channels\n upsample = transforms.Resize((224, 224))\n real_images = upsample(real_images)", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "documentation": {} + }, + { + "label": "quantum_fidelity", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "peekOfCode": "def quantum_fidelity(real_data, fake_data, pca):\n n_qubits = real_data.size(1)\n dev_fidelity = qml.device('default.qubit', wires=n_qubits)\n @qml.qnode(dev_fidelity, interface='torch')\n def state_preparation(inputs):\n qml.AngleEmbedding(inputs, wires=range(n_qubits))\n return qml.state()\n real_states = state_preparation(real_data).cpu().numpy()\n fake_states = state_preparation(fake_data).cpu().detach().numpy()\n fidelities = []", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "documentation": {} + }, + { + "label": "plot_losses", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "peekOfCode": "def plot_losses(epoch, args):\n # Implement plotting of losses\n pass # To be implemented based on how losses are stored\ndef save_models(generator, discriminator, epoch, args):\n torch.save(generator.state_dict(), f'outputs/models/generator_epoch_{epoch}.pth')\n torch.save(discriminator.state_dict(), f'outputs/models/discriminator_epoch_{epoch}.pth')", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "documentation": {} + }, + { + "label": "save_models", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "description": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "peekOfCode": "def save_models(generator, discriminator, epoch, args):\n torch.save(generator.state_dict(), f'outputs/models/generator_epoch_{epoch}.pth')\n torch.save(discriminator.state_dict(), f'outputs/models/discriminator_epoch_{epoch}.pth')", + "detail": "Quantum_GAN_for_HEP_Adithya_Penagonda.Code.quantum_gans.Quantum_gans_with_perceptualQuantumloss.src.utils", + "documentation": {} + }, + { + "label": "one_qubit_unitary", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.demo_circuits", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.demo_circuits", + "peekOfCode": "def one_qubit_unitary(qubit, symbols):\n return cirq.Circuit(\n [cirq.rz(symbols[0])(qubit),\n cirq.ry(symbols[1])(qubit),\n cirq.rz(symbols[2])(qubit)]\n )\ndef two_qubit_unitary(qubits):\n cx_ops = [cirq.CX(q0, q1) for q0, q1 in zip(qubits, qubits[1:])]\n cx_ops += ([cirq.CX(qubits[-1], qubits[0])] if len(qubits) != 2 else [])\n return cx_ops", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.demo_circuits", + "documentation": {} + }, + { + "label": "two_qubit_unitary", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.demo_circuits", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.demo_circuits", + "peekOfCode": "def two_qubit_unitary(qubits):\n cx_ops = [cirq.CX(q0, q1) for q0, q1 in zip(qubits, qubits[1:])]\n cx_ops += ([cirq.CX(qubits[-1], qubits[0])] if len(qubits) != 2 else [])\n return cx_ops\ndef pqc_circuit_for_conv(qubits,layers):\n \"\"\"\n Arguments:\n qubits(cirq.GridQubit)\n layers(number of layers)\n Returns:", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.demo_circuits", + "documentation": {} + }, + { + "label": "pqc_circuit_for_conv", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.demo_circuits", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.demo_circuits", + "peekOfCode": "def pqc_circuit_for_conv(qubits,layers):\n \"\"\"\n Arguments:\n qubits(cirq.GridQubit)\n layers(number of layers)\n Returns:\n cirq.Circuit(parameterised circuit)\n sympy symbols for gates having input data\n sympy symbols for gates having parameters\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.demo_circuits", + "documentation": {} + }, + { + "label": "FID", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.frechet_inception_distance", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.frechet_inception_distance", + "peekOfCode": "class FID:\n def __init__(self):\n \"\"\"\n initiates the Inception model for calculating Frechet Inception Distance between two sets of images \n Wikipedia:\n \"The Fréchet inception distance (FID) is a metric used to assess the quality of \n images created by a generative model, like a generative adversarial network (GAN).\"\n \"\"\"\n self.model = self.get_model(input_shape=(299,299,3))\n def get_model(self,input_shape,pooling='avg'):", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.frechet_inception_distance", + "documentation": {} + }, + { + "label": "EntangledQGAN", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.model", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.model", + "peekOfCode": "class EntangledQGAN():\n \"\"\"\n Model for Entangled Quantum Generative Adversarial Networks\n Inspiration: https://arxiv.org/abs/2105.00080\n \"\"\"\n def __init__(self, generator_model, discriminator_model,\n use_sampled=False,backend=None,name='QGAN_Model'):\n self.d_loss = []\n self.g_loss = []\n self.param_history = []", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.model", + "documentation": {} + }, + { + "label": "inputs_preprocess", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.preprocess_utils", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.preprocess_utils", + "peekOfCode": "def inputs_preprocess(inputs,filter_shape,stride,input_rows,input_cols,input_channels,padding=\"same\"):\n \"\"\"\n Processes the input images and returns patches from input\n images according to filter size and stride\n \"\"\"\n kernel_size = (1, 1) + filter_shape + (1,)\n strides = (1, 1) + stride + (1,)\n padding = padding.upper()\n batchsize = lambda x: tf.gather(tf.shape(x), 0)\n # planes = number of channels", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.preprocess_utils", + "documentation": {} + }, + { + "label": "get_output_shape", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.preprocess_utils", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.preprocess_utils", + "peekOfCode": "def get_output_shape(input_shape,filter_shape,stride,padding='same'):\n \"\"\"\n Returns:\n output shape for given input shape \n \"\"\"\n if (input_shape[0] % stride[0] == 0):\n pad_along_height = max(filter_shape[0] - stride[0], 0)\n else:\n pad_along_height = max(filter_shape[0] - (input_shape[0] % stride[0]), 0)\n if (input_shape[1] % stride[1] == 0):", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.preprocess_utils", + "documentation": {} + }, + { + "label": "crop_images", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.preprocess_utils", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.preprocess_utils", + "peekOfCode": "def crop_images(data,dimensions):\n \"\"\"\n Arguments: \n data(ndarray) - input images/data)\n dimensions(tuple) - required dimension of images \n Returns: \n cropped images/data\n \"\"\"\n img_size = dimensions[0]\n max_val_pix = np.argmax(np.mean(data[:, :, :], axis=0))", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.preprocess_utils", + "documentation": {} + }, + { + "label": "QGAN", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.QGANS", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.QGANS", + "peekOfCode": "class QGAN():\n \"\"\"\n Class for creating the GANS model\n reference: https://gitlab.cern.ch/clcheng/quple/-/blob/master/quple/models/generative/qgan.py\n \"\"\"\n def __init__(self,discriminator,generator,disc_optimizer,gen_optimizer,generator_loss='negative_binary_cross_entropy'):\n self.generator_model = generator\n self.discriminator_model = discriminator\n self.d_opt = disc_optimizer\n self.g_opt = gen_optimizer", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.QGANS", + "documentation": {} + }, + { + "label": "QConv2D_layer", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.Quantum_Conv2D_layer", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.Quantum_Conv2D_layer", + "peekOfCode": "class QConv2D_layer(tf.keras.layers.Layer):\n \"\"\"\n Layer performs the convolutions similar to classical convolutional layers with parameterised quantum circuits as kernel\n \"\"\"\n def __init__(self,circuit_layers,filters,filter_shape,stride,seed,conv_circuit=None,parameter_sharing=True,padding='same',conv_id='',name='Quantum_Convolutional_Layer_with_padding'):\n super(QConv2D_layer,self).__init__(name=name+conv_id)\n self.layers = circuit_layers\n self.filters = filters\n self.parameter_sharing = parameter_sharing\n self.filter_shape = filter_shape", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.Quantum_Conv2D_layer", + "documentation": {} + }, + { + "label": "SwapTestLayer", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.swap_test_layer", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.swap_test_layer", + "peekOfCode": "class SwapTestLayer(tf.keras.layers.Layer):\n \"\"\"\n Layer outputs the fidelity between two states created using two data entries \n \"\"\"\n def __init__(self,swap_test_symbol_values,real_data_encoding_circuit=None,gen_data_encoding_circuit=None,use_sampled=False,name='Swap_Test_Layer'):\n super(SwapTestLayer,self).__init__(name=name)\n self.qubits = cirq.GridQubit.rect(8,1)\n self.input_symbols = sp.symbols('i_:'+str(swap_test_symbol_values.shape[0]))\n self.swap_test_symbols = sp.symbols('x_:'+str(len(self.qubits)/2)+'_:'+str(2))\n if real_data_encoding_circuit is not None:", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.swap_test_layer", + "documentation": {} + }, + { + "label": "one_proj", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.swap_test_utils", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.swap_test_utils", + "peekOfCode": "def one_proj(a):\n \"\"\"\n returns : projection operator for a given state\n \"\"\"\n return 0.5 * (1 - cirq.Z(a))\ndef count_set_bits(n):\n \"\"\"\n returns : count of the set bits in a decimal number\n \"\"\"\n count=0", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.swap_test_utils", + "documentation": {} + }, + { + "label": "count_set_bits", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.swap_test_utils", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.swap_test_utils", + "peekOfCode": "def count_set_bits(n):\n \"\"\"\n returns : count of the set bits in a decimal number\n \"\"\"\n count=0\n while(n>0):\n count += n&1\n n >>= 1\n return count\ndef swap_test_op(qubits_a,qubits_b):", + "detail": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.swap_test_utils", + "documentation": {} + }, + { + "label": "swap_test_op", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.swap_test_utils", + "description": "Quantum_GAN_for_HEP_Amey_Bhatuse.QGANSHEP.swap_test_utils", + "peekOfCode": "def swap_test_op(qubits_a,qubits_b):\n \"\"\"\n returns: swap test operator for the variational swap test\n \"\"\"\n ret_op = 0\n for i in range(1< None:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.templates.pauli_block", + "documentation": {} + }, + { + "label": "RISWAPBlock", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.templates.riswap_block", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.templates.riswap_block", + "peekOfCode": "class RISWAPBlock(TemplateCircuitBlock):\n @staticmethod\n def RYY(circuit:'quple.ParameterisedCircuit', theta, qubits:Sequence[int]):\n circuit.RX(np.pi/2, list(qubits))\n circuit.CX(tuple(qubits))\n circuit.RZ(theta, qubits[1])\n circuit.CX(tuple(qubits))\n circuit.RX(-np.pi/2, list(qubits))\n @staticmethod\n def RXX(circuit:'quple.ParameterisedCircuit', theta, qubits:Sequence[int]):", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.templates.riswap_block", + "documentation": {} + }, + { + "label": "TemplateCircuitBlock", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.templates.template_circuit_block", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.templates.template_circuit_block", + "peekOfCode": "class TemplateCircuitBlock(ABC):\n @abstractmethod\n def build(self, circuit:'quple.ParameterisedCircuit', qubits:Sequence[int]):\n '''Builds the circuit block involving the specified qubits\n '''\n raise NotImplementedError\n @property\n @abstractmethod\n def num_block_qubits(self) -> int:\n \"\"\"Returns the number of qubits in the circuit block", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.templates.template_circuit_block", + "documentation": {} + }, + { + "label": "EfficientSU2", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.efficient_su2", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.efficient_su2", + "peekOfCode": "class EfficientSU2(ParameterisedCircuit):\n '''The efficient SU2 parameterised circuit\n Implementation based on the Qiskit library.\n The EfficientSU2 circuit consists of layers of single qubit\n operations spanned by SU(2) abd a layer of CNOT entanglements.\n This construction is believed to be hardware efficient.\n The EfficientSU2 circuit may be used as a variational circuit\n or a model circuit in the PQC layer of a machine learning model.\n It represents a unitary U(θ) parameterised by a set of free \n parameters θ. The circuit may be repeated several times to ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.efficient_su2", + "documentation": {} + }, + { + "label": "SU2_gate_set", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.efficient_su2", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.efficient_su2", + "peekOfCode": "SU2_gate_set = ['RX','RY','RZ','X','Y','Z']\nclass EfficientSU2(ParameterisedCircuit):\n '''The efficient SU2 parameterised circuit\n Implementation based on the Qiskit library.\n The EfficientSU2 circuit consists of layers of single qubit\n operations spanned by SU(2) abd a layer of CNOT entanglements.\n This construction is believed to be hardware efficient.\n The EfficientSU2 circuit may be used as a variational circuit\n or a model circuit in the PQC layer of a machine learning model.\n It represents a unitary U(θ) parameterised by a set of free ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.efficient_su2", + "documentation": {} + }, + { + "label": "ExcitationPreserving", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.excitation_preserving", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.excitation_preserving", + "peekOfCode": "class ExcitationPreserving(ParameterisedCircuit):\n '''The excitation preserving parameterised circuit\n Implementation based on the Qiskit library.\n The ExcitationPreserving circuit consists of layers \n of the two qubit Fermionic simulation, or fSim, gate set. \n Under this gate set, the σ_Xσ_X and σ_Yσ_Y couplings \n between the qubits have equal coefficients which\n conserves the number of excitations of the qubits. \n Algorithms performed with just Pauli Z rotations \n and fSim gates enable error mitigation techiques", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.excitation_preserving", + "documentation": {} + }, + { + "label": "IsingCoupling", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.ising_coupling", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.ising_coupling", + "peekOfCode": "class IsingCoupling(ParameterisedCircuit):\n '''The efficient SU2 parameterised circuit\n Implementation based on the Qiskit library.\n The IsingCoupling circuit consists of a layer of single-qubit\n rotation gates and a layer of two-qubit ising coupling gates.\n Such gate set is naturally implemented by trapped ions quantum\n computers.\n The IsingCoupling circuit may be used as a variational circuit\n or a model circuit in the PQC layer of a machine learning model.\n It represents a unitary U(θ) parameterised by a set of free ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.ising_coupling", + "documentation": {} + }, + { + "label": "RealAmplitudes", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.real_amplitudes", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.real_amplitudes", + "peekOfCode": "class RealAmplitudes(ParameterisedCircuit):\n '''The real amplitudes parameterised circuit\n Implementation based on the Qiskit library.\n The RealAmplitudes circuit consists of a layer\n of Pauli Y rotations acting on each qubit followed by\n a layer of CNOT entanglement on pairs of qubits under\n a given interaction graph. The resultant quantum state\n from the RealAmplitudes circuit will only have real\n amplitudes with 0 complex part. \n The RealAmplitudes circuit may be used as a variational", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.variational_circuits.real_amplitudes", + "documentation": {} + }, + { + "label": "construct_bell_circuit", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.common_circuits", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.common_circuits", + "peekOfCode": "def construct_bell_circuit():\n \"\"\"\n Quantum circuit to create the Bell state:\n |\\Phi ^{+}\\rangle ={\\frac {1}{{\\sqrt {2}}}}(|0\\rangle _{A}\\otimes |0\\rangle _{B}+|1\\rangle _{A}\\otimes |1\\rangle _{B})\n \"\"\"\n cq = QuantumCircuit(2, name='BellCircuit')\n cq.H(0)\n cq.CNOT((0,1))\n return cq\nbell_circuit = construct_bell_circuit()", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.common_circuits", + "documentation": {} + }, + { + "label": "bell_circuit", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.common_circuits", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.common_circuits", + "peekOfCode": "bell_circuit = construct_bell_circuit()", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.common_circuits", + "documentation": {} + }, + { + "label": "ParameterisedCircuit", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.parameterised_circuit", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.parameterised_circuit", + "peekOfCode": "class ParameterisedCircuit(QuantumCircuit, ABC):\n \"\"\"Parameterised Quantum Circuit (PQC)\n The `ParameterisedCircuit` architecture consists of alternating rotation and entanglement \n layers that are repeated for a certain number of times. In both layers, parameterized \n circuit-blocks act on the circuit in a defined way. The rotation layer consists of single \n qubit gate operations (rotation blocks) that are applied to every qubit in the circuit. \n The entanglement layer consists of two (or multiple) qubit gate operations (entanglement blocks) \n applied to the set of qubits defined by an interaction graph.\n Common single qubit gates in the rotation layer include the Hadamard gate,\n Pauli X, Pauli Y, Pauli Z gate and the corresponding rotation gates", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.parameterised_circuit", + "documentation": {} + }, + { + "label": "QuantumCircuit", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.quantum_circuit", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.quantum_circuit", + "peekOfCode": "class QuantumCircuit(cirq.Circuit):\n \"\"\"A wrapper for the construction of quantum circuits based on the Google cirq library \n A quantum circuit consists of a system of qubits together with a sequence of unitary operations\n (quantum gates) applied to the qubits which transform the quantum state of the system. Designing\n a quantum circuit is the basis for construction of quantum algorithms for solving problems which\n may be classically unreachable. \n In the scenario of a parameterised quantum circuit, the gate operations are parameterised by a \n symbolic expression via the sympy library. The expressions in a paramterised circuit can \n subsequently be resolved by providing a symbol to value map. Parameterised circuits are important\n for applications in machine learning alogorithms. They can serve as data encoding circuits where", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.quantum_circuit", + "documentation": {} + }, + { + "label": "kGateMapping", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.quantum_circuit", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.quantum_circuit", + "peekOfCode": "kGateMapping = {\n \"H\": cirq.H, # Hadamard gate\n \"I\": cirq.I, # one-qubit Identity gate\n \"S\": cirq.S, # Clifford S gate\n \"T\": cirq.T, # non-Clifford T gate\n 'X': cirq.X, # Pauli-X gate\n \"Y\": cirq.Y, # Pauli-Y gate\n \"Z\": cirq.Z, # Pauli-Z gate\n \"PauliX\": cirq.X, # Pauli-X gate\n \"PauliY\": cirq.Y, # Pauli-Y gate", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.quantum_circuit", + "documentation": {} + }, + { + "label": "QuantumCircuit", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.quantum_circuit_test", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.quantum_circuit_test", + "peekOfCode": "class QuantumCircuit(ABC):\n def __init__(self, n_qubit:Union[int, Sequence[GridQubit]]=0, name:str='QuantumCircuit',\n insert_strategy:InsertStrategy=None,\n backend=None) -> None:\n \"\"\"Creates a quantum circuit\n Args:\n n_qubit: int, iterable of cirq.GridQubit\n If int, it specifies the number of qubits in the circuit.\n If iterable of cirq.GridQubit object, it specifies the exact\n qubit layout of the circuit. ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.quantum_circuit_test", + "documentation": {} + }, + { + "label": "QubitRegister", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.qubit_register", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.qubit_register", + "peekOfCode": "class QubitRegister():\n \"\"\"Qubit Register (the quantum analog of a classical processor register)\n A qubit register keeps track of the qubits used in a quantum circuit. \n Examples:\n --------\n >>> qr = quple.QubitRegister(5)\n >>> qr.size\n 5\n >>> qr.qubits\n [cirq.GridQubit(0, 0),", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.circuits.qubit_register", + "documentation": {} + }, + { + "label": "DefaultDict", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm", + "peekOfCode": "class DefaultDict(dict):\n def __missing__(self, key):\n return key\nclass QSVM:\n __DEFAULT_SVC_PARAM_GRID__ = {\n 'C': [1,2,3,4,5,8,9,10,15,20,30,50,100,200,400,800,1000],\n }\n def __init__(self, encoding_circuit:Union[\"quple.ParameterisedCircuit\", \"qiskit.QuantumCircuit\"], \n precision:Union[str,DataPrecision]='double', \n kernel_matrix_split:int=1, ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm", + "documentation": {} + }, + { + "label": "QSVM", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm", + "peekOfCode": "class QSVM:\n __DEFAULT_SVC_PARAM_GRID__ = {\n 'C': [1,2,3,4,5,8,9,10,15,20,30,50,100,200,400,800,1000],\n }\n def __init__(self, encoding_circuit:Union[\"quple.ParameterisedCircuit\", \"qiskit.QuantumCircuit\"], \n precision:Union[str,DataPrecision]='double', \n kernel_matrix_split:int=1, \n state_vector_event_split=None, **kwargs):\n self._attributes = DefaultDict({})\n self.encoding_circuit = encoding_circuit", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm", + "documentation": {} + }, + { + "label": "DefaultDict", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_logger", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_logger", + "peekOfCode": "class DefaultDict(dict):\n def __missing__(self, key):\n return key\nclass QSVMLogger():\n \"\"\"Logger for the quantum support vector machine classifer (QSVM)\n \"\"\"\n def __init__(self, \n log_dir='./logs', \n filename='qsvm_'\n 'circuit_{circuit_name}_'", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_logger", + "documentation": {} + }, + { + "label": "QSVMLogger", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_logger", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_logger", + "peekOfCode": "class QSVMLogger():\n \"\"\"Logger for the quantum support vector machine classifer (QSVM)\n \"\"\"\n def __init__(self, \n log_dir='./logs', \n filename='qsvm_'\n 'circuit_{circuit_name}_'\n 'encoder_{encoding_map}_'\n 'entanglement_{entangle_strategy}_'\n 'nqubit_{n_qubit}_'", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_logger", + "documentation": {} + }, + { + "label": "logger", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_logger", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_logger", + "peekOfCode": "logger = logging.getLogger(__name__)\nlogger.setLevel(logging.DEBUG)\nclass DefaultDict(dict):\n def __missing__(self, key):\n return key\nclass QSVMLogger():\n \"\"\"Logger for the quantum support vector machine classifer (QSVM)\n \"\"\"\n def __init__(self, \n log_dir='./logs', ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_logger", + "documentation": {} + }, + { + "label": "QSVM", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_qiskit", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_qiskit", + "peekOfCode": "class QSVM:\n def __init__(self):\n pass\n @staticmethod\n def get_kernel_matrix(state_vectors_left, state_vectors_right):\n return abs2(state_vectors_left.conjugate() @ state_vectors_right.T)\n @staticmethod\n def run(encoding_circuit, x_train, y_train, x_test, y_test, \n random_seed = 0, backend=None, **kwargs):\n from qiskit.aqua.algorithms import QSVM as QSVM_qiskit", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_qiskit", + "documentation": {} + }, + { + "label": "abs2", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_qiskit", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_qiskit", + "peekOfCode": "def abs2(x):\n return x.real**2 + x.imag**2\nclass QSVM:\n def __init__(self):\n pass\n @staticmethod\n def get_kernel_matrix(state_vectors_left, state_vectors_right):\n return abs2(state_vectors_left.conjugate() @ state_vectors_right.T)\n @staticmethod\n def run(encoding_circuit, x_train, y_train, x_test, y_test, ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.qsvm_qiskit", + "documentation": {} + }, + { + "label": "DefaultDict", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "peekOfCode": "class DefaultDict(dict):\n def __missing__(self, key):\n return key\nclass VQC(tf.keras.Sequential):\n \"\"\"Variational Quantum Classifier (VQC)\n The variational quantum classifier (VQC) is a hybrid quantum-classical algorithm that \n utilizes both quantum and classical resources for solving classification problems. The\n VQC is a kind of quantum neural network (QNN) where the model representation of the \n is based entirely on the quantum processor, with classical heuristics participating \n only as optimizers for the trainable parameters of the quantum model.", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "documentation": {} + }, + { + "label": "VQC", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "peekOfCode": "class VQC(tf.keras.Sequential):\n \"\"\"Variational Quantum Classifier (VQC)\n The variational quantum classifier (VQC) is a hybrid quantum-classical algorithm that \n utilizes both quantum and classical resources for solving classification problems. The\n VQC is a kind of quantum neural network (QNN) where the model representation of the \n is based entirely on the quantum processor, with classical heuristics participating \n only as optimizers for the trainable parameters of the quantum model.\n (Ref: https://arxiv.org/pdf/2003.02989.pdf)\n The core components of a VQC involves the construction of two quantum circuits: a data \n encoding circuit for mapping input features into the quantum state of the circuit ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "documentation": {} + }, + { + "label": "logger", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "peekOfCode": "logger = logging.getLogger(__name__)\nlogger.setLevel(logging.INFO)\nch = logging.StreamHandler()\nch.setLevel(logging.INFO)\nlogger.addHandler(ch)\nclass DefaultDict(dict):\n def __missing__(self, key):\n return key\nclass VQC(tf.keras.Sequential):\n \"\"\"Variational Quantum Classifier (VQC)", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "documentation": {} + }, + { + "label": "ch", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "peekOfCode": "ch = logging.StreamHandler()\nch.setLevel(logging.INFO)\nlogger.addHandler(ch)\nclass DefaultDict(dict):\n def __missing__(self, key):\n return key\nclass VQC(tf.keras.Sequential):\n \"\"\"Variational Quantum Classifier (VQC)\n The variational quantum classifier (VQC) is a hybrid quantum-classical algorithm that \n utilizes both quantum and classical resources for solving classification problems. The", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc", + "documentation": {} + }, + { + "label": "DefaultDict", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc_logger", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc_logger", + "peekOfCode": "class DefaultDict(dict):\n def __missing__(self, key):\n return key\nclass VQCLogger(tf.keras.callbacks.Callback):\n \"\"\"Logger for the variational quantum classifier (VQC)\n \"\"\"\n def __init__(self, log_dir='./logs', \n filename='{encoding_circuit}_{encoding_map}_'\n '{variational_circuit}_{n_qubit}_qubit_'\n '{activation}_{optimizer}_train_{train_size}_'", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc_logger", + "documentation": {} + }, + { + "label": "VQCLogger", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc_logger", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc_logger", + "peekOfCode": "class VQCLogger(tf.keras.callbacks.Callback):\n \"\"\"Logger for the variational quantum classifier (VQC)\n \"\"\"\n def __init__(self, log_dir='./logs', \n filename='{encoding_circuit}_{encoding_map}_'\n '{variational_circuit}_{n_qubit}_qubit_'\n '{activation}_{optimizer}_train_{train_size}_'\n 'val_{val_size}_test_{test_size}_epoch_{epochs}_'\n 'batch_size_{batch_size}_{time}',\n keys=None,", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc_logger", + "documentation": {} + }, + { + "label": "logger", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc_logger", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc_logger", + "peekOfCode": "logger = logging.getLogger(__name__)\nlogger.setLevel(logging.DEBUG)\nclass DefaultDict(dict):\n def __missing__(self, key):\n return key\nclass VQCLogger(tf.keras.callbacks.Callback):\n \"\"\"Logger for the variational quantum classifier (VQC)\n \"\"\"\n def __init__(self, log_dir='./logs', \n filename='{encoding_circuit}_{encoding_map}_'", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.classifiers.vqc_logger", + "documentation": {} + }, + { + "label": "symbols_in_op", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def symbols_in_op(op):\n \"\"\"Returns the set of symbols associated with a parameterized gate operation.\n Arguments:\n op: cirq.Gate\n The parameterised gate operation to find the set of symbols associated with\n Returns:\n Set of symbols associated with the parameterized gate operation\n \"\"\"\n if isinstance(op, cirq.EigenGate):\n return op.exponent.free_symbols", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "symbols_in_expr_map", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def symbols_in_expr_map(expr_map, to_str=False, sort_key=natural_key):\n \"\"\"Returns the set of symbols in an expression map\n Arguments:\n expr_map: cirq.ExpressionMap\n The expression map to find the set of symbols in\n to_str: boolean, default=False\n Whether to convert symbol to strings\n sort_key: \n Sort key for the list of symbols\n Returns:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "get_circuit_unflattened_symbols", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def get_circuit_unflattened_symbols(circuit:'quple.QuantumCircuit',\n to_str=True,\n sort_key=natural_key):\n \"\"\"Returns a list of unflattened parameter symbols in a circuit\n Arguments:\n circuit: quple.QuantumCircuit\n The circuit to find the unflattened parameter symbols in\n to_str: boolean, default=True\n Whether to convert symbol to strings\n sort_key:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "get_circuit_symbols", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def get_circuit_symbols(circuit, to_str=True, sort_key=natural_key):\n \"\"\"Returns a list of parameter symbols in a circuit\n Arguments:\n circuit: cirq.Circuit, quple.QuantumCircuit\n The circuit to find the associated parameter symbols\n to_str: boolean, default=True\n Whether to convert symbol to strings\n sort_key:\n Sort key for the list of symbols\n Returns:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "get_circuit_qubits", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def get_circuit_qubits(circuit):\n \"\"\"Returns a list of qubits in a circuit\n Arguments:\n circuit: cirq.Circuit, quple.QuantumCircuit\n The circuit to find the associated qubits\n Returns:\n A list of qubits in the circuit\n \"\"\" \n all_qubits = set()\n for moment in circuit:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "get_circuit_symbols_in_order", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def get_circuit_symbols_in_order(circuit, to_str=False):\n \"\"\"Returns a list of parameter symbols in a circuit in order of creation\n Arguments:\n circuit: cirq.Circuit, quple.QuantumCircuit\n The circuit to find the associated parameter symbols\n to_str: boolean, default=False\n Whether to convert the sympy symbols to strings\n Returns:\n A list of symbols in the circuit in order of creation\n \"\"\" ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "sample_final_states", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def sample_final_states(circuit, samples=1, data=None, backend=None):\n \"\"\"Samples the final states of a circuit\n If the circuit has parameterised gate operation, random values of\n the symbol values in the range (0, 2π) will be assigned to the gate operation.\n If `data` is given, the symbol values will be assigned according to the given data.\n Arguments:\n circuit: cirq.Circuit, quple.QuantumCircuit\n The circuit to sample the final states\n samples: int\n Number of samples, default=1", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "sample_density_matrices", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def sample_density_matrices(circuit, samples=1, data=None, backend=None):\n \"\"\"Samples the density matrices of a circuit\n If the circuit has parameterised gate operation, random values of\n the symbol values in the range (0, 2π) will be assigned to the gate operation.\n If `data` is given, the symbol values will be assigned according to the given data.\n Arguments:\n circuit: cirq.Circuit, quple.QuantumCircuit\n The circuit to sample the density matrices\n samples: int, default=1\n Number of samples", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "sample_fidelities", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def sample_fidelities(circuit, samples=1, data=None, backend=None):\n \"\"\"Samples the fidelities between two sampled final states of a circuit\n Two independent set of samples of circuit final states are first generated.\n The two set of samples are then paired according to the indices of the set.\n The fidelity between the final states in each pair of the samples is then calculated\n to obtain the sampled fidelties of the circuit. \n If the circuit has parameterised gate operation, random values of\n the symbol values in the range (0, 2π) will be assigned to the gate operation.\n If `data` is given, the symbol values will be assigned according to the given data.\n Arguments:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "circuit_fidelity_pdf", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def circuit_fidelity_pdf(circuit, samples=3000, bins=100, data=None, backend=None):\n \"\"\"Returns the binned probability density function from the sampled fidelties of a circuit\n If the circuit has parameterised gate operation, random values of\n the symbol values in the range (0, 2π) will be assigned to the gate operation.\n If `data` is given, the symbol values will be assigned according to the given data. \n Arguments: \n circuit: cirq.Circuit, quple.QuantumCircuit\n The circuit to sample the fidelities\n samples: int, default=3000\n Number of samples", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "get_data_Haar", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def get_data_Haar(n_qubit, samples=3000, bins=100):\n \"\"\"Returns the fidelity values sampled from the Haar distribution\n Arguments:\n n_qubit: int\n Number of qubitts\n samples: int, default=3000\n Number of samples\n bins: int, default=100\n Number of bins of the Haar fidelity distribution\n Returns:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "get_pdf_Haar", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def get_pdf_Haar(n_qubit, f_values):\n \"\"\"Returns the Haar probability density function\n Arguments: \n n_qubit: integer\n Number of qubits\n f_values: list/array of float/int\n A collection of fidelity values to be sampled \n Returns:\n A numpy array of the Haar probability density function\n \"\"\" ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "circuit_fidelity_plot", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def circuit_fidelity_plot(circuit, samples=3000, bins=100, data=None, KL=True, epsilon=1e-10, backend=None): \n \"\"\"Returns a plot of the fidelity distribution for the final states sampled from a parameterised circuit overlayed with the Haar fidelity distrubiotn\n If the circuit has parameterised gate operation, random values of\n the symbol values in the range (0, 2π) will be assigned to the gate operation.\n If `data` is given, the symbol values will be assigned according to the given data.\n Arguments:\n circuit: cirq.Circuit, quple.QuantumCircuit\n The circuit to sample the fidelities\n samples: int\n Number of samples", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "circuit_expressibility_measure", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def circuit_expressibility_measure(circuit, samples = 3000, bins=100, data=None, relative=False, epsilon=1e-10, backend=None):\n \"\"\"Returns the expressibility measure of a parameterised circuit.\n The expressibility measure is the KL-divergence between the sampled fidelity distribution of the parameterised circuit and the Haar distribution. \n Reference: https://arxiv.org/pdf/1905.10876.pdf\n If the circuit has parameterised gate operation, random values of\n the symbol values in the range (0, 2π) will be assigned to the gate operation.\n If `data` is given, the symbol values will be assigned according to the given data.\n Arguments:\n circuit: cirq.Circuit, quple.QuantumCircuit\n The circuit to find the expressibiliy measure", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "Meyer_Wallach_measure", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def Meyer_Wallach_measure(state):\n \"\"\"Returns the Meyer Wallach measure of a quantum state\n Reference: https://arxiv.org/pdf/quant-ph/0305094.pdf\n Arguments:\n state: array like\n The quantum state to calculate the Meyer Wallach measure\n Returns:\n The Meyer Wallach measure of a quantum state\n \"\"\" \n state = np.array(state)", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "circuit_entangling_measure", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def circuit_entangling_measure(circuit, samples=200, data=None, backend=None):\n \"\"\"Returns the entangling measure of a parameterised circuit\n The entangling measure of a parameterised circuit is the average Meyer Wallach \n measure of the sampled final states of the circuit.\n Reference: https://arxiv.org/pdf/1905.10876.pdf\n If the circuit has parameterised gate operation, random values of\n the symbol values in the range (0, 2π) will be assigned to the gate operation.\n If `data` is given, the symbol values will be assigned according to the given data.\n Arguments:\n circuit: cirq.Circuit, quple.QuantumCircuit", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "circuit_von_neumann_entropy", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def circuit_von_neumann_entropy(circuit, samples=200, data=None, backend=None):\n \"\"\"Returns the average von Neumann entropy of the sampled density matrices of a parameterised circuit\n If the circuit has parameterised gate operation, random values of\n the symbol values in the range (0, 2π) will be assigned to the gate operation.\n If `data` is given, the symbol values will be assigned according to the given data.\n Arguments:\n circuit: cirq.Circuit, quple.QuantumCircuit\n The circuit to find the von Neumann entropy\n samples: int, default=200\n Number of samples", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "gradient_variance_test", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def gradient_variance_test(circuits, op, symbol=None):\n \"\"\"Performs the gradient variance test for a parameter symbol in a parameterised circuit\n Reference: https://www.nature.com/articles/s41467-018-07090-4\n Arguments:\n circuits: list of cirq.Circuit, quple.QuantumCircuit\n The circuits to perform the gradient variance test\n op: cirq.Gate\n The gate operation to sample the expectation value from\n symbol: str, default=None\n The parameter symbol which the values are varied in the gradient variance test.", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "has_composite_symbols", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "peekOfCode": "def has_composite_symbols(circuit):\n if not isinstance(circuit, cirq.Circuit):\n raise ValueError(\"circuit must be a cirq.Circuit object.\")\n if (isinstance(circuit, quple.QuantumCircuit)) and (circuit.expr_map is not None):\n circuit = circuit.get_unflattened_circuit() \n for moment in circuit:\n for op in moment:\n if cirq.is_parameterized(op):\n if len(symbols_in_op(op.gate)) > 1:\n return True", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.descriptors", + "documentation": {} + }, + { + "label": "CompositeGate", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "peekOfCode": "class CompositeGate:\n pass\nclass PauliRotation(CompositeGate):\n def __init__(self, pauli_string, theta, global_shift=False):\n self.paulis = pauli_string[::-1]\n self.theta = theta\n self.indices = [i for i, pauli in enumerate(self.paulis) if pauli != 'I']\n self.global_shift = global_shift\n @staticmethod\n def change_basis(*qubits:Sequence[int],", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "documentation": {} + }, + { + "label": "PauliRotation", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "peekOfCode": "class PauliRotation(CompositeGate):\n def __init__(self, pauli_string, theta, global_shift=False):\n self.paulis = pauli_string[::-1]\n self.theta = theta\n self.indices = [i for i, pauli in enumerate(self.paulis) if pauli != 'I']\n self.global_shift = global_shift\n @staticmethod\n def change_basis(*qubits:Sequence[int],\n pauli_string:Sequence[str], inverse=False) -> None:\n # do not change basis if only first order pauli operator", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "documentation": {} + }, + { + "label": "RXX", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "peekOfCode": "def RXX(theta: float) -> ops.XXPowGate:\n \"\"\"The XX Ising coupling gate, a native two-qubit operation in ion traps.\n A rotation around the XX axis in the two-qubit bloch sphere.\n The gate implements the following unitary:\n exp(-i θ XX) = [ cos(θ) 0 0 -isin(θ)]\n [ 0 cos(θ) -isin(θ) 0 ]\n [ 0 -isin(θ) cos(θ) 0 ]\n [-isin(θ) 0 0 cos(θ) ]\n Args:\n theta: float, sympy.Basic", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "documentation": {} + }, + { + "label": "RYY", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "peekOfCode": "def RYY(theta: float) -> ops.YYPowGate:\n \"\"\"The YY Ising coupling gate\n A rotation around the YY axis in the two-qubit bloch sphere.\n The gate implements the following unitary:\n exp(-i θ YY) = [ cos(θ) 0 0 sin(θ)]\n [ 0 cos(θ) -isin(θ) 0 ]\n [ 0 -isin(θ) cos(θ) 0 ]\n [sin(θ) 0 0 cos(θ) ]\n Args:\n theta: float, sympy.Basic", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "documentation": {} + }, + { + "label": "RZZ", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "peekOfCode": "def RZZ(theta: float) -> ops.ZZPowGate:\n \"\"\"The ZZ Ising coupling gate\n A rotation around the ZZ axis in the two-qubit bloch sphere.\n The gate implements the following unitary:\n exp(-i θ ZZ) = [ exp(iθ/2) 0 0 0 ]\n [ 0 exp(-iθ/2) 0 0 ]\n [ 0 0 exp(-iθ/2) 0 ]\n [ 0 0 0 exp(iθ/2) ]\n Args:\n rads: float, sympy.Basic", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.gate_ops", + "documentation": {} + }, + { + "label": "nearest_neighbor", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "peekOfCode": "def nearest_neighbor(n:int, m:int=2) -> List[Tuple[int]]:\n \"\"\"The nearest neighbor(linear) interaction\n In the nearest neighbor interaction, each qubit is connected to its\n next nearest neighbor in a linear manner. \n Examples:\n # Nearest neighbor interaction graph for 4 qubit system with 2 qubit interaction\n >> nearest_neighbor(n=4, m=2) \n [(0, 1), (1, 2), (2, 3)]\n # Building a circuit of 4 qubits with a layer of CNOT gates using\n # the nearest neighbor interaction graph", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "cyclic", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "peekOfCode": "def cyclic(n:int, m:int=2) -> List[Tuple[int]]:\n \"\"\"The cyclic(circular) interaction\n In the cyclic interaction, each qubit is connected to its\n next nearest neighbor in a circular manner.\n Examples:\n # Cyclic interaction graph for 4 qubit system with 2 qubit interaction\n >> cyclic(n=4, m=2) \n [(0, 1), (1, 2), (2, 3), (3, 0)]\n # Building a circuit of 4 qubits with a layer of CNOT gates using\n # the cyclic interaction graph", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "fully_connected", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "peekOfCode": "def fully_connected(n:int, m:int=2) -> List[Tuple[int]]:\n \"\"\"The fully-connected(full) interaction\n In the fully-connected interaction, every distinct unordered tuple of m qubits\n are connected exactly once. \n Examples:\n # Fully connected interaction graph for 4 qubit system with 2 qubit interaction\n >> fully_connected(n=4, m=2) \n [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]\n # Building a circuit of 4 qubits with a layer of CNOT gates using\n # the fully connected interaction graph", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "all_to_all", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "peekOfCode": "def all_to_all(n:int, m:int=2) -> List[Tuple[int]]:\n \"\"\"The all-to-all interaction\n In the fully-connected interaction, every distinct ordered tuple of m qubits\n are connected.\n Examples:\n # All to all interaction graph for 4 qubit system with 2 qubit interaction\n >> all_to_all(n=4, m=2) \n [(0, 1),\n (0, 2),\n (0, 3),", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "star", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "peekOfCode": "def star(n:int, m:int=2) -> List[Tuple[int]]:\n \"\"\"The star interaction\n In the star interaction, the first qubit is connected to every other qubit.\n Examples:\n # Star interaction graph for 4 qubit system with 2 qubit interaction\n >> star(n=4, m=2) \n [(0, 1), (0, 2), (0, 3)]\n # Building a circuit of 4 qubits with a layer of CNOT gates using\n # the fully connected interaction graph\n >> cq = ParameterisedCircuit(n_qubit=4)", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "alternate_linear", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "peekOfCode": "def alternate_linear(n:int, m:int=2) -> List[Tuple[int]]:\n \"\"\"The alternate linear interaction\n In the alternate, all neiboring qubits are connected but in an alternating manner\n Examples:\n # Alternate linear graph for 5 qubit system with 2 qubit interaction\n >> alternate_linear(n=5, m=2) \n [(0, 1), (2, 3), (1, 2), (3, 4)]\n # Building a circuit of 5 qubits with a layer of CNOT gates using\n # the alternate linear interaction graph\n >> cq = ParameterisedCircuit(n_qubit=4)", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "alternate_cyclic", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "peekOfCode": "def alternate_cyclic(n:int, m:int=2) -> List[Tuple[int]]:\n \"\"\"The alternate linear interaction\n In the alternate, all neiboring qubits as well as the first and last qubit \n are connected in an alternating manner\n Examples:\n # Alternate cyclic graph for 5 qubit system with 2 qubit interaction\n >> alternate_cyclic(n=5, m=2) \n [(0, 1), (2, 3), (1, 2), (3, 4), (4, 0)]\n # Building a circuit of 5 qubits with a layer of CNOT gates using\n # the alternate linear interaction graph", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "filter_mesh", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "peekOfCode": "def filter_mesh(n:int, m:int=2) -> List[Tuple[int]]: \n #check if number of qubit is a square number\n dimension = sqrt(n)\n if not dimension.is_integer():\n raise ValueError('The \"filter_mesh\" connectivity graphs requires number of qubits being a square number')\n dimension = int(dimension)\n horizontal_filter = [(dimension*i+j, dimension*i+j+1) for i in range(dimension) for j in range(dimension-1)]\n vertical_filter = [(dimension*j+i, dimension*(j+1)+i) for i in range(dimension) for j in range(dimension-1)]\n return horizontal_filter+vertical_filter\ndef pool_mesh(n:int, m:int=2) -> List[Tuple[int]]: ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "pool_mesh", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "peekOfCode": "def pool_mesh(n:int, m:int=2) -> List[Tuple[int]]: \n #check if number of qubit is a square number\n dimension = sqrt(n)\n if not dimension.is_integer():\n raise ValueError('The \"pool_mesh\" connectivity graphs requires number of qubits being a square number')\n dimension = int(dimension)\n if dimension % 2 != 0:\n raise ValueError('The \"pool_mesh\" connectivity graphs requires number of qubits being a square of an even number')\n horizontal_filter = [(dimension*i+j, dimension*i+j+1) for i in range(dimension) for j in range(0, dimension-1, 2)]\n vertical_filter = [(dimension*j+i, dimension*(j+1)+i) for i in range(dimension) for j in range(0, dimension-1, 2)]", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "interaction_graph", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "peekOfCode": "interaction_graph = {\n 'nearest_neighbor': nearest_neighbor,\n 'linear': nearest_neighbor,\n 'cyclic': cyclic,\n 'circular': cyclic,\n 'full': fully_connected,\n 'fully_connected': fully_connected,\n 'all_to_all': all_to_all,\n 'star': star,\n 'alternate_linear': alternate_linear,", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.components.interaction_graphs", + "documentation": {} + }, + { + "label": "EncodingCircuit", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_circuit", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_circuit", + "peekOfCode": "class EncodingCircuit(ParameterisedCircuit, ABC):\n '''The data encoding circuit (or state preparation circuit)\n A quantum circuit for encoding classical data into a quantum state by embedding the\n data in the parameters of unitary operations on the circuit qubits using a suitable encoding function.\n If the qubit encoding method is used, the number of qubits in the encoding circuit should match the \n feature dimesion of the input data.\n '''\n def __init__(self, feature_dimension:int, copies:int=1,\n rotation_blocks:Optional[Union[str, cirq.Gate, Callable, TemplateCircuitBlock,\n List[str],List[cirq.Gate],List[Callable],", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_circuit", + "documentation": {} + }, + { + "label": "trial_2_qiskit", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "class trial_2_qiskit:\n def __init__(self, degree=1):\n self.degree = degree\n def __call__(self, x: np.ndarray):\n if len(x) == 1:\n coeff = x[0]**self.degree\n elif len(x) == 2:\n coeff = (x[0]+x[1])**self.degree/sp.pi\n else:\n raise ValueError('The encoding map \"trial_2_qiskit\" supports only up to second order feature maps')", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "polynomial", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "class polynomial:\n def __init__(self, degree=1, scale_factor=sp.pi):\n self.degree = degree\n self.scale_factor = scale_factor\n def __call__(self, x):\n coeff = (reduce(lambda m, n: m + n, x)/len(x))**self.degree/self.scale_factor\n return coeff\nclass trial_2:\n def __init__(self, degree=1):\n self.degree = degree", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "trial_2", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "class trial_2:\n def __init__(self, degree=1):\n self.degree = degree\n def __call__(self, x: np.ndarray):\n if len(x) == 1:\n coeff = x[0]**self.degree/sp.pi\n elif len(x) == 2:\n coeff = (x[0]+x[1])**self.degree/sp.pi**2\n else:\n raise ValueError('The encoding map \"trial_2\" supports only up to second order feature maps')", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "trial_0_qiskit", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def trial_0_qiskit(x: np.ndarray) -> float:\n if len(x) == 1:\n coeff = x[0]\n elif len(x) == 2:\n coeff = (x[0]+x[1])/np.pi\n else:\n raise ValueError('The encoding map \"trial_0\" supports only up to second order feature maps')\n return coeff\nclass trial_2_qiskit:\n def __init__(self, degree=1):", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "trial_1", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def trial_1(x: np.ndarray) -> float:\n if len(x) == 1:\n coeff = x[0]/sp.pi\n elif len(x) == 2:\n coeff = (x[0]+x[1])/sp.e**2\n elif len(x) == 3:\n coeff = (x[0]+x[1]+x[2])/sp.e**2\n elif len(x) == 4:\n coeff = (x[0]+x[1]+x[2]+x[3])/sp.e**4\n else:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "trial_0", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def trial_0(x: np.ndarray) -> float:\n if len(x) == 1:\n coeff = x[0]/sp.pi\n elif len(x) == 2:\n coeff = (x[0]+x[1])/sp.pi**2\n elif len(x) == 3:\n coeff = (x[0]+x[1]+x[2])/sp.pi**2\n elif len(x) == 4:\n coeff = (x[0]+x[1]+x[2]+x[3])/sp.pi**4\n else:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "qiskit_default", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def qiskit_default(x: np.ndarray) -> float:\n coeff = 2*x[0]/(sp.pi**2) if len(x) == 1 else 2*reduce(lambda m, n: m * n, (np.pi-x))/(sp.pi**2)\n return coeff\ndef self_product(x: np.ndarray) -> float:\n \"\"\"\n Function: \\prod_{i=0, n} x_i\n Domain: (-1, +1)\n Range: (-1, +1)\n Args:\n x: data", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "self_product", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def self_product(x: np.ndarray) -> float:\n \"\"\"\n Function: \\prod_{i=0, n} x_i\n Domain: (-1, +1)\n Range: (-1, +1)\n Args:\n x: data\n Returns:\n float: the mapped value\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "shifted_self_product", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def shifted_self_product(x: np.ndarray) -> float:\n \"\"\"\n Function: \\prod_{i=0, n} (1-x_i)\n Domain: (-1, +1)\n Range: (0, +2)\n Args:\n x: data\n Returns:\n float: the mapped value\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "pi_exponent", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def pi_exponent(x: np.ndarray) -> float:\n \"\"\"\n Function: \n f(x) = x if n_dim = 1\n f(x) = pi^(|x_0-x_1|^2/8)/pi if n_dim = 2\n Domain: (-1, +1)\n Range: (pi**(-0.5), 1)\n Args:\n x: data\n Returns:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "cosine_reciprocal", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def cosine_reciprocal(x: np.ndarray) -> float:\n \"\"\"\n Function: \n f(x) = x if n_dim = 1\n f(x) = 1/(3*cos(x_0)*cos(x_1)) if n_dim = 2\n Domain: (-1, +1)\n Range: ?\n Args:\n x: data\n Returns:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "cosine_product", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def cosine_product(x: np.ndarray) -> float:\n \"\"\"\n Function: f(x) = \\prod_{i=0, n} cos(x_i)\n Domain: (-1, +1)\n Range: (-1, +1)?\n Args:\n x: data\n Returns:\n float: the mapped value\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "modified_cosine_product", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def modified_cosine_product(x: np.ndarray) -> float:\n \"\"\"\n Function: f(x) = \\prod_{i=0, n} cos(pi*(x_i+1)/2)\n Domain: (-1, +1)\n Range: (-1, +1)\n Args:\n x: data\n Returns:\n float: the mapped value\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "modified_sine_cosine_alternate_product", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def modified_sine_cosine_alternate_product(x: np.ndarray) -> float:\n \"\"\"\n Function: f(x) = sin(pi*x_0)*cos(pi*x_1)*sin(pi*x_2)*...\n Domain: (-1, +1)\n Range: (-1, +1)\n Args:\n x: data\n Returns:\n float: the mapped value\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "sine_cosine_alternate_product", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def sine_cosine_alternate_product(x: np.ndarray) -> float:\n \"\"\"\n Function: f(x) = sin(x_0)*cos(x_1)*sin(x_2)*...\n Domain: (-1, +1)\n Range: (-1, +1)?\n Args:\n x: data\n Returns:\n float: the mapped value\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "distance_measure", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def distance_measure(x: np.ndarray) -> float:\n \"\"\"\n Function: f(x) = (\\prod_{i float:\n \"\"\"\n Calculate the one norm distance between the varaibles\n Function: f(x) = (\\sum_{i float:\n \"\"\"\n Calculate the two norm distance between the varaibles\n Function: f(x) = (\\sum_{i float:\n \"\"\"\n Function: f(x) = (\\sum_{i=0, n} x_i^3)/n\n Domain: (-1, +1)\n Range: (-1, +1)\n Args:\n x: data\n Returns:\n float: the mapped value\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "arithmetic_mean", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def arithmetic_mean(x: np.ndarray) -> float:\n \"\"\"\n Function: f(x) = (\\sum_{i=0, n} x_i)/n\n Domain: (-1, +1)\n Range: (-1, +1)\n Args:\n x: data\n Returns:\n float: the mapped value\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "second_moment", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def second_moment(x: np.ndarray) -> float:\n \"\"\"\n Function: f(x) = ((\\sum_{i=0, n} (x_i+1)**2)/n)**(1/2)\n Domain: (-1, +1)\n Range: (0, +2)\n Args:\n x: data\n Returns:\n float: the mapped value\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "exponential_square_sum", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def exponential_square_sum(x: np.ndarray) -> float:\n \"\"\"\n Function: f(x) = exp((\\sum_{i=0, n} x_i^2/n)-1)*2\n Domain: (-1, +1)\n Range: (2*exp(-1), +2)\n Args:\n x: data\n Returns:\n float: the mapped value\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "exponential_cube_sum", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "def exponential_cube_sum(x: np.ndarray) -> float:\n \"\"\"\n Function: f(x) = exp((\\sum_{i=0, n} x_i^3/n)-1)*2\n Domain: (-1, +1)\n Range: (2*exp(-2), +2)\n Args:\n x: data\n Returns:\n float: the mapped value\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "encoding_map_registry", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "peekOfCode": "encoding_map_registry = {\n 'trial_0': trial_0,\n 'trial_1': trial_1,\n 'poly_1': polynomial(degree=1),\n 'poly_2': polynomial(degree=2),\n 'poly_3': polynomial(degree=3),\n 'poly_4': polynomial(degree=4),\n 'qiskit_default': qiskit_default,\n 'self_product': self_product,\n 'modified_cosine_product': modified_cosine_product, ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.encoding_maps", + "documentation": {} + }, + { + "label": "FirstOrderPauliZEncoding", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.first_order_pauli_z_encoding", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.first_order_pauli_z_encoding", + "peekOfCode": "class FirstOrderPauliZEncoding(GeneralPauliZEncoding):\n \"\"\"The first order Pauli Z encoding circuit\n An encoding circuit consisting of layers of unitary operators of the\n form exp(iψ(x)Z)H^{⊗n} where ψ is a data encoding function, Z is the \n Pauli Z operator and and x = (x_1, . . . , x_n ) are the input features\n to be encoded.\n To encode data of feature dimension n, a set of general Pauli operators \n are chosen to encode the data into an n qubit circuit. Each Pauli operator\n will contribute to a unitary operation \\exp(i\\sum_{s\\in S}ψ_s(x_s)Z_s)H^{⊗n}\n where S is the set of qubit indices for all qubits in the circuit.", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.first_order_pauli_z_encoding", + "documentation": {} + }, + { + "label": "GeneralPauliEncoding", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.general_pauli_encoding", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.general_pauli_encoding", + "peekOfCode": "class GeneralPauliEncoding(EncodingCircuit):\n '''The general Pauli encoding circuit\n An encoding circuit consisting of layers of unitary operators of the\n form exp(iψ(x)Σ)H^{⊗n} where ψ is a data encoding function, Σ is a\n generalized Pauli operator from the general Pauli group G_n \n which is an n-fold tensor product of Pauli operators on n\n qubits, and x = (x_1, . . . , x_n ) are the input features to be encoded.\n To encode data of feature dimension n, a set of general Pauli operators \n are chosen to encode the data into an n qubit circuit. Each Pauli operator\n will contribute to a unitary operation \\exp(i\\sum_{s\\in S}ψ_s(x_s)Σ_s) where", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.general_pauli_encoding", + "documentation": {} + }, + { + "label": "GeneralPauliZEncoding", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.general_pauli_z_encoding", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.general_pauli_z_encoding", + "peekOfCode": "class GeneralPauliZEncoding(GeneralPauliEncoding):\n \"\"\"The general Pauli Z encoding circuit\n An encoding circuit consisting of layers of unitary operators of the\n form exp(iψ(x)Z^{⊗k})H^{⊗n} where ψ is a data encoding function, Z^{⊗k}\n is a k fold tensor product of Pauli Z operator for some k <= n on n qubits,\n and x = (x_1, . . . , x_n ) are the input features to be encoded.\n To encode data of feature dimension n, a set of general Pauli operators \n are chosen to encode the data into an n qubit circuit. Each Pauli operator\n will contribute to a unitary operation \\exp(i\\sum_{s\\in S}ψ_s(x_s)Σ_s)H^{⊗n}\n where s is the indices of a subset of all qubit indices S. For a general Pauli ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.general_pauli_z_encoding", + "documentation": {} + }, + { + "label": "SecondOrderPauliZEncoding", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.second_order_pauli_z_encoding", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.second_order_pauli_z_encoding", + "peekOfCode": "class SecondOrderPauliZEncoding(GeneralPauliZEncoding):\n \"\"\"The second order Pauli Z encoding circuit\n An encoding circuit consisting of layers of unitary operators of the\n form exp(iψ(x)Z⊗Z)H^{⊗n} where ψ is a data encoding function, Z⊗Z is the \n tensor product of two Pauli Z operators and x = (x_1, . . . , x_n ) are \n the input features to be encoded.\n To encode data of feature dimension n, a set of general Pauli operators \n are chosen to encode the data into an n qubit circuit. Each Pauli operator\n will contribute to a unitary operation \\exp(i\\sum_{s\\in S}ψ_s(x_s)Z_s) where\n S is a set of 2-tuple of qubit indices determined by the interaction graph. ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.data_encoding.second_order_pauli_z_encoding", + "documentation": {} + }, + { + "label": "PQC", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.layers.pqc", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.layers.pqc", + "peekOfCode": "class PQC(tf.keras.layers.Layer):\n \"\"\"Parametrized Quantum Circuit (PQC) Layer.\n This layer is for training parameterized quantum models.\n Given a parameterized circuit, this layer initializes the parameters\n and manages them in a Keras native way.\n We start by defining a simple quantum circuit on one qubit.\n This circuit parameterizes an arbitrary rotation on the Bloch sphere in\n terms of the three angles a, b, and c:\n >>> q = cirq.GridQubit(0, 0)\n >>> (a, b, c) = sympy.symbols(\"a b c\")", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.layers.pqc", + "documentation": {} + }, + { + "label": "QConv2D", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.layers.qconv2d", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.layers.qconv2d", + "peekOfCode": "class QConv2D(PQC):\n \"\"\"Quantum Convolution 2D Layer.\n Quantum convolution uses a quantum filter as the basic building block. \n It replaces the classical filter by a Parameterised Quantum Circuit (PQC) \n which scan across the local regions (the receptive field) of an image. \n A quantum filter transforms the pixel values in the local region into the \n quantum states of its data circuit via a suitable feature map, hence projecting\n the data into a higher dimensional quantum (Hilbert) space. The quantum states \n are then propagated to the kernel circuit of the quantum filter which undergoes a \n sequence parameterised gate operations and outputs the expectation value of a ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.layers.qconv2d", + "documentation": {} + }, + { + "label": "qAUC", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.metrics.metrics", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.metrics.metrics", + "peekOfCode": "class qAUC(tf.keras.metrics.AUC):\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n def update_state(self, y_true, y_pred, sample_weight=None):\n y_pred_ = (y_pred+1)/2\n super().update_state(y_true, y_pred_, sample_weight=sample_weight)", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.metrics.metrics", + "documentation": {} + }, + { + "label": "resolve_inputs", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "peekOfCode": "def resolve_inputs(formulas, symbols, inputs):\n param_dicts = pd.DataFrame(inputs, columns=symbols).to_dict(\"records\")\n return tf.convert_to_tensor(parallel_run(resolve_formulas, repeat(formulas), param_dicts))\ndef resolve_formulas(formulas, param_dict):\n return [resolve_formula(formula, param_dict) for formula in formulas]\ndef resolve_formula(formula, param_dict, recursive:bool=True, deep_eval_map=None):\n if not isinstance(param_dict, dict):\n raise ValueError(\"resolver must be a dictionary mapping the raw symbol\"\n \" to the corresponding value\")\n # Input is a pass through type, no resolution needed: return early", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "documentation": {} + }, + { + "label": "resolve_formulas", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "peekOfCode": "def resolve_formulas(formulas, param_dict):\n return [resolve_formula(formula, param_dict) for formula in formulas]\ndef resolve_formula(formula, param_dict, recursive:bool=True, deep_eval_map=None):\n if not isinstance(param_dict, dict):\n raise ValueError(\"resolver must be a dictionary mapping the raw symbol\"\n \" to the corresponding value\")\n # Input is a pass through type, no resolution needed: return early\n value = resolve_value(formula)\n if value is not NotImplemented:\n return value", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "documentation": {} + }, + { + "label": "resolve_formula", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "peekOfCode": "def resolve_formula(formula, param_dict, recursive:bool=True, deep_eval_map=None):\n if not isinstance(param_dict, dict):\n raise ValueError(\"resolver must be a dictionary mapping the raw symbol\"\n \" to the corresponding value\")\n # Input is a pass through type, no resolution needed: return early\n value = resolve_value(formula)\n if value is not NotImplemented:\n return value\n # Handles 2 cases:\n # formula is a string and maps to a number in the dictionary", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "documentation": {} + }, + { + "label": "resolve_value", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "peekOfCode": "def resolve_value(val: Any):\n if isinstance(val, numbers.Number) and not isinstance(val, sympy.Basic):\n return val\n elif isinstance(val, tf.Tensor):\n return val \n elif isinstance(val, sympy_numbers.IntegerConstant):\n return val.p\n elif isinstance(val, sympy_numbers.RationalConstant):\n return val.p / val.q\n elif val == sympy.pi:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "documentation": {} + }, + { + "label": "_RecursionFlag", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "peekOfCode": "_RecursionFlag = object()\ntf_ops_map = {\n sympy.sin: tf.sin,\n sympy.cos: tf.cos,\n sympy.tan: tf.tan,\n sympy.asin: tf.asin,\n sympy.acos: tf.acos,\n sympy.atan: tf.atan,\n sympy.atan2: tf.atan2,\n sympy.cosh: tf.cosh,", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "documentation": {} + }, + { + "label": "tf_ops_map", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "peekOfCode": "tf_ops_map = {\n sympy.sin: tf.sin,\n sympy.cos: tf.cos,\n sympy.tan: tf.tan,\n sympy.asin: tf.asin,\n sympy.acos: tf.acos,\n sympy.atan: tf.atan,\n sympy.atan2: tf.atan2,\n sympy.cosh: tf.cosh,\n sympy.tanh: tf.tanh,", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.resolvers", + "documentation": {} + }, + { + "label": "stack", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "peekOfCode": "def stack(func, lambda_set, intermediate=None):\n if intermediate is None:\n return stack(func, lambda_set[1:], lambda_set[0])\n if len(lambda_set) > 0:\n new_lambda = lambda x:func(intermediate(x), lambda_set[0](x))\n return stack(func, lambda_set[1:], new_lambda)\n else:\n return intermediate\ndef resolve_value(val):\n if isinstance(val, numbers.Number) and not isinstance(val, sympy.Basic):", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "documentation": {} + }, + { + "label": "resolve_value", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "peekOfCode": "def resolve_value(val):\n if isinstance(val, numbers.Number) and not isinstance(val, sympy.Basic):\n return tf.constant(float(val), dtype=tf.float32)\n elif isinstance(val, (sympy_numbers.IntegerConstant, sympy_numbers.Integer)):\n return tf.constant(float(val.p), dtype=tf.float32)\n elif isinstance(val, (sympy_numbers.RationalConstant, sympy_numbers.Rational)):\n return tf.divide(tf.constant(val.p, dtype=tf.float32), tf.constant(val.q, dtype=tf.float32))\n elif val == sympy.pi:\n return tf.constant(np.pi, dtype=tf.float32)\n else:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "documentation": {} + }, + { + "label": "resolve_formulas", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "peekOfCode": "def resolve_formulas(formulas, symbols):\n lambda_set = [resolve_formula(f, symbols) for f in formulas]\n stacked_ops = stack(lambda x, y:tf.concat((x, y), 0), lambda_set)\n n_formula = tf.constant([len(formulas)])\n transposed_x = lambda x: tf.transpose(x, perm=tf.roll(tf.range(tf.rank(x)), shift=1, axis=0))\n resolved_x = lambda x: stacked_ops(transposed_x(x))\n reshaped_x = lambda x: tf.reshape(resolved_x(x), \n tf.concat((n_formula, tf.strided_slice(tf.shape(x), begin=[0], end=[-1])), axis=0))\n transformed_x = lambda x: tf.transpose(reshaped_x(x), perm=tf.roll(tf.range(tf.rank(x)), shift=-1, axis=0))\n return transformed_x", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "documentation": {} + }, + { + "label": "resolve_formulas_legacy", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "peekOfCode": "def resolve_formulas_legacy(formulas, symbols):\n lambda_set = [resolve_formula(f, symbols) for f in formulas]\n stacked_ops = stack(lambda x, y:tf.concat((x, y), 0), lambda_set)\n n_formula = len(formulas)\n transformed_x = lambda x: tf.transpose(tf.reshape(stacked_ops(tf.transpose(x)), (n_formula, tf.gather(tf.shape(x), 0))))\n return transformed_x \ndef resolve_formula(formula, symbols):\n # Input is a pass through type, no resolution needed: return early\n value = resolve_value(formula)\n if value is not NotImplemented:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "documentation": {} + }, + { + "label": "resolve_formula", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "peekOfCode": "def resolve_formula(formula, symbols):\n # Input is a pass through type, no resolution needed: return early\n value = resolve_value(formula)\n if value is not NotImplemented:\n return lambda x:value\n # Handles 2 cases:\n # formula is a string and maps to a number in the dictionary\n # formula is a symbol and maps to a number in the dictionary\n # in both cases, return it directly.\n if formula in symbols:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "documentation": {} + }, + { + "label": "tf_ops_map", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "peekOfCode": "tf_ops_map = {\n sympy.sin: tf.sin,\n sympy.cos: tf.cos,\n sympy.tan: tf.tan,\n sympy.asin: tf.asin,\n sympy.acos: tf.acos,\n sympy.atan: tf.atan,\n sympy.atan2: tf.atan2,\n sympy.cosh: tf.cosh,\n sympy.tanh: tf.tanh,", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_resolvers", + "documentation": {} + }, + { + "label": "get_paddings", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_utils", + "peekOfCode": "def get_paddings(input_shape, filter_shape, strides):\n \"\"\"\n Arguments:\n input_shape:\n `(rows, cols)`\n filter_shape:\n `(rows, cols)`\n strides:\n `(rows, cols)`\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_utils", + "documentation": {} + }, + { + "label": "get_padded_shape", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_utils", + "peekOfCode": "def get_padded_shape(input_shape, filter_shape, strides, padding=\"same\"):\n \"\"\"\n Arguments:\n input_shape:\n `(rows, cols)`\n filter_shape:\n `(rows, cols)`\n strides:\n `(rows, cols)`\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_utils", + "documentation": {} + }, + { + "label": "get_output_shape", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_utils", + "peekOfCode": "def get_output_shape(input_shape, filter_shape, strides, padding=\"same\"):\n \"\"\"\n Arguments:\n input_shape:\n `(rows, cols)`\n filter_shape:\n `(rows, cols)`\n strides:\n `(rows, cols)`\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.tfq.tf_utils", + "documentation": {} + }, + { + "label": "CirqInterface", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.cirq_interface", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.cirq_interface", + "peekOfCode": "class CirqInterface(ABC):\n __GATE_MAPPING__ = {\n \"H\": cirq.H, # Hadamard gate\n \"I\": cirq.I, # one-qubit Identity gate\n \"S\": cirq.S, # Clifford S gate\n \"T\": cirq.T, # non-Clifford T gate\n 'X': cirq.X, # Pauli-X gate\n \"Y\": cirq.Y, # Pauli-Y gate\n \"Z\": cirq.Z, # Pauli-Z gate\n \"PauliX\": cirq.X, # Pauli-X gate", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.cirq_interface", + "documentation": {} + }, + { + "label": "QuantumPlatform", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "peekOfCode": "class QuantumPlatform(Enum):\n\tCIRQ = 1\n\tQISKIT = 2\n\tBRACKET = 3", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "documentation": {} + }, + { + "label": "\tCIRQ", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "peekOfCode": "\tCIRQ = 1\n\tQISKIT = 2\n\tBRACKET = 3", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "documentation": {} + }, + { + "label": "\tQISKIT", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "peekOfCode": "\tQISKIT = 2\n\tBRACKET = 3", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "documentation": {} + }, + { + "label": "\tBRACKET", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "peekOfCode": "\tBRACKET = 3", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.interface.interface_ctrl", + "documentation": {} + }, + { + "label": "QGAN", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.models.generative.qgan", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.models.generative.qgan", + "peekOfCode": "class QGAN(AbstractModel):\n \"\"\"Quantum Generative Adversarial Network (QGAN)\n \"\"\" \n def __init__(self, generator:Model, \n discriminator:Model,\n latent_dim:Optional[Union[int, Tuple]]=None,\n n_disc:int=1,\n epochs:int=100, \n batch_size:int=32,\n optimizer:Optional[Union[str, Dict]]=None,", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.models.generative.qgan", + "documentation": {} + }, + { + "label": "QWGAN", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.models.generative.qwgan", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.models.generative.qwgan", + "peekOfCode": "class QWGAN(QGAN):\n \"\"\"Quantum Wasserstein Generative Adversarial Network (QWGAN)\n \"\"\" \n def __init__(self, generator:Model,\n discriminator:Model,\n latent_dim:Optional[Union[int, Tuple]]=None,\n n_disc:int=3,\n epochs:int=100, \n batch_size:int=32,\n optimizer:Optional[Union[str, Dict]]=None,", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.models.generative.qwgan", + "documentation": {} + }, + { + "label": "AbstractModel", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.models.abstract_model", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.models.abstract_model", + "peekOfCode": "class AbstractModel(ABC):\n \"\"\"Abstract class for building quantum models\"\"\"\n def __init__(self, name:Optional[str]=None,\n random_state:Optional[int]=None, \n checkpoint_dir:Optional[str]=None,\n checkpoint_interval:int=10,\n checkpoint_max_to_keep:Optional[int]=None):\n \"\"\"Instantiate the quantum model\n Arguments:\n name: (Optional) string", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.models.abstract_model", + "documentation": {} + }, + { + "label": "FeatureMaps01", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.feature_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.feature_maps", + "peekOfCode": "class FeatureMaps01(FeatureMap):\n \"\"\"\n Mapping data with the second order expansion followed by entangling gates.\n Refer to https://arxiv.org/pdf/1804.11326.pdf for details.\n \"\"\"\n CONFIGURATION = {\n 'name': 'MultiVariableMap',\n 'description': 'Second order expansion for feature map',\n 'input_schema': {\n '$schema': 'http://json-schema.org/schema#',", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.feature_maps", + "documentation": {} + }, + { + "label": "FeatureMapDev", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.feature_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.feature_maps", + "peekOfCode": "class FeatureMapDev(PauliFeatureMap):\n def pauli_evolution(self, pauli_string, time):\n \"\"\"Get the evolution block for the given pauli string.\"\"\"\n # for some reason this is in reversed order\n pauli_string = pauli_string[::-1]\n # trim the pauli string if identities are included\n trimmed = []\n indices = []\n for i, pauli in enumerate(pauli_string):\n if pauli != 'I':", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.feature_maps", + "documentation": {} + }, + { + "label": "FeatureMaps02", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.feature_maps", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.feature_maps", + "peekOfCode": "class FeatureMaps02(PauliFeatureMap):\n def __init__(self,\n feature_dimension: Optional[int] = None,\n reps: int = 2,\n entanglement: Union[str, List[List[int]], Callable[[int], List[int]]] = 'full',\n paulis: Optional[List[str]] = None,\n data_map_func: Optional[Callable[[np.ndarray], float]] = None,\n parameter_prefix: str = 'x',\n insert_barriers: bool = False,\n degree: int= 2", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.feature_maps", + "documentation": {} + }, + { + "label": "construct_circuit", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "peekOfCode": "def construct_circuit(x, feature_map):\n q = QuantumRegister(feature_map.num_qubits, 'q')\n c = ClassicalRegister(feature_map.num_qubits, 'c')\n qc = QuantumCircuit(q, c)\n # write input state from sample distribution\n if isinstance(feature_map, FeatureMap):\n qc += feature_map.construct_circuit(x, q)\n else:\n raise ValueError('Only FeatureMap istance is allowed')\n return qc", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "documentation": {} + }, + { + "label": "execute", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "peekOfCode": "def execute(circuits, quantum_instance):\n return quantum_instance.execute(circuits, had_transpiled=True)\ndef assign_parameters(param_values, param_names, feature_map):\n return feature_map.assign_parameters({param_names: param_values})\ndef batch_assign_parameters(param_values, param_names, feature_map):\n return [feature_map.assign_parameters({param_names: val}) for val in param_values]\ndef get_qiskit_state_vectors_deprecated(quantum_instance, feature_map, x, batchsize=100):\n if not quantum_instance.is_statevector:\n raise ValueError('Quantum instance must be a statevector simulator')\n q = QuantumRegister(feature_map.num_qubits, 'q')", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "documentation": {} + }, + { + "label": "assign_parameters", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "peekOfCode": "def assign_parameters(param_values, param_names, feature_map):\n return feature_map.assign_parameters({param_names: param_values})\ndef batch_assign_parameters(param_values, param_names, feature_map):\n return [feature_map.assign_parameters({param_names: val}) for val in param_values]\ndef get_qiskit_state_vectors_deprecated(quantum_instance, feature_map, x, batchsize=100):\n if not quantum_instance.is_statevector:\n raise ValueError('Quantum instance must be a statevector simulator')\n q = QuantumRegister(feature_map.num_qubits, 'q')\n c = ClassicalRegister(feature_map.num_qubits, 'c')\n qc = QuantumCircuit(q, c)", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "documentation": {} + }, + { + "label": "batch_assign_parameters", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "peekOfCode": "def batch_assign_parameters(param_values, param_names, feature_map):\n return [feature_map.assign_parameters({param_names: val}) for val in param_values]\ndef get_qiskit_state_vectors_deprecated(quantum_instance, feature_map, x, batchsize=100):\n if not quantum_instance.is_statevector:\n raise ValueError('Quantum instance must be a statevector simulator')\n q = QuantumRegister(feature_map.num_qubits, 'q')\n c = ClassicalRegister(feature_map.num_qubits, 'c')\n qc = QuantumCircuit(q, c)\n if isinstance(feature_map, QuantumCircuit):\n use_parameterized_circuits = True", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "documentation": {} + }, + { + "label": "get_qiskit_state_vectors_deprecated", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "peekOfCode": "def get_qiskit_state_vectors_deprecated(quantum_instance, feature_map, x, batchsize=100):\n if not quantum_instance.is_statevector:\n raise ValueError('Quantum instance must be a statevector simulator')\n q = QuantumRegister(feature_map.num_qubits, 'q')\n c = ClassicalRegister(feature_map.num_qubits, 'c')\n qc = QuantumCircuit(q, c)\n if isinstance(feature_map, QuantumCircuit):\n use_parameterized_circuits = True\n else:\n use_parameterized_circuits = feature_map.support_parameterized_circuit ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "documentation": {} + }, + { + "label": "get_qiskit_state_vectors_test", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "peekOfCode": "def get_qiskit_state_vectors_test(quantum_instance, feature_map, x, batchsize=100):\n if not quantum_instance.is_statevector:\n raise ValueError('Quantum instance must be a statevector simulator')\n q = QuantumRegister(feature_map.num_qubits, 'q')\n c = ClassicalRegister(feature_map.num_qubits, 'c')\n qc = QuantumCircuit(q, c)\n feature_map_params = ParameterVector('x', feature_map.feature_dimension)\n parameterized_circuit = QSVM._construct_circuit(\n (feature_map_params, feature_map_params), feature_map, False,\n is_statevector_sim=True)", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "documentation": {} + }, + { + "label": "get_qiskit_state_vectors", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "peekOfCode": "def get_qiskit_state_vectors(quantum_instance, feature_map, x, batchsize=None):\n if not quantum_instance.is_statevector:\n raise ValueError('Quantum instance must be a statevector simulator')\n n_qubit = feature_map.num_qubits\n data_size = x.shape[0]\n q = QuantumRegister(n_qubit, 'q')\n c = ClassicalRegister(n_qubit, 'c')\n qc = QuantumCircuit(q, c)\n feature_map_params = ParameterVector('x', feature_map.feature_dimension)\n parameterized_circuit = QSVM._construct_circuit(", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.qiskit_interface.tools", + "documentation": {} + }, + { + "label": "train_val_test_split", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "peekOfCode": "def train_val_test_split(x:np.ndarray, y:np.ndarray, train_size=None, val_size=None,\n test_size=None, stratify=None, shuffle=True, \n random_state=None, weight:np.ndarray=None):\n \"\"\"Split inputs into the training, validation and test set based on sklearn train_test_split\n Allowed inputs are lists, numpy arrays, scipy-sparse matrices or pandas dataframes.\n Args:\n x, y: sequence of indexables with same length / shape[0]\n train_size: float or int, default=None\n If float, should be between 0.0 and 1.0 and represent the proportion of the dataset to include in the train split. If int, represents the absolute number of train samples. If both train_size, val_size and test_size are None, it will be set to 0.7.\n val_size: float or int, default=None", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "documentation": {} + }, + { + "label": "prepare_train_val_test", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "peekOfCode": "def prepare_train_val_test(x:np.ndarray, y:np.ndarray, train_size=None, val_size=None,\n test_size=None, preprocessors=None, shuffle=True, stratify=None,\n random_state=None, weight=None):\n \"\"\"Prepares training, validation and test datasets from inputs with proprocessing \n Allowed inputs are lists, numpy arrays, scipy-sparse matrices or pandas dataframes.\n Args:\n x, y: sequence of indexables with same length / shape[0]\n train_size: float or int, default=None\n If float, should be between 0.0 and 1.0 and represent the proportion of the dataset to include in the train split. If int, represents the absolute number of train samples. If both train_size, val_size and test_size are None, it will be set to 0.7.\n val_size: float or int, default=None", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "documentation": {} + }, + { + "label": "rescale_data", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "peekOfCode": "def rescale_data(*data, val_range:Tuple=(0, 1.)):\n min_value = np.min([np.min(d) for d in data])\n max_value = np.max([np.max(d) for d in data])\n range_min = val_range[0]\n range_max = val_range[1]\n rescaled_data = tuple([(((d-min_value)/(max_value-min_value))*(range_max-range_min))+range_min for d in data])\n if len(rescaled_data) == 1:\n return rescaled_data[0]\n return rescaled_data\ndef crop_images(images:np.ndarray, dimension:Tuple[int]):", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "documentation": {} + }, + { + "label": "crop_images", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "peekOfCode": "def crop_images(images:np.ndarray, dimension:Tuple[int]):\n \"\"\"Crop the central region of an image (2D array) or multiple images (3D array) to the required dimension\n Arguments:\n image: numpy.ndarray\n In case of a 2D array, it represents the pixel values of an image with shape (rows, cols)\n In case of a 3D array, it represents a batch of images with shape (batchsize, rows, cols)\n dimension: Tuple[int]\n A 2-tuple specifying the (width, height) of the cropped frame\n Returns:\n Image(s) cropped to the specified dimension", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.data_preparation", + "documentation": {} + }, + { + "label": "DataPrecision", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "peekOfCode": "class DataPrecision(Enum):\n single = 1\n double = 2\n@numba.vectorize([numba.float64(numba.complex128),numba.float32(numba.complex64)])\ndef abs2(x):\n # element-wise modulus square\n return x.real**2 + x.imag**2\ndef gramian_matrix(A:np.ndarray, B:np.ndarray):\n # The Gramian matrix\n # G = AB*", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "documentation": {} + }, + { + "label": "abs2", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "peekOfCode": "def abs2(x):\n # element-wise modulus square\n return x.real**2 + x.imag**2\ndef gramian_matrix(A:np.ndarray, B:np.ndarray):\n # The Gramian matrix\n # G = AB*\n return A @ B.conjugate().T\ndef split_gramian_matrix(A:np.ndarray, B:np.ndarray, n_split:int=1):\n if n_split == 1:\n return gramian_matrix(A, B)", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "documentation": {} + }, + { + "label": "gramian_matrix", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "peekOfCode": "def gramian_matrix(A:np.ndarray, B:np.ndarray):\n # The Gramian matrix\n # G = AB*\n return A @ B.conjugate().T\ndef split_gramian_matrix(A:np.ndarray, B:np.ndarray, n_split:int=1):\n if n_split == 1:\n return gramian_matrix(A, B)\n # output dimension\n dimension = A.shape[0]\n if A.shape != B.shape:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "documentation": {} + }, + { + "label": "split_gramian_matrix", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "peekOfCode": "def split_gramian_matrix(A:np.ndarray, B:np.ndarray, n_split:int=1):\n if n_split == 1:\n return gramian_matrix(A, B)\n # output dimension\n dimension = A.shape[0]\n if A.shape != B.shape:\n raise ValueError('Input matrices must have the same shape')\n if A.dtype != B.dtype:\n raise ValueError('Input matrices must have the same data type')\n if dimension % n_split != 0:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.mathext", + "documentation": {} + }, + { + "label": "set_global_random_seed", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.setup", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.setup", + "peekOfCode": "def set_global_random_seed(seed:int=None, tf_seed=True):\n \"\"\"Set global random seed for various python modules such as random, numpy and tensorflow\n \"\"\"\n random.seed(seed)\n np.random.seed(seed)\n if tf_seed:\n import tensorflow as tf\n tf.random.set_seed(seed)", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.setup", + "documentation": {} + }, + { + "label": "ByteExpression", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "peekOfCode": "class ByteExpression(Enum):\n KB = 1\n MB = 2\n GB = 3\n TB = 4\n PB = 5\ndef get_cpu_count():\n return multiprocessing.cpu_count()\ndef get_gpu_count():\n import torch", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "documentation": {} + }, + { + "label": "get_cpu_count", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "peekOfCode": "def get_cpu_count():\n return multiprocessing.cpu_count()\ndef get_gpu_count():\n import torch\n return torch.cuda.device_count()\ndef get_gpu_device_names():\n import torch\n return [torch.cuda.get_device_name(i) for i in range(get_gpu_count)]\ndef get_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().total/1024**byte_expr.value", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "documentation": {} + }, + { + "label": "get_gpu_count", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "peekOfCode": "def get_gpu_count():\n import torch\n return torch.cuda.device_count()\ndef get_gpu_device_names():\n import torch\n return [torch.cuda.get_device_name(i) for i in range(get_gpu_count)]\ndef get_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().total/1024**byte_expr.value\ndef get_available_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().available/1024**byte_expr.value", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "documentation": {} + }, + { + "label": "get_gpu_device_names", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "peekOfCode": "def get_gpu_device_names():\n import torch\n return [torch.cuda.get_device_name(i) for i in range(get_gpu_count)]\ndef get_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().total/1024**byte_expr.value\ndef get_available_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().available/1024**byte_expr.value\ndef get_used_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().used/1024**byte_expr.value", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "documentation": {} + }, + { + "label": "get_virtual_memory", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "peekOfCode": "def get_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().total/1024**byte_expr.value\ndef get_available_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().available/1024**byte_expr.value\ndef get_used_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().used/1024**byte_expr.value", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "documentation": {} + }, + { + "label": "get_available_virtual_memory", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "peekOfCode": "def get_available_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().available/1024**byte_expr.value\ndef get_used_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().used/1024**byte_expr.value", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "documentation": {} + }, + { + "label": "get_used_virtual_memory", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "peekOfCode": "def get_used_virtual_memory(byte_expr:ByteExpression = ByteExpression.GB):\n return psutil.virtual_memory().used/1024**byte_expr.value", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.sysinfo", + "documentation": {} + }, + { + "label": "replace_symbol_in_op", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def replace_symbol_in_op(op, old_symbol:sp.Symbol, new_symbol:sp.Symbol) -> None:\n \"\"\"Replace symbols in a parameterised gate operation with new symbols\n Args:\n op: gate operation to which the associated symbols are replaced\n old_symbol: the original symbol associated with the gate operation\n new_symbol: the new symbol which the original symbol is to be replaced by\n \"\"\"\n if isinstance(op, cirq.EigenGate):\n if old_symbol in op.exponent.free_symbols:\n op._exponent = op.exponent.subs(old_symbol, new_symbol)", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "resolve_expression_map_conflicts", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def resolve_expression_map_conflicts(old_expr_map, new_expr_map):\n \"\"\"Recovers reflattened expressions from a new expression map given the old expression map\n Args:\n old_expr_map: cirq.ExpressionMap\n The original expression map which maps flattened expressions to unflattened expressions\n new_expr_map: cirq.ExpressionMap\n The new expression map which is possibly reflattened from a flattened expressions\n Returns:\n An expression map with reflattened expressions recovered\n \"\"\" ", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "pqc_symbol_map", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def pqc_symbol_map(circuit:cirq.Circuit, symbols_map) -> cirq.Circuit:\n \"\"\"Maps the old symbols in a circuit with the new ones\n Args:\n circuit: cirq.Circuit, quple.QuantumCircuit\n The circuit to map the parameter symbols\n symbols_map:\n A dictionary that maps old parameter symbols to new ones\n Returns:\n A new circuit with the old parameter symbols replaced by the new ones\n \"\"\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "merge_pqc", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def merge_pqc(circuits:List[cirq.Circuit], symbol:str='θ') -> cirq.Circuit:\n \"\"\"Merges a list of parameterized circuit and updates the parameter symbols\n Circuits are merged in the order they are listed. The set of symbols in one circuit \n will be treated as distinct from the set of symbols in another circuit. The merged\n circuit will have all parameter symbols of the form {symbol}_{index}. \n Args:\n circuits: list of cirq.Circuit or quple.QuantumCircuit\n The circuits to merge\n symbol: str\n The parameter symbol prefix for the merged circuit", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "plot_stacked_roc_curve", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def plot_stacked_roc_curve(fpr_list:List[np.ndarray], \n tpr_list:List[np.ndarray], labels:List[str]=None, title:str='ROC Curve', with_auc:bool=True):\n \"\"\"Plots multiple roc curves stacked together\n Args:\n fpr: list/array of numpy array\n A collection of arrays containing the false positive rates for \n different experiments\n tpr: list/array of numpy array\n A collection of arrays containing the false positive rates for \n different experiments", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "plot_roc_curve", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def plot_roc_curve(fpr:np.ndarray, tpr:np.ndarray, label:str='', title:str='ROC Curve', with_auc:bool=True):\n \"\"\"Plots a roc curve\n Args:\n fpr: numpy array\n An array containing the false positive rates\n tpr: numpy array\n An array containing the true positive rates\n label: str\n Label of the curve\n title: str", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "atoi", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def atoi(symbol):\n return int(symbol) if symbol.isdigit() else symbol\ndef natural_key(symbol):\n '''Keys for human sorting\n Reference:\n http://nedbatchelder.com/blog/200712/human_sorting.html\n '''\n return [ atoi(s) for s in re.split(r'(\\d+)', symbol.name) ]\ndef get_unique_symbols(symbols, sort_key=natural_key):\n unique_symbols = set(symbols)", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "natural_key", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def natural_key(symbol):\n '''Keys for human sorting\n Reference:\n http://nedbatchelder.com/blog/200712/human_sorting.html\n '''\n return [ atoi(s) for s in re.split(r'(\\d+)', symbol.name) ]\ndef get_unique_symbols(symbols, sort_key=natural_key):\n unique_symbols = set(symbols)\n return sorted(list(unique_symbols), key=sort_key)\ndef parallel_run(func, *iterables, max_workers=None):", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "get_unique_symbols", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def get_unique_symbols(symbols, sort_key=natural_key):\n unique_symbols = set(symbols)\n return sorted(list(unique_symbols), key=sort_key)\ndef parallel_run(func, *iterables, max_workers=None):\n max_workers = max_workers or quple.MAX_WORKERS \n with ProcessPoolExecutor(max_workers) as executor:\n result = executor.map(func, *iterables)\n return [i for i in result]\ndef execute_multi_tasks(func, *iterables, parallel):\n if parallel == 0:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "parallel_run", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def parallel_run(func, *iterables, max_workers=None):\n max_workers = max_workers or quple.MAX_WORKERS \n with ProcessPoolExecutor(max_workers) as executor:\n result = executor.map(func, *iterables)\n return [i for i in result]\ndef execute_multi_tasks(func, *iterables, parallel):\n if parallel == 0:\n result = []\n for args in zip(*iterables):\n result.append(func(*args))", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "execute_multi_tasks", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def execute_multi_tasks(func, *iterables, parallel):\n if parallel == 0:\n result = []\n for args in zip(*iterables):\n result.append(func(*args))\n return result\n else:\n if parallel == -1:\n max_workers = get_cpu_count()\n else:", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "batching", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def batching(l:List, n:int):\n for i in range(0, len(l), n):\n yield l[i:i + n]\ndef flatten_list(l):\n return [item for sublist in l for item in sublist]", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "flatten_list", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "peekOfCode": "def flatten_list(l):\n return [item for sublist in l for item in sublist]", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.utils", + "documentation": {} + }, + { + "label": "visualize_images", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.visualization", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.visualization", + "peekOfCode": "def visualize_images(images:\"numpy.ndarray\", columns:int=8, subplots_options:Optional[Dict]=None,\n labels:Optional[\"numpy.ndarray\"]=None, label_map:Optional[Dict]=None):\n \"\"\"Visualize a set of 2D images in a grid\n Arguments:\n images: numpy.ndarray\n A 3D numpy array representing batches of images of shape (batchsize, rows, cols).\n colunms: int\n Number of coumns in the grid.\n subplots_options: (Optional) dict\n A dictionary containing subplots options.", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple.utils.visualization", + "documentation": {} + }, + { + "label": "__version__", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple._version", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple._version", + "peekOfCode": "__version__ = \"0.9.0\"", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.quple._version", + "documentation": {} + }, + { + "label": "verstrline", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.setup", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.setup", + "peekOfCode": "verstrline = open(VERSIONFILE, \"rt\").read()\nVSRE = r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\"\nmo = re.search(VSRE, verstrline, re.M)\nif mo:\n verstr = mo.group(1)\nelse:\n raise RuntimeError(\"Unable to find version string in %s.\" % (VERSIONFILE,)) \nsetuptools.setup(\n name=\"quple\", # Replace with your own username\n version=verstr,", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.setup", + "documentation": {} + }, + { + "label": "VSRE", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.setup", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.setup", + "peekOfCode": "VSRE = r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\"\nmo = re.search(VSRE, verstrline, re.M)\nif mo:\n verstr = mo.group(1)\nelse:\n raise RuntimeError(\"Unable to find version string in %s.\" % (VERSIONFILE,)) \nsetuptools.setup(\n name=\"quple\", # Replace with your own username\n version=verstr,\n author=\"Alkaid Cheng\",", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.setup", + "documentation": {} + }, + { + "label": "mo", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.setup", + "description": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.setup", + "peekOfCode": "mo = re.search(VSRE, verstrline, re.M)\nif mo:\n verstr = mo.group(1)\nelse:\n raise RuntimeError(\"Unable to find version string in %s.\" % (VERSIONFILE,)) \nsetuptools.setup(\n name=\"quple\", # Replace with your own username\n version=verstr,\n author=\"Alkaid Cheng\",\n author_email=\"chi.lung.cheng@cern.ch\",", + "detail": "Quantum_GAN_for_HEP_Chi_Lung_Cheng.setup", + "documentation": {} + }, + { + "label": "frechet_distance", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.metrics", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.metrics", + "peekOfCode": "def frechet_distance(real_data, generated_data):\n mu_r = np.mean(real_data)\n mu_g = np.mean(generated_data)\n var_r = np.var(real_data)\n var_g = np.var(generated_data)\n mean_diff = mu_r - mu_g\n cov_mean = np.sqrt(var_r * var_g)\n distance = mean_diff**2 + var_r + var_g - 2 * cov_mean\n return distance\ndef FID(real_data, generated_data):", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.metrics", + "documentation": {} + }, + { + "label": "FID", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.metrics", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.metrics", + "peekOfCode": "def FID(real_data, generated_data):\n mu_r = np.mean(real_data, axis=0)\n mu_g = np.mean(generated_data, axis=0)\n C_r = np.cov(real_data, rowvar=False)\n C_g = np.cov(generated_data, rowvar=False)\n mean_diff = mu_r - mu_g\n cov_mean = sqrtm(C_r.dot(C_g))\n if np.iscomplexobj(cov_mean):\n cov_mean = cov_mean.real\n distance = mean_diff.dot(mean_diff) + np.trace(C_r + C_g - 2*cov_mean)", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.metrics", + "documentation": {} + }, + { + "label": "relative_entropy", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.metrics", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.metrics", + "peekOfCode": "def relative_entropy(real_data, generated_data):\n return np.sum(rel_entr(real_data, generated_data))", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.metrics", + "documentation": {} + }, + { + "label": "Dataset", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "peekOfCode": "class Dataset(Dataset):\n def __init__(self, probs):\n self.data = torch.tensor(probs, dtype=torch.float32) \n def __len__(self):\n return len(self.data)\n def __getitem__(self, idx):\n return self.data[idx]\nclass Discriminator(nn.Module):\n \"\"\"Fully connected classical discriminator\"\"\"\n def __init__(self, input_shape, layers):", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "documentation": {} + }, + { + "label": "Discriminator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "peekOfCode": "class Discriminator(nn.Module):\n \"\"\"Fully connected classical discriminator\"\"\"\n def __init__(self, input_shape, layers):\n super().__init__()\n self.input_shape = input_shape\n self.layers = layers\n self.model = self.set_model()\n def set_model(self):\n input = self.input_shape\n modules = []", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "documentation": {} + }, + { + "label": "QuantumAnsatz", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "peekOfCode": "class QuantumAnsatz:\n def __init__(self, n_qubits, q_depth) -> None:\n self.n_qubits = n_qubits\n self.q_depth = q_depth\n self.dev = qml.device(\"default.qubit\", wires=n_qubits)\n def circuit(self):\n @qml.qnode(self.dev, diff_method=\"backprop\")\n def quantum_circuit(weights):\n weights = weights.reshape(self.q_depth, self.n_qubits)\n # Initialise latent vectors", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "documentation": {} + }, + { + "label": "QuantumGenerator", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "peekOfCode": "class QuantumGenerator(nn.Module):\n \"\"\"Quantum generator class for the patch method\"\"\"\n def __init__(self, total_qubits, auxiliar_qubits, circuit_depth, quantum_circuit, partial_measure=False, states_range=(0, )):\n \"\"\"\n Args:\n n_generators (int): Number of sub-generators to be used in the patch method.\n q_delta (float, optional): Spread of the random distribution for parameter initialisation.\n \"\"\"\n super().__init__()\n self.n_qubits = total_qubits", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "documentation": {} + }, + { + "label": "plot_training_progress", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "peekOfCode": "def plot_training_progress(epoch, iterations, metric_1, metric_2, generator, real_data, dist_shape: tuple[int, int]):\n # we don't plot if we don't have enough data\n if len(metric_1) < 2:\n return\n clear_output(wait=True)\n fig, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(25, 6))\n # Metric 1\n ax1.set_title(\"metric 1\", fontsize=15)\n ax1.plot(iterations, metric_1, color=\"royalblue\", linewidth=3)\n ax1.set_xlabel(\"Epoch\")", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "documentation": {} + }, + { + "label": "model_training", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "peekOfCode": "def model_training(discriminator: Discriminator, generator: QuantumGenerator, probability_distribution: np.array, dist_shape: tuple[int, int],\n device: torch.device, criterion: Module, disc_optimizer: Optimizer, gen_optimizer: Optimizer, metrics: list, epochs: int,\n batch_size: int) -> None:\n gen_loss = []\n disc_loss = []\n metric_1 = []\n metric_2 = []\n iterations = []\n real_labels = torch.full((1,), 1.0, dtype=torch.float, device=device)\n fake_labels = torch.full((1,), 0.0, dtype=torch.float, device=device)", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qGAN", + "documentation": {} + }, + { + "label": "ModelTest", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qg_test", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qg_test", + "peekOfCode": "class ModelTest:\n def __init__(self, test_id, num_qubits, num_aux_qubits, circuit_depth, \n rotations, num_generators, generator_lr, discriminator_lr, \n batch_size, num_samples, num_epochs, y, channel, optimizer, \n resolution) -> None:\n self.test_id = test_id\n self.num_qubits = num_qubits\n self.num_aux_qubits = num_aux_qubits\n self.circuit_depth = circuit_depth\n self.rotations = rotations", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.qg_test", + "documentation": {} + }, + { + "label": "MonteCarlo", + "kind": 6, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.simulations", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.simulations", + "peekOfCode": "class MonteCarlo:\n \"\"\"\n A class to simulate various probabilistic experiments such as rolling dice, coin tosses,\n particle decay, and random walks.\n Methods:\n -------\n rolling_dice(n_samples: int) -> np.array:\n Simulate rolling two six-sided dice and return the sum of the results.\n coin_toss(n_samples: int, coins: int) -> np.array:\n Simulate tossing a specified number of coins and return the number of heads for each set of tosses.", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.src.python_package.simulations", + "documentation": {} + }, + { + "label": "read_json_files", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "peekOfCode": "def read_json_files(directory):\n results = []\n # Read all files in the specified directory\n for filename in os.listdir(directory):\n if filename.endswith('.json'):\n filepath = os.path.join(directory, filename)\n with open(filepath, 'r') as file:\n data = json.load(file)\n results.append(data)\n # Sort results based on 'id' key ", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "documentation": {} + }, + { + "label": "generate_markdown_table", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "peekOfCode": "def generate_markdown_table(data):\n # Table headers\n headers = data[0].keys()\n markdown = \"| \" + \" | \".join(headers) + \" |\\n\"\n markdown += \"|---\" * len(headers) + \"|\\n\"\n # Add data rows\n for entry in data:\n row = \"| \" + \" | \".join(str(value) for key, value in entry.items()) + \" |\"\n markdown += row + \"\\n\"\n return markdown", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "documentation": {} + }, + { + "label": "save_to_readme", + "kind": 2, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "peekOfCode": "def save_to_readme(markdown, introduction, filename=\"/home/reyguadarrama/GSoC/tests/ECAL_tests/README.md\"):\n with open(filename, 'w') as file:\n file.write(introduction + \"\\n\\n\" + markdown)\n# Use the defined functions to read, generate, and save the data\ndirectory = \"/home/reyguadarrama/GSoC/tests/ECAL_tests/log\"\nintroduction_text = \"\"\"\n
\n# **Summary of ECAL Channel Test Results**\n---\n
", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "documentation": {} + }, + { + "label": "directory", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "peekOfCode": "directory = \"/home/reyguadarrama/GSoC/tests/ECAL_tests/log\"\nintroduction_text = \"\"\"\n
\n# **Summary of ECAL Channel Test Results**\n---\n
\nThis README file includes a summary of test results for the various parameters explored in my experiments.\nEach entry in the table represents a specific configuration and its outcomes. The model used in the training is the proposed by \n[He-Liang et.al.](https://arxiv.org/abs/2010.06201), this model consist in a set of feature qubits which will represent the distribution\nand a set of auxiliar qubits which gives the model more freedom, a post-processing is performed over the circuit output, first is divided by", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "documentation": {} + }, + { + "label": "introduction_text", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "peekOfCode": "introduction_text = \"\"\"\n
\n# **Summary of ECAL Channel Test Results**\n---\n
\nThis README file includes a summary of test results for the various parameters explored in my experiments.\nEach entry in the table represents a specific configuration and its outcomes. The model used in the training is the proposed by \n[He-Liang et.al.](https://arxiv.org/abs/2010.06201), this model consist in a set of feature qubits which will represent the distribution\nand a set of auxiliar qubits which gives the model more freedom, a post-processing is performed over the circuit output, first is divided by\na number $y \\in [0, 1]$ which allows the circuit output to take values larger than 1 and fix the limitation of the maximum sum of the output ", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "documentation": {} + }, + { + "label": "data", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "peekOfCode": "data = read_json_files(directory)\nmarkdown_table = generate_markdown_table(data)\nsave_to_readme(markdown_table, introduction_text)\nprint(\"The README.md has been updated with the results table.\")", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "documentation": {} + }, + { + "label": "markdown_table", + "kind": 5, + "importPath": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "description": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "peekOfCode": "markdown_table = generate_markdown_table(data)\nsave_to_readme(markdown_table, introduction_text)\nprint(\"The README.md has been updated with the results table.\")", + "detail": "Quantum_GAN_for_HEP_Luis_Rey_Guadarrama.tests.ECAL_tests.summary", + "documentation": {} + }, + { + "label": "DHCEModel", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "class DHCEModel(tf.keras.Model):\n\tdef __init__(self, in_shape):\n\t\tsuper().__init__()\n\t\tself.qml_layer = tc.keras.QuantumLayer(circ, weights_shape=[2, in_shape])\n\t\tself.qml_layer.compute_output_shape = lambda input_shape: (input_shape[0], 1)\n\tdef call(self, input_tensor):\n\t\tx = self.qml_layer(input_tensor)\n\t\treturn x\n\tdef build_graph(self, raw_shape):\n\t\tx = tf.keras.Input(shape=raw_shape)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "circ", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "def circ(x, weights):\n\tc = tc.Circuit(1)\n\tz = x * weights[0] + weights[1]\n\tc.rx(0, theta=z[0])\n\tc.ry(0, theta=z[1])\n\tc.rz(0, theta=z[2])\n\tc.ry(0, theta=z[3])\n\treturn tc.backend.real(c.expectation_ps(z=[0]))\ndef hinge_accuracy(y_true, y_pred):\n\ty_true = tf.squeeze(y_true) > 0.0", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "hinge_accuracy", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "def hinge_accuracy(y_true, y_pred):\n\ty_true = tf.squeeze(y_true) > 0.0\n\ty_pred = tf.squeeze(y_pred) > 0.0\n\tresult = tf.cast(y_true == y_pred, tf.float32)\n\treturn tf.reduce_mean(result)\nclass DHCEModel(tf.keras.Model):\n\tdef __init__(self, in_shape):\n\t\tsuper().__init__()\n\t\tself.qml_layer = tc.keras.QuantumLayer(circ, weights_shape=[2, in_shape])\n\t\tself.qml_layer.compute_output_shape = lambda input_shape: (input_shape[0], 1)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "DATASET_PATH", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "DATASET_PATH = Path(__file__).parents[1] / 'data/downloaded/'\ndef circ(x, weights):\n\tc = tc.Circuit(1)\n\tz = x * weights[0] + weights[1]\n\tc.rx(0, theta=z[0])\n\tc.ry(0, theta=z[1])\n\tc.rz(0, theta=z[2])\n\tc.ry(0, theta=z[3])\n\treturn tc.backend.real(c.expectation_ps(z=[0]))\ndef hinge_accuracy(y_true, y_pred):", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tc", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tc = tc.Circuit(1)\n\tz = x * weights[0] + weights[1]\n\tc.rx(0, theta=z[0])\n\tc.ry(0, theta=z[1])\n\tc.rz(0, theta=z[2])\n\tc.ry(0, theta=z[3])\n\treturn tc.backend.real(c.expectation_ps(z=[0]))\ndef hinge_accuracy(y_true, y_pred):\n\ty_true = tf.squeeze(y_true) > 0.0\n\ty_pred = tf.squeeze(y_pred) > 0.0", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tz", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tz = x * weights[0] + weights[1]\n\tc.rx(0, theta=z[0])\n\tc.ry(0, theta=z[1])\n\tc.rz(0, theta=z[2])\n\tc.ry(0, theta=z[3])\n\treturn tc.backend.real(c.expectation_ps(z=[0]))\ndef hinge_accuracy(y_true, y_pred):\n\ty_true = tf.squeeze(y_true) > 0.0\n\ty_pred = tf.squeeze(y_pred) > 0.0\n\tresult = tf.cast(y_true == y_pred, tf.float32)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\ty_true", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\ty_true = tf.squeeze(y_true) > 0.0\n\ty_pred = tf.squeeze(y_pred) > 0.0\n\tresult = tf.cast(y_true == y_pred, tf.float32)\n\treturn tf.reduce_mean(result)\nclass DHCEModel(tf.keras.Model):\n\tdef __init__(self, in_shape):\n\t\tsuper().__init__()\n\t\tself.qml_layer = tc.keras.QuantumLayer(circ, weights_shape=[2, in_shape])\n\t\tself.qml_layer.compute_output_shape = lambda input_shape: (input_shape[0], 1)\n\tdef call(self, input_tensor):", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\ty_pred", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\ty_pred = tf.squeeze(y_pred) > 0.0\n\tresult = tf.cast(y_true == y_pred, tf.float32)\n\treturn tf.reduce_mean(result)\nclass DHCEModel(tf.keras.Model):\n\tdef __init__(self, in_shape):\n\t\tsuper().__init__()\n\t\tself.qml_layer = tc.keras.QuantumLayer(circ, weights_shape=[2, in_shape])\n\t\tself.qml_layer.compute_output_shape = lambda input_shape: (input_shape[0], 1)\n\tdef call(self, input_tensor):\n\t\tx = self.qml_layer(input_tensor)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tresult", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tresult = tf.cast(y_true == y_pred, tf.float32)\n\treturn tf.reduce_mean(result)\nclass DHCEModel(tf.keras.Model):\n\tdef __init__(self, in_shape):\n\t\tsuper().__init__()\n\t\tself.qml_layer = tc.keras.QuantumLayer(circ, weights_shape=[2, in_shape])\n\t\tself.qml_layer.compute_output_shape = lambda input_shape: (input_shape[0], 1)\n\tdef call(self, input_tensor):\n\t\tx = self.qml_layer(input_tensor)\n\t\treturn x", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\t\tself.qml_layer", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\t\tself.qml_layer = tc.keras.QuantumLayer(circ, weights_shape=[2, in_shape])\n\t\tself.qml_layer.compute_output_shape = lambda input_shape: (input_shape[0], 1)\n\tdef call(self, input_tensor):\n\t\tx = self.qml_layer(input_tensor)\n\t\treturn x\n\tdef build_graph(self, raw_shape):\n\t\tx = tf.keras.Input(shape=raw_shape)\n\t\treturn tf.keras.Model(inputs=[x], outputs=self.call(x))\nif __name__ == \"__main__\":\n\ttu_dataset = torch_geometric.datasets.TUDataset(root=DATASET_PATH, name=\"MUTAG\")", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\t\tself.qml_layer.compute_output_shape", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\t\tself.qml_layer.compute_output_shape = lambda input_shape: (input_shape[0], 1)\n\tdef call(self, input_tensor):\n\t\tx = self.qml_layer(input_tensor)\n\t\treturn x\n\tdef build_graph(self, raw_shape):\n\t\tx = tf.keras.Input(shape=raw_shape)\n\t\treturn tf.keras.Model(inputs=[x], outputs=self.call(x))\nif __name__ == \"__main__\":\n\ttu_dataset = torch_geometric.datasets.TUDataset(root=DATASET_PATH, name=\"MUTAG\")\n\ttu_dataset.shuffle()", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\t\tx = self.qml_layer(input_tensor)\n\t\treturn x\n\tdef build_graph(self, raw_shape):\n\t\tx = tf.keras.Input(shape=raw_shape)\n\t\treturn tf.keras.Model(inputs=[x], outputs=self.call(x))\nif __name__ == \"__main__\":\n\ttu_dataset = torch_geometric.datasets.TUDataset(root=DATASET_PATH, name=\"MUTAG\")\n\ttu_dataset.shuffle()\n\t# Generate DHCE data\n\ttemp_en_graphs = []", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\t\tx = tf.keras.Input(shape=raw_shape)\n\t\treturn tf.keras.Model(inputs=[x], outputs=self.call(x))\nif __name__ == \"__main__\":\n\ttu_dataset = torch_geometric.datasets.TUDataset(root=DATASET_PATH, name=\"MUTAG\")\n\ttu_dataset.shuffle()\n\t# Generate DHCE data\n\ttemp_en_graphs = []\n\tdata = tu_dataset[0]\n\tgraph = to_networkx(data).to_undirected()\n\tsmax = 0", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\ttu_dataset", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\ttu_dataset = torch_geometric.datasets.TUDataset(root=DATASET_PATH, name=\"MUTAG\")\n\ttu_dataset.shuffle()\n\t# Generate DHCE data\n\ttemp_en_graphs = []\n\tdata = tu_dataset[0]\n\tgraph = to_networkx(data).to_undirected()\n\tsmax = 0\n\ty = []\n\tfor data in tu_dataset:\n\t\tgraph = to_networkx(data).to_undirected()", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\ttemp_en_graphs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\ttemp_en_graphs = []\n\tdata = tu_dataset[0]\n\tgraph = to_networkx(data).to_undirected()\n\tsmax = 0\n\ty = []\n\tfor data in tu_dataset:\n\t\tgraph = to_networkx(data).to_undirected()\n\t\ty.append(int(data.y[0]))\n\t\ttemp_en_graphs.append(get_dhce_data(graph))\n\t\tsmax = max(smax, len(temp_en_graphs[-1]))", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tdata", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tdata = tu_dataset[0]\n\tgraph = to_networkx(data).to_undirected()\n\tsmax = 0\n\ty = []\n\tfor data in tu_dataset:\n\t\tgraph = to_networkx(data).to_undirected()\n\t\ty.append(int(data.y[0]))\n\t\ttemp_en_graphs.append(get_dhce_data(graph))\n\t\tsmax = max(smax, len(temp_en_graphs[-1]))\n\ten_graphs = []", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tgraph", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tgraph = to_networkx(data).to_undirected()\n\tsmax = 0\n\ty = []\n\tfor data in tu_dataset:\n\t\tgraph = to_networkx(data).to_undirected()\n\t\ty.append(int(data.y[0]))\n\t\ttemp_en_graphs.append(get_dhce_data(graph))\n\t\tsmax = max(smax, len(temp_en_graphs[-1]))\n\ten_graphs = []\n\tfor data in temp_en_graphs:", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tsmax", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tsmax = 0\n\ty = []\n\tfor data in tu_dataset:\n\t\tgraph = to_networkx(data).to_undirected()\n\t\ty.append(int(data.y[0]))\n\t\ttemp_en_graphs.append(get_dhce_data(graph))\n\t\tsmax = max(smax, len(temp_en_graphs[-1]))\n\ten_graphs = []\n\tfor data in temp_en_graphs:\n\t\ten_graphs.append(data + [data[-1]]*(smax - len(data)))", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\ty", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\ty = []\n\tfor data in tu_dataset:\n\t\tgraph = to_networkx(data).to_undirected()\n\t\ty.append(int(data.y[0]))\n\t\ttemp_en_graphs.append(get_dhce_data(graph))\n\t\tsmax = max(smax, len(temp_en_graphs[-1]))\n\ten_graphs = []\n\tfor data in temp_en_graphs:\n\t\ten_graphs.append(data + [data[-1]]*(smax - len(data)))\n\ten_graphs = np.array(en_graphs)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\t\tgraph", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\t\tgraph = to_networkx(data).to_undirected()\n\t\ty.append(int(data.y[0]))\n\t\ttemp_en_graphs.append(get_dhce_data(graph))\n\t\tsmax = max(smax, len(temp_en_graphs[-1]))\n\ten_graphs = []\n\tfor data in temp_en_graphs:\n\t\ten_graphs.append(data + [data[-1]]*(smax - len(data)))\n\ten_graphs = np.array(en_graphs)\n\ty = 2*np.array(y) - 1\n\t# train test split", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\t\tsmax", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\t\tsmax = max(smax, len(temp_en_graphs[-1]))\n\ten_graphs = []\n\tfor data in temp_en_graphs:\n\t\ten_graphs.append(data + [data[-1]]*(smax - len(data)))\n\ten_graphs = np.array(en_graphs)\n\ty = 2*np.array(y) - 1\n\t# train test split\n\tX_train, X_test, y_train, y_test = train_test_split(en_graphs, y, test_size=0.1, random_state=42, stratify=y)\n\tin_shape = en_graphs.shape[-1]\n\tmodel = DHCEModel(in_shape)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\ten_graphs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\ten_graphs = []\n\tfor data in temp_en_graphs:\n\t\ten_graphs.append(data + [data[-1]]*(smax - len(data)))\n\ten_graphs = np.array(en_graphs)\n\ty = 2*np.array(y) - 1\n\t# train test split\n\tX_train, X_test, y_train, y_test = train_test_split(en_graphs, y, test_size=0.1, random_state=42, stratify=y)\n\tin_shape = en_graphs.shape[-1]\n\tmodel = DHCEModel(in_shape)\n\tmodel.compile(", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\ten_graphs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\ten_graphs = np.array(en_graphs)\n\ty = 2*np.array(y) - 1\n\t# train test split\n\tX_train, X_test, y_train, y_test = train_test_split(en_graphs, y, test_size=0.1, random_state=42, stratify=y)\n\tin_shape = en_graphs.shape[-1]\n\tmodel = DHCEModel(in_shape)\n\tmodel.compile(\n\t\tloss=tf.keras.losses.Hinge(),\n\t\toptimizer=tf.keras.optimizers.Adam(0.01),\n\t\tmetrics=[hinge_accuracy],", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\ty", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\ty = 2*np.array(y) - 1\n\t# train test split\n\tX_train, X_test, y_train, y_test = train_test_split(en_graphs, y, test_size=0.1, random_state=42, stratify=y)\n\tin_shape = en_graphs.shape[-1]\n\tmodel = DHCEModel(in_shape)\n\tmodel.compile(\n\t\tloss=tf.keras.losses.Hinge(),\n\t\toptimizer=tf.keras.optimizers.Adam(0.01),\n\t\tmetrics=[hinge_accuracy],\n\t)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tin_shape", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tin_shape = en_graphs.shape[-1]\n\tmodel = DHCEModel(in_shape)\n\tmodel.compile(\n\t\tloss=tf.keras.losses.Hinge(),\n\t\toptimizer=tf.keras.optimizers.Adam(0.01),\n\t\tmetrics=[hinge_accuracy],\n\t)\n\tprint(model.build_graph(in_shape).summary())\n\tEPOCHS = 50\n\tBATCH_SIZE = 16", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tmodel", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tmodel = DHCEModel(in_shape)\n\tmodel.compile(\n\t\tloss=tf.keras.losses.Hinge(),\n\t\toptimizer=tf.keras.optimizers.Adam(0.01),\n\t\tmetrics=[hinge_accuracy],\n\t)\n\tprint(model.build_graph(in_shape).summary())\n\tEPOCHS = 50\n\tBATCH_SIZE = 16\n\tqnn_history = model.fit(", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tEPOCHS", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tEPOCHS = 50\n\tBATCH_SIZE = 16\n\tqnn_history = model.fit(\n\t\tX_train, y_train,\n\t\tbatch_size=BATCH_SIZE,\n\t\tepochs=EPOCHS,\n\t\tverbose=1,\n\t\tvalidation_data=(X_test, y_test)\n\t)\n\tqnn_results = model.evaluate(X_test, y_test)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tBATCH_SIZE", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tBATCH_SIZE = 16\n\tqnn_history = model.fit(\n\t\tX_train, y_train,\n\t\tbatch_size=BATCH_SIZE,\n\t\tepochs=EPOCHS,\n\t\tverbose=1,\n\t\tvalidation_data=(X_test, y_test)\n\t)\n\tqnn_results = model.evaluate(X_test, y_test)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tqnn_history", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tqnn_history = model.fit(\n\t\tX_train, y_train,\n\t\tbatch_size=BATCH_SIZE,\n\t\tepochs=EPOCHS,\n\t\tverbose=1,\n\t\tvalidation_data=(X_test, y_test)\n\t)\n\tqnn_results = model.evaluate(X_test, y_test)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "\tqnn_results", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "peekOfCode": "\tqnn_results = model.evaluate(X_test, y_test)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.qnn", + "documentation": {} + }, + { + "label": "node_hindex_centrality", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.utils", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.utils", + "peekOfCode": "def node_hindex_centrality(graph):\n numNode = graph.number_of_nodes()\n TotalDeg = np.array(list(graph.degree(graph.nodes())))[:, 1]\n Nei = [list(iter(graph[i])) for i in graph.nodes()]\n Hn = []\n Hi = [0] * numNode\n Hn.append(TotalDeg)\n for inter in range(1, numNode):\n Hitmp = np.array(Hn[inter - 1])\n for iNode in range(0, numNode):", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.utils", + "documentation": {} + }, + { + "label": "Entropy_Shannon", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.utils", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.utils", + "peekOfCode": "def Entropy_Shannon(X):\n # For discrete data\n data = np.unique(X)\n numData = len(data)\n Frequency = [0] * numData\n for index in range(numData):\n Frequency[index] = np.sum(X == data[index])\n P = Frequency / np.sum(Frequency)\n H = -np.sum(P * np.log2(P))\n return H", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.utils", + "documentation": {} + }, + { + "label": "get_dhce_data", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.utils", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.utils", + "peekOfCode": "def get_dhce_data(graph):\n Hn = node_hindex_centrality(graph)\n maxDim = Hn.shape[0]\n EnGraph = [0] * maxDim\n for ih in range(maxDim):\n EnGraph[ih] = Entropy_Shannon(Hn[ih])\n return EnGraph", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.dhce.utils", + "documentation": {} + }, + { + "label": "get_config", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "peekOfCode": "def get_config():\n\t\"\"\"Get the default hyperparameter configuration.\"\"\"\n\tconfig = ml_collections.ConfigDict()\n\tconfig.n_hops = 3\n\tconfig.learning_rate = 0.001\n\tconfig.batch_size = 16\n\tconfig.num_epochs = 50\n\tconfig.split = 150\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "documentation": {} + }, + { + "label": "\tconfig", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "peekOfCode": "\tconfig = ml_collections.ConfigDict()\n\tconfig.n_hops = 3\n\tconfig.learning_rate = 0.001\n\tconfig.batch_size = 16\n\tconfig.num_epochs = 50\n\tconfig.split = 150\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "documentation": {} + }, + { + "label": "\tconfig.n_hops", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "peekOfCode": "\tconfig.n_hops = 3\n\tconfig.learning_rate = 0.001\n\tconfig.batch_size = 16\n\tconfig.num_epochs = 50\n\tconfig.split = 150\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "documentation": {} + }, + { + "label": "\tconfig.learning_rate", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "peekOfCode": "\tconfig.learning_rate = 0.001\n\tconfig.batch_size = 16\n\tconfig.num_epochs = 50\n\tconfig.split = 150\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "documentation": {} + }, + { + "label": "\tconfig.batch_size", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "peekOfCode": "\tconfig.batch_size = 16\n\tconfig.num_epochs = 50\n\tconfig.split = 150\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "documentation": {} + }, + { + "label": "\tconfig.num_epochs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "peekOfCode": "\tconfig.num_epochs = 50\n\tconfig.split = 150\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "documentation": {} + }, + { + "label": "\tconfig.split", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "peekOfCode": "\tconfig.split = 150\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.config", + "documentation": {} + }, + { + "label": "MUTAGDataset", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "class MUTAGDataset(Dataset):\n\t\"\"\"Mutag dataset.\"\"\"\n\tdef __init__(self, data_dir, n_hops=3):\n\t\t# Pad nodes and edges\n\t\ttransform = T.Compose([T.Pad(28, 66)])\n\t\tself.tu_dataset = torch_geometric.datasets.TUDataset(root=data_dir, name=\"MUTAG\", transform=transform)\n\t\tself.tu_dataset.shuffle()\n\t\tself.ego_dataset = get_ego_dataset(self.tu_dataset, n_hops)\n\t\tself.max_ego_nodes = self.ego_dataset.shape[3]\n\tdef __len__(self):", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "get_ego_dataset", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "def get_ego_dataset(dataset, n_hops):\n\tmax_nodes = int(dataset.get_summary().num_nodes.max) # maximum number of nodes in the dataset\n\tk = n_hops\n\t# Prepare ego dataset\n\ttemp_dataset = []\n\tmax_ego_nodes = 0\n\tfor data in dataset:\n\t\tn_nodes = data.num_nodes\n\t\tego_nodes = []\n\t\tfor node in range(n_nodes):", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\tmax_nodes", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\tmax_nodes = int(dataset.get_summary().num_nodes.max) # maximum number of nodes in the dataset\n\tk = n_hops\n\t# Prepare ego dataset\n\ttemp_dataset = []\n\tmax_ego_nodes = 0\n\tfor data in dataset:\n\t\tn_nodes = data.num_nodes\n\t\tego_nodes = []\n\t\tfor node in range(n_nodes):\n\t\t\ttry:", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\tk", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\tk = n_hops\n\t# Prepare ego dataset\n\ttemp_dataset = []\n\tmax_ego_nodes = 0\n\tfor data in dataset:\n\t\tn_nodes = data.num_nodes\n\t\tego_nodes = []\n\t\tfor node in range(n_nodes):\n\t\t\ttry:\n\t\t\t\tsubset, edge_index, _, _ = k_hop_subgraph(node,", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\ttemp_dataset", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\ttemp_dataset = []\n\tmax_ego_nodes = 0\n\tfor data in dataset:\n\t\tn_nodes = data.num_nodes\n\t\tego_nodes = []\n\t\tfor node in range(n_nodes):\n\t\t\ttry:\n\t\t\t\tsubset, edge_index, _, _ = k_hop_subgraph(node,\n\t\t\t\t\t\t\t\t\t\t\t\t\tk,\n\t\t\t\t\t\t\t\t\t\t\t\t\tdata.edge_index,", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\tmax_ego_nodes", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\tmax_ego_nodes = 0\n\tfor data in dataset:\n\t\tn_nodes = data.num_nodes\n\t\tego_nodes = []\n\t\tfor node in range(n_nodes):\n\t\t\ttry:\n\t\t\t\tsubset, edge_index, _, _ = k_hop_subgraph(node,\n\t\t\t\t\t\t\t\t\t\t\t\t\tk,\n\t\t\t\t\t\t\t\t\t\t\t\t\tdata.edge_index,\n\t\t\t\t\t\t\t\t\t\t\t\t\tdirected=False)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\tn_nodes", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\tn_nodes = data.num_nodes\n\t\tego_nodes = []\n\t\tfor node in range(n_nodes):\n\t\t\ttry:\n\t\t\t\tsubset, edge_index, _, _ = k_hop_subgraph(node,\n\t\t\t\t\t\t\t\t\t\t\t\t\tk,\n\t\t\t\t\t\t\t\t\t\t\t\t\tdata.edge_index,\n\t\t\t\t\t\t\t\t\t\t\t\t\tdirected=False)\n\t\t\texcept:\n\t\t\t\tsubset = []", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\tego_nodes", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\tego_nodes = []\n\t\tfor node in range(n_nodes):\n\t\t\ttry:\n\t\t\t\tsubset, edge_index, _, _ = k_hop_subgraph(node,\n\t\t\t\t\t\t\t\t\t\t\t\t\tk,\n\t\t\t\t\t\t\t\t\t\t\t\t\tdata.edge_index,\n\t\t\t\t\t\t\t\t\t\t\t\t\tdirected=False)\n\t\t\texcept:\n\t\t\t\tsubset = []\n\t\t\t\tedge_index = torch.tensor([])", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\t\tsubset", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\t\tsubset = []\n\t\t\t\tedge_index = torch.tensor([])\n\t\t\tn_subset_nodes = len(subset)\n\t\t\tif n_subset_nodes:\n\t\t\t\tG = nx.Graph()\n\t\t\t\tG.add_edges_from(edge_index.numpy().T)\n\t\t\t\tpaths = nx.single_source_shortest_path_length(G, node, cutoff=k)\n\t\t\t\tnodes = np.array(list(paths.keys()))\n\t\t\t\tdists = np.array(list(paths.values()))\n\t\t\t\thop_nodes = [", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\t\tedge_index", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\t\tedge_index = torch.tensor([])\n\t\t\tn_subset_nodes = len(subset)\n\t\t\tif n_subset_nodes:\n\t\t\t\tG = nx.Graph()\n\t\t\t\tG.add_edges_from(edge_index.numpy().T)\n\t\t\t\tpaths = nx.single_source_shortest_path_length(G, node, cutoff=k)\n\t\t\t\tnodes = np.array(list(paths.keys()))\n\t\t\t\tdists = np.array(list(paths.values()))\n\t\t\t\thop_nodes = [\n\t\t\t\t\t[node] + list(nodes[np.where(dists == hop)[0]]) for hop in range(1, k + 1)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\tn_subset_nodes", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\tn_subset_nodes = len(subset)\n\t\t\tif n_subset_nodes:\n\t\t\t\tG = nx.Graph()\n\t\t\t\tG.add_edges_from(edge_index.numpy().T)\n\t\t\t\tpaths = nx.single_source_shortest_path_length(G, node, cutoff=k)\n\t\t\t\tnodes = np.array(list(paths.keys()))\n\t\t\t\tdists = np.array(list(paths.values()))\n\t\t\t\thop_nodes = [\n\t\t\t\t\t[node] + list(nodes[np.where(dists == hop)[0]]) for hop in range(1, k + 1)\n\t\t\t\t]", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\t\tG", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\t\tG = nx.Graph()\n\t\t\t\tG.add_edges_from(edge_index.numpy().T)\n\t\t\t\tpaths = nx.single_source_shortest_path_length(G, node, cutoff=k)\n\t\t\t\tnodes = np.array(list(paths.keys()))\n\t\t\t\tdists = np.array(list(paths.values()))\n\t\t\t\thop_nodes = [\n\t\t\t\t\t[node] + list(nodes[np.where(dists == hop)[0]]) for hop in range(1, k + 1)\n\t\t\t\t]\n\t\t\t\thop_nodes = np.array(list(zip_longest(*hop_nodes, fillvalue=max_nodes+1))).T\n\t\t\telse:", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\t\tpaths", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\t\tpaths = nx.single_source_shortest_path_length(G, node, cutoff=k)\n\t\t\t\tnodes = np.array(list(paths.keys()))\n\t\t\t\tdists = np.array(list(paths.values()))\n\t\t\t\thop_nodes = [\n\t\t\t\t\t[node] + list(nodes[np.where(dists == hop)[0]]) for hop in range(1, k + 1)\n\t\t\t\t]\n\t\t\t\thop_nodes = np.array(list(zip_longest(*hop_nodes, fillvalue=max_nodes+1))).T\n\t\t\telse:\n\t\t\t\tdists = np.array([])\n\t\t\t\thop_nodes = np.array([np.array([])]*k)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\t\tnodes", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\t\tnodes = np.array(list(paths.keys()))\n\t\t\t\tdists = np.array(list(paths.values()))\n\t\t\t\thop_nodes = [\n\t\t\t\t\t[node] + list(nodes[np.where(dists == hop)[0]]) for hop in range(1, k + 1)\n\t\t\t\t]\n\t\t\t\thop_nodes = np.array(list(zip_longest(*hop_nodes, fillvalue=max_nodes+1))).T\n\t\t\telse:\n\t\t\t\tdists = np.array([])\n\t\t\t\thop_nodes = np.array([np.array([])]*k)\n\t\t\tego_nodes.append(hop_nodes)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\t\tdists", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\t\tdists = np.array(list(paths.values()))\n\t\t\t\thop_nodes = [\n\t\t\t\t\t[node] + list(nodes[np.where(dists == hop)[0]]) for hop in range(1, k + 1)\n\t\t\t\t]\n\t\t\t\thop_nodes = np.array(list(zip_longest(*hop_nodes, fillvalue=max_nodes+1))).T\n\t\t\telse:\n\t\t\t\tdists = np.array([])\n\t\t\t\thop_nodes = np.array([np.array([])]*k)\n\t\t\tego_nodes.append(hop_nodes)\n\t\t\tmax_ego_nodes = max(max_ego_nodes, hop_nodes.shape[-1])", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\t\thop_nodes", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\t\thop_nodes = [\n\t\t\t\t\t[node] + list(nodes[np.where(dists == hop)[0]]) for hop in range(1, k + 1)\n\t\t\t\t]\n\t\t\t\thop_nodes = np.array(list(zip_longest(*hop_nodes, fillvalue=max_nodes+1))).T\n\t\t\telse:\n\t\t\t\tdists = np.array([])\n\t\t\t\thop_nodes = np.array([np.array([])]*k)\n\t\t\tego_nodes.append(hop_nodes)\n\t\t\tmax_ego_nodes = max(max_ego_nodes, hop_nodes.shape[-1])\n\t\ttemp_dataset.append(ego_nodes)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\t\thop_nodes", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\t\thop_nodes = np.array(list(zip_longest(*hop_nodes, fillvalue=max_nodes+1))).T\n\t\t\telse:\n\t\t\t\tdists = np.array([])\n\t\t\t\thop_nodes = np.array([np.array([])]*k)\n\t\t\tego_nodes.append(hop_nodes)\n\t\t\tmax_ego_nodes = max(max_ego_nodes, hop_nodes.shape[-1])\n\t\ttemp_dataset.append(ego_nodes)\n\t# Pad with maximum ego nodes\n\tego_dataset = np.stack([np.stack([\n\t\tnp.pad(ego_nodes, ((0, 0), (0, max_ego_nodes - ego_nodes.shape[-1])),", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\t\tdists", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\t\tdists = np.array([])\n\t\t\t\thop_nodes = np.array([np.array([])]*k)\n\t\t\tego_nodes.append(hop_nodes)\n\t\t\tmax_ego_nodes = max(max_ego_nodes, hop_nodes.shape[-1])\n\t\ttemp_dataset.append(ego_nodes)\n\t# Pad with maximum ego nodes\n\tego_dataset = np.stack([np.stack([\n\t\tnp.pad(ego_nodes, ((0, 0), (0, max_ego_nodes - ego_nodes.shape[-1])),\n\t\t\t constant_values=max_nodes+1) for ego_nodes in data\n\t]) for data in temp_dataset]).astype(np.int32)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\t\thop_nodes", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\t\thop_nodes = np.array([np.array([])]*k)\n\t\t\tego_nodes.append(hop_nodes)\n\t\t\tmax_ego_nodes = max(max_ego_nodes, hop_nodes.shape[-1])\n\t\ttemp_dataset.append(ego_nodes)\n\t# Pad with maximum ego nodes\n\tego_dataset = np.stack([np.stack([\n\t\tnp.pad(ego_nodes, ((0, 0), (0, max_ego_nodes - ego_nodes.shape[-1])),\n\t\t\t constant_values=max_nodes+1) for ego_nodes in data\n\t]) for data in temp_dataset]).astype(np.int32)\n\treturn ego_dataset", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\t\tmax_ego_nodes", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\t\tmax_ego_nodes = max(max_ego_nodes, hop_nodes.shape[-1])\n\t\ttemp_dataset.append(ego_nodes)\n\t# Pad with maximum ego nodes\n\tego_dataset = np.stack([np.stack([\n\t\tnp.pad(ego_nodes, ((0, 0), (0, max_ego_nodes - ego_nodes.shape[-1])),\n\t\t\t constant_values=max_nodes+1) for ego_nodes in data\n\t]) for data in temp_dataset]).astype(np.int32)\n\treturn ego_dataset\n# torch compatible dataset\nclass MUTAGDataset(Dataset):", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\tego_dataset", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\tego_dataset = np.stack([np.stack([\n\t\tnp.pad(ego_nodes, ((0, 0), (0, max_ego_nodes - ego_nodes.shape[-1])),\n\t\t\t constant_values=max_nodes+1) for ego_nodes in data\n\t]) for data in temp_dataset]).astype(np.int32)\n\treturn ego_dataset\n# torch compatible dataset\nclass MUTAGDataset(Dataset):\n\t\"\"\"Mutag dataset.\"\"\"\n\tdef __init__(self, data_dir, n_hops=3):\n\t\t# Pad nodes and edges", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\ttransform", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\ttransform = T.Compose([T.Pad(28, 66)])\n\t\tself.tu_dataset = torch_geometric.datasets.TUDataset(root=data_dir, name=\"MUTAG\", transform=transform)\n\t\tself.tu_dataset.shuffle()\n\t\tself.ego_dataset = get_ego_dataset(self.tu_dataset, n_hops)\n\t\tself.max_ego_nodes = self.ego_dataset.shape[3]\n\tdef __len__(self):\n\t\treturn len(self.tu_dataset)\n\tdef __getitem__(self, idx):\n\t\tdata = self.tu_dataset[idx]\n\t\treturn {", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\tself.tu_dataset", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\tself.tu_dataset = torch_geometric.datasets.TUDataset(root=data_dir, name=\"MUTAG\", transform=transform)\n\t\tself.tu_dataset.shuffle()\n\t\tself.ego_dataset = get_ego_dataset(self.tu_dataset, n_hops)\n\t\tself.max_ego_nodes = self.ego_dataset.shape[3]\n\tdef __len__(self):\n\t\treturn len(self.tu_dataset)\n\tdef __getitem__(self, idx):\n\t\tdata = self.tu_dataset[idx]\n\t\treturn {\n\t\t\t'x': data.x,", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\tself.ego_dataset", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\tself.ego_dataset = get_ego_dataset(self.tu_dataset, n_hops)\n\t\tself.max_ego_nodes = self.ego_dataset.shape[3]\n\tdef __len__(self):\n\t\treturn len(self.tu_dataset)\n\tdef __getitem__(self, idx):\n\t\tdata = self.tu_dataset[idx]\n\t\treturn {\n\t\t\t'x': data.x,\n\t\t\t'ego_graphs': torch.tensor(self.ego_dataset[idx]),\n\t\t\t'y': data.y", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\tself.max_ego_nodes", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\tself.max_ego_nodes = self.ego_dataset.shape[3]\n\tdef __len__(self):\n\t\treturn len(self.tu_dataset)\n\tdef __getitem__(self, idx):\n\t\tdata = self.tu_dataset[idx]\n\t\treturn {\n\t\t\t'x': data.x,\n\t\t\t'ego_graphs': torch.tensor(self.ego_dataset[idx]),\n\t\t\t'y': data.y\n\t\t}", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "\t\tdata", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "peekOfCode": "\t\tdata = self.tu_dataset[idx]\n\t\treturn {\n\t\t\t'x': data.x,\n\t\t\t'ego_graphs': torch.tensor(self.ego_dataset[idx]),\n\t\t\t'y': data.y\n\t\t}", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.data", + "documentation": {} + }, + { + "label": "my_collate_fn", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "def my_collate_fn(batch):\n x = []\n y = []\n ego_graphs = []\n for data in batch:\n x += [data['x']]\n y += [data['y']]\n ego_graphs += [data['ego_graphs']]\n x = torch.stack(x)\n y = torch.stack(y)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "get_loaders", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "def get_loaders(config):\n split = config.split\n mutag_ds = MUTAGDataset(DATASET_PATH, config.n_hops)\n split_a_size = split\n split_b_size = len(mutag_ds) - split_a_size\n train_dataset, test_dataset = random_split(\n mutag_ds, [split_a_size, split_b_size],\n generator=torch.Generator().manual_seed(42))\n batch_size = config.batch_size\n train_loader = DataLoader(train_dataset,", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "graph_circ", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "def graph_circ(ego_graphs, x, lmbd, theta):\n n_qubits = ego_graphs.shape[-1]\n n_features = x.shape[-1]\n n_hops = ego_graphs.shape[-2]\n steps = n_features // 3\n readout = n_qubits\n c = tc.Circuit(n_qubits + 1)\n # Paper's implementation\n for hop in range(n_hops):\n inputs = jnp.take(x, ego_graphs[hop], axis=0)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "loss_fn", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "def loss_fn(params, x, y, ego_graphs):\n res = qpred_vmap(ego_graphs, x, params['lmbd'], params['theta'])\n res = jnp.mean(res, axis=0) # paper's implementation\n res = jnp.dot(params['w'], res) + params['b']\n logits = res\n one_hot = jax.nn.one_hot(y, 2).reshape(-1, )\n loss = optax.softmax_cross_entropy(logits=logits, labels=one_hot)\n return loss, logits\nif __name__ == \"__main__\":\n\tconfiguration = get_config()", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "DATASET_PATH", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "DATASET_PATH = Path(__file__).parents[1] / 'data/downloaded/'\ndef my_collate_fn(batch):\n x = []\n y = []\n ego_graphs = []\n for data in batch:\n x += [data['x']]\n y += [data['y']]\n ego_graphs += [data['ego_graphs']]\n x = torch.stack(x)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "qpred_vmap", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "qpred_vmap = tc.backend.vmap(tc.backend.jit(graph_circ), vectorized_argnums=(0, ))\ndef loss_fn(params, x, y, ego_graphs):\n res = qpred_vmap(ego_graphs, x, params['lmbd'], params['theta'])\n res = jnp.mean(res, axis=0) # paper's implementation\n res = jnp.dot(params['w'], res) + params['b']\n logits = res\n one_hot = jax.nn.one_hot(y, 2).reshape(-1, )\n loss = optax.softmax_cross_entropy(logits=logits, labels=one_hot)\n return loss, logits\nif __name__ == \"__main__\":", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tconfiguration", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tconfiguration = get_config()\n\ttrain_loader, val_loader = get_loaders(configuration)\n\t# configure parameters\n\tdummy = next(iter(train_loader))\n\tprint(dummy[-1].shape)\n\tmax_nodes, _, n_qubits = dummy[-1].shape[1:]\n\tn_features = dummy[0].shape[-1]\n\tn_hops = configuration.n_hops\n\tprint(max_nodes, n_qubits, n_features)\n\tkey = jax.random.PRNGKey(0)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tdummy", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tdummy = next(iter(train_loader))\n\tprint(dummy[-1].shape)\n\tmax_nodes, _, n_qubits = dummy[-1].shape[1:]\n\tn_features = dummy[0].shape[-1]\n\tn_hops = configuration.n_hops\n\tprint(max_nodes, n_qubits, n_features)\n\tkey = jax.random.PRNGKey(0)\n\tkey, *subkeys = jax.random.split(key, num=5)\n\tlmbd = jax.random.uniform(subkeys[0], (n_hops, n_qubits, n_features))\n\ttheta = jax.random.uniform(subkeys[1], (n_hops, n_qubits, 3)) # paper's implementation", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tn_features", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tn_features = dummy[0].shape[-1]\n\tn_hops = configuration.n_hops\n\tprint(max_nodes, n_qubits, n_features)\n\tkey = jax.random.PRNGKey(0)\n\tkey, *subkeys = jax.random.split(key, num=5)\n\tlmbd = jax.random.uniform(subkeys[0], (n_hops, n_qubits, n_features))\n\ttheta = jax.random.uniform(subkeys[1], (n_hops, n_qubits, 3)) # paper's implementation\n\tw = jax.random.uniform(subkeys[2], (2, n_qubits)) # paper's implementation\n\tb = jax.random.uniform(subkeys[3], (2,)) # paper's implementation\n\tparams = {'lmbd': lmbd, 'theta': theta, 'w': w, 'b': b}", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tn_hops", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tn_hops = configuration.n_hops\n\tprint(max_nodes, n_qubits, n_features)\n\tkey = jax.random.PRNGKey(0)\n\tkey, *subkeys = jax.random.split(key, num=5)\n\tlmbd = jax.random.uniform(subkeys[0], (n_hops, n_qubits, n_features))\n\ttheta = jax.random.uniform(subkeys[1], (n_hops, n_qubits, 3)) # paper's implementation\n\tw = jax.random.uniform(subkeys[2], (2, n_qubits)) # paper's implementation\n\tb = jax.random.uniform(subkeys[3], (2,)) # paper's implementation\n\tparams = {'lmbd': lmbd, 'theta': theta, 'w': w, 'b': b}\n\toptimizer = optax.adam(learning_rate=configuration.learning_rate)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tkey", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tkey = jax.random.PRNGKey(0)\n\tkey, *subkeys = jax.random.split(key, num=5)\n\tlmbd = jax.random.uniform(subkeys[0], (n_hops, n_qubits, n_features))\n\ttheta = jax.random.uniform(subkeys[1], (n_hops, n_qubits, 3)) # paper's implementation\n\tw = jax.random.uniform(subkeys[2], (2, n_qubits)) # paper's implementation\n\tb = jax.random.uniform(subkeys[3], (2,)) # paper's implementation\n\tparams = {'lmbd': lmbd, 'theta': theta, 'w': w, 'b': b}\n\toptimizer = optax.adam(learning_rate=configuration.learning_rate)\n\topt_state = optimizer.init(params)\n\t# Preparing VVAG using", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tlmbd", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tlmbd = jax.random.uniform(subkeys[0], (n_hops, n_qubits, n_features))\n\ttheta = jax.random.uniform(subkeys[1], (n_hops, n_qubits, 3)) # paper's implementation\n\tw = jax.random.uniform(subkeys[2], (2, n_qubits)) # paper's implementation\n\tb = jax.random.uniform(subkeys[3], (2,)) # paper's implementation\n\tparams = {'lmbd': lmbd, 'theta': theta, 'w': w, 'b': b}\n\toptimizer = optax.adam(learning_rate=configuration.learning_rate)\n\topt_state = optimizer.init(params)\n\t# Preparing VVAG using\n\tqml_vvag = tc.backend.vectorized_value_and_grad(\n\t\tloss_fn, argnums=0, vectorized_argnums=(1, 2, 3), has_aux=True", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\ttheta", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\ttheta = jax.random.uniform(subkeys[1], (n_hops, n_qubits, 3)) # paper's implementation\n\tw = jax.random.uniform(subkeys[2], (2, n_qubits)) # paper's implementation\n\tb = jax.random.uniform(subkeys[3], (2,)) # paper's implementation\n\tparams = {'lmbd': lmbd, 'theta': theta, 'w': w, 'b': b}\n\toptimizer = optax.adam(learning_rate=configuration.learning_rate)\n\topt_state = optimizer.init(params)\n\t# Preparing VVAG using\n\tqml_vvag = tc.backend.vectorized_value_and_grad(\n\t\tloss_fn, argnums=0, vectorized_argnums=(1, 2, 3), has_aux=True\n\t)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tw", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tw = jax.random.uniform(subkeys[2], (2, n_qubits)) # paper's implementation\n\tb = jax.random.uniform(subkeys[3], (2,)) # paper's implementation\n\tparams = {'lmbd': lmbd, 'theta': theta, 'w': w, 'b': b}\n\toptimizer = optax.adam(learning_rate=configuration.learning_rate)\n\topt_state = optimizer.init(params)\n\t# Preparing VVAG using\n\tqml_vvag = tc.backend.vectorized_value_and_grad(\n\t\tloss_fn, argnums=0, vectorized_argnums=(1, 2, 3), has_aux=True\n\t)\n\tqml_vvag = tc.backend.jit(qml_vvag)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tb", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tb = jax.random.uniform(subkeys[3], (2,)) # paper's implementation\n\tparams = {'lmbd': lmbd, 'theta': theta, 'w': w, 'b': b}\n\toptimizer = optax.adam(learning_rate=configuration.learning_rate)\n\topt_state = optimizer.init(params)\n\t# Preparing VVAG using\n\tqml_vvag = tc.backend.vectorized_value_and_grad(\n\t\tloss_fn, argnums=0, vectorized_argnums=(1, 2, 3), has_aux=True\n\t)\n\tqml_vvag = tc.backend.jit(qml_vvag)\n\t# dummy input check", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tparams", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tparams = {'lmbd': lmbd, 'theta': theta, 'w': w, 'b': b}\n\toptimizer = optax.adam(learning_rate=configuration.learning_rate)\n\topt_state = optimizer.init(params)\n\t# Preparing VVAG using\n\tqml_vvag = tc.backend.vectorized_value_and_grad(\n\t\tloss_fn, argnums=0, vectorized_argnums=(1, 2, 3), has_aux=True\n\t)\n\tqml_vvag = tc.backend.jit(qml_vvag)\n\t# dummy input check\n\tprint(\"Checking model with dummy input\")", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\toptimizer", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\toptimizer = optax.adam(learning_rate=configuration.learning_rate)\n\topt_state = optimizer.init(params)\n\t# Preparing VVAG using\n\tqml_vvag = tc.backend.vectorized_value_and_grad(\n\t\tloss_fn, argnums=0, vectorized_argnums=(1, 2, 3), has_aux=True\n\t)\n\tqml_vvag = tc.backend.jit(qml_vvag)\n\t# dummy input check\n\tprint(\"Checking model with dummy input\")\n\tdummy_x = jnp.ones([1, max_nodes, 7])", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\topt_state", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\topt_state = optimizer.init(params)\n\t# Preparing VVAG using\n\tqml_vvag = tc.backend.vectorized_value_and_grad(\n\t\tloss_fn, argnums=0, vectorized_argnums=(1, 2, 3), has_aux=True\n\t)\n\tqml_vvag = tc.backend.jit(qml_vvag)\n\t# dummy input check\n\tprint(\"Checking model with dummy input\")\n\tdummy_x = jnp.ones([1, max_nodes, 7])\n\tdummy_y = jnp.ones([1, 1])", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tqml_vvag", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tqml_vvag = tc.backend.vectorized_value_and_grad(\n\t\tloss_fn, argnums=0, vectorized_argnums=(1, 2, 3), has_aux=True\n\t)\n\tqml_vvag = tc.backend.jit(qml_vvag)\n\t# dummy input check\n\tprint(\"Checking model with dummy input\")\n\tdummy_x = jnp.ones([1, max_nodes, 7])\n\tdummy_y = jnp.ones([1, 1])\n\tdummy_ego_graphs = jnp.ones([1, max_nodes, n_hops, n_qubits]).astype(jnp.int32)\n\ts = time()", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tqml_vvag", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tqml_vvag = tc.backend.jit(qml_vvag)\n\t# dummy input check\n\tprint(\"Checking model with dummy input\")\n\tdummy_x = jnp.ones([1, max_nodes, 7])\n\tdummy_y = jnp.ones([1, 1])\n\tdummy_ego_graphs = jnp.ones([1, max_nodes, n_hops, n_qubits]).astype(jnp.int32)\n\ts = time()\n\tqml_vvag(params, dummy_x, dummy_y, dummy_ego_graphs)\n\te = time()\n\tprint(f\"Done in {e-s} s\")", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tdummy_x", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tdummy_x = jnp.ones([1, max_nodes, 7])\n\tdummy_y = jnp.ones([1, 1])\n\tdummy_ego_graphs = jnp.ones([1, max_nodes, n_hops, n_qubits]).astype(jnp.int32)\n\ts = time()\n\tqml_vvag(params, dummy_x, dummy_y, dummy_ego_graphs)\n\te = time()\n\tprint(f\"Done in {e-s} s\")\n\tlosses = []\n\taccs = []\n\tfor epoch in range(1, configuration.num_epochs + 1):", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tdummy_y", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tdummy_y = jnp.ones([1, 1])\n\tdummy_ego_graphs = jnp.ones([1, max_nodes, n_hops, n_qubits]).astype(jnp.int32)\n\ts = time()\n\tqml_vvag(params, dummy_x, dummy_y, dummy_ego_graphs)\n\te = time()\n\tprint(f\"Done in {e-s} s\")\n\tlosses = []\n\taccs = []\n\tfor epoch in range(1, configuration.num_epochs + 1):\n\t\tepoch_loss = []", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tdummy_ego_graphs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tdummy_ego_graphs = jnp.ones([1, max_nodes, n_hops, n_qubits]).astype(jnp.int32)\n\ts = time()\n\tqml_vvag(params, dummy_x, dummy_y, dummy_ego_graphs)\n\te = time()\n\tprint(f\"Done in {e-s} s\")\n\tlosses = []\n\taccs = []\n\tfor epoch in range(1, configuration.num_epochs + 1):\n\t\tepoch_loss = []\n\t\tepoch_accuracy = []", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\ts", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\ts = time()\n\tqml_vvag(params, dummy_x, dummy_y, dummy_ego_graphs)\n\te = time()\n\tprint(f\"Done in {e-s} s\")\n\tlosses = []\n\taccs = []\n\tfor epoch in range(1, configuration.num_epochs + 1):\n\t\tepoch_loss = []\n\t\tepoch_accuracy = []\n\t\twith tqdm(train_loader, unit='batch') as tepoch:", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\te", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\te = time()\n\tprint(f\"Done in {e-s} s\")\n\tlosses = []\n\taccs = []\n\tfor epoch in range(1, configuration.num_epochs + 1):\n\t\tepoch_loss = []\n\t\tepoch_accuracy = []\n\t\twith tqdm(train_loader, unit='batch') as tepoch:\n\t\t\ts = time()\n\t\t\tfor x, y, ego_graphs in tepoch:", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tlosses", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tlosses = []\n\taccs = []\n\tfor epoch in range(1, configuration.num_epochs + 1):\n\t\tepoch_loss = []\n\t\tepoch_accuracy = []\n\t\twith tqdm(train_loader, unit='batch') as tepoch:\n\t\t\ts = time()\n\t\t\tfor x, y, ego_graphs in tepoch:\n\t\t\t\ttepoch.set_description(f\"Epoch {epoch}\")\n\t\t\t\tx = x.numpy()", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\taccs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\taccs = []\n\tfor epoch in range(1, configuration.num_epochs + 1):\n\t\tepoch_loss = []\n\t\tepoch_accuracy = []\n\t\twith tqdm(train_loader, unit='batch') as tepoch:\n\t\t\ts = time()\n\t\t\tfor x, y, ego_graphs in tepoch:\n\t\t\t\ttepoch.set_description(f\"Epoch {epoch}\")\n\t\t\t\tx = x.numpy()\n\t\t\t\ty = y.numpy()", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\tepoch_loss", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\tepoch_loss = []\n\t\tepoch_accuracy = []\n\t\twith tqdm(train_loader, unit='batch') as tepoch:\n\t\t\ts = time()\n\t\t\tfor x, y, ego_graphs in tepoch:\n\t\t\t\ttepoch.set_description(f\"Epoch {epoch}\")\n\t\t\t\tx = x.numpy()\n\t\t\t\ty = y.numpy()\n\t\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\tepoch_accuracy", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\tepoch_accuracy = []\n\t\twith tqdm(train_loader, unit='batch') as tepoch:\n\t\t\ts = time()\n\t\t\tfor x, y, ego_graphs in tepoch:\n\t\t\t\ttepoch.set_description(f\"Epoch {epoch}\")\n\t\t\t\tx = x.numpy()\n\t\t\t\ty = y.numpy()\n\t\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)\n\t\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\ts", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\ts = time()\n\t\t\tfor x, y, ego_graphs in tepoch:\n\t\t\t\ttepoch.set_description(f\"Epoch {epoch}\")\n\t\t\t\tx = x.numpy()\n\t\t\t\ty = y.numpy()\n\t\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)\n\t\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)\n\t\t\t\tupdates, opt_state = optimizer.update(grads, opt_state)\n\t\t\t\tparams = optax.apply_updates(params, updates)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\t\tx = x.numpy()\n\t\t\t\ty = y.numpy()\n\t\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)\n\t\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)\n\t\t\t\tupdates, opt_state = optimizer.update(grads, opt_state)\n\t\t\t\tparams = optax.apply_updates(params, updates)\n\t\t\t\tepoch_loss.append(jnp.mean(loss))\n\t\t\t\tepoch_accuracy.append(accuracy)\n\t\t\te = time()", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\t\ty", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\t\ty = y.numpy()\n\t\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)\n\t\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)\n\t\t\t\tupdates, opt_state = optimizer.update(grads, opt_state)\n\t\t\t\tparams = optax.apply_updates(params, updates)\n\t\t\t\tepoch_loss.append(jnp.mean(loss))\n\t\t\t\tepoch_accuracy.append(accuracy)\n\t\t\te = time()\n\t\t\ttrain_loss = np.mean(epoch_loss)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\t\tego_graphs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)\n\t\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)\n\t\t\t\tupdates, opt_state = optimizer.update(grads, opt_state)\n\t\t\t\tparams = optax.apply_updates(params, updates)\n\t\t\t\tepoch_loss.append(jnp.mean(loss))\n\t\t\t\tepoch_accuracy.append(accuracy)\n\t\t\te = time()\n\t\t\ttrain_loss = np.mean(epoch_loss)\n\t\t\ttrain_accuracy = np.mean(epoch_accuracy)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\t\taccuracy", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)\n\t\t\t\tupdates, opt_state = optimizer.update(grads, opt_state)\n\t\t\t\tparams = optax.apply_updates(params, updates)\n\t\t\t\tepoch_loss.append(jnp.mean(loss))\n\t\t\t\tepoch_accuracy.append(accuracy)\n\t\t\te = time()\n\t\t\ttrain_loss = np.mean(epoch_loss)\n\t\t\ttrain_accuracy = np.mean(epoch_accuracy)\n\t\t\tprint(\n\t\t\tf'epoch: {epoch:3d}',", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\t\tparams", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\t\tparams = optax.apply_updates(params, updates)\n\t\t\t\tepoch_loss.append(jnp.mean(loss))\n\t\t\t\tepoch_accuracy.append(accuracy)\n\t\t\te = time()\n\t\t\ttrain_loss = np.mean(epoch_loss)\n\t\t\ttrain_accuracy = np.mean(epoch_accuracy)\n\t\t\tprint(\n\t\t\tf'epoch: {epoch:3d}',\n\t\t\tf'train_loss: {train_loss:.4f}, train_acc: {train_accuracy:.4f}',\n\t\t\tf'epoch time: {e-s:.4f}')", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\te", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\te = time()\n\t\t\ttrain_loss = np.mean(epoch_loss)\n\t\t\ttrain_accuracy = np.mean(epoch_accuracy)\n\t\t\tprint(\n\t\t\tf'epoch: {epoch:3d}',\n\t\t\tf'train_loss: {train_loss:.4f}, train_acc: {train_accuracy:.4f}',\n\t\t\tf'epoch time: {e-s:.4f}')\n\t\t\tlosses.append(train_loss)\n\t\t\taccs.append(train_accuracy)\n\t# Testing", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\ttrain_loss", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\ttrain_loss = np.mean(epoch_loss)\n\t\t\ttrain_accuracy = np.mean(epoch_accuracy)\n\t\t\tprint(\n\t\t\tf'epoch: {epoch:3d}',\n\t\t\tf'train_loss: {train_loss:.4f}, train_acc: {train_accuracy:.4f}',\n\t\t\tf'epoch time: {e-s:.4f}')\n\t\t\tlosses.append(train_loss)\n\t\t\taccs.append(train_accuracy)\n\t# Testing\n\tval_loss = []", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\ttrain_accuracy", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\ttrain_accuracy = np.mean(epoch_accuracy)\n\t\t\tprint(\n\t\t\tf'epoch: {epoch:3d}',\n\t\t\tf'train_loss: {train_loss:.4f}, train_acc: {train_accuracy:.4f}',\n\t\t\tf'epoch time: {e-s:.4f}')\n\t\t\tlosses.append(train_loss)\n\t\t\taccs.append(train_accuracy)\n\t# Testing\n\tval_loss = []\n\tval_accuracy = []", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tval_loss", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tval_loss = []\n\tval_accuracy = []\n\ts = time()\n\twith tqdm(val_loader, unit='batch') as tepoch:\n\t\tfor x, y, ego_graphs in tepoch:\n\t\t\ttepoch.set_description(f\"Val\")\n\t\t\tx = x.numpy()\n\t\t\ty = y.numpy()\n\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\tval_accuracy", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\tval_accuracy = []\n\ts = time()\n\twith tqdm(val_loader, unit='batch') as tepoch:\n\t\tfor x, y, ego_graphs in tepoch:\n\t\t\ttepoch.set_description(f\"Val\")\n\t\t\tx = x.numpy()\n\t\t\ty = y.numpy()\n\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)\n\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\ts", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\ts = time()\n\twith tqdm(val_loader, unit='batch') as tepoch:\n\t\tfor x, y, ego_graphs in tepoch:\n\t\t\ttepoch.set_description(f\"Val\")\n\t\t\tx = x.numpy()\n\t\t\ty = y.numpy()\n\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)\n\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)\n\t\t\tval_loss.append(jnp.mean(loss))", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\tx = x.numpy()\n\t\t\ty = y.numpy()\n\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)\n\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)\n\t\t\tval_loss.append(jnp.mean(loss))\n\t\t\tval_accuracy.append(accuracy)\n\te = time()\n\tprint(f\"val_loss: {np.mean(val_loss):.4f}, val_acc: {np.mean(val_accuracy):.4f}\")\n\tprint(f\"val time: {e-s:.4f}\")", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\ty", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\ty = y.numpy()\n\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)\n\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)\n\t\t\tval_loss.append(jnp.mean(loss))\n\t\t\tval_accuracy.append(accuracy)\n\te = time()\n\tprint(f\"val_loss: {np.mean(val_loss):.4f}, val_acc: {np.mean(val_accuracy):.4f}\")\n\tprint(f\"val time: {e-s:.4f}\")", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\tego_graphs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\tego_graphs = ego_graphs.numpy().astype(np.int32)\n\t\t\t(loss, logits), grads = qml_vvag(params, x, y, ego_graphs)\n\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)\n\t\t\tval_loss.append(jnp.mean(loss))\n\t\t\tval_accuracy.append(accuracy)\n\te = time()\n\tprint(f\"val_loss: {np.mean(val_loss):.4f}, val_acc: {np.mean(val_accuracy):.4f}\")\n\tprint(f\"val time: {e-s:.4f}\")", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\t\t\taccuracy", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\t\t\taccuracy = jnp.mean(jnp.argmax(logits, -1) == y)\n\t\t\tval_loss.append(jnp.mean(loss))\n\t\t\tval_accuracy.append(accuracy)\n\te = time()\n\tprint(f\"val_loss: {np.mean(val_loss):.4f}, val_acc: {np.mean(val_accuracy):.4f}\")\n\tprint(f\"val time: {e-s:.4f}\")", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "\te", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "peekOfCode": "\te = time()\n\tprint(f\"val_loss: {np.mean(val_loss):.4f}, val_acc: {np.mean(val_accuracy):.4f}\")\n\tprint(f\"val time: {e-s:.4f}\")", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.ego_net.train", + "documentation": {} + }, + { + "label": "square_kernel_matrix_jax", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.notebooks.jax_utils", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.notebooks.jax_utils", + "peekOfCode": "def square_kernel_matrix_jax(X, kernel, assume_normalized_kernel=False):\n N = qml.math.shape(X)[0]\n if assume_normalized_kernel and N == 1:\n return qml.math.eye(1, like=qml.math.get_interface(X))\n # Compute all off-diagonal kernel values, using symmetry of the kernel matrix\n i, j = jnp.tril_indices(N)\n res = jax.vmap(kernel, in_axes=(0,0))(X[i], X[j])\n mtx = jnp.zeros((N, N)) # create an empty matrix\n mtx = mtx.at[jnp.tril_indices(N)].set(res)\n mtx = mtx + mtx.T - jnp.diag(jnp.diag(mtx))", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.notebooks.jax_utils", + "documentation": {} + }, + { + "label": "kernel_matrix_jax", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.notebooks.jax_utils", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.notebooks.jax_utils", + "peekOfCode": "def kernel_matrix_jax(X1, X2, kernel):\n N = X1.shape[0]\n M = X2.shape[0]\n products = jnp.array(list(product(X1,X2)))\n mtx = jnp.stack(jax.vmap(kernel, in_axes=(0,0))(products[:,0,:], products[:,1,:]))\n if jnp.ndim(mtx[0]) == 0:\n return jnp.reshape(mtx, (N, M))\n return jnp.moveaxis(jnp.reshape(mtx, (N, M, qml.math.size(mtx[0]))), -1, 0)\ndef target_alignment_jax(\n X,", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.notebooks.jax_utils", + "documentation": {} + }, + { + "label": "target_alignment_jax", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.notebooks.jax_utils", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.notebooks.jax_utils", + "peekOfCode": "def target_alignment_jax(\n X,\n Y,\n kernel,\n assume_normalized_kernel=False,\n rescale_class_labels=True,\n):\n \"\"\"Kernel-target alignment between kernel and labels.\"\"\"\n K = square_kernel_matrix_jax(\n X,", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.notebooks.jax_utils", + "documentation": {} + }, + { + "label": "Builder", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.mutag.mutag_dataset_builder", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.mutag.mutag_dataset_builder", + "peekOfCode": "class Builder(tfds.core.GeneratorBasedBuilder):\n \"\"\"DatasetBuilder for mutag dataset.\"\"\"\n VERSION = tfds.core.Version(\"1.0.0\")\n RELEASE_NOTES = {\n \"1.0.0\": \"Initial release.\",\n }\n BUILDER_CONFIGS = [\n # `name` (and optionally `description`) are required for each config\n MutagConfig(name=\"mutag\", description=\"Classic Mutag dataset\"),\n ]", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.mutag.mutag_dataset_builder", + "documentation": {} + }, + { + "label": "MutagTest", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.mutag.mutag_dataset_builder_test", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.mutag.mutag_dataset_builder_test", + "peekOfCode": "class MutagTest(tfds.testing.DatasetBuilderTestCase):\n \"\"\"Tests for mutag dataset.\"\"\"\n # TODO(mutag):\n DATASET_CLASS = mutag_dataset_builder.Builder\n SPLITS = {\n \"train\": 3, # Number of fake train example\n \"test\": 1, # Number of fake test example\n }\n # If you are calling `download/download_and_extract` with a dict, like:\n # dl_manager.download({'some_key': 'http://a.org/out.txt', ...})", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.mutag.mutag_dataset_builder_test", + "documentation": {} + }, + { + "label": "MutagConfig", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.mutag.mutag_dataset_config", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.mutag.mutag_dataset_config", + "peekOfCode": "class MutagConfig(tfds.core.BuilderConfig):\n \"\"\"BuilderConfig for mutag.\"\"\"\n img_size: Tuple[int, int] = (0, 0)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.mutag.mutag_dataset_config", + "documentation": {} + }, + { + "label": "GraphsTupleSize", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "peekOfCode": "class GraphsTupleSize(NamedTuple):\n \"\"\"Helper class to represent padding and graph sizes.\"\"\"\n n_node: int\n n_edge: int\n n_graph: int\ndef get_raw_datasets(dataset_name, config_name) -> Dict[str, tf.data.Dataset]:\n \"\"\"Returns datasets as tf.data.Dataset, organized by split.\"\"\"\n ds_builder = tfds.builder(f\"{dataset_name}/{config_name}\")\n ds_builder.download_and_prepare()\n ds_splits = [\"train\", \"test\"]", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "documentation": {} + }, + { + "label": "get_raw_datasets", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "peekOfCode": "def get_raw_datasets(dataset_name, config_name) -> Dict[str, tf.data.Dataset]:\n \"\"\"Returns datasets as tf.data.Dataset, organized by split.\"\"\"\n ds_builder = tfds.builder(f\"{dataset_name}/{config_name}\")\n ds_builder.download_and_prepare()\n ds_splits = [\"train\", \"test\"]\n datasets = {split: ds_builder.as_dataset(split=split) for split in ds_splits}\n return datasets\ndef get_datasets(\n dataset_name: str,\n config_name: str,", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "documentation": {} + }, + { + "label": "get_datasets", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "peekOfCode": "def get_datasets(\n dataset_name: str,\n config_name: str,\n batch_size: int,\n add_virtual_node: bool = True,\n add_undirected_edges: bool = True,\n add_self_loops: bool = True,\n) -> Dict[str, tf.data.Dataset]:\n \"\"\"Returns datasets of batched GraphsTuples, organized by split.\"\"\"\n if batch_size <= 1:", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "documentation": {} + }, + { + "label": "convert_to_graphs_tuple", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "peekOfCode": "def convert_to_graphs_tuple(\n graph: Dict[str, tf.Tensor], add_virtual_node: bool, add_undirected_edges: bool, add_self_loops: bool\n) -> jraph.GraphsTuple:\n \"\"\"Converts a dictionary of tf.Tensors to a GraphsTuple.\"\"\"\n num_nodes = tf.squeeze(graph[\"num_nodes\"])\n num_edges = tf.squeeze(graph[\"num_edges\"])\n nodes = graph[\"node_feats\"]\n edges = graph[\"edge_feats\"]\n edge_feature_dim = edges.shape[-1]\n labels = graph[\"label\"]", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "documentation": {} + }, + { + "label": "estimate_padding_budget_for_batch_size", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "peekOfCode": "def estimate_padding_budget_for_batch_size(\n dataset: tf.data.Dataset, batch_size: int, num_estimation_graphs: int\n) -> GraphsTupleSize:\n \"\"\"Estimates the padding budget for a dataset of unbatched GraphsTuples.\n Args:\n dataset: A dataset of unbatched GraphsTuples.\n batch_size: The intended batch size. Note that no batching is performed by\n this function.\n num_estimation_graphs: How many graphs to take from the dataset to estimate\n the distribution of number of nodes and edges per graph.", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "documentation": {} + }, + { + "label": "specs_from_graphs_tuple", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "peekOfCode": "def specs_from_graphs_tuple(graph: jraph.GraphsTuple):\n \"\"\"Returns a tf.TensorSpec corresponding to this graph.\"\"\"\n def get_tensor_spec(array: np.ndarray):\n shape = list(array.shape)\n dtype = array.dtype\n return tf.TensorSpec(shape=shape, dtype=dtype)\n specs = {}\n for field in [\"nodes\", \"edges\", \"senders\", \"receivers\", \"globals\", \"n_node\", \"n_edge\"]:\n field_sample = getattr(graph, field)\n specs[field] = get_tensor_spec(field_sample)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "documentation": {} + }, + { + "label": "get_graphs_tuple_size", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "peekOfCode": "def get_graphs_tuple_size(graph: jraph.GraphsTuple):\n \"\"\"Returns the number of nodes, edges and graphs in a GraphsTuple.\"\"\"\n return GraphsTupleSize(n_node=np.sum(graph.n_node), n_edge=np.sum(graph.n_edge), n_graph=np.shape(graph.n_node)[0])", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.data.input_pipeline", + "documentation": {} + }, + { + "label": "MLP", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "class MLP(nn.Module):\n\t\"\"\"A multi-layer perceptron.\"\"\"\n\thidden_dim: int\n\toutput_dim: int\n\tenhance: bool = False\n\tdeterministic: bool = True\n\t@nn.compact\n\tdef __call__(self, x):\n\t\tx = nn.Dense(features=self.hidden_dim)(x)\n\t\tx = nn.relu(x)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "GCN", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "class GCN(nn.Module):\n\t\"\"\"A Graph Convolution Network + Pooling model defined with Jraph.\"\"\"\n\t# input_dim: int\n\tlatent_size: int\n\t# num_mlp_layers: int\n\tmessage_passing_steps: int\n\toutput_globals_size: int\n\t# dropout_rate: float = 0\n\t# skip_connections: bool = True\n\t# layer_norm: bool = True", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tx = nn.Dense(features=self.hidden_dim)(x)\n\t\tx = nn.relu(x)\n\t\tif self.enhance:\n\t\t\tx = nn.LayerNorm()(x)\n\t\t\tx = nn.Dropout(rate = 0.2, deterministic=self.deterministic)(x)\n\t\tx = nn.Dense(features=self.hidden_dim)(x)\n\t\tx = nn.relu(x)\n\t\tif self.enhance:\n\t\t\tx = nn.LayerNorm()(x)\n\t\t\tx = nn.Dropout(rate= 0.2, deterministic=self.deterministic)(x)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tx = nn.relu(x)\n\t\tif self.enhance:\n\t\t\tx = nn.LayerNorm()(x)\n\t\t\tx = nn.Dropout(rate = 0.2, deterministic=self.deterministic)(x)\n\t\tx = nn.Dense(features=self.hidden_dim)(x)\n\t\tx = nn.relu(x)\n\t\tif self.enhance:\n\t\t\tx = nn.LayerNorm()(x)\n\t\t\tx = nn.Dropout(rate= 0.2, deterministic=self.deterministic)(x)\n\t\tx = nn.Dense(features=self.output_dim)(x)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\t\tx = nn.LayerNorm()(x)\n\t\t\tx = nn.Dropout(rate = 0.2, deterministic=self.deterministic)(x)\n\t\tx = nn.Dense(features=self.hidden_dim)(x)\n\t\tx = nn.relu(x)\n\t\tif self.enhance:\n\t\t\tx = nn.LayerNorm()(x)\n\t\t\tx = nn.Dropout(rate= 0.2, deterministic=self.deterministic)(x)\n\t\tx = nn.Dense(features=self.output_dim)(x)\n\t\treturn nn.relu(x)\nclass GCN(nn.Module):", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\t\tx = nn.Dropout(rate = 0.2, deterministic=self.deterministic)(x)\n\t\tx = nn.Dense(features=self.hidden_dim)(x)\n\t\tx = nn.relu(x)\n\t\tif self.enhance:\n\t\t\tx = nn.LayerNorm()(x)\n\t\t\tx = nn.Dropout(rate= 0.2, deterministic=self.deterministic)(x)\n\t\tx = nn.Dense(features=self.output_dim)(x)\n\t\treturn nn.relu(x)\nclass GCN(nn.Module):\n\t\"\"\"A Graph Convolution Network + Pooling model defined with Jraph.\"\"\"", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tx = nn.Dense(features=self.hidden_dim)(x)\n\t\tx = nn.relu(x)\n\t\tif self.enhance:\n\t\t\tx = nn.LayerNorm()(x)\n\t\t\tx = nn.Dropout(rate= 0.2, deterministic=self.deterministic)(x)\n\t\tx = nn.Dense(features=self.output_dim)(x)\n\t\treturn nn.relu(x)\nclass GCN(nn.Module):\n\t\"\"\"A Graph Convolution Network + Pooling model defined with Jraph.\"\"\"\n\t# input_dim: int", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tx = nn.relu(x)\n\t\tif self.enhance:\n\t\t\tx = nn.LayerNorm()(x)\n\t\t\tx = nn.Dropout(rate= 0.2, deterministic=self.deterministic)(x)\n\t\tx = nn.Dense(features=self.output_dim)(x)\n\t\treturn nn.relu(x)\nclass GCN(nn.Module):\n\t\"\"\"A Graph Convolution Network + Pooling model defined with Jraph.\"\"\"\n\t# input_dim: int\n\tlatent_size: int", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\t\tx = nn.LayerNorm()(x)\n\t\t\tx = nn.Dropout(rate= 0.2, deterministic=self.deterministic)(x)\n\t\tx = nn.Dense(features=self.output_dim)(x)\n\t\treturn nn.relu(x)\nclass GCN(nn.Module):\n\t\"\"\"A Graph Convolution Network + Pooling model defined with Jraph.\"\"\"\n\t# input_dim: int\n\tlatent_size: int\n\t# num_mlp_layers: int\n\tmessage_passing_steps: int", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\t\tx = nn.Dropout(rate= 0.2, deterministic=self.deterministic)(x)\n\t\tx = nn.Dense(features=self.output_dim)(x)\n\t\treturn nn.relu(x)\nclass GCN(nn.Module):\n\t\"\"\"A Graph Convolution Network + Pooling model defined with Jraph.\"\"\"\n\t# input_dim: int\n\tlatent_size: int\n\t# num_mlp_layers: int\n\tmessage_passing_steps: int\n\toutput_globals_size: int", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tx", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tx = nn.Dense(features=self.output_dim)(x)\n\t\treturn nn.relu(x)\nclass GCN(nn.Module):\n\t\"\"\"A Graph Convolution Network + Pooling model defined with Jraph.\"\"\"\n\t# input_dim: int\n\tlatent_size: int\n\t# num_mlp_layers: int\n\tmessage_passing_steps: int\n\toutput_globals_size: int\n\t# dropout_rate: float = 0", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t]", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t] = jraph.segment_mean\n\tdef pool(self, graphs: jraph.GraphsTuple) -> jraph.GraphsTuple:\n\t\t\"\"\"Pooling operation, taken from Jraph.\"\"\"\n\t\t# Equivalent to jnp.sum(n_node), but JIT-able.\n\t\tsum_n_node = graphs.nodes.shape[0] # pytype: disable=attribute-error # jax-ndarray\n\t\t# To aggregate nodes from each graph to global features,\n\t\t# we first construct tensors that map the node to the corresponding graph.\n\t\t# Example: if you have `n_node=[1,2]`, we construct the tensor [0, 1, 1].\n\t\tn_graph = graphs.n_node.shape[0]\n\t\tnode_graph_indices = jnp.repeat(jnp.arange(n_graph), graphs.n_node, axis=0, total_repeat_length=sum_n_node)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tsum_n_node", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tsum_n_node = graphs.nodes.shape[0] # pytype: disable=attribute-error # jax-ndarray\n\t\t# To aggregate nodes from each graph to global features,\n\t\t# we first construct tensors that map the node to the corresponding graph.\n\t\t# Example: if you have `n_node=[1,2]`, we construct the tensor [0, 1, 1].\n\t\tn_graph = graphs.n_node.shape[0]\n\t\tnode_graph_indices = jnp.repeat(jnp.arange(n_graph), graphs.n_node, axis=0, total_repeat_length=sum_n_node)\n\t\t# We use the aggregation function to pool the nodes per graph.\n\t\tpooled = self.pooling_fn(\n\t\t\tgraphs.nodes, node_graph_indices, n_graph\n\t\t) # pytype: disable=wrong-arg-types # jax-ndarray", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tn_graph", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tn_graph = graphs.n_node.shape[0]\n\t\tnode_graph_indices = jnp.repeat(jnp.arange(n_graph), graphs.n_node, axis=0, total_repeat_length=sum_n_node)\n\t\t# We use the aggregation function to pool the nodes per graph.\n\t\tpooled = self.pooling_fn(\n\t\t\tgraphs.nodes, node_graph_indices, n_graph\n\t\t) # pytype: disable=wrong-arg-types # jax-ndarray\n\t\treturn graphs._replace(globals=pooled)\n\t@nn.compact\n\tdef __call__(self, graphs: jraph.GraphsTuple) -> jraph.GraphsTuple:\n\t\tprint(\"Graphs: \", graphs.nodes.shape, graphs.edges.shape, graphs.globals.shape)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tnode_graph_indices", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tnode_graph_indices = jnp.repeat(jnp.arange(n_graph), graphs.n_node, axis=0, total_repeat_length=sum_n_node)\n\t\t# We use the aggregation function to pool the nodes per graph.\n\t\tpooled = self.pooling_fn(\n\t\t\tgraphs.nodes, node_graph_indices, n_graph\n\t\t) # pytype: disable=wrong-arg-types # jax-ndarray\n\t\treturn graphs._replace(globals=pooled)\n\t@nn.compact\n\tdef __call__(self, graphs: jraph.GraphsTuple) -> jraph.GraphsTuple:\n\t\tprint(\"Graphs: \", graphs.nodes.shape, graphs.edges.shape, graphs.globals.shape)\n\t\t# We will first linearly project the original node features as 'embeddings'.", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tpooled", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tpooled = self.pooling_fn(\n\t\t\tgraphs.nodes, node_graph_indices, n_graph\n\t\t) # pytype: disable=wrong-arg-types # jax-ndarray\n\t\treturn graphs._replace(globals=pooled)\n\t@nn.compact\n\tdef __call__(self, graphs: jraph.GraphsTuple) -> jraph.GraphsTuple:\n\t\tprint(\"Graphs: \", graphs.nodes.shape, graphs.edges.shape, graphs.globals.shape)\n\t\t# We will first linearly project the original node features as 'embeddings'.\n\t\tembedder = jraph.GraphMapFeatures(embed_node_fn=nn.Dense(self.latent_size))\n\t\tprocessed_graphs = embedder(graphs)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tembedder", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tembedder = jraph.GraphMapFeatures(embed_node_fn=nn.Dense(self.latent_size))\n\t\tprocessed_graphs = embedder(graphs)\n\t\tprint(\"After embedder\", processed_graphs.nodes.shape, processed_graphs.edges.shape, processed_graphs.globals.shape)\n\t\t# Now, we will apply the GCN once for each message-passing round.\n\t\tfor i in range(self.message_passing_steps):\n\t\t\tupdate_node_fn = jraph.concatenated_args(\n\t\t\t\tMLP(self.latent_size, self.latent_size, enhance=True, deterministic=self.deterministic)\n\t\t\t)\n\t\t\tgraph_conv = jraph.GraphConvolution(update_node_fn=update_node_fn, add_self_edges=True)\n\t\t\tprocessed_graphs = graph_conv(processed_graphs)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tprocessed_graphs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tprocessed_graphs = embedder(graphs)\n\t\tprint(\"After embedder\", processed_graphs.nodes.shape, processed_graphs.edges.shape, processed_graphs.globals.shape)\n\t\t# Now, we will apply the GCN once for each message-passing round.\n\t\tfor i in range(self.message_passing_steps):\n\t\t\tupdate_node_fn = jraph.concatenated_args(\n\t\t\t\tMLP(self.latent_size, self.latent_size, enhance=True, deterministic=self.deterministic)\n\t\t\t)\n\t\t\tgraph_conv = jraph.GraphConvolution(update_node_fn=update_node_fn, add_self_edges=True)\n\t\t\tprocessed_graphs = graph_conv(processed_graphs)\n\t\t\tprint(", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\t\tupdate_node_fn", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\t\tupdate_node_fn = jraph.concatenated_args(\n\t\t\t\tMLP(self.latent_size, self.latent_size, enhance=True, deterministic=self.deterministic)\n\t\t\t)\n\t\t\tgraph_conv = jraph.GraphConvolution(update_node_fn=update_node_fn, add_self_edges=True)\n\t\t\tprocessed_graphs = graph_conv(processed_graphs)\n\t\t\tprint(\n\t\t\t\tf\"After message passing {i}\",\n\t\t\t\tprocessed_graphs.nodes.shape,\n\t\t\t\tprocessed_graphs.edges.shape,\n\t\t\t\tprocessed_graphs.globals.shape,", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\t\tgraph_conv", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\t\tgraph_conv = jraph.GraphConvolution(update_node_fn=update_node_fn, add_self_edges=True)\n\t\t\tprocessed_graphs = graph_conv(processed_graphs)\n\t\t\tprint(\n\t\t\t\tf\"After message passing {i}\",\n\t\t\t\tprocessed_graphs.nodes.shape,\n\t\t\t\tprocessed_graphs.edges.shape,\n\t\t\t\tprocessed_graphs.globals.shape,\n\t\t\t)\n\t\t# We apply the pooling operation to get a 'global' embedding.\n\t\tprocessed_graphs = self.pool(processed_graphs)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\t\tprocessed_graphs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\t\tprocessed_graphs = graph_conv(processed_graphs)\n\t\t\tprint(\n\t\t\t\tf\"After message passing {i}\",\n\t\t\t\tprocessed_graphs.nodes.shape,\n\t\t\t\tprocessed_graphs.edges.shape,\n\t\t\t\tprocessed_graphs.globals.shape,\n\t\t\t)\n\t\t# We apply the pooling operation to get a 'global' embedding.\n\t\tprocessed_graphs = self.pool(processed_graphs)\n\t\tprint(", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tprocessed_graphs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tprocessed_graphs = self.pool(processed_graphs)\n\t\tprint(\n\t\t\t\"After pooling\", processed_graphs.nodes.shape, processed_graphs.edges.shape, processed_graphs.globals.shape\n\t\t)\n\t\t# Now, we decode this to get the required output logits.\n\t\tdecoder = jraph.GraphMapFeatures(embed_global_fn=nn.Dense(self.output_globals_size))\n\t\tprocessed_graphs = decoder(processed_graphs)\n\t\tprint(\n\t\t\t\"After decoder\", processed_graphs.nodes.shape, processed_graphs.edges.shape, processed_graphs.globals.shape\n\t\t)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tdecoder", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tdecoder = jraph.GraphMapFeatures(embed_global_fn=nn.Dense(self.output_globals_size))\n\t\tprocessed_graphs = decoder(processed_graphs)\n\t\tprint(\n\t\t\t\"After decoder\", processed_graphs.nodes.shape, processed_graphs.edges.shape, processed_graphs.globals.shape\n\t\t)\n\t\treturn processed_graphs", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "\t\tprocessed_graphs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "peekOfCode": "\t\tprocessed_graphs = decoder(processed_graphs)\n\t\tprint(\n\t\t\t\"After decoder\", processed_graphs.nodes.shape, processed_graphs.edges.shape, processed_graphs.globals.shape\n\t\t)\n\t\treturn processed_graphs", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.gcn", + "documentation": {} + }, + { + "label": "GraphConvNet", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.graph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.graph_conv_net", + "peekOfCode": "class GraphConvNet(nn.Module):\n \"\"\"A Graph Convolution Network + Pooling model defined with Jraph.\"\"\"\n latent_size: int\n num_mlp_layers: int\n message_passing_steps: int\n output_globals_size: int\n dropout_rate: float = 0\n skip_connections: bool = True\n layer_norm: bool = True\n deterministic: bool = True", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.graph_conv_net", + "documentation": {} + }, + { + "label": "QGraphConvNet", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.qgraph_conv_net", + "peekOfCode": "class QGraphConvNet(nn.Module):\n \"\"\"A Graph Convolution Network + Pooling model defined with Jraph.\"\"\"\n latent_size: int\n num_mlp_layers: int\n message_passing_steps: int\n output_globals_size: int\n dropout_rate: float = 0\n skip_connections: bool = True\n layer_norm: bool = True\n num_qubits: int = 2", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.qgraph_conv_net", + "documentation": {} + }, + { + "label": "MLP", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "peekOfCode": "class MLP(nn.Module):\n \"\"\"A multi-layer perceptron.\"\"\"\n feature_sizes: Sequence[int]\n dropout_rate: float = 0\n deterministic: bool = True\n activation: Callable[[jnp.ndarray], jnp.ndarray] = nn.relu\n @nn.compact\n def __call__(self, inputs):\n x = inputs\n for size in self.feature_sizes:", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "documentation": {} + }, + { + "label": "DRQNN", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "peekOfCode": "class DRQNN(nn.Module):\n \"\"\"Data Re-uploading Quantum Neural Network\"\"\"\n num_qubits: int = 2\n num_layers: int = 1\n num_features: int = 3\n entanglement_gate: str = \"cz\"\n theta_init: Callable = nn.initializers.uniform(scale=jnp.pi)\n def setup(self):\n self.dev = qml.device('default.qubit', wires=self.num_qubits)\n self.theta = self.param('θ', self.theta_init, (self.num_layers, self.num_qubits, self.num_features, 2))", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "documentation": {} + }, + { + "label": "QMLP", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "peekOfCode": "class QMLP(nn.Module):\n \"\"\"A multi-layer perceptron.\"\"\"\n feature_sizes: Sequence[int]\n dropout_rate: float = 0\n num_qubits: int = 2\n num_layers: int = 1\n num_features: int = 3\n entanglement_gate: str = \"cz\"\n deterministic: bool = True\n activation: Callable[[jnp.ndarray], jnp.ndarray] = nn.relu", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "documentation": {} + }, + { + "label": "add_graphs_tuples", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "peekOfCode": "def add_graphs_tuples(graphs: jraph.GraphsTuple, other_graphs: jraph.GraphsTuple) -> jraph.GraphsTuple:\n \"\"\"Adds the nodes, edges and global features from other_graphs to graphs.\"\"\"\n return graphs._replace(\n nodes=graphs.nodes + other_graphs.nodes,\n edges=graphs.edges + other_graphs.edges,\n globals=graphs.globals + other_graphs.globals,\n )\nclass MLP(nn.Module):\n \"\"\"A multi-layer perceptron.\"\"\"\n feature_sizes: Sequence[int]", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.models.util", + "documentation": {} + }, + { + "label": "TqdmUpTo", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.util", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.util", + "peekOfCode": "class TqdmUpTo(tqdm):\n \"\"\"From https://github.com/tqdm/tqdm/blob/master/examples/tqdm_wget.py\"\"\"\n def update_to(self, blocks=1, bsize=1, tsize=None):\n \"\"\"\n This function updates the progress of a download by calculating the number of blocks and block\n size and setting the total size if provided.\n Args:\n blocks: The number of blocks that have been transferred. Defaults to 1\n bsize: bsize stands for \"block size\" and represents the size of each block being downloaded or\n uploaded. It is usually measured in bytes. Defaults to 1", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.util", + "documentation": {} + }, + { + "label": "download_url", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.util", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.util", + "peekOfCode": "def download_url(url, filename):\n \"\"\"\n This function downloads a file from a given URL and displays the progress using the TqdmUpTo\n library.\n Args:\n url: The URL of the file to be downloaded.\n filename: The name of the file to be saved after downloading from the given URL.\n \"\"\"\n with TqdmUpTo(unit=\"B\", unit_scale=True, unit_divisor=1024, miniters=1) as t:\n urlretrieve(url, filename, reporthook=t.update_to, data=None) # noqa: S310", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.qgnn_hep.util", + "documentation": {} + }, + { + "label": "get_config", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_graph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_graph_conv_net", + "peekOfCode": "def get_config():\n \"\"\"Get the default hyperparameter configuration.\"\"\"\n config = ml_collections.ConfigDict()\n # Optimizer.\n config.optimizer_hparams = ml_collections.ConfigDict()\n config.optimizer_hparams.optimizer = \"adam\"\n config.optimizer_hparams.learning_rate = 1e-4\n # Training hyperparameters.\n config.train_hparams = ml_collections.ConfigDict()\n config.train_hparams.model_name = \"GraphConvNet\"", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_graph_conv_net", + "documentation": {} + }, + { + "label": "get_config", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "def get_config():\n\t\"\"\"Get the default hyperparameter configuration.\"\"\"\n\tconfig = ml_collections.ConfigDict()\n\t# Optimizer.\n\tconfig.optimizer_hparams = ml_collections.ConfigDict()\n\tconfig.optimizer_hparams.optimizer = \"adam\"\n\tconfig.optimizer_hparams.learning_rate = 1e-3\n\t# Training hyperparameters.\n\tconfig.train_hparams = ml_collections.ConfigDict()\n\tconfig.train_hparams.model_name = \"QGraphConvNet\"", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig = ml_collections.ConfigDict()\n\t# Optimizer.\n\tconfig.optimizer_hparams = ml_collections.ConfigDict()\n\tconfig.optimizer_hparams.optimizer = \"adam\"\n\tconfig.optimizer_hparams.learning_rate = 1e-3\n\t# Training hyperparameters.\n\tconfig.train_hparams = ml_collections.ConfigDict()\n\tconfig.train_hparams.model_name = \"QGraphConvNet\"\n\tconfig.train_hparams.dataset_name = \"mutag\"\n\tconfig.train_hparams.dataset_config_name = \"mutag\"", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.optimizer_hparams", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.optimizer_hparams = ml_collections.ConfigDict()\n\tconfig.optimizer_hparams.optimizer = \"adam\"\n\tconfig.optimizer_hparams.learning_rate = 1e-3\n\t# Training hyperparameters.\n\tconfig.train_hparams = ml_collections.ConfigDict()\n\tconfig.train_hparams.model_name = \"QGraphConvNet\"\n\tconfig.train_hparams.dataset_name = \"mutag\"\n\tconfig.train_hparams.dataset_config_name = \"mutag\"\n\tconfig.train_hparams.batch_size = 32\n\tconfig.train_hparams.num_train_steps = 15_000", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.optimizer_hparams.optimizer", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.optimizer_hparams.optimizer = \"adam\"\n\tconfig.optimizer_hparams.learning_rate = 1e-3\n\t# Training hyperparameters.\n\tconfig.train_hparams = ml_collections.ConfigDict()\n\tconfig.train_hparams.model_name = \"QGraphConvNet\"\n\tconfig.train_hparams.dataset_name = \"mutag\"\n\tconfig.train_hparams.dataset_config_name = \"mutag\"\n\tconfig.train_hparams.batch_size = 32\n\tconfig.train_hparams.num_train_steps = 15_000\n\tconfig.train_hparams.log_every_steps = 1000", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.optimizer_hparams.learning_rate", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.optimizer_hparams.learning_rate = 1e-3\n\t# Training hyperparameters.\n\tconfig.train_hparams = ml_collections.ConfigDict()\n\tconfig.train_hparams.model_name = \"QGraphConvNet\"\n\tconfig.train_hparams.dataset_name = \"mutag\"\n\tconfig.train_hparams.dataset_config_name = \"mutag\"\n\tconfig.train_hparams.batch_size = 32\n\tconfig.train_hparams.num_train_steps = 15_000\n\tconfig.train_hparams.log_every_steps = 1000\n\tconfig.train_hparams.eval_every_steps = 1_000", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams = ml_collections.ConfigDict()\n\tconfig.train_hparams.model_name = \"QGraphConvNet\"\n\tconfig.train_hparams.dataset_name = \"mutag\"\n\tconfig.train_hparams.dataset_config_name = \"mutag\"\n\tconfig.train_hparams.batch_size = 32\n\tconfig.train_hparams.num_train_steps = 15_000\n\tconfig.train_hparams.log_every_steps = 1000\n\tconfig.train_hparams.eval_every_steps = 1_000\n\tconfig.train_hparams.checkpoint_every_steps = 10_000\n\tconfig.train_hparams.add_virtual_node = False", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.model_name", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.model_name = \"QGraphConvNet\"\n\tconfig.train_hparams.dataset_name = \"mutag\"\n\tconfig.train_hparams.dataset_config_name = \"mutag\"\n\tconfig.train_hparams.batch_size = 32\n\tconfig.train_hparams.num_train_steps = 15_000\n\tconfig.train_hparams.log_every_steps = 1000\n\tconfig.train_hparams.eval_every_steps = 1_000\n\tconfig.train_hparams.checkpoint_every_steps = 10_000\n\tconfig.train_hparams.add_virtual_node = False\n\tconfig.train_hparams.add_undirected_edges = True", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.dataset_name", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.dataset_name = \"mutag\"\n\tconfig.train_hparams.dataset_config_name = \"mutag\"\n\tconfig.train_hparams.batch_size = 32\n\tconfig.train_hparams.num_train_steps = 15_000\n\tconfig.train_hparams.log_every_steps = 1000\n\tconfig.train_hparams.eval_every_steps = 1_000\n\tconfig.train_hparams.checkpoint_every_steps = 10_000\n\tconfig.train_hparams.add_virtual_node = False\n\tconfig.train_hparams.add_undirected_edges = True\n\tconfig.train_hparams.add_self_loops = True", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.dataset_config_name", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.dataset_config_name = \"mutag\"\n\tconfig.train_hparams.batch_size = 32\n\tconfig.train_hparams.num_train_steps = 15_000\n\tconfig.train_hparams.log_every_steps = 1000\n\tconfig.train_hparams.eval_every_steps = 1_000\n\tconfig.train_hparams.checkpoint_every_steps = 10_000\n\tconfig.train_hparams.add_virtual_node = False\n\tconfig.train_hparams.add_undirected_edges = True\n\tconfig.train_hparams.add_self_loops = True\n\tconfig.train_hparams.load_checkpoint = False", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.batch_size", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.batch_size = 32\n\tconfig.train_hparams.num_train_steps = 15_000\n\tconfig.train_hparams.log_every_steps = 1000\n\tconfig.train_hparams.eval_every_steps = 1_000\n\tconfig.train_hparams.checkpoint_every_steps = 10_000\n\tconfig.train_hparams.add_virtual_node = False\n\tconfig.train_hparams.add_undirected_edges = True\n\tconfig.train_hparams.add_self_loops = True\n\tconfig.train_hparams.load_checkpoint = False\n\t# GNN hyperparameters.", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.num_train_steps", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.num_train_steps = 15_000\n\tconfig.train_hparams.log_every_steps = 1000\n\tconfig.train_hparams.eval_every_steps = 1_000\n\tconfig.train_hparams.checkpoint_every_steps = 10_000\n\tconfig.train_hparams.add_virtual_node = False\n\tconfig.train_hparams.add_undirected_edges = True\n\tconfig.train_hparams.add_self_loops = True\n\tconfig.train_hparams.load_checkpoint = False\n\t# GNN hyperparameters.\n\tconfig.model_hparams = ml_collections.ConfigDict()", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.log_every_steps", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.log_every_steps = 1000\n\tconfig.train_hparams.eval_every_steps = 1_000\n\tconfig.train_hparams.checkpoint_every_steps = 10_000\n\tconfig.train_hparams.add_virtual_node = False\n\tconfig.train_hparams.add_undirected_edges = True\n\tconfig.train_hparams.add_self_loops = True\n\tconfig.train_hparams.load_checkpoint = False\n\t# GNN hyperparameters.\n\tconfig.model_hparams = ml_collections.ConfigDict()\n\tconfig.model_hparams.latent_size = 64", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.eval_every_steps", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.eval_every_steps = 1_000\n\tconfig.train_hparams.checkpoint_every_steps = 10_000\n\tconfig.train_hparams.add_virtual_node = False\n\tconfig.train_hparams.add_undirected_edges = True\n\tconfig.train_hparams.add_self_loops = True\n\tconfig.train_hparams.load_checkpoint = False\n\t# GNN hyperparameters.\n\tconfig.model_hparams = ml_collections.ConfigDict()\n\tconfig.model_hparams.latent_size = 64\n\tconfig.model_hparams.num_mlp_layers = 1", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.checkpoint_every_steps", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.checkpoint_every_steps = 10_000\n\tconfig.train_hparams.add_virtual_node = False\n\tconfig.train_hparams.add_undirected_edges = True\n\tconfig.train_hparams.add_self_loops = True\n\tconfig.train_hparams.load_checkpoint = False\n\t# GNN hyperparameters.\n\tconfig.model_hparams = ml_collections.ConfigDict()\n\tconfig.model_hparams.latent_size = 64\n\tconfig.model_hparams.num_mlp_layers = 1\n\tconfig.model_hparams.message_passing_steps = 2", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.add_virtual_node", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.add_virtual_node = False\n\tconfig.train_hparams.add_undirected_edges = True\n\tconfig.train_hparams.add_self_loops = True\n\tconfig.train_hparams.load_checkpoint = False\n\t# GNN hyperparameters.\n\tconfig.model_hparams = ml_collections.ConfigDict()\n\tconfig.model_hparams.latent_size = 64\n\tconfig.model_hparams.num_mlp_layers = 1\n\tconfig.model_hparams.message_passing_steps = 2\n\tconfig.model_hparams.output_globals_size = 2", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.add_undirected_edges", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.add_undirected_edges = True\n\tconfig.train_hparams.add_self_loops = True\n\tconfig.train_hparams.load_checkpoint = False\n\t# GNN hyperparameters.\n\tconfig.model_hparams = ml_collections.ConfigDict()\n\tconfig.model_hparams.latent_size = 64\n\tconfig.model_hparams.num_mlp_layers = 1\n\tconfig.model_hparams.message_passing_steps = 2\n\tconfig.model_hparams.output_globals_size = 2\n\tconfig.model_hparams.dropout_rate = 0.1", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.add_self_loops", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.add_self_loops = True\n\tconfig.train_hparams.load_checkpoint = False\n\t# GNN hyperparameters.\n\tconfig.model_hparams = ml_collections.ConfigDict()\n\tconfig.model_hparams.latent_size = 64\n\tconfig.model_hparams.num_mlp_layers = 1\n\tconfig.model_hparams.message_passing_steps = 2\n\tconfig.model_hparams.output_globals_size = 2\n\tconfig.model_hparams.dropout_rate = 0.1\n\tconfig.model_hparams.skip_connections = True", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.train_hparams.load_checkpoint", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.train_hparams.load_checkpoint = False\n\t# GNN hyperparameters.\n\tconfig.model_hparams = ml_collections.ConfigDict()\n\tconfig.model_hparams.latent_size = 64\n\tconfig.model_hparams.num_mlp_layers = 1\n\tconfig.model_hparams.message_passing_steps = 2\n\tconfig.model_hparams.output_globals_size = 2\n\tconfig.model_hparams.dropout_rate = 0.1\n\tconfig.model_hparams.skip_connections = True\n\tconfig.model_hparams.layer_norm = True", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams = ml_collections.ConfigDict()\n\tconfig.model_hparams.latent_size = 64\n\tconfig.model_hparams.num_mlp_layers = 1\n\tconfig.model_hparams.message_passing_steps = 2\n\tconfig.model_hparams.output_globals_size = 2\n\tconfig.model_hparams.dropout_rate = 0.1\n\tconfig.model_hparams.skip_connections = True\n\tconfig.model_hparams.layer_norm = True\n\t# Quantum circuit specific hyperparameters\n\tconfig.model_hparams.num_qubits = 4", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams.latent_size", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams.latent_size = 64\n\tconfig.model_hparams.num_mlp_layers = 1\n\tconfig.model_hparams.message_passing_steps = 2\n\tconfig.model_hparams.output_globals_size = 2\n\tconfig.model_hparams.dropout_rate = 0.1\n\tconfig.model_hparams.skip_connections = True\n\tconfig.model_hparams.layer_norm = True\n\t# Quantum circuit specific hyperparameters\n\tconfig.model_hparams.num_qubits = 4\n\tconfig.model_hparams.num_layers = 1", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams.num_mlp_layers", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams.num_mlp_layers = 1\n\tconfig.model_hparams.message_passing_steps = 2\n\tconfig.model_hparams.output_globals_size = 2\n\tconfig.model_hparams.dropout_rate = 0.1\n\tconfig.model_hparams.skip_connections = True\n\tconfig.model_hparams.layer_norm = True\n\t# Quantum circuit specific hyperparameters\n\tconfig.model_hparams.num_qubits = 4\n\tconfig.model_hparams.num_layers = 1\n\tconfig.model_hparams.num_features = 6", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams.message_passing_steps", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams.message_passing_steps = 2\n\tconfig.model_hparams.output_globals_size = 2\n\tconfig.model_hparams.dropout_rate = 0.1\n\tconfig.model_hparams.skip_connections = True\n\tconfig.model_hparams.layer_norm = True\n\t# Quantum circuit specific hyperparameters\n\tconfig.model_hparams.num_qubits = 4\n\tconfig.model_hparams.num_layers = 1\n\tconfig.model_hparams.num_features = 6\n\tconfig.model_hparams.entanglement_gate = \"cz\" # \"cz\" or \"cx\"", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams.output_globals_size", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams.output_globals_size = 2\n\tconfig.model_hparams.dropout_rate = 0.1\n\tconfig.model_hparams.skip_connections = True\n\tconfig.model_hparams.layer_norm = True\n\t# Quantum circuit specific hyperparameters\n\tconfig.model_hparams.num_qubits = 4\n\tconfig.model_hparams.num_layers = 1\n\tconfig.model_hparams.num_features = 6\n\tconfig.model_hparams.entanglement_gate = \"cz\" # \"cz\" or \"cx\"\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams.dropout_rate", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams.dropout_rate = 0.1\n\tconfig.model_hparams.skip_connections = True\n\tconfig.model_hparams.layer_norm = True\n\t# Quantum circuit specific hyperparameters\n\tconfig.model_hparams.num_qubits = 4\n\tconfig.model_hparams.num_layers = 1\n\tconfig.model_hparams.num_features = 6\n\tconfig.model_hparams.entanglement_gate = \"cz\" # \"cz\" or \"cx\"\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams.skip_connections", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams.skip_connections = True\n\tconfig.model_hparams.layer_norm = True\n\t# Quantum circuit specific hyperparameters\n\tconfig.model_hparams.num_qubits = 4\n\tconfig.model_hparams.num_layers = 1\n\tconfig.model_hparams.num_features = 6\n\tconfig.model_hparams.entanglement_gate = \"cz\" # \"cz\" or \"cx\"\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams.layer_norm", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams.layer_norm = True\n\t# Quantum circuit specific hyperparameters\n\tconfig.model_hparams.num_qubits = 4\n\tconfig.model_hparams.num_layers = 1\n\tconfig.model_hparams.num_features = 6\n\tconfig.model_hparams.entanglement_gate = \"cz\" # \"cz\" or \"cx\"\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams.num_qubits", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams.num_qubits = 4\n\tconfig.model_hparams.num_layers = 1\n\tconfig.model_hparams.num_features = 6\n\tconfig.model_hparams.entanglement_gate = \"cz\" # \"cz\" or \"cx\"\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams.num_layers", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams.num_layers = 1\n\tconfig.model_hparams.num_features = 6\n\tconfig.model_hparams.entanglement_gate = \"cz\" # \"cz\" or \"cx\"\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams.num_features", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams.num_features = 6\n\tconfig.model_hparams.entanglement_gate = \"cz\" # \"cz\" or \"cx\"\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "\tconfig.model_hparams.entanglement_gate", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "peekOfCode": "\tconfig.model_hparams.entanglement_gate = \"cz\" # \"cz\" or \"cx\"\n\treturn config", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.default_qgraph_conv_net", + "documentation": {} + }, + { + "label": "get_config", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.gcn_mutag", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.gcn_mutag", + "peekOfCode": "def get_config():\n \"\"\"Get the default hyperparameter configuration.\"\"\"\n config = ml_collections.ConfigDict()\n # Optimizer.\n config.optimizer_hparams = ml_collections.ConfigDict()\n config.optimizer_hparams.optimizer = \"adam\"\n config.optimizer_hparams.learning_rate = 1e-4\n # Training hyperparameters.\n config.train_hparams = ml_collections.ConfigDict()\n config.train_hparams.model_name = \"GCN\"", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.configs.gcn_mutag", + "documentation": {} + }, + { + "label": "main", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.run_experiment", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.run_experiment", + "peekOfCode": "def main(argv):\n if len(argv) > 1:\n raise app.UsageError(\"Too many command-line arguments.\")\n # Hide any GPUs from TensorFlow. Otherwise TF might reserve memory and make\n # it unavailable to JAX.\n tf.config.experimental.set_visible_devices([], \"GPU\")\n # This example only supports single-host training on a single device.\n logging.info(\"JAX host: %d / %d\", jax.process_index(), jax.process_count())\n logging.info(\"JAX local devices: %r\", jax.local_devices())\n # Add a note so that we can tell which task is which JAX host.", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.run_experiment", + "documentation": {} + }, + { + "label": "FLAGS", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.run_experiment", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.run_experiment", + "peekOfCode": "FLAGS = flags.FLAGS\nflags.DEFINE_string(\"workdir\", None, \"Directory to store model data.\")\nflags.DEFINE_bool(\"wandb\", False, \"Whether to log to Weights & Biases.\")\nconfig_flags.DEFINE_config_file(\n \"config\", None, \"File path to the training hyperparameter configuration.\", lock_config=True\n)\ndef main(argv):\n if len(argv) > 1:\n raise app.UsageError(\"Too many command-line arguments.\")\n # Hide any GPUs from TensorFlow. Otherwise TF might reserve memory and make", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.run_experiment", + "documentation": {} + }, + { + "label": "MeanAveragePrecision", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "class MeanAveragePrecision(metrics.CollectingMetric.from_outputs((\"labels\", \"logits\", \"mask\"))):\n \"\"\"Computes the mean average precision (mAP) over different tasks.\"\"\"\n def compute(self):\n # Matches the official OGB evaluation scheme for mean average precision.\n values = super().compute()\n labels = values[\"labels\"]\n logits = values[\"logits\"]\n mask = values[\"mask\"]\n assert logits.shape == labels.shape == mask.shape\n assert len(logits.shape) == 2", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "AUC", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "class AUC(metrics.CollectingMetric.from_outputs((\"labels\", \"logits\", \"mask\"))):\n \"\"\"Computes the ROC AUC Score\"\"\"\n def compute(self):\n values = super().compute()\n labels = values[\"labels\"]\n logits = values[\"logits\"]\n mask = values[\"mask\"]\n assert logits.shape == labels.shape == mask.shape\n assert len(logits.shape) == 2\n # take first column of mask", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "EvalMetrics", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "class EvalMetrics(metrics.Collection):\n accuracy: metrics.Average.from_fun(predictions_match_labels)\n loss: metrics.Average.from_output(\"loss\")\n mean_average_precision: MeanAveragePrecision\n auc: AUC\n@flax.struct.dataclass\nclass TrainMetrics(metrics.Collection):\n accuracy: metrics.Average.from_fun(predictions_match_labels)\n loss: metrics.Average.from_output(\"loss\")\ndef replace_globals(graphs: jraph.GraphsTuple) -> jraph.GraphsTuple:", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "TrainMetrics", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "class TrainMetrics(metrics.Collection):\n accuracy: metrics.Average.from_fun(predictions_match_labels)\n loss: metrics.Average.from_output(\"loss\")\ndef replace_globals(graphs: jraph.GraphsTuple) -> jraph.GraphsTuple:\n \"\"\"Replaces the globals attribute with a constant feature for each graph.\"\"\"\n return graphs._replace(globals=jnp.ones([graphs.n_node.shape[0], 1]))\ndef get_predicted_logits(\n state: train_state.TrainState, graphs: jraph.GraphsTuple, rngs: Optional[Dict[str, jnp.ndarray]]\n) -> jnp.ndarray:\n \"\"\"Get predicted logits from the network for input graphs.\"\"\"", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "create_model", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def create_model(model_name: str, model_hparams: ml_collections.ConfigDict, deterministic: bool) -> nn.Module:\n model_class = import_class(f\"qgnn_hep.models.{model_name}\")\n return model_class(**model_hparams, deterministic=deterministic)\ndef create_optimizer(config: ml_collections.ConfigDict) -> optax.GradientTransformation:\n \"\"\"Creates an optimizer, as specified by the config.\"\"\"\n if config.optimizer == \"adam\":\n return optax.adam(learning_rate=config.learning_rate)\n if config.optimizer == \"sgd\":\n return optax.sgd(learning_rate=config.learning_rate, momentum=config.momentum)\n raise ValueError(f\"Unsupported optimizer: {config.optimizer}.\")", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "create_optimizer", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def create_optimizer(config: ml_collections.ConfigDict) -> optax.GradientTransformation:\n \"\"\"Creates an optimizer, as specified by the config.\"\"\"\n if config.optimizer == \"adam\":\n return optax.adam(learning_rate=config.learning_rate)\n if config.optimizer == \"sgd\":\n return optax.sgd(learning_rate=config.learning_rate, momentum=config.momentum)\n raise ValueError(f\"Unsupported optimizer: {config.optimizer}.\")\ndef binary_cross_entropy_with_mask(*, logits: jnp.ndarray, labels: jnp.ndarray, mask: jnp.ndarray):\n \"\"\"Binary cross entropy loss for unnormalized logits, with masked elements.\"\"\"\n assert logits.shape == labels.shape == mask.shape", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "binary_cross_entropy_with_mask", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def binary_cross_entropy_with_mask(*, logits: jnp.ndarray, labels: jnp.ndarray, mask: jnp.ndarray):\n \"\"\"Binary cross entropy loss for unnormalized logits, with masked elements.\"\"\"\n assert logits.shape == labels.shape == mask.shape\n assert len(logits.shape) == 2\n # To prevent propagation of NaNs during grad().\n # We mask over the loss for invalid targets later.\n labels = jnp.where(mask, labels, -1)\n # Numerically stable implementation of BCE loss.\n # This mimics TensorFlow's tf.nn.sigmoid_cross_entropy_with_logits().\n positive_logits = logits >= 0", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "predictions_match_labels", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def predictions_match_labels(*, logits: jnp.ndarray, labels: jnp.ndarray, **kwargs) -> jnp.ndarray:\n \"\"\"Returns a binary array indicating where predictions match the labels.\"\"\"\n del kwargs # Unused.\n preds = logits > 0\n return (preds == labels).astype(jnp.float32)\ndef add_prefix_to_keys(result: Dict[str, Any], prefix: str) -> Dict[str, Any]:\n \"\"\"Adds a prefix to the keys of a dict, returning a new dict.\"\"\"\n return {f\"{prefix}_{key}\": val for key, val in result.items()}\n@flax.struct.dataclass\nclass MeanAveragePrecision(metrics.CollectingMetric.from_outputs((\"labels\", \"logits\", \"mask\"))):", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "add_prefix_to_keys", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def add_prefix_to_keys(result: Dict[str, Any], prefix: str) -> Dict[str, Any]:\n \"\"\"Adds a prefix to the keys of a dict, returning a new dict.\"\"\"\n return {f\"{prefix}_{key}\": val for key, val in result.items()}\n@flax.struct.dataclass\nclass MeanAveragePrecision(metrics.CollectingMetric.from_outputs((\"labels\", \"logits\", \"mask\"))):\n \"\"\"Computes the mean average precision (mAP) over different tasks.\"\"\"\n def compute(self):\n # Matches the official OGB evaluation scheme for mean average precision.\n values = super().compute()\n labels = values[\"labels\"]", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "replace_globals", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def replace_globals(graphs: jraph.GraphsTuple) -> jraph.GraphsTuple:\n \"\"\"Replaces the globals attribute with a constant feature for each graph.\"\"\"\n return graphs._replace(globals=jnp.ones([graphs.n_node.shape[0], 1]))\ndef get_predicted_logits(\n state: train_state.TrainState, graphs: jraph.GraphsTuple, rngs: Optional[Dict[str, jnp.ndarray]]\n) -> jnp.ndarray:\n \"\"\"Get predicted logits from the network for input graphs.\"\"\"\n pred_graphs = state.apply_fn(state.params, graphs, rngs=rngs)\n logits = pred_graphs.globals\n return logits", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "get_predicted_logits", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def get_predicted_logits(\n state: train_state.TrainState, graphs: jraph.GraphsTuple, rngs: Optional[Dict[str, jnp.ndarray]]\n) -> jnp.ndarray:\n \"\"\"Get predicted logits from the network for input graphs.\"\"\"\n pred_graphs = state.apply_fn(state.params, graphs, rngs=rngs)\n logits = pred_graphs.globals\n return logits\ndef get_valid_mask(labels: jnp.ndarray, graphs: jraph.GraphsTuple) -> jnp.ndarray:\n \"\"\"Gets the binary mask indicating only valid labels and graphs.\"\"\"\n # We have to ignore all NaN values - which indicate labels for which", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "get_valid_mask", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def get_valid_mask(labels: jnp.ndarray, graphs: jraph.GraphsTuple) -> jnp.ndarray:\n \"\"\"Gets the binary mask indicating only valid labels and graphs.\"\"\"\n # We have to ignore all NaN values - which indicate labels for which\n # the current graphs have no label.\n labels_mask = ~jnp.isnan(labels)\n # Since we have extra 'dummy' graphs in our batch due to padding, we want\n # to mask out any loss associated with the dummy graphs.\n # Since we padded with `pad_with_graphs` we can recover the mask by using\n # get_graph_padding_mask.\n graph_mask = jraph.get_graph_padding_mask(graphs)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "train_step", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def train_step(\n state: train_state.TrainState, graphs: jraph.GraphsTuple, rngs: Dict[str, jnp.ndarray]\n) -> Tuple[train_state.TrainState, metrics.Collection]:\n \"\"\"Performs one update step over the current batch of graphs.\"\"\"\n def loss_fn(params, graphs):\n curr_state = state.replace(params=params)\n # Extract labels.\n labels = graphs.globals\n labels = jax.nn.one_hot(labels, 2)\n # Replace the global feature for graph classification.", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "evaluate_step", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def evaluate_step(\n state: train_state.TrainState,\n graphs: jraph.GraphsTuple,\n) -> metrics.Collection:\n \"\"\"Computes metrics over a set of graphs.\"\"\"\n # The target labels our model has to predict.\n labels = graphs.globals\n labels = jax.nn.one_hot(labels, 2)\n # Replace the global feature for graph classification.\n graphs = replace_globals(graphs)", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "evaluate_model", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def evaluate_model(\n state: train_state.TrainState, datasets: Dict[str, tf.data.Dataset], splits: Iterable[str]\n) -> Dict[str, metrics.Collection]:\n \"\"\"Evaluates the model on metrics over the specified splits.\"\"\"\n # Loop over each split independently.\n eval_metrics = {}\n for split in splits:\n split_metrics = None\n # Loop over graphs.\n for graphs in datasets[split].as_numpy_iterator():", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "train_and_evaluate", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "peekOfCode": "def train_and_evaluate(config: ml_collections.ConfigDict, workdir: str, wandb_logging: bool) -> train_state.TrainState:\n \"\"\"Execute model training and evaluation loop.\n Args:\n config: Hyperparameter configuration for training and evaluation.\n workdir: Directory where the TensorBoard summaries are written to.\n Returns:\n The train state (which includes the `.params`).\n \"\"\"\n # We only support single-host training.\n assert jax.process_count() == 1", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.train", + "documentation": {} + }, + { + "label": "import_class", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.util", + "description": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.util", + "peekOfCode": "def import_class(module_and_class_name: str) -> type:\n \"\"\"Import class from a module, e.g. 'qgnn_hep.models.GraphConvNet'.\"\"\"\n module_name, class_name = module_and_class_name.rsplit(\".\", 1)\n module = importlib.import_module(module_name)\n class_ = getattr(module, class_name)\n return class_", + "detail": "Quantum_GNN_for_HEP_Gopal_Ramesh_Dahale.training.util", + "documentation": {} + }, + { + "label": "QGJets", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.data.load_jets", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.data.load_jets", + "peekOfCode": "class QGJets():\n def __init__(self, num_data=None):\n self.config = read_configurations(\"./config.json\")\n if num_data is None:\n self.N = self.config[\"NUM JETS\"]\n else:\n self.N = num_data\n self.data = energyflow.qg_jets.load(num_data=self.N, pad=True, ncol=4, generator='pythia',\n with_bc=False, cache_dir='~/data')\n self.path = self.config[\"PATH\"]", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.data.load_jets", + "documentation": {} + }, + { + "label": "GATConv", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GATConvLayers.Custom_GATConv", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GATConvLayers.Custom_GATConv", + "peekOfCode": "class GATConv(MessagePassing):\n def __init__(self, in_channels, out_channels):\n super().__init__(aggr='add') # \"Add\" aggregation (Step 5).\n# self.heads = heads\n# seld.concat = concat\n self.lin = Linear(in_channels, out_channels, bias=False)\n self.attn = Sequential(Linear(out_channels*2, 8),\n ReLU(),\n Linear(8, 1),\n LeakyReLU(0.2)", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GATConvLayers.Custom_GATConv", + "documentation": {} + }, + { + "label": "QGATConv", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GATConvLayers.QGATConv", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GATConvLayers.QGATConv", + "peekOfCode": "class QGATConv(MessagePassing):\n def __init__(self, in_channels, depth, attn_model):\n super().__init__(aggr='add') # \"Add\" aggregation (Step 5).\n self.bias = nn.Parameter(torch.empty(in_channels))\n self.reset_parameters()\n self.n_qubits = in_channels\n self.qc, _ = quantum_net(self.n_qubits, depth)\n if attn_model == \"MPS\":\n self.attn, _ = MPS(in_channels*2)\n else:", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GATConvLayers.QGATConv", + "documentation": {} + }, + { + "label": "GCNConv", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GCNConv_Layers.Custom_GCNConv", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GCNConv_Layers.Custom_GCNConv", + "peekOfCode": "class GCNConv(MessagePassing):\n def __init__(self, in_channels, out_channels=None, no_node_NN=False):\n super().__init__(aggr='add') # \"Add\" aggregation (Step 5).\n # to perform simple neighborhood aggregation (without NN to learn node embedding)\n self.no_NN = no_node_NN\n if not self.no_NN:\n assert out_channels == None or in_channels == out_channels\n else:\n self.lin = Linear(in_channels, out_channels, bias=False)\n self.bias = Parameter(torch.empty(out_channels))", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GCNConv_Layers.Custom_GCNConv", + "documentation": {} + }, + { + "label": "QGCNConv", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GCNConv_Layers.QGCNConv", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GCNConv_Layers.QGCNConv", + "peekOfCode": "class QGCNConv(MessagePassing):\n def __init__(self, in_channels, n_layers):\n super().__init__(aggr='add') # \"Add\" aggregation (Step 5).\n self.bias = Parameter(torch.empty(in_channels))\n self.reset_parameters()\n self.n_qubits = in_channels\n self.n_layers = n_layers\n self.qc, _ = quantum_net(self.n_qubits, self.n_layers)\n def reset_parameters(self):\n self.bias.data.zero_()", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.GCNConv_Layers.QGCNConv", + "documentation": {} + }, + { + "label": "MPS", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Quantum_Classifiers.MPS", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Quantum_Classifiers.MPS", + "peekOfCode": "def MPS(n_qubits, meas_qubits):\n dev = qml.device(\"default.qubit\", wires=n_qubits)\n @qml.qnode(dev, interface='torch')\n def quantum_circuit(inputs, q_weights_flat):\n \"\"\"\n The variational quantum classifier.\n \"\"\"\n # Reshape weights\n q_weights = q_weights_flat[:-1].reshape(n_qubits-1, 2)\n # Embed features in the quantum node", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Quantum_Classifiers.MPS", + "documentation": {} + }, + { + "label": "TTN", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Quantum_Classifiers.TTN", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Quantum_Classifiers.TTN", + "peekOfCode": "def TTN(n_qubits):\n dev = qml.device(\"default.qubit\", wires=n_qubits)\n @qml.qnode(dev, interface='torch')\n def quantum_circuit(inputs, q_weights_flat):\n \"\"\"\n The variational quantum classifier.\n \"\"\"\n # Embed features in the quantum node\n qml.AngleEmbedding(inputs, wires=range(n_qubits), rotation=\"Y\")\n n_layers = int(np.log2(n_qubits))", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Quantum_Classifiers.TTN", + "documentation": {} + }, + { + "label": "GCN", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Custom_GCN_Model", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Custom_GCN_Model", + "peekOfCode": "class GCN(Module):\n def __init__(self, GCNConvLayer, input_dims, hidden_dims, output_dims, activ_fn=LeakyReLU(0.2)):\n super().__init__()\n layers = []\n layers.append(GCNConvLayer(input_dims, hidden_dims[0]))\n for i in range(len(hidden_dims)-1):\n layers.append(GCNConvLayer(hidden_dims[i], hidden_dims[i+1]))\n self.layers = ModuleList(layers)\n self.activ_fn = activ_fn\n self.classifier = Linear(hidden_dims[-1], output_dims)", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Custom_GCN_Model", + "documentation": {} + }, + { + "label": "GNN", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.PyTorch_GCN", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.PyTorch_GCN", + "peekOfCode": "class GNN(torch.nn.Module):\n def __init__(self, input_dims, hidden_dims, output_dims, activ_fn):\n super().__init__()\n layers = []\n layers.append(GCNConv(input_dims, hidden_dims[0]))\n for i in range(len(hidden_dims)-1):\n layers.append(GCNConv(hidden_dims[i], hidden_dims[i+1]))\n self.layers = ModuleList(layers)\n self.activ_fn = activ_fn\n self.classifier = Linear(hidden_dims[-1], output_dims)", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.PyTorch_GCN", + "documentation": {} + }, + { + "label": "H_layer", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "peekOfCode": "def H_layer(nqubits):\n \"\"\"Layer of single-qubit Hadamard gates.\n \"\"\"\n for idx in range(nqubits):\n qml.Hadamard(wires=idx)\ndef encoder(w):\n \"\"\"Layer of parametrized qubit rotations around the x axis.\n \"\"\"\n for idx, element in enumerate(w):\n qml.RY(element, wires=idx)", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "documentation": {} + }, + { + "label": "encoder", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "peekOfCode": "def encoder(w):\n \"\"\"Layer of parametrized qubit rotations around the x axis.\n \"\"\"\n for idx, element in enumerate(w):\n qml.RY(element, wires=idx)\ndef Rot_layer(gate, w):\n \"\"\"Layer of parametrized qubit rotations around the y axis.\n \"\"\"\n for idx, element in enumerate(w):\n gate(element, wires=idx)", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "documentation": {} + }, + { + "label": "Rot_layer", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "peekOfCode": "def Rot_layer(gate, w):\n \"\"\"Layer of parametrized qubit rotations around the y axis.\n \"\"\"\n for idx, element in enumerate(w):\n gate(element, wires=idx)\ndef entangling_layer(nqubits):\n \"\"\"Layers of CZ and RY gates.\n \"\"\"\n for i in range(0, nqubits - 1): # Loop over even indices: i=0,2,...N-2\n qml.CNOT(wires=[i, i + 1])", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "documentation": {} + }, + { + "label": "entangling_layer", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "peekOfCode": "def entangling_layer(nqubits):\n \"\"\"Layers of CZ and RY gates.\n \"\"\"\n for i in range(0, nqubits - 1): # Loop over even indices: i=0,2,...N-2\n qml.CNOT(wires=[i, i + 1])\n qml.CNOT(wires=[nqubits-1, 0])\ndef quantum_net(n_qubits, q_depth):\n dev = qml.device(\"default.qubit\", wires=n_qubits)\n @qml.qnode(dev, interface='torch')\n def quantum_circuit(inputs, q_weights_flat):", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "documentation": {} + }, + { + "label": "quantum_net", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "peekOfCode": "def quantum_net(n_qubits, q_depth):\n dev = qml.device(\"default.qubit\", wires=n_qubits)\n @qml.qnode(dev, interface='torch')\n def quantum_circuit(inputs, q_weights_flat):\n \"\"\"\n The variational quantum circuit.\n \"\"\"\n # Reshape weights\n q_weights = q_weights_flat.reshape(q_depth, 2, n_qubits)\n # Embed features in the quantum node", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.QNN_Node_Embedding", + "documentation": {} + }, + { + "label": "QGCN", + "kind": 6, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Quantum_GCN", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Quantum_GCN", + "peekOfCode": "class QGCN(Module):\n def __init__(self, input_dims, q_depths, output_dims, activ_fn=LeakyReLU(0.2), classifier=None, readout=False):\n super().__init__()\n layers = []\n self.n_qubits = input_dims\n for q_depth in q_depths:\n nodeNN = quantum_net(self.n_qubits, q_depth)\n QGCNConv = GCNConv(self.n_qubits, nodeNN)\n layers.append(QGCNConv)\n self.layers = ModuleList(layers)", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.models.Quantum_GCN", + "documentation": {} + }, + { + "label": "train_model", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "peekOfCode": "def train_model(model, optimizer, lossFn, epochs, lr, train_dataloader, val_dataloader):\n history = {'train_loss': [], 'val_loss': [],\n 'train_acc': [], 'val_acc': []}\n for epoch in range(epochs):\n train_loss, train_acc = run_model(\n model, epoch, train_dataloader, lossFn, optimizer)\n val_loss, val_acc = run_model(\n model, epoch, val_dataloader, lossFn, optimizer, train=False)\n print()\n history['train_loss'].append(train_loss)", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "documentation": {} + }, + { + "label": "run_model", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "peekOfCode": "def run_model(model, epoch, loader, lossFn, optimizer, train=True):\n if train:\n model.train()\n else:\n model.eval()\n loss = 0\n net_loss = 0\n correct = 0\n for batch_idx, data in (tqdm(enumerate(loader)) if train else enumerate(loader)):\n target = data.y", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "documentation": {} + }, + { + "label": "test_eval", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "peekOfCode": "def test_eval(model, test_dataloader):\n preds = []\n labels = []\n acc = 0\n for data in test_dataloader:\n target = data.y\n labels.append(target.detach().cpu().numpy())\n output = model(data.x, data.edge_index.type(torch.int64), data.batch)\n preds.append(output.detach().cpu().numpy()) # Convert to numpy array\n # probs = Sigmoid()(output).detach().cpu().numpy() # Convert to numpy array", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "documentation": {} + }, + { + "label": "auc", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "peekOfCode": "def auc(model, test_dataloader):\n labels, preds = test_eval(model, test_dataloader)\n plot_auc(labels, preds)", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "documentation": {} + }, + { + "label": "config", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "peekOfCode": "config = read_configurations('./config.json')\nbatch_size = config['BATCH SIZE']\n# TODO: Save best model and metrics\ndef train_model(model, optimizer, lossFn, epochs, lr, train_dataloader, val_dataloader):\n history = {'train_loss': [], 'val_loss': [],\n 'train_acc': [], 'val_acc': []}\n for epoch in range(epochs):\n train_loss, train_acc = run_model(\n model, epoch, train_dataloader, lossFn, optimizer)\n val_loss, val_acc = run_model(", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "documentation": {} + }, + { + "label": "batch_size", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "peekOfCode": "batch_size = config['BATCH SIZE']\n# TODO: Save best model and metrics\ndef train_model(model, optimizer, lossFn, epochs, lr, train_dataloader, val_dataloader):\n history = {'train_loss': [], 'val_loss': [],\n 'train_acc': [], 'val_acc': []}\n for epoch in range(epochs):\n train_loss, train_acc = run_model(\n model, epoch, train_dataloader, lossFn, optimizer)\n val_loss, val_acc = run_model(\n model, epoch, val_dataloader, lossFn, optimizer, train=False)", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.train", + "documentation": {} + }, + { + "label": "read_configurations", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "peekOfCode": "def read_configurations(config_file):\n with open(config_file, 'r') as f:\n config = json.load(f)\n return config\nconfig = read_configurations(\"../config.json\")\nepochs = config[\"EPOCHS\"]\ndef plot_loss(history, step=2):\n x = range(epochs)\n plt.plot(x, history['train_loss'], label='Train loss')\n plt.plot(x, history['val_loss'], label='Val loss')", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "documentation": {} + }, + { + "label": "plot_loss", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "peekOfCode": "def plot_loss(history, step=2):\n x = range(epochs)\n plt.plot(x, history['train_loss'], label='Train loss')\n plt.plot(x, history['val_loss'], label='Val loss')\n plt.plot(x, history['train_acc'], label='Train acc')\n plt.plot(x, history['val_acc'], label='Val acc')\n plt.xlabel('Epochs')\n plt.ylabel('Loss')\n plt.xticks(range(0, epochs, step), range(1, epochs+1, step))\n plt.legend()", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "documentation": {} + }, + { + "label": "plot_auc", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "peekOfCode": "def plot_auc(labels, preds):\n auc = roc_auc_score(labels, preds)\n fpr, tpr, _ = roc_curve(labels, preds)\n plt.plot(fpr, tpr, label=\"AUC = {0}\".format(auc))\n plt.xlabel('False Positive Rate')\n plt.ylabel('True Positive Rate')\n plt.legend()\n plt.show()", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "documentation": {} + }, + { + "label": "config", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "peekOfCode": "config = read_configurations(\"../config.json\")\nepochs = config[\"EPOCHS\"]\ndef plot_loss(history, step=2):\n x = range(epochs)\n plt.plot(x, history['train_loss'], label='Train loss')\n plt.plot(x, history['val_loss'], label='Val loss')\n plt.plot(x, history['train_acc'], label='Train acc')\n plt.plot(x, history['val_acc'], label='Val acc')\n plt.xlabel('Epochs')\n plt.ylabel('Loss')", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "documentation": {} + }, + { + "label": "epochs", + "kind": 5, + "importPath": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "description": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "peekOfCode": "epochs = config[\"EPOCHS\"]\ndef plot_loss(history, step=2):\n x = range(epochs)\n plt.plot(x, history['train_loss'], label='Train loss')\n plt.plot(x, history['val_loss'], label='Val loss')\n plt.plot(x, history['train_acc'], label='Train acc')\n plt.plot(x, history['val_acc'], label='Val acc')\n plt.xlabel('Epochs')\n plt.ylabel('Loss')\n plt.xticks(range(0, epochs, step), range(1, epochs+1, step))", + "detail": "Quantum_GNN_for_HEP_Haemanth_Velmurugan.code.training.utils", + "documentation": {} + }, + { + "label": "get_distance", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "description": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "peekOfCode": "def get_distance(tuple1,tuple2):\n return np.sqrt((tuple1[0]-tuple2[0])**2+(tuple1[1]-tuple2[1])**2)\n'''\n------------------------------------------------------------------------------------------\n Using graphs with a general number of nodes\n------------------------------------------------------------------------------------------\n'''\ndef preprocess_general(x_data,y_data): #,masses):\n print('--- Finding All Unique Particles ---')\n unique_particles = np.unique(x_data[:,:,3])", + "detail": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "documentation": {} + }, + { + "label": "preprocess_general", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "description": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "peekOfCode": "def preprocess_general(x_data,y_data): #,masses):\n print('--- Finding All Unique Particles ---')\n unique_particles = np.unique(x_data[:,:,3])\n x_data = torch.tensor(x_data)\n y_data = torch.tensor(y_data)\n print()\n print('--- Inserting Masses ---')\n masses = torch.zeros((x_data.shape[0],x_data.shape[1]))\n for i,particle in tqdm(enumerate(unique_particles)):\n if particle!=0:", + "detail": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "documentation": {} + }, + { + "label": "preprocess_fixed_nodes", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "description": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "peekOfCode": "def preprocess_fixed_nodes(x_data,y_data,nodes_per_graph=10): #,masses):\n print('--- Finding All Unique Particles ---')\n unique_particles = np.unique(x_data[:,:,3])\n x_data = torch.tensor(x_data)\n y_data = torch.tensor(y_data)\n print()\n print('--- Inserting Masses ---')\n masses = torch.zeros((x_data.shape[0],x_data.shape[1]))\n for i,particle in tqdm(enumerate(unique_particles)):\n if particle!=0:", + "detail": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "documentation": {} + }, + { + "label": "preprocess_fixed_nodes_quantum", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "description": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "peekOfCode": "def preprocess_fixed_nodes_quantum(x_data,y_data,nodes_per_graph=10): #,masses):\n print('--- Finding All Unique Particles ---')\n unique_particles = np.unique(x_data[:,:,3])\n x_data = torch.tensor(x_data)\n y_data = torch.tensor(y_data)\n print()\n print('--- Inserting Masses ---')\n masses = torch.zeros((x_data.shape[0],x_data.shape[1]))\n for i,particle in tqdm(enumerate(unique_particles)):\n if particle!=0:", + "detail": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "documentation": {} + }, + { + "label": "create_hamiltonian_matrix_interaction", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "description": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "peekOfCode": "def create_hamiltonian_matrix_interaction(nodes_per_graph,\n edge_indx_tensor, \n edge_attr_matrix,\n term_dictionary = {0: [np.identity(2),np.identity(2)], \n 1: [qml.matrix(qml.PauliZ)(0), qml.matrix(qml.PauliZ)(0)]} ):\n # n_qubits == number of nodes in graph\n # graph == Data frame\n # weights == edge_attr\n n_qubits = nodes_per_graph # graph.x.shape[0]\n full_matrix = np.zeros((2 ** n_qubits, 2 ** n_qubits))", + "detail": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "documentation": {} + }, + { + "label": "create_hamiltonian_matrix_transverse", + "kind": 2, + "importPath": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "description": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "peekOfCode": "def create_hamiltonian_matrix_transverse(nodes_per_graph):\n n_qubits = nodes_per_graph #graph.x.shape[0]\n full_matrix = np.zeros((2 ** n_qubits, 2 ** n_qubits))\n # Creates the bias components of the matrix\n for i in range(n_qubits):\n x_term = 1\n for j in range(n_qubits):\n if j==i:\n x_term = np.kron(x_term, qml.matrix(qml.PauliX)(0))\n else:", + "detail": "Quantum_GNN_for_HEP_Roy_Forestano.utils.preprocess", + "documentation": {} + }, + { + "label": "QCBMState", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "class QCBMState(nn.Module):\n def __init__(self, n_label_qubits, n_pos_qubits, depth=3, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.L, self.P = n_label_qubits, n_pos_qubits\n self.n_qubits = self.L + self.P\n self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32))\n self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n def qnode(weights):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "LabelMixer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "class LabelMixer(nn.Module):\n def __init__(self, qcbm: QCBMState, depth=1, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.qcbm = qcbm\n self.L, self.P = qcbm.L, qcbm.P\n self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32))\n self.dev = qml.device(\"default.qubit\", wires=self.L + self.P)\n @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n def qnode(weights_qcbm, weights_label):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "QuantumBlock", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "class QuantumBlock(nn.Module):\n def __init__(self, k_frequencies=3, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.K = k_frequencies\n self.log_omega = nn.Parameter(torch.randn(self.K) * 0.05)\n self.phase = nn.Parameter(torch.zeros(self.K))\n self.w_cos = nn.Parameter(torch.randn(self.K) * 0.1)\n self.w_sin = nn.Parameter(torch.randn(self.K) * 0.1)\n def forward_batch(self, x01_vec):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "QuKANResidualEdge", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "class QuKANResidualEdge(nn.Module):\n def __init__(self, mixer, n_label_qubits, n_pos_qubits, fourier_k=3, seed=0, w_init=0.5):\n super().__init__()\n self.mixer = mixer\n self.L, self.P = n_label_qubits, n_pos_qubits\n self.Nlabel, self.Npos = 2**self.L, 2**self.P\n self.wf = nn.Parameter(torch.tensor(float(w_init)))\n self.wq = nn.Parameter(torch.tensor(float(w_init)))\n self.qfour = QuantumBlock(fourier_k, seed=seed)\n def batch_forward(self, x_pos01, probs_flat):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "QuKANLayerCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "class QuKANLayerCfg:\n n_nodes: int = 4\n n_label_qubits: int = 2\n n_pos_qubits: int = 6 \n qcbm_depth: int = 3\n label_mixer_depth: int = 1\n fourier_k: int = 3\n mixers_trainable: bool = False \nclass QuKANLayer(nn.Module):\n def __init__(self, cfg: QuKANLayerCfg, seed=0):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "QuKANLayer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "class QuKANLayer(nn.Module):\n def __init__(self, cfg: QuKANLayerCfg, seed=0):\n super().__init__()\n self.cfg = cfg\n self.qcbm = QCBMState(cfg.n_label_qubits, cfg.n_pos_qubits, cfg.qcbm_depth, seed)\n self.mixers, self.edges = nn.ModuleList(), nn.ModuleList()\n self._built=False\n self._train_mixers = cfg.mixers_trainable\n def build(self, input_dim, seed=0):\n for m in range(self.cfg.n_nodes):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "KANReadoutCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "class KANReadoutCfg:\n n_classes:int; in_dim:int; fourier_k:int=3\nclass KANReadout(nn.Module):\n def __init__(self,cfg:KANReadoutCfg,seed=0):\n super().__init__()\n self.cfg=cfg; C,M=cfg.n_classes,cfg.in_dim\n self.qfr=nn.ModuleList([QuantumBlock(cfg.fourier_k,seed+131*c+m)\n for c in range(C) for m in range(M)])\n self.b=nn.Parameter(torch.zeros(C))\n def _idx(self,c,m): return c*self.cfg.in_dim+m", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "KANReadout", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "class KANReadout(nn.Module):\n def __init__(self,cfg:KANReadoutCfg,seed=0):\n super().__init__()\n self.cfg=cfg; C,M=cfg.n_classes,cfg.in_dim\n self.qfr=nn.ModuleList([QuantumBlock(cfg.fourier_k,seed+131*c+m)\n for c in range(C) for m in range(M)])\n self.b=nn.Parameter(torch.zeros(C))\n def _idx(self,c,m): return c*self.cfg.in_dim+m\n def forward(self,H):\n H01=torch.sigmoid(H); logits=[]", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "QuKANNetCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "class QuKANNetCfg:\n layer1:QuKANLayerCfg=field(default_factory=QuKANLayerCfg)\n layer2:QuKANLayerCfg=field(default_factory=lambda: QuKANLayerCfg(n_pos_qubits=6))\n n_classes:int=10\nclass QuKANNet(nn.Module):\n def __init__(self,cfg,input_dim,seed=0):\n super().__init__()\n self.l1=QuKANLayer(cfg.layer1,seed); self.l1.build(input_dim,seed)\n self.l2=QuKANLayer(cfg.layer2,seed+1); self.l2.build(cfg.layer1.n_nodes,seed+1)\n self.readout=KANReadout(KANReadoutCfg(cfg.n_classes,cfg.layer2.n_nodes),seed+123)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "QuKANNet", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "class QuKANNet(nn.Module):\n def __init__(self,cfg,input_dim,seed=0):\n super().__init__()\n self.l1=QuKANLayer(cfg.layer1,seed); self.l1.build(input_dim,seed)\n self.l2=QuKANLayer(cfg.layer2,seed+1); self.l2.build(cfg.layer1.n_nodes,seed+1)\n self.readout=KANReadout(KANReadoutCfg(cfg.n_classes,cfg.layer2.n_nodes),seed+123)\n def pretrain_qcbms(self,degree=2,epochs=80,lr=5e-2):\n print(\"\\n[Pretrain] Layer 1 QCBM\"); self.l1.pretrain_qcbm_on_splines(degree,epochs,lr)\n print(\"\\n[Pretrain] Layer 2 QCBM\"); self.l2.pretrain_qcbm_on_splines(degree,epochs,lr)\n def forward(self,X):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "bspline_basis_matrix", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n assert num_splines >= degree + 1\n n = num_splines - 1\n p = degree\n if n - p > 0:\n interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1]\n else:\n interior = np.array([], dtype=float)\n knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)])\n def N(i, r, t):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "run_digits", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "peekOfCode": "def run_digits(seed=0):\n torch.manual_seed(seed); np.random.seed(seed)\n digits = load_digits()\n X, y = digits.data.astype(np.float32), digits.target.astype(np.int64)\n X, y = X[:1000], y[:1000]\n X = MinMaxScaler((0,1)).fit_transform(X).astype(np.float32)\n X_tr, X_te, y_tr, y_te = train_test_split(X, y, test_size=0.3, random_state=seed, stratify=y)\n X_tr, X_te = torch.tensor(X_tr), torch.tensor(X_te)\n y_tr, y_te = torch.tensor(y_tr), torch.tensor(y_te)\n model = QuKANNet(QuKANNetCfg(), input_dim=64, seed=seed)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_digits", + "documentation": {} + }, + { + "label": "QCBMState", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "class QCBMState(nn.Module):\n def __init__(self, n_label_qubits: int, n_pos_qubits: int, depth: int = 3, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.L, self.P = n_label_qubits, n_pos_qubits\n self.n_qubits = self.L + self.P\n self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3).float())\n self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n def qnode(weights):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "LabelMixer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "class LabelMixer(nn.Module):\n def __init__(self, qcbm: QCBMState, depth=1, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.qcbm = qcbm\n self.L, self.P = qcbm.L, qcbm.P\n self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3).float())\n self.dev = qml.device(\"default.qubit\", wires=self.L + self.P)\n @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n def qnode(weights_qcbm, weights_label):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "QuantumBlock", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "class QuantumBlock(nn.Module):\n def __init__(self, k_frequencies=3, depth=1, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.K = k_frequencies\n self.log_omega = nn.Parameter(torch.randn(self.K).float() * 0.05)\n self.phase = nn.Parameter(torch.zeros(self.K).float())\n self.w_cos = nn.Parameter(torch.randn(self.K).float() * 0.1)\n self.w_sin = nn.Parameter(torch.randn(self.K).float() * 0.1)\n self.dev = qml.device(\"default.qubit\", wires=self.K)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "QuKANResidualEdge", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "class QuKANResidualEdge(nn.Module):\n def __init__(self, mixer: LabelMixer, n_label_qubits, n_pos_qubits, k=3):\n super().__init__()\n self.mixer = mixer\n self.Nlabel, self.Npos = 2 ** n_label_qubits, 2 ** n_pos_qubits\n self.wf = nn.Parameter(torch.tensor(0.5).float())\n self.wq = nn.Parameter(torch.tensor(0.5).float())\n self.qfour = QuantumBlock(k)\n def batch_forward(self, x_pos01, probs_flat):\n lp = probs_flat.view(self.Nlabel, self.Npos)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "QuKANRegressor", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "class QuKANRegressor(nn.Module):\n def __init__(self, input_dim=1, hidden_nodes=6, seed=0):\n super().__init__()\n self.qcbm = QCBMState(2, 5, depth=3, seed=seed)\n self.mixers, self.edges = nn.ModuleList(), nn.ModuleList()\n for m in range(hidden_nodes):\n for j in range(input_dim):\n mixer = LabelMixer(self.qcbm, depth=1, seed=seed + 97 * m + j)\n edge = QuKANResidualEdge(mixer, 2, 5, k=3)\n self.mixers.append(mixer)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "bspline_basis_matrix", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n assert num_splines >= degree + 1\n n = num_splines - 1\n p = degree\n if n - p > 0:\n interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1]\n else:\n interior = np.array([], dtype=float)\n knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)])\n def N(i, r, t):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "f_func", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "def f_func(x): return torch.tanh(10*x + 0.5 + F.relu(x**2) * 10)\ndef g_func(x): return torch.sin(x) + torch.cos(5*x) * torch.exp(-x**2) + F.relu(x - 0.5)\ndef h_func(x): return torch.sigmoid(3*x) + F.relu(torch.sin(2*x) + x**3)\ndef k_func(x): return torch.tanh(5*x - 2) + 3 * F.relu(torch.cos(x**2))\ndef m_func(x): return F.softplus(x**2 - 1) + torch.tanh(4*x + 0.1)\ndef n_func(x): return torch.exp(-x**2 + 0.3*x) + F.relu(torch.tanh(2*x - 1))\nFUNCTION_MAP = {\n \"f_func\": f_func,\n \"g_func\": g_func,\n \"h_func\": h_func,", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "g_func", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "def g_func(x): return torch.sin(x) + torch.cos(5*x) * torch.exp(-x**2) + F.relu(x - 0.5)\ndef h_func(x): return torch.sigmoid(3*x) + F.relu(torch.sin(2*x) + x**3)\ndef k_func(x): return torch.tanh(5*x - 2) + 3 * F.relu(torch.cos(x**2))\ndef m_func(x): return F.softplus(x**2 - 1) + torch.tanh(4*x + 0.1)\ndef n_func(x): return torch.exp(-x**2 + 0.3*x) + F.relu(torch.tanh(2*x - 1))\nFUNCTION_MAP = {\n \"f_func\": f_func,\n \"g_func\": g_func,\n \"h_func\": h_func,\n \"k_func\": k_func,", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "h_func", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "def h_func(x): return torch.sigmoid(3*x) + F.relu(torch.sin(2*x) + x**3)\ndef k_func(x): return torch.tanh(5*x - 2) + 3 * F.relu(torch.cos(x**2))\ndef m_func(x): return F.softplus(x**2 - 1) + torch.tanh(4*x + 0.1)\ndef n_func(x): return torch.exp(-x**2 + 0.3*x) + F.relu(torch.tanh(2*x - 1))\nFUNCTION_MAP = {\n \"f_func\": f_func,\n \"g_func\": g_func,\n \"h_func\": h_func,\n \"k_func\": k_func,\n \"m_func\": m_func,", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "k_func", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "def k_func(x): return torch.tanh(5*x - 2) + 3 * F.relu(torch.cos(x**2))\ndef m_func(x): return F.softplus(x**2 - 1) + torch.tanh(4*x + 0.1)\ndef n_func(x): return torch.exp(-x**2 + 0.3*x) + F.relu(torch.tanh(2*x - 1))\nFUNCTION_MAP = {\n \"f_func\": f_func,\n \"g_func\": g_func,\n \"h_func\": h_func,\n \"k_func\": k_func,\n \"m_func\": m_func,\n \"n_func\": n_func,", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "m_func", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "def m_func(x): return F.softplus(x**2 - 1) + torch.tanh(4*x + 0.1)\ndef n_func(x): return torch.exp(-x**2 + 0.3*x) + F.relu(torch.tanh(2*x - 1))\nFUNCTION_MAP = {\n \"f_func\": f_func,\n \"g_func\": g_func,\n \"h_func\": h_func,\n \"k_func\": k_func,\n \"m_func\": m_func,\n \"n_func\": n_func,\n}", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "n_func", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "def n_func(x): return torch.exp(-x**2 + 0.3*x) + F.relu(torch.tanh(2*x - 1))\nFUNCTION_MAP = {\n \"f_func\": f_func,\n \"g_func\": g_func,\n \"h_func\": h_func,\n \"k_func\": k_func,\n \"m_func\": m_func,\n \"n_func\": n_func,\n}\ndef train_one_function(name, func, epochs=100, batch=64, seed=0):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "train_one_function", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "def train_one_function(name, func, epochs=100, batch=64, seed=0):\n x = torch.linspace(-1, 1, 500).unsqueeze(1).float()\n y = func(x).float()\n X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=42)\n model = QuKANRegressor(input_dim=1, hidden_nodes=6, seed=seed)\n model.pretrain_qcbm()\n opt = torch.optim.AdamW(model.parameters(), lr=2e-3, weight_decay=8e-4)\n mse = nn.MSELoss()\n train_losses, test_losses = [], []\n for ep in range(epochs):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "main", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "def main():\n for name, fn in FUNCTION_MAP.items():\n print(f\"\\nTraining QuKAN Regressor on {name}\")\n train_one_function(name, fn, epochs=500, batch=64, seed=0)\nif __name__ == \"__main__\":\n main()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "FUNCTION_MAP", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "peekOfCode": "FUNCTION_MAP = {\n \"f_func\": f_func,\n \"g_func\": g_func,\n \"h_func\": h_func,\n \"k_func\": k_func,\n \"m_func\": m_func,\n \"n_func\": n_func,\n}\ndef train_one_function(name, func, epochs=100, batch=64, seed=0):\n x = torch.linspace(-1, 1, 500).unsqueeze(1).float()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_equations", + "documentation": {} + }, + { + "label": "QCBMState", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "class QCBMState(nn.Module):\n def __init__(self, n_label_qubits, n_pos_qubits, depth=3, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.L, self.P = n_label_qubits, n_pos_qubits\n self.n_qubits = self.L + self.P\n self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32))\n self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n def qnode(weights):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "LabelMixer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "class LabelMixer(nn.Module):\n def __init__(self, qcbm: QCBMState, depth=2, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.qcbm = qcbm\n self.L, self.P = qcbm.L, qcbm.P\n self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32))\n self.dev = qml.device(\"default.qubit\", wires=self.L + self.P)\n @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n def qnode(weights_qcbm, weights_label):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "QuantumBlock", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "class QuantumBlock(nn.Module):\n def __init__(self, k_frequencies=4, entangle_depth=1, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.K = k_frequencies\n self.log_omega = nn.Parameter(torch.randn(self.K) * 0.05)\n self.phase = nn.Parameter(torch.zeros(self.K))\n self.w_cos = nn.Parameter(torch.randn(self.K) * 0.1)\n self.w_sin = nn.Parameter(torch.randn(self.K) * 0.1)\n self.dev = qml.device(\"default.qubit\", wires=self.K)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "QuKANResidualEdge", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "class QuKANResidualEdge(nn.Module):\n \"\"\"\n Edge output = w_f * (QCBM label⊗position probability at discretized x)\n + w_q * (QuantumFourierBlock(x))\n \"\"\"\n def __init__(self, mixer, n_label_qubits, n_pos_qubits, fourier_k=4, fourier_depth=1, seed=0, w_init=0.5):\n super().__init__()\n self.mixer = mixer\n self.L, self.P = n_label_qubits, n_pos_qubits\n self.Nlabel, self.Npos = 2 ** self.L, 2 ** self.P", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "QuKANLayerCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "class QuKANLayerCfg:\n n_nodes: int = 5 \n n_label_qubits: int = 2 \n n_pos_qubits: int = 6 \n qcbm_depth: int = 3\n label_mixer_depth: int = 2\n fourier_k: int = 4\n fourier_depth: int = 1\nclass QuKANLayer(nn.Module):\n def __init__(self, cfg: QuKANLayerCfg, seed=0):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "QuKANLayer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "class QuKANLayer(nn.Module):\n def __init__(self, cfg: QuKANLayerCfg, seed=0):\n super().__init__()\n self.cfg = cfg\n self.qcbm = QCBMState(cfg.n_label_qubits, cfg.n_pos_qubits, cfg.qcbm_depth, seed)\n self.mixers, self.edges = nn.ModuleList(), nn.ModuleList()\n def build(self, input_dim, seed=0):\n print(f\"[QuKANLayer] Building with {input_dim} inputs...\")\n for m in range(self.cfg.n_nodes):\n for j in range(input_dim):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "KANReadoutCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "class KANReadoutCfg:\n n_classes: int\n in_dim: int\n fourier_k: int = 3\n fourier_depth: int = 1\nclass KANReadout(nn.Module):\n \"\"\"\n KAN-style readout: for each output class, sum QuantumFourier transforms of each hidden unit (no Linear).\n \"\"\"\n def __init__(self, cfg: KANReadoutCfg, seed=0):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "KANReadout", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "class KANReadout(nn.Module):\n \"\"\"\n KAN-style readout: for each output class, sum QuantumFourier transforms of each hidden unit (no Linear).\n \"\"\"\n def __init__(self, cfg: KANReadoutCfg, seed=0):\n super().__init__()\n self.cfg = cfg\n C, M = cfg.n_classes, cfg.in_dim\n self.qfr = nn.ModuleList([\n QuantumBlock(cfg.fourier_k, cfg.fourier_depth, seed + 131 * c + m)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "QuKANNetCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "class QuKANNetCfg:\n layer1: QuKANLayerCfg = field(default_factory=QuKANLayerCfg)\n layer2: QuKANLayerCfg = field(default_factory=QuKANLayerCfg)\n n_classes: int = 2 \nclass QuKANNet(nn.Module):\n def __init__(self, cfg, input_dim, seed=0):\n super().__init__()\n print(\"[QuKANNet] Initializing network...\")\n self.l1 = QuKANLayer(cfg.layer1, seed); self.l1.build(input_dim, seed)\n self.l2 = QuKANLayer(cfg.layer2, seed+1); self.l2.build(cfg.layer1.n_nodes, seed+1)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "QuKANNet", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "class QuKANNet(nn.Module):\n def __init__(self, cfg, input_dim, seed=0):\n super().__init__()\n print(\"[QuKANNet] Initializing network...\")\n self.l1 = QuKANLayer(cfg.layer1, seed); self.l1.build(input_dim, seed)\n self.l2 = QuKANLayer(cfg.layer2, seed+1); self.l2.build(cfg.layer1.n_nodes, seed+1)\n self.readout = KANReadout(KANReadoutCfg(cfg.n_classes, cfg.layer2.n_nodes), seed+123)\n print(\"[QuKANNet] Build complete.\")\n def forward(self, X):\n h1 = self.l1(X)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "load_higgs_csv_first_n", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "def load_higgs_csv_first_n(csv_path: str, n_samples: int):\n data = np.loadtxt(csv_path, delimiter=\",\", max_rows=n_samples)\n y = data[:, 0].astype(np.int64) \n X = data[:, 1:29].astype(np.float32) \n scaler = MinMaxScaler((0, 1))\n X = scaler.fit_transform(X).astype(np.float32)\n return X, y\ndef run_higgs(csv_path: str,\n n_samples: int = 20000,\n epochs: int = 20,", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "run_higgs", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "peekOfCode": "def run_higgs(csv_path: str,\n n_samples: int = 20000,\n epochs: int = 20,\n batch_size: int = 128,\n seed: int = 0):\n torch.manual_seed(seed); np.random.seed(seed)\n print(f\"Loading first {n_samples} rows from: {csv_path}\")\n X, y = load_higgs_csv_first_n(csv_path, n_samples)\n X_tr, X_te, y_tr, y_te = train_test_split(\n X, y, test_size=0.2, random_state=seed, stratify=y", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_higgs_boson", + "documentation": {} + }, + { + "label": "QCBMState", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "class QCBMState(nn.Module):\n def __init__(self, n_label_qubits: int, n_pos_qubits: int, depth: int = 3, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.L = n_label_qubits\n self.P = n_pos_qubits\n self.n_qubits = self.L + self.P\n self.depth = depth\n init = 0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32)\n self.theta = nn.Parameter(init)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "LabelMixer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "class LabelMixer(nn.Module):\n def __init__(self, qcbm: QCBMState, depth: int = 2, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.qcbm = qcbm\n self.L = qcbm.L\n self.P = qcbm.P\n self.n_qubits = qcbm.n_qubits\n self.depth = depth\n init = 0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "QuantumBlock", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "class QuantumBlock(nn.Module):\n def __init__(self, k_frequencies: int = 4, entangle_depth: int = 1, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.K = k_frequencies\n self.depth = entangle_depth\n self.log_omega = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.05) \n self.phase = nn.Parameter(torch.zeros(self.K, dtype=torch.float32))\n self.w_cos = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1)\n self.w_sin = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "QuKANResidualEdge", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "class QuKANResidualEdge(nn.Module):\n def __init__(self, mixer: LabelMixer, n_label_qubits: int, n_pos_qubits: int,\n fourier_k: int = 4, fourier_depth: int = 1, seed: int = 0, w_init=0.5):\n super().__init__()\n self.mixer = mixer\n self.L = n_label_qubits\n self.P = n_pos_qubits\n self.Nlabel = 2 ** self.L\n self.Npos = 2 ** self.P\n self.wf = nn.Parameter(torch.tensor(float(w_init), dtype=torch.float32)) ", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "QuKANLayerCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "class QuKANLayerCfg:\n n_nodes: int = 6\n n_label_qubits: int = 2 \n n_pos_qubits: int = 5 \n qcbm_depth: int = 3\n label_mixer_depth: int = 2\n fourier_k: int = 4\n fourier_depth: int = 1\nclass QuKANLayer(nn.Module):\n \"\"\"", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "QuKANLayer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "class QuKANLayer(nn.Module):\n \"\"\"\n KAN-style: node_m = sum_j f_edge_{m,j}(x_j)\n Quantum part is independent of x (probabilities over pos bins);\n x affects which position bin is read out and the Fourier phase.\n \"\"\"\n def __init__(self, cfg: QuKANLayerCfg, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.cfg = cfg", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "KANReadoutCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "class KANReadoutCfg:\n n_classes: int\n in_dim: int\n fourier_k: int = 3\n fourier_depth: int = 1\nclass KANReadout(nn.Module):\n def __init__(self, cfg: KANReadoutCfg, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.cfg = cfg", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "KANReadout", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "class KANReadout(nn.Module):\n def __init__(self, cfg: KANReadoutCfg, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.cfg = cfg\n C, M = cfg.n_classes, cfg.in_dim\n self.qfr = nn.ModuleList([\n QuantumBlock(k_frequencies=cfg.fourier_k,\n entangle_depth=cfg.fourier_depth,\n seed=seed + 131 * c + m)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "QuKANNetCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "class QuKANNetCfg:\n layer1: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1))\n layer2: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1))\n n_classes: int = 3\nclass QuKANNet(nn.Module):\n def __init__(self, cfg: QuKANNetCfg, input_dim: int, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.cfg = cfg\n self.l1 = QuKANLayer(cfg.layer1, seed=seed)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "QuKANNet", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "class QuKANNet(nn.Module):\n def __init__(self, cfg: QuKANNetCfg, input_dim: int, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.cfg = cfg\n self.l1 = QuKANLayer(cfg.layer1, seed=seed)\n self.l1.build(input_dim=input_dim, seed=seed)\n self.l2 = QuKANLayer(cfg.layer2, seed=seed + 1)\n self.l2.build(input_dim=cfg.layer1.n_nodes, seed=seed + 1)\n self.readout = KANReadout(", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "bspline_basis_matrix", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n \"\"\"\n Open-uniform B-spline basis on [0,1].\n num_splines = n+1, degree = p. Knot vector length must be n+p+2 with p+1 repeats at each end,\n and exactly (n-p) interior knots.\n \"\"\"\n assert num_splines >= degree + 1\n n = num_splines - 1\n p = degree\n if n - p > 0:", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "run_iris", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "peekOfCode": "def run_iris(seed=0):\n torch.manual_seed(seed)\n np.random.seed(seed)\n iris = load_iris()\n X = iris.data.astype(np.float32)\n y = iris.target.astype(np.int64)\n scaler = MinMaxScaler(feature_range=(0.0, 1.0))\n X01 = scaler.fit_transform(X).astype(np.float32)\n X_tr, X_te, y_tr, y_te = train_test_split(\n X01, y, test_size=0.3, random_state=seed, stratify=y", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_iris", + "documentation": {} + }, + { + "label": "QCBMState", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "class QCBMState(nn.Module):\n def __init__(self, n_label_qubits: int, n_pos_qubits: int, depth: int = 3, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.L = n_label_qubits\n self.P = n_pos_qubits\n self.n_qubits = self.L + self.P\n self.depth = depth\n init = 0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32)\n self.theta = nn.Parameter(init)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "LabelMixer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "class LabelMixer(nn.Module):\n def __init__(self, qcbm: QCBMState, depth: int = 2, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.qcbm = qcbm\n self.L = qcbm.L\n self.P = qcbm.P\n self.n_qubits = qcbm.n_qubits\n self.depth = depth\n init = 0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "QuantumBlock", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "class QuantumBlock(nn.Module):\n def __init__(self, k_frequencies: int = 4, entangle_depth: int = 1, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.K = k_frequencies\n self.depth = entangle_depth\n self.log_omega = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.05) \n self.phase = nn.Parameter(torch.zeros(self.K, dtype=torch.float32))\n self.w_cos = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1)\n self.w_sin = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "QuKANResidualEdge", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "class QuKANResidualEdge(nn.Module):\n def __init__(self, mixer: LabelMixer, n_label_qubits: int, n_pos_qubits: int,\n fourier_k: int = 4, fourier_depth: int = 1, seed: int = 0, w_init=0.5):\n super().__init__()\n self.mixer = mixer\n self.L = n_label_qubits\n self.P = n_pos_qubits\n self.Nlabel = 2 ** self.L\n self.Npos = 2 ** self.P\n self.wf = nn.Parameter(torch.tensor(float(w_init), dtype=torch.float32)) ", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "QuKANLayerCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "class QuKANLayerCfg:\n n_nodes: int = 6\n n_label_qubits: int = 2 \n n_pos_qubits: int = 5 \n qcbm_depth: int = 3\n label_mixer_depth: int = 2\n fourier_k: int = 4\n fourier_depth: int = 1\nclass QuKANLayer(nn.Module):\n \"\"\"", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "QuKANLayer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "class QuKANLayer(nn.Module):\n \"\"\"\n KAN-style: node_m = sum_j f_edge_{m,j}(x_j)\n Quantum part is independent of x (probabilities over pos bins);\n x affects which position bin is read out and the Fourier phase.\n \"\"\"\n def __init__(self, cfg: QuKANLayerCfg, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.cfg = cfg", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "KANReadoutCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "class KANReadoutCfg:\n n_classes: int\n in_dim: int\n fourier_k: int = 3\n fourier_depth: int = 1\nclass KANReadout(nn.Module):\n def __init__(self, cfg: KANReadoutCfg, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.cfg = cfg", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "KANReadout", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "class KANReadout(nn.Module):\n def __init__(self, cfg: KANReadoutCfg, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.cfg = cfg\n C, M = cfg.n_classes, cfg.in_dim\n self.qfr = nn.ModuleList([\n QuantumBlock(k_frequencies=cfg.fourier_k,\n entangle_depth=cfg.fourier_depth,\n seed=seed + 131 * c + m)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "QuKANNetCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "class QuKANNetCfg:\n layer1: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1))\n layer2: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1))\n n_classes: int = 2 \nclass QuKANNet(nn.Module):\n def __init__(self, cfg: QuKANNetCfg, input_dim: int, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.cfg = cfg\n self.l1 = QuKANLayer(cfg.layer1, seed=seed)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "QuKANNet", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "class QuKANNet(nn.Module):\n def __init__(self, cfg: QuKANNetCfg, input_dim: int, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.cfg = cfg\n self.l1 = QuKANLayer(cfg.layer1, seed=seed)\n self.l1.build(input_dim=input_dim, seed=seed)\n self.l2 = QuKANLayer(cfg.layer2, seed=seed + 1)\n self.l2.build(input_dim=cfg.layer1.n_nodes, seed=seed + 1)\n self.readout = KANReadout(", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "bspline_basis_matrix", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n \"\"\"\n Open-uniform B-spline basis on [0,1].\n num_splines = n+1, degree = p. Knot vector length must be n+p+2 with p+1 repeats at each end,\n and exactly (n-p) interior knots.\n \"\"\"\n assert num_splines >= degree + 1\n n = num_splines - 1\n p = degree\n if n - p > 0:", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "run_social", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "def run_social(seed=0):\n torch.manual_seed(seed)\n np.random.seed(seed)\n assert os.path.exists(CSV_PATH), f\"CSV not found: {CSV_PATH}\"\n df = pd.read_csv(CSV_PATH)\n cols = [c.lower() for c in df.columns]\n col_map = {c.lower(): c for c in df.columns}\n needed = [\"age\", \"estimatedsalary\", \"purchased\"]\n for k in needed:\n assert k in cols, f\"Column '{k}' not found in CSV. Found columns: {df.columns.tolist()}\"", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "CSV_PATH", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "peekOfCode": "CSV_PATH = \"C:\\\\Users\\\\riakh\\\\Downloads\\\\Social_Network_Ads.csv\"\ntorch.set_default_dtype(torch.float32)\ndef bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n \"\"\"\n Open-uniform B-spline basis on [0,1].\n num_splines = n+1, degree = p. Knot vector length must be n+p+2 with p+1 repeats at each end,\n and exactly (n-p) interior knots.\n \"\"\"\n assert num_splines >= degree + 1\n n = num_splines - 1", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "QCBMState", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "class QCBMState(nn.Module):\n def __init__(self, n_label_qubits, n_pos_qubits, depth=3, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.L, self.P = n_label_qubits, n_pos_qubits\n self.n_qubits = self.L + self.P\n self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32))\n self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n def qnode(weights):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "LabelMixer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "class LabelMixer(nn.Module):\n def __init__(self, qcbm: QCBMState, depth=1, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.qcbm = qcbm\n self.L, self.P = qcbm.L, qcbm.P\n self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32))\n self.dev = qml.device(\"default.qubit\", wires=self.L + self.P)\n @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n def qnode(weights_qcbm, weights_label):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "QuantumBlock", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "class QuantumBlock(nn.Module):\n def __init__(self, k_frequencies=3, seed=0):\n super().__init__()\n torch.manual_seed(seed)\n self.K = k_frequencies\n self.log_omega = nn.Parameter(torch.randn(self.K) * 0.05)\n self.phase = nn.Parameter(torch.zeros(self.K))\n self.w_cos = nn.Parameter(torch.randn(self.K) * 0.1)\n self.w_sin = nn.Parameter(torch.randn(self.K) * 0.1)\n def forward_batch(self, x01_vec):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "QuKANResidualEdge", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "class QuKANResidualEdge(nn.Module):\n def __init__(self, mixer, n_label_qubits, n_pos_qubits, fourier_k=3, seed=0, w_init=0.5):\n super().__init__()\n self.mixer = mixer\n self.L, self.P = n_label_qubits, n_pos_qubits\n self.Nlabel, self.Npos = 2**self.L, 2**self.P\n self.wf = nn.Parameter(torch.tensor(float(w_init)))\n self.wq = nn.Parameter(torch.tensor(float(w_init)))\n self.qfour = QuantumBlock(fourier_k, seed=seed)\n def batch_forward(self, x_pos01, probs_flat):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "QuKANLayerCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "class QuKANLayerCfg:\n n_nodes: int = 6\n n_label_qubits: int = 2\n n_pos_qubits: int = 5 \n qcbm_depth: int = 3\n label_mixer_depth: int = 1\n fourier_k: int = 3\n mixers_trainable: bool = False\nclass QuKANLayer(nn.Module):\n def __init__(self, cfg: QuKANLayerCfg, seed=0):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "QuKANLayer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "class QuKANLayer(nn.Module):\n def __init__(self, cfg: QuKANLayerCfg, seed=0):\n super().__init__()\n self.cfg = cfg\n self.qcbm = QCBMState(cfg.n_label_qubits, cfg.n_pos_qubits, cfg.qcbm_depth, seed)\n self.mixers, self.edges = nn.ModuleList(), nn.ModuleList()\n self._built=False\n self._train_mixers = cfg.mixers_trainable\n def build(self, input_dim, seed=0):\n for m in range(self.cfg.n_nodes):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "KANReadoutCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "class KANReadoutCfg:\n n_classes:int; in_dim:int; fourier_k:int=3\nclass KANReadout(nn.Module):\n def __init__(self,cfg:KANReadoutCfg,seed=0):\n super().__init__()\n self.cfg=cfg; C,M=cfg.n_classes,cfg.in_dim\n self.qfr=nn.ModuleList([QuantumBlock(cfg.fourier_k,seed+131*c+m)\n for c in range(C) for m in range(M)])\n self.b=nn.Parameter(torch.zeros(C))\n def _idx(self,c,m): return c*self.cfg.in_dim+m", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "KANReadout", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "class KANReadout(nn.Module):\n def __init__(self,cfg:KANReadoutCfg,seed=0):\n super().__init__()\n self.cfg=cfg; C,M=cfg.n_classes,cfg.in_dim\n self.qfr=nn.ModuleList([QuantumBlock(cfg.fourier_k,seed+131*c+m)\n for c in range(C) for m in range(M)])\n self.b=nn.Parameter(torch.zeros(C))\n def _idx(self,c,m): return c*self.cfg.in_dim+m\n def forward(self,H):\n H01=torch.sigmoid(H); logits=[]", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "QuKANNetCfg", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "class QuKANNetCfg:\n layer1:QuKANLayerCfg=field(default_factory=QuKANLayerCfg)\n layer2:QuKANLayerCfg=field(default_factory=QuKANLayerCfg)\n n_classes:int=2\nclass QuKANNet(nn.Module):\n def __init__(self,cfg,input_dim,seed=0):\n super().__init__()\n self.l1=QuKANLayer(cfg.layer1,seed); self.l1.build(input_dim,seed)\n self.l2=QuKANLayer(cfg.layer2,seed+1); self.l2.build(cfg.layer1.n_nodes,seed+1)\n self.readout=KANReadout(KANReadoutCfg(cfg.n_classes,cfg.layer2.n_nodes),seed+123)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "QuKANNet", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "class QuKANNet(nn.Module):\n def __init__(self,cfg,input_dim,seed=0):\n super().__init__()\n self.l1=QuKANLayer(cfg.layer1,seed); self.l1.build(input_dim,seed)\n self.l2=QuKANLayer(cfg.layer2,seed+1); self.l2.build(cfg.layer1.n_nodes,seed+1)\n self.readout=KANReadout(KANReadoutCfg(cfg.n_classes,cfg.layer2.n_nodes),seed+123)\n def pretrain_qcbms(self,degree=2,epochs=80,lr=5e-2):\n print(\"\\n[Pretrain] Layer 1 QCBM\"); self.l1.pretrain_qcbm_on_splines(degree,epochs,lr)\n print(\"\\n[Pretrain] Layer 2 QCBM\"); self.l2.pretrain_qcbm_on_splines(degree,epochs,lr)\n def forward(self,X):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "bspline_basis_matrix", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n assert num_splines >= degree + 1\n n = num_splines - 1\n p = degree\n if n - p > 0:\n interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1]\n else:\n interior = np.array([], dtype=float)\n knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)])\n def N(i, r, t):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "load_titanic_features", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "def load_titanic_features(csv_path: str):\n assert os.path.exists(csv_path), f\"CSV not found: {csv_path}\"\n df = pd.read_csv(csv_path)\n cols_map = {c.lower(): c for c in df.columns}\n survived = _first_present(cols_map, [\"survived\"])\n pclass = _first_present(cols_map, [\"pclass\",\"p class\",\"p_class\"])\n sex = _first_present(cols_map, [\"sex\",\"gender\"])\n age = _first_present(cols_map, [\"age\"])\n sibsp = _first_present(cols_map, [\"sibsp\",\"siblings/spouses aboard\",\"siblingsaboard\",\"siblings_spouses_aboard\"])\n parch = _first_present(cols_map, [\"parch\",\"parents/children aboard\",\"parentschildrenaboard\",\"parents_children_aboard\"])", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "run_titanic", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "def run_titanic(seed=0):\n torch.manual_seed(seed); np.random.seed(seed)\n X, y = load_titanic_features(CSV_PATH)\n X_tr, X_te, y_tr, y_te = train_test_split(X, y, test_size=0.3, random_state=seed, stratify=y)\n X_tr, X_te = torch.tensor(X_tr, dtype=torch.float32), torch.tensor(X_te, dtype=torch.float32)\n y_tr, y_te = torch.tensor(y_tr, dtype=torch.long), torch.tensor(y_te, dtype=torch.long)\n input_dim = X_tr.shape[1]\n print(f\"Input dim: {input_dim}\")\n cfg = QuKANNetCfg(\n layer1=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, label_mixer_depth=1, fourier_k=3, mixers_trainable=False),", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "CSV_PATH", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "peekOfCode": "CSV_PATH = \"C:\\Users\\riakh\\Downloads\\archive\\Titanic-Dataset.csv\"\ndef bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n assert num_splines >= degree + 1\n n = num_splines - 1\n p = degree\n if n - p > 0:\n interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1]\n else:\n interior = np.array([], dtype=float)\n knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)])", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.fully_qkan_titanic", + "documentation": {} + }, + { + "label": "QuantumKANClassifier", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "class QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=64, degree=4, num_classes=10):\n super().__init__()\n self.num_features = num_features\n self.degree = degree\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree))\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])\n self.kan = KANLayer(in_features=num_features, out_features=num_classes)\n def forward(self, X):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "digits", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "digits = load_digits()\nX = digits.data[:1000] # 1000 samples\ny = digits.target[:1000]\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(\n X_tensor, y_tensor, test_size=0.2, random_state=42\n)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "X", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "X = digits.data[:1000] # 1000 samples\ny = digits.target[:1000]\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(\n X_tensor, y_tensor, test_size=0.2, random_state=42\n)\nclass QuantumKANClassifier(nn.Module):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "y", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "y = digits.target[:1000]\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(\n X_tensor, y_tensor, test_size=0.2, random_state=42\n)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=64, degree=4, num_classes=10):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "scaler", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "scaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(\n X_tensor, y_tensor, test_size=0.2, random_state=42\n)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=64, degree=4, num_classes=10):\n super().__init__()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "X_scaled", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "X_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(\n X_tensor, y_tensor, test_size=0.2, random_state=42\n)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=64, degree=4, num_classes=10):\n super().__init__()\n self.num_features = num_features", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "X_tensor", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "X_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(\n X_tensor, y_tensor, test_size=0.2, random_state=42\n)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=64, degree=4, num_classes=10):\n super().__init__()\n self.num_features = num_features\n self.degree = degree", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "y_tensor", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "y_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(\n X_tensor, y_tensor, test_size=0.2, random_state=42\n)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=64, degree=4, num_classes=10):\n super().__init__()\n self.num_features = num_features\n self.degree = degree\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "model", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "model = QuantumKANClassifier()\ncriterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "criterion", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "criterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "optimizer", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "optimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "model", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "model = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "X_train", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "X_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "y_train", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "y_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "X_test", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "X_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "y_test", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "y_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)\n loss = criterion(outputs, y_train)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "num_epochs", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "peekOfCode": "num_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)\n loss = criterion(outputs, y_train)\n loss.backward()\n optimizer.step()\n with torch.no_grad():\n pred_train = outputs.argmax(dim=1)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_digits", + "documentation": {} + }, + { + "label": "QuantumKANRegressor", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "class QuantumKANRegressor(nn.Module):\n def __init__(self, num_features, degree=5):\n super().__init__()\n self.num_features = num_features\n self.degree = degree\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree)) # (F, P)\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])\n self.kan = KANLayer(in_features=num_features, out_features=1)\n def forward(self, X):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "bump", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "def bump(x): return torch.exp(-10 * (x - 0.2)**2) + torch.exp(-50 * (x + 0.5)**2)\ndef runge(x): return 1 / (1 + 25 * x**2)\ndef exp_sin(x): return torch.exp(-x**2) * torch.sin(5 * x)\ndef noisy_step(x): return torch.heaviside(x, torch.tensor(0.0)) + 0.1 * torch.sin(20 * x)\ndef sigmoid_bumps(x): return torch.sigmoid(8 * (x - 0.5)) + torch.sigmoid(-10 * (x + 0.3)) - 1\ndef sawtooth(x): return 2 * (x - torch.floor(x + 0.5))\ndef default_func(x): return torch.tanh(10 * x + 0.5 + torch.clamp(x**2, min=0) * 10)\nFUNCTION_MAP = {\n \"bump\": bump,\n \"runge\": runge,", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "runge", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "def runge(x): return 1 / (1 + 25 * x**2)\ndef exp_sin(x): return torch.exp(-x**2) * torch.sin(5 * x)\ndef noisy_step(x): return torch.heaviside(x, torch.tensor(0.0)) + 0.1 * torch.sin(20 * x)\ndef sigmoid_bumps(x): return torch.sigmoid(8 * (x - 0.5)) + torch.sigmoid(-10 * (x + 0.3)) - 1\ndef sawtooth(x): return 2 * (x - torch.floor(x + 0.5))\ndef default_func(x): return torch.tanh(10 * x + 0.5 + torch.clamp(x**2, min=0) * 10)\nFUNCTION_MAP = {\n \"bump\": bump,\n \"runge\": runge,\n \"exp_sin\": exp_sin,", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "exp_sin", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "def exp_sin(x): return torch.exp(-x**2) * torch.sin(5 * x)\ndef noisy_step(x): return torch.heaviside(x, torch.tensor(0.0)) + 0.1 * torch.sin(20 * x)\ndef sigmoid_bumps(x): return torch.sigmoid(8 * (x - 0.5)) + torch.sigmoid(-10 * (x + 0.3)) - 1\ndef sawtooth(x): return 2 * (x - torch.floor(x + 0.5))\ndef default_func(x): return torch.tanh(10 * x + 0.5 + torch.clamp(x**2, min=0) * 10)\nFUNCTION_MAP = {\n \"bump\": bump,\n \"runge\": runge,\n \"exp_sin\": exp_sin,\n \"noisy_step\": noisy_step,", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "noisy_step", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "def noisy_step(x): return torch.heaviside(x, torch.tensor(0.0)) + 0.1 * torch.sin(20 * x)\ndef sigmoid_bumps(x): return torch.sigmoid(8 * (x - 0.5)) + torch.sigmoid(-10 * (x + 0.3)) - 1\ndef sawtooth(x): return 2 * (x - torch.floor(x + 0.5))\ndef default_func(x): return torch.tanh(10 * x + 0.5 + torch.clamp(x**2, min=0) * 10)\nFUNCTION_MAP = {\n \"bump\": bump,\n \"runge\": runge,\n \"exp_sin\": exp_sin,\n \"noisy_step\": noisy_step,\n \"sigmoid_bumps\": sigmoid_bumps,", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "sigmoid_bumps", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "def sigmoid_bumps(x): return torch.sigmoid(8 * (x - 0.5)) + torch.sigmoid(-10 * (x + 0.3)) - 1\ndef sawtooth(x): return 2 * (x - torch.floor(x + 0.5))\ndef default_func(x): return torch.tanh(10 * x + 0.5 + torch.clamp(x**2, min=0) * 10)\nFUNCTION_MAP = {\n \"bump\": bump,\n \"runge\": runge,\n \"exp_sin\": exp_sin,\n \"noisy_step\": noisy_step,\n \"sigmoid_bumps\": sigmoid_bumps,\n \"sawtooth\": sawtooth,", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "sawtooth", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "def sawtooth(x): return 2 * (x - torch.floor(x + 0.5))\ndef default_func(x): return torch.tanh(10 * x + 0.5 + torch.clamp(x**2, min=0) * 10)\nFUNCTION_MAP = {\n \"bump\": bump,\n \"runge\": runge,\n \"exp_sin\": exp_sin,\n \"noisy_step\": noisy_step,\n \"sigmoid_bumps\": sigmoid_bumps,\n \"sawtooth\": sawtooth,\n \"default\": lambda x: torch.tanh(10 * x + 0.5 + torch.clamp(x**2, min=0) * 10)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "default_func", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "def default_func(x): return torch.tanh(10 * x + 0.5 + torch.clamp(x**2, min=0) * 10)\nFUNCTION_MAP = {\n \"bump\": bump,\n \"runge\": runge,\n \"exp_sin\": exp_sin,\n \"noisy_step\": noisy_step,\n \"sigmoid_bumps\": sigmoid_bumps,\n \"sawtooth\": sawtooth,\n \"default\": lambda x: torch.tanh(10 * x + 0.5 + torch.clamp(x**2, min=0) * 10)\n}", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "FUNCTION_MAP", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "FUNCTION_MAP = {\n \"bump\": bump,\n \"runge\": runge,\n \"exp_sin\": exp_sin,\n \"noisy_step\": noisy_step,\n \"sigmoid_bumps\": sigmoid_bumps,\n \"sawtooth\": sawtooth,\n \"default\": lambda x: torch.tanh(10 * x + 0.5 + torch.clamp(x**2, min=0) * 10)\n}\nclass QuantumKANRegressor(nn.Module):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "model", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "model = QuantumKANRegressor()\noptimizer = torch.optim.Adam(model.parameters(), lr=0.01)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\ncriterion = nn.MSELoss()\ntrain_rmse, test_rmse = [], []\nnum_epochs = 1000\nfor name, func in FUNCTION_MAP.items():\n print(f\"\\nTraining on Function: {name}\")\n x = torch.linspace(-1, 1, 500).unsqueeze(1)\n y = func(x)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "optimizer", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "optimizer = torch.optim.Adam(model.parameters(), lr=0.01)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\ncriterion = nn.MSELoss()\ntrain_rmse, test_rmse = [], []\nnum_epochs = 1000\nfor name, func in FUNCTION_MAP.items():\n print(f\"\\nTraining on Function: {name}\")\n x = torch.linspace(-1, 1, 500).unsqueeze(1)\n y = func(x)\n X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=42)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\ncriterion = nn.MSELoss()\ntrain_rmse, test_rmse = [], []\nnum_epochs = 1000\nfor name, func in FUNCTION_MAP.items():\n print(f\"\\nTraining on Function: {name}\")\n x = torch.linspace(-1, 1, 500).unsqueeze(1)\n y = func(x)\n X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=42)\n train_losses, test_losses = [], []", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "criterion", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "criterion = nn.MSELoss()\ntrain_rmse, test_rmse = [], []\nnum_epochs = 1000\nfor name, func in FUNCTION_MAP.items():\n print(f\"\\nTraining on Function: {name}\")\n x = torch.linspace(-1, 1, 500).unsqueeze(1)\n y = func(x)\n X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=42)\n train_losses, test_losses = [], []\n for epoch in range(num_epochs):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "num_epochs", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "peekOfCode": "num_epochs = 1000\nfor name, func in FUNCTION_MAP.items():\n print(f\"\\nTraining on Function: {name}\")\n x = torch.linspace(-1, 1, 500).unsqueeze(1)\n y = func(x)\n X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=42)\n train_losses, test_losses = [], []\n for epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_equations", + "documentation": {} + }, + { + "label": "QuantumKANClassifier", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "class QuantumKANClassifier(nn.Module):\n def __init__(self, num_features, degree=5, num_classes=2):\n super().__init__()\n self.num_features = num_features\n self.degree = degree\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree)) # (F, P)\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])\n self.kan = KANLayer(in_features=num_features, out_features=num_classes) # e.g., binary => 2\n def forward(self, X):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "chebyshev_polynomials", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "chebyshev_polynomials = [\n [0, 1],\n [-1, 0, 2],\n [0, -3, 0, 4],\n [1, 0, -8, 0, 8]\n]\niris = load_iris()\nX = iris.data\ny = iris.target\nscaler = MinMaxScaler(feature_range=(-1, 1))", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "iris", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "iris = load_iris()\nX = iris.data\ny = iris.target\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features, degree=5, num_classes=2):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "X", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "X = iris.data\ny = iris.target\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features, degree=5, num_classes=2):\n super().__init__()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "y", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "y = iris.target\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features, degree=5, num_classes=2):\n super().__init__()\n self.num_features = num_features", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "scaler", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "scaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features, degree=5, num_classes=2):\n super().__init__()\n self.num_features = num_features\n self.degree = degree", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "X_scaled", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "X_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features, degree=5, num_classes=2):\n super().__init__()\n self.num_features = num_features\n self.degree = degree\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "X_tensor", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "X_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features, degree=5, num_classes=2):\n super().__init__()\n self.num_features = num_features\n self.degree = degree\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree)) # (F, P)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "y_tensor", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "y_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features, degree=5, num_classes=2):\n super().__init__()\n self.num_features = num_features\n self.degree = degree\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree)) # (F, P)\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "model", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "model = QuantumKANClassifier()\ncriterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "criterion", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "criterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "optimizer", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "optimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "model", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "model = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "X_train", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "X_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "y_train", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "y_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "X_test", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "X_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "y_test", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "y_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)\n loss = criterion(outputs, y_train)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "num_epochs", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "peekOfCode": "num_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)\n loss = criterion(outputs, y_train)\n loss.backward()\n optimizer.step()\n with torch.no_grad():\n pred_train = outputs.argmax(dim=1)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_iris", + "documentation": {} + }, + { + "label": "QSVTLayer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "peekOfCode": "class QSVTLayer(nn.Module):\n def __init__(self, chunk_size, coeffs):\n super().__init__()\n self.coeffs = coeffs\n self.theta = nn.Parameter(torch.randn(1))\n self.dev = qml.device(\"default.qubit\", wires=1)\n @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n def circuit(x_scalar, theta):\n x_scalar = torch.clamp(x_scalar, -1.0, 1.0)\n qml.RY(x_scalar, wires=0)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "documentation": {} + }, + { + "label": "QuantumSummation", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "peekOfCode": "class QuantumSummation(nn.Module):\n def __init__(self, chunk_size):\n super().__init__()\n self.weights = nn.Parameter(torch.rand(chunk_size))\n self.num_index_qubits = math.ceil(math.log2(chunk_size))\n self.total_wires = self.num_index_qubits + 2\n self.dev = qml.device(\"default.qubit\", wires=self.total_wires)\n @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n def circuit(phi_vals, wts):\n index_wires = list(range(self.num_index_qubits))", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "documentation": {} + }, + { + "label": "SineKANLayer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "peekOfCode": "class SineKANLayer(nn.Module):\n def __init__(self, input_dim, output_dim, device='cpu', grid_size=5, is_first=False, add_bias=True, norm_freq=True):\n super(SineKANLayer, self).__init__()\n self.grid_size = grid_size\n self.device = device\n self.input_dim = input_dim\n self.output_dim = output_dim\n self.is_first = is_first\n self.add_bias = add_bias\n # Hyperparameters for phase evolution (used in forward_step)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "documentation": {} + }, + { + "label": "SineKAN", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "peekOfCode": "class SineKAN(nn.Module):\n def __init__(self, input_dim, hidden=64, output_dim=2):\n super().__init__()\n self.l1 = SineKANLayer(input_dim, hidden, is_first=True)\n self.l2 = SineKANLayer(hidden, output_dim)\n def forward(self, x):\n return self.l2(self.l1(x))\nclass HybridQKAN(nn.Module):\n def __init__(self, input_dim, cheb_coeffs, chunk_size=8):\n super().__init__()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "documentation": {} + }, + { + "label": "HybridQKAN", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "peekOfCode": "class HybridQKAN(nn.Module):\n def __init__(self, input_dim, cheb_coeffs, chunk_size=8):\n super().__init__()\n self.chunk_size = chunk_size\n self.num_chunks = input_dim // chunk_size\n self.qsvt_layers = nn.ModuleList([\n QSVTLayer(chunk_size, coeffs=cheb_coeffs[i % len(cheb_coeffs)])\n for i in range(self.num_chunks)\n ])\n self.lcu_weights = nn.Parameter(torch.randn(self.num_chunks, chunk_size))", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "documentation": {} + }, + { + "label": "load_and_preprocess", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "peekOfCode": "def load_and_preprocess(file_path, Nsamples=1000, crop_size=8):\n with h5py.File(file_path, \"r\") as f:\n X_jets = f[\"X_jets\"][:Nsamples]\n y = f[\"y\"][:Nsamples]\n def crop_center(img, cx, cy):\n x, y = img.shape[1:3]\n sx, sy = x // 2 - cx // 2, y // 2 - cy // 2\n return img[:, sx:sx + cx, sy:sy + cy, :]\n cropped = crop_center(X_jets, crop_size, crop_size)\n ch0 = cropped[..., 0]", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "documentation": {} + }, + { + "label": "make_lcu_layer", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "peekOfCode": "def make_lcu_layer(chunk_size):\n dev = qml.device(\"default.qubit\", wires=3)\n @qml.qnode(dev, interface=\"torch\", diff_method=\"backprop\")\n def lcu_circuit(vals, weights):\n qml.RY(vals[0], wires=0)\n qml.RY(weights[0], wires=1)\n qml.CNOT(wires=[0, 2])\n angle = torch.clamp(vals[0] * weights[0], -math.pi, math.pi)\n qml.ctrl(qml.RZ, control=0)(angle, wires=2)\n return qml.expval(qml.PauliZ(2))", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "documentation": {} + }, + { + "label": "forward_step", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "peekOfCode": "def forward_step(i_n, grid_size, A, K, C):\n ratio = A * grid_size**(-K) + C\n return ratio * i_n\nclass SineKANLayer(nn.Module):\n def __init__(self, input_dim, output_dim, device='cpu', grid_size=5, is_first=False, add_bias=True, norm_freq=True):\n super(SineKANLayer, self).__init__()\n self.grid_size = grid_size\n self.device = device\n self.input_dim = input_dim\n self.output_dim = output_dim", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "documentation": {} + }, + { + "label": "train_qkan_model", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "peekOfCode": "def train_qkan_model(filepath):\n X_train, X_test, y_train, y_test = load_and_preprocess(filepath, Nsamples=1000, crop_size=8)\n input_dim = X_train.shape[1]\n cheb_coeffs = [[0, 1], [-1, 0, 2], [0, -3, 0, 4]]\n model = HybridQKAN(input_dim=input_dim, cheb_coeffs=cheb_coeffs, chunk_size=8)\n device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n model.to(device)\n optimizer = optim.Adam(model.parameters(), lr=1e-3)\n criterion = nn.CrossEntropyLoss()\n for epoch in range(50):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_quark_gluon", + "documentation": {} + }, + { + "label": "QuantumKANClassifier", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "class QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=2, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree))\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])\n self.kan = KANLayer(in_features=num_features, out_features=num_classes)\n def forward(self, X):\n B = X.size(0)\n features = []", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "df", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "df = pd.read_csv(\"C://Users//riakh//Downloads//archive//Social_Network_Ads.csv\")\nX = df[['Age', 'EstimatedSalary']].values\ny = df['Purchased'].values\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=2, degree=3, num_classes=2):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "X", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "X = df[['Age', 'EstimatedSalary']].values\ny = df['Purchased'].values\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=2, degree=3, num_classes=2):\n super().__init__()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "y", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "y = df['Purchased'].values\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=2, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "scaler", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "scaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=2, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree))", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "X_scaled", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "X_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=2, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree))\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "X_tensor", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "X_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=2, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree))\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])\n self.kan = KANLayer(in_features=num_features, out_features=num_classes)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "y_tensor", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "y_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=2, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree))\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])\n self.kan = KANLayer(in_features=num_features, out_features=num_classes)\n def forward(self, X):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "model", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "model = QuantumKANClassifier()\ncriterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "criterion", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "criterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "optimizer", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "optimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "model", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "model = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "X_train", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "X_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "y_train", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "y_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "X_test", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "X_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "y_test", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "y_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)\n loss = criterion(outputs, y_train)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "num_epochs", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "peekOfCode": "num_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)\n loss = criterion(outputs, y_train)\n loss.backward()\n optimizer.step()\n with torch.no_grad():\n pred_train = outputs.argmax(dim=1)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_social_networks_ad", + "documentation": {} + }, + { + "label": "QuantumKANClassifier", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "class QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=4, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree))\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])\n self.kan = KANLayer(in_features=num_features, out_features=num_classes)\n def forward(self, X):\n B = X.size(0)\n features = []", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "df", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "df = pd.read_csv(\"C://Users//riakh//Downloads//archive//Titanic-Dataset.csv\")\ndf = df[['Pclass', 'Sex', 'Age', 'Fare', 'Survived']].dropna()\ndf['Sex'] = df['Sex'].map({'male': 0, 'female': 1})\nX = df[['Pclass', 'Sex', 'Age', 'Fare']].values\ny = df['Survived'].values\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "df", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "df = df[['Pclass', 'Sex', 'Age', 'Fare', 'Survived']].dropna()\ndf['Sex'] = df['Sex'].map({'male': 0, 'female': 1})\nX = df[['Pclass', 'Sex', 'Age', 'Fare']].values\ny = df['Survived'].values\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "df['Sex']", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "df['Sex'] = df['Sex'].map({'male': 0, 'female': 1})\nX = df[['Pclass', 'Sex', 'Age', 'Fare']].values\ny = df['Survived'].values\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=4, degree=3, num_classes=2):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "X", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "X = df[['Pclass', 'Sex', 'Age', 'Fare']].values\ny = df['Survived'].values\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=4, degree=3, num_classes=2):\n super().__init__()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "y", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "y = df['Survived'].values\nscaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=4, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "scaler", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "scaler = MinMaxScaler(feature_range=(-1, 1))\nX_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=4, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree))", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "X_scaled", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "X_scaled = scaler.fit_transform(X)\nX_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=4, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree))\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "X_tensor", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "X_tensor = torch.tensor(X_scaled, dtype=torch.float32)\ny_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=4, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree))\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])\n self.kan = KANLayer(in_features=num_features, out_features=num_classes)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "y_tensor", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "y_tensor = torch.tensor(y, dtype=torch.long)\nX_train, X_test, y_train, y_test = train_test_split(X_tensor, y_tensor, test_size=0.2, random_state=42)\nclass QuantumKANClassifier(nn.Module):\n def __init__(self, num_features=4, degree=3, num_classes=2):\n super().__init__()\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree))\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])\n self.kan = KANLayer(in_features=num_features, out_features=num_classes)\n def forward(self, X):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "model", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "model = QuantumKANClassifier(num_features=4, degree=3, num_classes=2)\ncriterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "criterion", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "criterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "optimizer", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "optimizer = torch.optim.Adam(model.parameters(), lr=0.5)\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nmodel = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "model", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "model = model.to(device)\nX_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "X_train", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "X_train = X_train.float().to(device)\ny_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "y_train", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "y_train = y_train.long().to(device)\nX_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "X_test", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "X_test = X_test.float().to(device)\ny_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "y_test", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "y_test = y_test.long().to(device)\nprint(\"X_train dtype:\", X_train.dtype)\nprint(\"weights dtype:\", model.kan_layer.weights.dtype)\nprint(\"knots dtype:\", model.kan_layer.knots.dtype)\nnum_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)\n loss = criterion(outputs, y_train)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "num_epochs", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "peekOfCode": "num_epochs = 1000\nfor epoch in range(num_epochs):\n model.train()\n optimizer.zero_grad()\n outputs = model(X_train)\n loss = criterion(outputs, y_train)\n loss.backward()\n optimizer.step()\n with torch.no_grad():\n pred_train = outputs.argmax(dim=1)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.experiments.hybrid_qkan_titanic", + "documentation": {} + }, + { + "label": "MLPClassifier", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "peekOfCode": "class MLPClassifier(nn.Module):\n def __init__(self, input_dim, hidden_dim=128, output_dim=2):\n super().__init__()\n self.model = nn.Sequential(\n nn.Linear(input_dim, hidden_dim),\n nn.ReLU(),\n nn.Linear(hidden_dim, hidden_dim),\n nn.ReLU(),\n nn.Linear(hidden_dim, output_dim)\n )", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "documentation": {} + }, + { + "label": "load_and_preprocess", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "peekOfCode": "def load_and_preprocess(file_path, Nsamples=1000, crop_size=12):\n with h5py.File(file_path, \"r\") as f:\n X_jets = f[\"X_jets\"][:Nsamples]\n y = f[\"y\"][:Nsamples]\n def crop_center(img, cx, cy):\n x, y = img.shape[1:3]\n sx, sy = x // 2 - cx // 2, y // 2 - cy // 2\n return img[:, sx:sx+cx, sy:sy+cy, :]\n cropped = crop_center(X_jets, crop_size, crop_size)\n ch0 = cropped[..., 1]", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "documentation": {} + }, + { + "label": "train_mlp_model", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "peekOfCode": "def train_mlp_model(file_path, epochs=50, batch_size=32, lr=0.001):\n X_train, X_test, y_train, y_test = load_and_preprocess(file_path, Nsamples=1000, crop_size=12)\n train_dataset = torch.utils.data.TensorDataset(X_train, y_train)\n test_dataset = torch.utils.data.TensorDataset(X_test, y_test)\n train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size, shuffle=True)\n test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size)\n input_dim = X_train.shape[1]\n model = MLPClassifier(input_dim=input_dim, hidden_dim=128, output_dim=2)\n device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n model.to(device)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "documentation": {} + }, + { + "label": "SEED", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "peekOfCode": "SEED = 10\ntorch.manual_seed(SEED)\nnp.random.seed(SEED)\ndef load_and_preprocess(file_path, Nsamples=1000, crop_size=12):\n with h5py.File(file_path, \"r\") as f:\n X_jets = f[\"X_jets\"][:Nsamples]\n y = f[\"y\"][:Nsamples]\n def crop_center(img, cx, cy):\n x, y = img.shape[1:3]\n sx, sy = x // 2 - cx // 2, y // 2 - cy // 2", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.MLP_quarkgluon", + "documentation": {} + }, + { + "label": "SineKANLayer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "peekOfCode": "class SineKANLayer(nn.Module):\n def __init__(self, input_dim, output_dim, device='cpu', grid_size=5, is_first=False, add_bias=True, norm_freq=True):\n super().__init__()\n self.grid_size = grid_size\n self.device = device\n self.input_dim = input_dim\n self.output_dim = output_dim\n self.is_first = is_first\n self.add_bias = add_bias\n self.A, self.K, self.C = 0.9724, 0.9884, 0.9994", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "documentation": {} + }, + { + "label": "SineKAN", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "peekOfCode": "class SineKAN(nn.Module):\n def __init__(self, layers_hidden: List[int], grid_size=8, device='cpu'):\n super().__init__()\n self.layers = nn.ModuleList([\n SineKANLayer(in_dim, out_dim, device, grid_size=grid_size, is_first=(i == 0))\n for i, (in_dim, out_dim) in enumerate(zip(layers_hidden[:-1], layers_hidden[1:]))\n ])\n def forward(self, x):\n for layer in self.layers:\n x = layer(x)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "documentation": {} + }, + { + "label": "load_and_preprocess", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "peekOfCode": "def load_and_preprocess(file_path, Nsamples=1000, crop_size=8):\n with h5py.File(file_path, \"r\") as f:\n X_jets = f[\"X_jets\"][:Nsamples]\n y = f[\"y\"][:Nsamples]\n def crop_center(img, cropx, cropy):\n x, y = img.shape[1:3]\n startx = x // 2 - (cropx // 2)\n starty = y // 2 - (cropy // 2)\n return img[:, startx:startx + cropx, starty:starty + cropy, :]\n cropped = crop_center(X_jets, crop_size, crop_size)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "documentation": {} + }, + { + "label": "forward_step", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "peekOfCode": "def forward_step(i_n, grid_size, A, K, C):\n ratio = A * grid_size**(-K) + C\n return ratio * i_n\nclass SineKANLayer(nn.Module):\n def __init__(self, input_dim, output_dim, device='cpu', grid_size=5, is_first=False, add_bias=True, norm_freq=True):\n super().__init__()\n self.grid_size = grid_size\n self.device = device\n self.input_dim = input_dim\n self.output_dim = output_dim", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "documentation": {} + }, + { + "label": "train_sinekan_model", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "peekOfCode": "def train_sinekan_model(X_train, X_test, y_train, y_test, input_dim=128, output_dim=2):\n device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n # model = SineKAN([input_dim, 64, output_dim], grid_size=8, device=device).to(device)\n model = SineKAN([input_dim, 64, 32, output_dim], grid_size=8, device=device).to(device)\n optimizer = optim.AdamW(model.parameters(), lr=1e-2)\n scheduler = optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.9)\n criterion = nn.CrossEntropyLoss()\n best_acc = 0.0\n best_state = None\n for epoch in range(300):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "documentation": {} + }, + { + "label": "SEED", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "peekOfCode": "SEED = 10\ntorch.manual_seed(SEED)\nnp.random.seed(SEED)\ncrop_size = 8\nNsamples = 1000\nfile_path = \"C:\\\\Users\\\\riakh\\\\Downloads\\\\quark-gluon_train-set_n793900-001.hdf5\"\ndef load_and_preprocess(file_path, Nsamples=1000, crop_size=8):\n with h5py.File(file_path, \"r\") as f:\n X_jets = f[\"X_jets\"][:Nsamples]\n y = f[\"y\"][:Nsamples]", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "documentation": {} + }, + { + "label": "crop_size", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "peekOfCode": "crop_size = 8\nNsamples = 1000\nfile_path = \"C:\\\\Users\\\\riakh\\\\Downloads\\\\quark-gluon_train-set_n793900-001.hdf5\"\ndef load_and_preprocess(file_path, Nsamples=1000, crop_size=8):\n with h5py.File(file_path, \"r\") as f:\n X_jets = f[\"X_jets\"][:Nsamples]\n y = f[\"y\"][:Nsamples]\n def crop_center(img, cropx, cropy):\n x, y = img.shape[1:3]\n startx = x // 2 - (cropx // 2)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "documentation": {} + }, + { + "label": "Nsamples", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "peekOfCode": "Nsamples = 1000\nfile_path = \"C:\\\\Users\\\\riakh\\\\Downloads\\\\quark-gluon_train-set_n793900-001.hdf5\"\ndef load_and_preprocess(file_path, Nsamples=1000, crop_size=8):\n with h5py.File(file_path, \"r\") as f:\n X_jets = f[\"X_jets\"][:Nsamples]\n y = f[\"y\"][:Nsamples]\n def crop_center(img, cropx, cropy):\n x, y = img.shape[1:3]\n startx = x // 2 - (cropx // 2)\n starty = y // 2 - (cropy // 2)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "documentation": {} + }, + { + "label": "file_path", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "peekOfCode": "file_path = \"C:\\\\Users\\\\riakh\\\\Downloads\\\\quark-gluon_train-set_n793900-001.hdf5\"\ndef load_and_preprocess(file_path, Nsamples=1000, crop_size=8):\n with h5py.File(file_path, \"r\") as f:\n X_jets = f[\"X_jets\"][:Nsamples]\n y = f[\"y\"][:Nsamples]\n def crop_center(img, cropx, cropy):\n x, y = img.shape[1:3]\n startx = x // 2 - (cropx // 2)\n starty = y // 2 - (cropy // 2)\n return img[:, startx:startx + cropx, starty:starty + cropy, :]", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.ClassicalModelsforBenchmarking.SineKAN_quarkgluon", + "documentation": {} + }, + { + "label": "LabelMixer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.FullyQKAN_model_components.LabelMixer", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.FullyQKAN_model_components.LabelMixer", + "peekOfCode": "class LabelMixer(nn.Module):\n \"\"\"\n Applies an extra entangling block on label qubits,\n after the QCBM has been prepared.\n \"\"\"\n def __init__(self, qcbm: QCBMState, depth: int = 2, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.qcbm = qcbm\n self.L = qcbm.L", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.FullyQKAN_model_components.LabelMixer", + "documentation": {} + }, + { + "label": "QCBMState", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.FullyQKAN_model_components.QCBM", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.FullyQKAN_model_components.QCBM", + "peekOfCode": "class QCBMState(nn.Module):\n \"\"\"\n Quantum Circuit Born Machine (QCBM) state preparation.\n Produces probability distribution over 2^(L+P) outcomes.\n \"\"\"\n def __init__(self, n_label_qubits: int, n_pos_qubits: int, depth: int = 3, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.L = n_label_qubits\n self.P = n_pos_qubits", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.FullyQKAN_model_components.QCBM", + "documentation": {} + }, + { + "label": "QuantumFourierBlock", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.FullyQKAN_model_components.QuantumBlock", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.FullyQKAN_model_components.QuantumBlock", + "peekOfCode": "class QuantumFourierBlock(nn.Module):\n \"\"\"\n Quantum Fourier Residual block.\n Maps input scalars in [0,1] into Fourier-like features\n using quantum rotations and entangling layers.\n \"\"\"\n def __init__(self, k_frequencies: int = 4, entangle_depth: int = 1, seed: int = 0):\n super().__init__()\n torch.manual_seed(seed)\n self.K = k_frequencies", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.FullyQKAN_model_components.QuantumBlock", + "documentation": {} + }, + { + "label": "QuantumKANClassifier", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.hybridqkanclassifier", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.hybridqkanclassifier", + "peekOfCode": "class QuantumKANClassifier(nn.Module):\n def __init__(self, num_features, degree=3, num_classes=2):\n super().__init__()\n self.num_features = num_features\n self.degree = degree\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree)) # (F, P)\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])\n self.kan = KANLayer(in_features=num_features, out_features=num_classes) \n def forward(self, X):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.hybridqkanclassifier", + "documentation": {} + }, + { + "label": "QuantumKANRegressor", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.hybridqkanregressor", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.hybridqkanregressor", + "peekOfCode": "class QuantumKANRegressor(nn.Module):\n def __init__(self, num_features, degree=3):\n super().__init__()\n self.num_features = num_features\n self.degree = degree\n self.qsvt = QSVT(wires=1, degree=degree, depth=2)\n self.lcu_weights = nn.Parameter(torch.rand(num_features, degree)) # (F, P)\n self.sum_blocks = nn.ModuleList([QuantumSumBlock(degree) for _ in range(num_features)])\n self.kan = KANLayer(in_features=num_features, out_features=1)\n def forward(self, X):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.hybridqkanregressor", + "documentation": {} + }, + { + "label": "quantum_lcu_block", + "kind": 2, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.LCU", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.LCU", + "peekOfCode": "def quantum_lcu_block(qsvt_vals, weight_vals):\n \"\"\"\n qsvt_vals: tensor of shape (P,)\n weight_vals: tensor of shape (P,)\n Output: tensor scalar (Z expectation after quantum weighting)\n \"\"\"\n P = len(qsvt_vals)\n n_ctrl = math.ceil(math.log2(P))\n wires = list(range(n_ctrl + 1)) # control + 1 target\n dev = qml.device(\"default.qubit\", wires=len(wires))", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.LCU", + "documentation": {} + }, + { + "label": "QSVT", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.qsvt_chebyshevpoly", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.qsvt_chebyshevpoly", + "peekOfCode": "class QSVT(nn.Module):\n def __init__(self, degree=5, wires=4):\n super().__init__()\n self.degree = degree\n self.wires = wires\n self.target_polys = target_polys\n self.dev = qml.device(\"default.qubit\", wires=wires)\n @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n def circuit(x, poly):\n for i in range(wires):", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.qsvt_chebyshevpoly", + "documentation": {} + }, + { + "label": "target_polys", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.qsvt_chebyshevpoly", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.qsvt_chebyshevpoly", + "peekOfCode": "target_polys = [\n [0, 1],\n [-1, 0, 2],\n [0, -3, 0, 4],\n [1, 0, -8, 0, 8]\n]\nclass QSVT(nn.Module):\n def __init__(self, degree=5, wires=4):\n super().__init__()\n self.degree = degree", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.qsvt_chebyshevpoly", + "documentation": {} + }, + { + "label": "QSVT", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.qsvt_sinepoly", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.qsvt_sinepoly", + "peekOfCode": "class QSVT(nn.Module):\n def __init__(self, wires=4, degree=5):\n super().__init__()\n self.wires = wires\n self.dev = qml.device(\"default.qubit\", wires=wires)\n xs = np.linspace(-1, 1, 200)\n fx = sum(np.sin(k * np.pi * xs) for k in [1, 3, 5])\n poly = np.polyfit(xs, fx, deg=degree)\n poly /= np.max(np.abs(np.polyval(poly, xs))) + 1e-6\n odd_poly = [c if i % 2 == 1 else 0 for i, c in enumerate(poly[::-1])]", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.qsvt_sinepoly", + "documentation": {} + }, + { + "label": "QuantumSumBlock", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.quantum_summation", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.quantum_summation", + "peekOfCode": "class QuantumSumBlock(nn.Module):\n def __init__(self, num_polynomials):\n \"\"\"\n Quantum summation over N polynomial outputs using Hadamard test.\n Supports arbitrary N (must be <= 2^num_index_qubits).\n Args:\n phi_vals (tensor): shape (N,), tensor of real-valued polynomial outputs.\n Returns:\n torch.Tensor: scalar value (quantum sum approximation via Hadamard test)\n \"\"\"", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.quantum_summation", + "documentation": {} + }, + { + "label": "KANLayer", + "kind": 6, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.SplineKANlayer", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.SplineKANlayer", + "peekOfCode": "class KANLayer(nn.Module):\n def __init__(self, in_features, out_features, num_basis=30, order=3):\n super().__init__()\n self.in_features = in_features\n self.out_features = out_features\n self.knots = nn.Parameter(torch.tensor([np.linspace(-2, 2, num_basis+order+1) for _ in range(in_features)], dtype=torch.float32))\n self.weights = nn.Parameter(torch.randn(out_features, in_features, num_basis))\n self.bias = nn.Parameter(torch.randn(out_features))\n def forward(self, x):\n x = x.detach()", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.SplineKANlayer", + "documentation": {} + }, + { + "label": "X_flat", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "peekOfCode": "X_flat = X.reshape(X.shape[0], -1) \nquark_flat = X_flat[y == 0]\ngluon_flat = X_flat[y == 1]\nt_vals, p_vals = ttest_ind(quark_flat, gluon_flat, axis=0, equal_var=False)\ntop_indices = np.argsort(p_vals)[:192] # top 8x8x3 = 192 pixel indices\nX_selected = X_flat[:, top_indices] # shape: (1000, 192)\nfrom sklearn.model_selection import train_test_split\nimport torch\nX_train, X_test, y_train, y_test = train_test_split(\n X_selected, y, test_size=0.2, random_state=42, stratify=y", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "documentation": {} + }, + { + "label": "quark_flat", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "peekOfCode": "quark_flat = X_flat[y == 0]\ngluon_flat = X_flat[y == 1]\nt_vals, p_vals = ttest_ind(quark_flat, gluon_flat, axis=0, equal_var=False)\ntop_indices = np.argsort(p_vals)[:192] # top 8x8x3 = 192 pixel indices\nX_selected = X_flat[:, top_indices] # shape: (1000, 192)\nfrom sklearn.model_selection import train_test_split\nimport torch\nX_train, X_test, y_train, y_test = train_test_split(\n X_selected, y, test_size=0.2, random_state=42, stratify=y\n)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "documentation": {} + }, + { + "label": "gluon_flat", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "peekOfCode": "gluon_flat = X_flat[y == 1]\nt_vals, p_vals = ttest_ind(quark_flat, gluon_flat, axis=0, equal_var=False)\ntop_indices = np.argsort(p_vals)[:192] # top 8x8x3 = 192 pixel indices\nX_selected = X_flat[:, top_indices] # shape: (1000, 192)\nfrom sklearn.model_selection import train_test_split\nimport torch\nX_train, X_test, y_train, y_test = train_test_split(\n X_selected, y, test_size=0.2, random_state=42, stratify=y\n)\nX_train = torch.tensor(X_train, dtype=torch.float32)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "documentation": {} + }, + { + "label": "top_indices", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "peekOfCode": "top_indices = np.argsort(p_vals)[:192] # top 8x8x3 = 192 pixel indices\nX_selected = X_flat[:, top_indices] # shape: (1000, 192)\nfrom sklearn.model_selection import train_test_split\nimport torch\nX_train, X_test, y_train, y_test = train_test_split(\n X_selected, y, test_size=0.2, random_state=42, stratify=y\n)\nX_train = torch.tensor(X_train, dtype=torch.float32)\nX_test = torch.tensor(X_test, dtype=torch.float32)\ny_train = torch.tensor(y_train, dtype=torch.long)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "documentation": {} + }, + { + "label": "X_selected", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "peekOfCode": "X_selected = X_flat[:, top_indices] # shape: (1000, 192)\nfrom sklearn.model_selection import train_test_split\nimport torch\nX_train, X_test, y_train, y_test = train_test_split(\n X_selected, y, test_size=0.2, random_state=42, stratify=y\n)\nX_train = torch.tensor(X_train, dtype=torch.float32)\nX_test = torch.tensor(X_test, dtype=torch.float32)\ny_train = torch.tensor(y_train, dtype=torch.long)\ny_test = torch.tensor(y_test, dtype=torch.long)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "documentation": {} + }, + { + "label": "X_train", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "peekOfCode": "X_train = torch.tensor(X_train, dtype=torch.float32)\nX_test = torch.tensor(X_test, dtype=torch.float32)\ny_train = torch.tensor(y_train, dtype=torch.long)\ny_test = torch.tensor(y_test, dtype=torch.long)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "documentation": {} + }, + { + "label": "X_test", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "peekOfCode": "X_test = torch.tensor(X_test, dtype=torch.float32)\ny_train = torch.tensor(y_train, dtype=torch.long)\ny_test = torch.tensor(y_test, dtype=torch.long)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "documentation": {} + }, + { + "label": "y_train", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "peekOfCode": "y_train = torch.tensor(y_train, dtype=torch.long)\ny_test = torch.tensor(y_test, dtype=torch.long)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "documentation": {} + }, + { + "label": "y_test", + "kind": 5, + "importPath": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "description": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "peekOfCode": "y_test = torch.tensor(y_test, dtype=torch.long)", + "detail": "Quantum_KAN_for_HEP_Ria_Khatoniar.models.HybridQKAN_model_components.t-testbasedpreprocessing", + "documentation": {} + }, + { + "label": "QuarkGluonGraphDataset", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "class QuarkGluonGraphDataset(dgl.data.dgl_dataset.DGLDataset):\n def __init__(self, dataset_name, raw_dir, save_dir, data_folder_name, datafile_name, labelsfile_name, datatype='particles', dataset_size=12500,\n nodes_per_graph = 5, spectral_augmentation=False, irc_safety_aug=False, url=None, hash_key=..., force_reload=False, verbose=False, transform=None,\n device='cpu'):\n self.data_folder = data_folder_name\n self.datafile_name = datafile_name\n self.labelsfile_name = labelsfile_name\n self.datatype = datatype\n self.nodes_per_graph = nodes_per_graph\n self.spectral_augmentation = spectral_augmentation", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "GNN_imp_estimator", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "class GNN_imp_estimator(torch.nn.Module):\n \"\"\"\n Args:\n num_layer (int): the number of GNN layers\n emb_dim (int): dimensionality of embeddings\n JK (str): last, concat, max or sum.\n max_pool_layer (int): the layer from which we use max pool rather than add pool for neighbor aggregation\n drop_ratio (float): dropout rate\n gnn_type: gin, gcn, graphsage, gat\n Output:", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "GNN", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "class GNN(torch.nn.Module):\n \"\"\"\n Args:\n num_layer (int): the number of GNN layers\n emb_dim (int): dimensionality of embeddings\n JK (str): last, concat, max or sum.\n max_pool_layer (int): the layer from which we use max pool rather than add pool for neighbor aggregation\n drop_ratio (float): dropout rate\n gnn_type: gin, gcn, graphsage, gat\n Output:", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "BetterBetterTorchLayer", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "class BetterBetterTorchLayer(torch.nn.Module):\n def __init__(self, nodes_per_graph, num_layers, input_dim, device):\n super(BetterBetterTorchLayer, self).__init__()\n self.device = device\n self.num_qubits = nodes_per_graph\n inputs = []\n self.node_attr_count = 0\n for q in range(self.num_qubits):\n for d in range(input_dim):\n inputs.append({'input_idx':self.node_attr_count, 'func':'ry', 'wires':[q]})", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "QGNN_node_estimator", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "class QGNN_node_estimator(torch.nn.Module):\n def __init__(self, nodes_per_graph, num_layers, input_dim=None, device='cpu'):\n super(QGNN_node_estimator, self).__init__()\n self.device = device\n self.nodes_per_graph = nodes_per_graph\n self.num_layers = num_layers\n self.input_dim = input_dim\n # self.quantum_nn = BetterTorchLayer(self.nodes_per_graph, num_layers)\n self.quantum_nn = BetterBetterTorchLayer(self.nodes_per_graph, self.num_layers, self.input_dim, self.device)\n def edge_attr_relevant(self, edge_attr, edge_attr_r):", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "EdgeConvBlock", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "class EdgeConvBlock(nn.Module):\n r\"\"\"EdgeConv layer.\n Introduced in \"`Dynamic Graph CNN for Learning on Point Clouds\n `__\". Can be described as follows:\n .. math::\n x_i^{(l+1)} = \\max_{j \\in \\mathcal{N}(i)} \\mathrm{ReLU}(\n \\Theta \\cdot (x_j^{(l)} - x_i^{(l)}) + \\Phi \\cdot x_i^{(l)})\n where :math:`\\mathcal{N}(i)` is the neighbor of :math:`i`.\n Parameters\n ----------", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "ParticleNet", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "class ParticleNet(nn.Module):\n def __init__(self,\n input_dims,\n num_classes,\n conv_params=[(7, (32, 32, 32)), (7, (64, 64, 64))],\n fc_params=[(128, 0.1)],\n use_fusion=True,\n use_fts_bn=True,\n use_counts=True,\n for_inference=False,", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "FeatureConv", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "class FeatureConv(nn.Module):\n def __init__(self, in_chn, out_chn, **kwargs):\n super(FeatureConv, self).__init__(**kwargs)\n self.conv = nn.Sequential(\n nn.BatchNorm1d(in_chn),\n nn.Conv1d(in_chn, out_chn, kernel_size=1, bias=False),\n nn.BatchNorm1d(out_chn),\n nn.ReLU()\n )\n def forward(self, x):", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "ParticleNetTagger1Path", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "class ParticleNetTagger1Path(nn.Module):\n def __init__(self,\n pf_features_dims,\n num_classes,\n conv_params= [(6, (32, 32, 32)), (6, (64, 64, 64)), (6, (128, 128, 128))], #[(7, (32, 32, 32)), (7, (64, 64, 64))], # \n fc_params=[(128, 0.1)],\n use_fusion=False,\n use_fts_bn=False,\n use_counts=True,\n pf_input_dropout=None,", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "graphcl", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "class graphcl(nn.Module):\n def __init__(self, gnn, node_imp_estimator, emb_dim, out_dim):\n super(graphcl, self).__init__()\n self.gnn = gnn\n self.node_imp_estimator = node_imp_estimator\n self.pool = global_mean_pool\n self.projection_head = nn.Sequential(nn.Linear(emb_dim, out_dim), nn.ReLU(inplace=True), nn.Linear(out_dim, out_dim))\n def prepare_variables_rg(self, gr_n, g_n, n_i): \n g_n[n_i] = gr_n\n return g_n", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "MyDistributedDataParallel", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "class MyDistributedDataParallel(nn.parallel.DistributedDataParallel):\n def __getattr__(self, name):\n try:\n return super().__getattr__(name)\n except AttributeError:\n return getattr(self.module, name)\ndef run(rank, world_size, epochs, model_type, npg_original):\n setup(rank, world_size)\n main_dir = ''\n epochs=epochs", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "preprocess_fixed_nodes", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def preprocess_fixed_nodes(x_data,y_data,nodes_per_graph=10): #,masses):\n print('--- Finding All Unique Particles ---')\n unique_particles = np.unique(x_data[:,:,3])\n x_data = torch.tensor(x_data)\n y_data = torch.tensor(y_data)\n print()\n print('--- Inserting Masses ---')\n masses = torch.zeros((x_data.shape[0],x_data.shape[1]))\n for i,particle in tqdm(enumerate(unique_particles)):\n if particle!=0:", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "distort_jets", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def distort_jets( batch, strength=0.1, pT_clip_min=0.1 ):\n '''\n Input: batch of jets, shape (batchsize, 3, n_constit)\n dim 1 ordering: (pT, eta, phi)\n Output: batch of jets with each constituents position shifted independently, shifts drawn from normal with mean 0, std strength/pT, same shape as input\n '''\n pT = batch[:,0] # (batchsize, n_constit)\n shift_eta = np.nan_to_num( strength * np.random.randn(batch.shape[0], batch.shape[2]) / pT.clip(min=pT_clip_min), posinf = 0.0, neginf = 0.0 )# * mask\n shift_phi = np.nan_to_num( strength * np.random.randn(batch.shape[0], batch.shape[2]) / pT.clip(min=pT_clip_min), posinf = 0.0, neginf = 0.0 )# * mask\n shift = np.stack( [ np.zeros( (batch.shape[0], batch.shape[2]) ), shift_eta, shift_phi ], 1)", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "collinear_fill_jets", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def collinear_fill_jets( batch ):\n '''\n Input: batch of jets, shape (batchsize, 3, n_constit)\n dim 1 ordering: (pT, eta, phi)\n Output: batch of jets with collinear splittings, the function attempts to fill as many of the zero-padded args.nconstit\n entries with collinear splittings of the constituents by splitting each constituent at most once, same shape as input\n '''\n batchb = batch.copy()\n nc = batch.shape[2]\n nzs = np.array( [ np.where( batch[:,0,:][i]>0.0)[0].shape[0] for i in range(len(batch)) ] )", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "knn", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def knn(x, k):\n inner = -2 * torch.matmul(x.transpose(2, 1), x)\n xx = torch.sum(x ** 2, dim=1, keepdim=True)\n pairwise_distance = -xx - inner - xx.transpose(2, 1)\n idx = pairwise_distance.topk(k=k + 1, dim=-1)[1][:, :, 1:] # (batch_size, num_points, k)\n return idx\n# v1 is faster on GPU\ndef get_graph_feature_v1(x, k, idx):\n batch_size, num_dims, num_points = x.size()\n idx_base = torch.arange(0, batch_size, device=x.device).view(-1, 1, 1) * num_points", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "get_graph_feature_v1", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def get_graph_feature_v1(x, k, idx):\n batch_size, num_dims, num_points = x.size()\n idx_base = torch.arange(0, batch_size, device=x.device).view(-1, 1, 1) * num_points\n idx = idx + idx_base\n idx = idx.view(-1)\n fts = x.transpose(2, 1).reshape(-1, num_dims) # -> (batch_size, num_points, num_dims) -> (batch_size*num_points, num_dims)\n fts = fts[idx, :].view(batch_size, num_points, k, num_dims) # neighbors: -> (batch_size*num_points*k, num_dims) -> ...\n fts = fts.permute(0, 3, 1, 2).contiguous() # (batch_size, num_dims, num_points, k)\n x = x.view(batch_size, num_dims, num_points, 1).repeat(1, 1, 1, k)\n fts = torch.cat((x, fts - x), dim=1) # ->(batch_size, 2*num_dims, num_points, k)", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "get_graph_feature_v2", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def get_graph_feature_v2(x, k, idx):\n batch_size, num_dims, num_points = x.size()\n idx_base = torch.arange(0, batch_size, device=x.device).view(-1, 1, 1) * num_points\n idx = idx + idx_base\n idx = idx.view(-1)\n fts = x.transpose(0, 1).reshape(num_dims, -1) # -> (num_dims, batch_size, num_points) -> (num_dims, batch_size*num_points)\n fts = fts[:, idx].view(num_dims, batch_size, num_points, k) # neighbors: -> (num_dims, batch_size*num_points*k) -> ...\n fts = fts.transpose(1, 0).contiguous() # (batch_size, num_dims, num_points, k)\n x = x.view(batch_size, num_dims, num_points, 1).repeat(1, 1, 1, k)\n fts = torch.cat((x, fts - x), dim=1) # ->(batch_size, 2*num_dims, num_points, k)", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "get_model", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def get_model(data_config, **kwargs):\n # conv_params = [\n # (16, (64, 64, 64)),\n # (16, (128, 128, 128)),\n # (16, (256, 256, 256)),\n # ]\n ec_k = kwargs.get('ec_k', 16)\n ec_c1 = kwargs.get('ec_c1', 64)\n ec_c2 = kwargs.get('ec_c2', 128)\n ec_c3 = kwargs.get('ec_c3', 256)", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "drop_nodes_prob_batch", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def drop_nodes_prob_batch(graph, batch_size, aug_ratio, device):\n node_num = graph.num_nodes()\n edge_num = graph.num_edges()\n node_num_sin = int(graph.num_nodes()/batch_size)\n edge_num_sin = int(graph.num_edges()/batch_size)\n drop_num_sin = int(node_num_sin * aug_ratio)\n drop_num = batch_size*drop_num_sin\n node_score = graph.ndata['node_score'].reshape(batch_size, -1)\n node_prob = node_score.float()\n node_prob += 0.001", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "drop_nodes_cp_batch", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def drop_nodes_cp_batch(graph, batch_size, aug_ratio, device):\n node_num = graph.num_nodes()\n edge_num = graph.num_edges()\n node_num_sin = int(graph.num_nodes()/batch_size)\n edge_num_sin = int(graph.num_edges()/batch_size)\n drop_num_sin = int(node_num_sin * aug_ratio)\n drop_num = batch_size*drop_num_sin\n node_score = graph.ndata['node_score'].reshape(batch_size, -1)\n node_prob = torch.sub(torch.max(node_score, dim=1).values.reshape(-1,1), node_score)\n node_prob += 0.001", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "train", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def train(epoch, model, device, dataset, optimizer, batch_size, nodes_per_graph, aug_ratio, loss_temp, lamda, irc_safety, q_edge_attr=False, loader=None, node_est='classical'):\n torch.autograd.set_detect_anomaly(True)\n dataset.aug = \"none\"\n imp_batch_size = batch_size\n loader = GraphDataLoader(dataset, batch_size=imp_batch_size, shuffle=False, drop_last=False)\n model.eval()\n torch.set_grad_enabled(False)\n node_imp_l = []\n # loader.set_epoch(epoch)\n for step, (g_batch, _) in enumerate(loader):", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "setup", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def setup(rank, world_size):\n os.environ['MASTER_ADDR'] = 'localhost'\n os.environ['MASTER_PORT'] = '12355'\n torch.cuda.set_device(rank)\n dist.init_process_group(\"nccl\", rank=rank, world_size=world_size)\ndef cleanup():\n dist.destroy_process_group()\nclass MyDistributedDataParallel(nn.parallel.DistributedDataParallel):\n def __getattr__(self, name):\n try:", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "cleanup", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def cleanup():\n dist.destroy_process_group()\nclass MyDistributedDataParallel(nn.parallel.DistributedDataParallel):\n def __getattr__(self, name):\n try:\n return super().__getattr__(name)\n except AttributeError:\n return getattr(self.module, name)\ndef run(rank, world_size, epochs, model_type, npg_original):\n setup(rank, world_size)", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "run", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "description": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "peekOfCode": "def run(rank, world_size, epochs, model_type, npg_original):\n setup(rank, world_size)\n main_dir = ''\n epochs=epochs\n nodes_per_graph_original = npg_original\n num_layer_gnn = 2\n num_layer_gnn_est = 2\n qnn_layers = 3\n emb_dim = 128\n in_dim = 8", + "detail": "Quantum_SSL_for_HEP_Amey_Bhatuse.run", + "documentation": {} + }, + { + "label": "QG_Images", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "class QG_Images(pyg_data.InMemoryDataset):\n def __init__(self, images, labels, channel=None, root='../data/QG_Images', transform=None, pre_transform=None, force_reload=True):\n self.images = images\n self.labels = labels\n self.channel = channel if channel else \"x\"\n super().__init__(root, transform, pre_transform, force_reload=force_reload)\n self.load(self.processed_paths[0])\n @property\n def raw_file_names(self):\n return []", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "Custom_GCN", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "class Custom_GCN(pyg_nn.MessagePassing):\n def __init__(self, out_channels, in_channels=16):\n super().__init__(aggr='add')\n self.mlp = nn.Sequential(\n nn.Linear(in_channels, out_channels),\n nn.ReLU(),\n nn.Linear(out_channels, out_channels),\n )\n self.pixel_embedding = nn.Sequential(\n nn.Linear(1, 4),", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "Custom_GCN2", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "class Custom_GCN2(pyg_nn.MessagePassing):\n def __init__(self, out_channels, in_channels=24):\n super().__init__(aggr='add')\n self.mlp = nn.Sequential(\n nn.Linear(in_channels, out_channels),\n nn.ReLU(),\n nn.Linear(out_channels, out_channels),\n )\n self.pixel_embedding = nn.Sequential(\n nn.Linear(8, 8),", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "GCN_Encoder", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "class GCN_Encoder(nn.Module):\n def __init__(self, hidden_dim=8):\n super().__init__()\n self.conv1 = Custom_GCN(hidden_dim)\n self.conv2 = Custom_GCN2(hidden_dim*2)\n self.output_dim = 8\n # self.classifier = pyg_nn.MLP([hidden_dim, hidden_dim, output_dim], bias=[False, True])\n self.readout = pyg_nn.MLP([hidden_dim*2, self.output_dim, self.output_dim], bias=[False, True])\n def forward(self, data):\n x, pos, edge_index, batch = data.x, data.pos, data.edge_index, data.batch", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "ModelPL_Classify", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "class ModelPL_Classify(pl.LightningModule):\n def __init__(self, model, learning_rate=0.001):\n super().__init__()\n self.model = model\n self.classifier = pyg_nn.MLP([model.output_dim, 16, 2], bias=[False, True])\n self.learning_rate = learning_rate\n self.criterion = torch.nn.CrossEntropyLoss()\n from torchmetrics import AUROC, Accuracy \n self.train_auc = AUROC(task='binary')\n self.val_auc = AUROC(task='binary')", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "TopKIntensity", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "class TopKIntensity(T.BaseTransform):\n def __init__(self, k: int):\n self.k = k\n def __call__(self, data):\n if data.num_nodes > self.k:\n _, top_k_indices = torch.topk(data.x[:, 0], self.k)\n data.x = data.x[top_k_indices]\n data.pos = data.pos[top_k_indices]\n data.num_nodes = self.k\n return data", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "EdgesToTopK", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "class EdgesToTopK(T.BaseTransform):\n def __init__(self, k: int):\n self.k = k\n def __call__(self, data):\n if self.k > len(data.x[:, 0]):\n raise ValueError(f\"Requested top-k ({self.k}) is larger than available data ({data.x}:{len(data.x[:, 0])})\")\n _, top_k_indices = torch.topk(data.x[:, 0], self.k)\n # Create edges from all nodes to the top k nodes\n edges = []\n for i in range(data.num_nodes):", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "KNNGroup", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "class KNNGroup(T.BaseTransform):\n def __init__(self, k: int, attr_name: str):\n self.k = k\n self.attr_name = attr_name\n def __call__(self, data, self_loop=True):\n if hasattr(data, self.attr_name):\n attr = getattr(data, self.attr_name)\n edge_index = pyg_nn.knn_graph(attr, k=self.k)\n data.edge_index = edge_index\n if self_loop:", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "ModelPL_Contrastive", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "class ModelPL_Contrastive(pl.LightningModule):\n def __init__(self, model, learning_rate=0.01):\n super().__init__()\n self.model = model\n self.learning_rate = learning_rate\n # self.criterion = losses.ContrastiveLoss(pos_margin=0.1, neg_margin=1.0)\n # try:\n self.criterion_alt = losses.NTXentLoss(temperature=0.5)\n # except:\n from qml_ssl.losses import NTXentLoss", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "LinearProbePL", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "class LinearProbePL(pl.LightningModule):\n def __init__(self, pretrained_model, num_classes, learning_rate=0.001):\n super().__init__()\n self.pretrained_model = pretrained_model\n # self.classifier = pyg_nn.MLP([pretrained_model.output_dim, 16, num_classes], bias=[False, True])\n self.classifier = nn.Sequential(\n nn.Linear(pretrained_model.output_dim, 16),\n nn.ReLU(),\n nn.Linear(16, num_classes),\n )", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "generate_embeddings", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "def generate_embeddings(model, data_loader):\n \"\"\"\n Generate embeddings for the given data using the provided model.\n Args:\n model (nn.Module): Trained model.\n data_loader (DataLoader): Data loader for the dataset.\n Returns:\n tuple: Embeddings and labels as numpy arrays.\n \"\"\"\n model.eval()", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "device", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')\nprint(\"Device:\", device)\nbatch_size = 64\nfrom pytorch_metric_learning import losses\nimport torchmetrics\nclass ModelPL_Classify(pl.LightningModule):\n def __init__(self, model, learning_rate=0.001):\n super().__init__()\n self.model = model\n self.classifier = pyg_nn.MLP([model.output_dim, 16, 2], bias=[False, True])", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "batch_size", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "peekOfCode": "batch_size = 64\nfrom pytorch_metric_learning import losses\nimport torchmetrics\nclass ModelPL_Classify(pl.LightningModule):\n def __init__(self, model, learning_rate=0.001):\n super().__init__()\n self.model = model\n self.classifier = pyg_nn.MLP([model.output_dim, 16, 2], bias=[False, True])\n self.learning_rate = learning_rate\n self.criterion = torch.nn.CrossEntropyLoss()", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.models", + "documentation": {} + }, + { + "label": "plot_metrics_from_csv", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.utils", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.utils", + "peekOfCode": "def plot_metrics_from_csv(metrics_file, metrics={'valid_loss', 'valid_acc', 'valid_auc'}):\n df = pd.read_csv(metrics_file)\n required_columns = metrics\n if not required_columns.issubset(df.columns):\n raise ValueError(\"The CSV file does not contain the required metrics.\")\n df = df.sort_values('epoch')\n df = df.fillna(method='ffill')\n epochs = df['epoch']\n valid_loss = df['valid_loss']\n valid_acc = df['valid_acc']", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.demos.supervised.utils", + "documentation": {} + }, + { + "label": "main", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.scripts.runner_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.scripts.runner_classical", + "peekOfCode": "def main():\n \"\"\"\n Main function to train the model and generate embeddings.\n \"\"\"\n classes = (0, 1, 2, 3, 4, 5, 6, 7, 8, 9)\n reduced_dim = 10\n dataset_size = (4000, 1000)\n mnist_data = load_mnist_img(classes=classes, reduced_dim = reduced_dim, dataset_size=dataset_size, data_dir=\"../data/\")\n def create_data_loader(data, labels, batch_size=64, shuffle=True, num_workers=4):\n dataset = TensorDataset(data, labels)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.scripts.runner_classical", + "documentation": {} + }, + { + "label": "main", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.scripts.runner_hybrid", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.scripts.runner_hybrid", + "peekOfCode": "def main():\n \"\"\"\n Main function to train the model and generate embeddings.\n \"\"\"\n classes = (3, 6, 9)\n reduced_dim = 10\n dataset_size = (1000, 300)\n mnist_data = load_mnist_img(classes=classes, reduced_dim = reduced_dim, dataset_size=dataset_size, data_dir=\"../data/\")\n def create_data_loader(data, labels, batch_size=64, shuffle=True, num_workers=4):\n dataset = TensorDataset(data, labels)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.scripts.runner_hybrid", + "documentation": {} + }, + { + "label": "main", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.scripts.runner_quantum", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.scripts.runner_quantum", + "peekOfCode": "def main():\n \"\"\"\n Main function to train the model and generate embeddings.\n \"\"\"\n classes = (3, 6, 9)\n reduced_dim = 10\n dataset_size = (1000, 300)\n mnist_data = load_mnist_img(classes=classes, reduced_dim = reduced_dim, dataset_size=dataset_size, data_dir=\"../data/\")\n def create_data_loader(data, labels, batch_size=64, shuffle=True, num_workers=4):\n dataset = TensorDataset(data, labels)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.scripts.runner_quantum", + "documentation": {} + }, + { + "label": "QG_Jets", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_syn_qg", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_syn_qg", + "peekOfCode": "class QG_Jets(pyg_data.InMemoryDataset):\n def __init__(self, root,\n transform=None, pre_transform=None, pre_filter=None, force_reload=True):\n super().__init__(root, transform, pre_transform)\n self.root = root\n self.load(self.processed_paths[0])\n @property\n def raw_file_names(self):\n files = glob.glob(self.root + '/raw/*.npz') \n return [file.split('/')[-1] for file in files]", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_syn_qg", + "documentation": {} + }, + { + "label": "QG_Jets_old", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_syn_qg", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_syn_qg", + "peekOfCode": "class QG_Jets_old(pyg_data.InMemoryDataset):\n def __init__(self, root,\n transform=None, pre_transform=None, pre_filter=None, force_reload=True):\n super().__init__(root, transform, pre_transform)\n self.root = root\n self.load(self.processed_paths[0])\n @property\n def raw_file_names(self):\n files = glob.glob(self.root + '/raw/*.npz') \n return [file.split('/')[-1] for file in files]", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_syn_qg", + "documentation": {} + }, + { + "label": "TopKMomentum", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_transform", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_transform", + "peekOfCode": "class TopKMomentum(T.BaseTransform):\n def __init__(self, k: int):\n self.k = k\n def __call__(self, data):\n if data.num_nodes > self.k:\n _, top_k_indices = torch.topk(data.h[:, 0], self.k)\n data.h = data.h[top_k_indices]\n data.particleid = data.particleid[top_k_indices]\n data.num_nodes = self.k\n return data", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_transform", + "documentation": {} + }, + { + "label": "ToTopMomentum", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_transform", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_transform", + "peekOfCode": "class ToTopMomentum(T.BaseTransform):\n def __call__(data):\n top_index = torch.argemax(data.h[:, 0])\n edges = []\n for i in range(data.num_nodes):\n if i != top_index:\n edges.append([i, top_index])\n return data\n# Transform: Edge Creation\nclass KNNGroup(T.BaseTransform):", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_transform", + "documentation": {} + }, + { + "label": "KNNGroup", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_transform", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_transform", + "peekOfCode": "class KNNGroup(T.BaseTransform):\n def __init__(self, k: int, attr_name: str):\n self.k = k\n self.attr_name = attr_name\n def __call__(self, data, self_loop=True):\n if hasattr(data, self.attr_name):\n attr = getattr(data, self.attr_name)\n edge_index = pyg_nn.knn_graph(attr, k=self.k)\n data.edge_index = edge_index\n if self_loop:", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.graph_transform", + "documentation": {} + }, + { + "label": "load_mnist_img", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_mnist", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_mnist", + "peekOfCode": "def load_mnist_img(classes=None, reduced_dim=None, dataset_size=None, data_dir=\"../../data\"):\n \"\"\"\n Load and preprocess MNIST data.\n Args:\n classes (tuple): Tuple of classes to filter (default is (3, 6)).\n reduced_dim (int): Size to resize the images to (default is None).\n dataset_size (tuple): Custom dataset size (train_size, test_size) (default is None).\n Returns:\n dict: A dictionary with the preprocessed training and test datasets.\n \"\"\"", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_mnist", + "documentation": {} + }, + { + "label": "visualize_data", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_mnist", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_mnist", + "peekOfCode": "def visualize_data(data, labels, classes, title=\"\"):\n \"\"\"\n Visualize the dataset.\n Args:\n data (torch.Tensor): Dataset images.\n labels (torch.Tensor): Corresponding labels.\n classes (tuple): Tuple of class labels to visualize.\n title (str): Title of the plot (default is \"\").\n \"\"\"\n fig, axs = plt.subplots(1, len(classes), figsize=(8, 4))", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_mnist", + "documentation": {} + }, + { + "label": "PE_IMG_Dataset", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "peekOfCode": "class PE_IMG_Dataset(Dataset):\n def __init__(self, data, labels, transform=None):\n self.data = data\n self.labels = labels\n self.transform = transform\n def __len__(self):\n return len(self.data)\n def __getitem__(self, idx):\n sample = self.data[idx]\n label = self.labels[idx]", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "documentation": {} + }, + { + "label": "inspect_h5py_file", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "peekOfCode": "def inspect_h5py_file(file_path):\n \"\"\"\n Inspect the contents and dimensions of an HDF5 file.\n Args:\n file_path (str): Path to the HDF5 file.\n \"\"\"\n with h5py.File(file_path, 'r') as f:\n print(f\"Inspecting HDF5 file: {file_path}\")\n def print_attrs(name, obj):\n print(f\"Name: {name}\")", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "documentation": {} + }, + { + "label": "load_pe_img", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "peekOfCode": "def load_pe_img(electron_file, photon_file, reduced_dim=None, dataset_size=-1, channel=None):\n \"\"\"\n Load and preprocess electron and photon data.\n Args:\n electron_file (str): Path to the electron data file.\n photon_file (str): Path to the photon data file.\n reduced_dim (int): Size to resize the images to (default is None).\n dataset_size (int): Custom dataset size.\n Returns:\n dict: A dictionary with the preprocessed training, validation, and test datasets.", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "documentation": {} + }, + { + "label": "visualize_data", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "peekOfCode": "def visualize_data(data, labels, classes=(\"P\", \"E\"), title=\"\"):\n \"\"\"\n Visualize the dataset.\n Args:\n data (torch.Tensor): Dataset images.\n labels (torch.Tensor): Corresponding labels.\n classes (tuple): Tuple of class labels to visualize.\n title (str): Title of the plot (default is \"\").\n \"\"\"\n fig, axs = plt.subplots(1, len(classes), figsize=(8, 4))", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_pe", + "documentation": {} + }, + { + "label": "QG_Images", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "peekOfCode": "class QG_Images(pyg_data.InMemoryDataset):\n def __init__(self, images, labels, channel=None, root='../data/QG_Images', transform=None, pre_transform=None, force_reload=True):\n self.images = images\n self.labels = labels\n self.channel = channel if channel else \"x\"\n super().__init__(root, transform, pre_transform, force_reload=force_reload)\n self.load(self.processed_paths[0])\n @property\n def raw_file_names(self):\n return []", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "documentation": {} + }, + { + "label": "load_qg_img", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "peekOfCode": "def load_qg_img(x, y, reduced_dim=None, dataset_size=-1):\n \"\"\"\n Load and preprocess electron and photon data.\n Args:\n x (np.array): Image data with 3 channels.\n y (np.array): Labels.\n reduced_dim (int): Size to resize the images to (default is None).\n dataset_size (tuple): Custom dataset size.\n Returns:\n dict: A dictionary with the preprocessed training, validation, and test datasets.", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "documentation": {} + }, + { + "label": "reduce_resolution", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "peekOfCode": "def reduce_resolution(image):\n image = np.array(image)\n assert image.shape[0] % 5 == 0 and image.shape[1] % 5 == 0, \"Image dimensions should be divisible by 5\"\n # Reshape the image into 25x25x5x5\n reduced_image = image.reshape(25, 5, 25, 5).mean(axis=(1, 3))\n return reduced_image\ndef reduce_resolution_batch(images):\n return np.array([reduce_resolution(img) for img in images])\ndef visualize_image(image, label):\n fig, axs = plt.subplots(1, 3, figsize=(15, 5))", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "documentation": {} + }, + { + "label": "reduce_resolution_batch", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "peekOfCode": "def reduce_resolution_batch(images):\n return np.array([reduce_resolution(img) for img in images])\ndef visualize_image(image, label):\n fig, axs = plt.subplots(1, 3, figsize=(15, 5))\n im = axs[0].imshow(image[:, :, 0], cmap='binary')\n axs[0].title.set_text(f'Class {label} - Tracks')\n im = axs[1].imshow(image[:, :, 1], cmap='binary')\n axs[1].title.set_text(f'Class {label} - ECAL')\n im = axs[2].imshow(image[:, :, 2], cmap='binary')\n axs[2].title.set_text(f'Class {label} - HCAL')", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "documentation": {} + }, + { + "label": "visualize_image", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "peekOfCode": "def visualize_image(image, label):\n fig, axs = plt.subplots(1, 3, figsize=(15, 5))\n im = axs[0].imshow(image[:, :, 0], cmap='binary')\n axs[0].title.set_text(f'Class {label} - Tracks')\n im = axs[1].imshow(image[:, :, 1], cmap='binary')\n axs[1].title.set_text(f'Class {label} - ECAL')\n im = axs[2].imshow(image[:, :, 2], cmap='binary')\n axs[2].title.set_text(f'Class {label} - HCAL')\ndef visualize_average_images(x_data, y_data, num=-1, use_lognorm=False):\n fig, axs = plt.subplots(2, 3, figsize=(15, 10))", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "documentation": {} + }, + { + "label": "visualize_average_images", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "peekOfCode": "def visualize_average_images(x_data, y_data, num=-1, use_lognorm=False):\n fig, axs = plt.subplots(2, 3, figsize=(15, 10))\n norm = LogNorm() if use_lognorm else None\n # Calculate average images for each class and channel\n avg_images = {}\n for class_label in [0, 1]:\n avg_images[class_label] = []\n class_data = x_data[y_data == class_label]\n for channel in range(3):\n # print(len(class_data))", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "documentation": {} + }, + { + "label": "visualize_diff_average_images", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "peekOfCode": "def visualize_diff_average_images(x_data, y_data, num=-1):\n fig, axs = plt.subplots(1, 3, figsize=(15, 6))\n # Calculate average images for each class and channel\n avg_images = {}\n for class_label in [0, 1]:\n avg_images[class_label] = []\n class_data = x_data[y_data == class_label]\n for channel in range(3):\n # print(len(class_data))\n avg_image = np.average(class_data[:num, :, :, channel], 0)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.data.img_qg", + "documentation": {} + }, + { + "label": "ConvUnit", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "class ConvUnit(nn.Module):\n \"\"\"\n A convolutional unit consisting of a convolutional layer, batch normalization, activation, and pooling.\n \"\"\"\n def __init__(self, input_channels, output_channels, kernel_size, stride, padding, \n pool_type, pool_kernel_size, pool_stride, activ_type):\n super().__init__()\n self.conv = nn.Conv2d(input_channels, output_channels, kernel_size, stride, padding)\n self.bn = nn.BatchNorm2d(output_channels)\n self.activ = _ACTIVATIONS[activ_type]()", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "ConvEncoder", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "class ConvEncoder(nn.Module):\n \"\"\"\n A convolutional encoder for MNIST images using nn.ModuleList.\n \"\"\"\n def __init__(self, activ_type, pool_type, layer_num, hidden_channel_num, input_channel_num, out_channel_num):\n super().__init__()\n self.layers = nn.ModuleList()\n # First convolutional layer\n self.layers.append(ConvUnit(input_channel_num, hidden_channel_num, 3, 1, 1, pool_type, 2, 2, activ_type))\n # Intermediate layers", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "Conv_UnSupContrastive", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "class Conv_UnSupContrastive(pl.LightningModule):\n \"\"\"\n A PyTorch Lightning module for unsupervised contrastive learning using pytorch-metric-learning.\n \"\"\"\n def __init__(self, lr, activ_type=\"relu\", pool_type=\"max\", layer_num=2, hidden_channel_num=8, input_channel_num=1, out_channel_num=8, proj_dim=2, augmentations=None, loss=\"NTXentLoss\", loss_kwargs={}):\n super().__init__()\n self.save_hyperparameters()\n self.lr = lr\n self.encoder = ConvEncoder(activ_type, pool_type, layer_num, hidden_channel_num, input_channel_num, out_channel_num)\n self.proj = nn.Linear(out_channel_num, proj_dim)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "Conv_SupContrastive", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "class Conv_SupContrastive(pl.LightningModule):\n \"\"\"\n A PyTorch Lightning module for supervised contrastive learning.\n \"\"\"\n def __init__(self, lr, activ_type=\"relu\", pool_type=\"max\", layer_num=2, hidden_channel_num=8, input_channel_num=1, out_channel_num=8, proj_dim=2, preprocess=None, loss=\"NTXentLoss\", loss_kwargs={}):\n super().__init__()\n self.save_hyperparameters()\n self.lr = lr\n if preprocess is not None:\n self.preprocessing = get_mnist_augmentations()", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "LinearProbe", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "class LinearProbe(pl.LightningModule):\n def __init__(self, pretrained_model, classes, lr=0.001, num_layers=1, hidden_dim=16, lr_scheduler_metric=None):\n super().__init__()\n self.save_hyperparameters(ignore=\"pretrained_model\")\n self.pretrained_model = pretrained_model\n layers = []\n input_dim = pretrained_model.output_dim\n for _ in range(num_layers - 1):\n layers.append(nn.Linear(input_dim, hidden_dim))\n layers.append(nn.ReLU())", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "Conv_Classifier", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "class Conv_Classifier(pl.LightningModule):\n \"\"\"\n A PyTorch Lightning module for supervised classification.\n \"\"\"\n def __init__(self, classes, lr, activ_type=\"relu\", pool_type=\"max\", layer_num=2, hidden_channel_num=8, input_channel_num=1, out_channel_num=8, proj_dim=2, preprocess=None, lr_scheduler_metric=None):\n super().__init__()\n self.save_hyperparameters()\n if preprocess is not None:\n self.preprocessing = get_mnist_augmentations()\n else: ", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "Custom_GCN", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "class Custom_GCN(pyg_nn.MessagePassing):\n def __init__(self, out_channels, in_channels=16):\n super().__init__(aggr='add')\n self.mlp = nn.Sequential(\n nn.Linear(in_channels, out_channels),\n nn.ReLU(),\n nn.Linear(out_channels, out_channels),\n )\n self.pixel_embedding = nn.Sequential(\n nn.Linear(1, 4),", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "Custom_GCN2", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "class Custom_GCN2(pyg_nn.MessagePassing):\n def __init__(self, out_channels, in_channels=24):\n super().__init__(aggr='add')\n self.mlp = nn.Sequential(\n nn.Linear(in_channels, out_channels),\n nn.ReLU(),\n nn.Linear(out_channels, out_channels),\n )\n self.pixel_embedding = nn.Sequential(\n nn.Linear(8, 8),", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "GCN_Encoder", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "class GCN_Encoder(nn.Module):\n def __init__(self, hidden_dim=8):\n super().__init__()\n self.conv1 = Custom_GCN(hidden_dim)\n self.conv2 = Custom_GCN2(hidden_dim*2)\n self.output_dim = 8\n # self.classifier = pyg_nn.MLP([hidden_dim, hidden_dim, output_dim], bias=[False, True])\n self.readout = pyg_nn.MLP([hidden_dim*2, self.output_dim, self.output_dim], bias=[False, True])\n def forward(self, data):\n x, pos, edge_index, batch = data.x, data.pos, data.edge_index, data.batch", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "ModelPL_Classify", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "class ModelPL_Classify(pl.LightningModule):\n def __init__(self, model, learning_rate=0.001):\n super().__init__()\n self.model = model\n self.classifier = pyg_nn.MLP([model.output_dim, 16, 2], bias=[False, True])\n self.learning_rate = learning_rate\n self.criterion = torch.nn.CrossEntropyLoss()\n from torchmetrics import AUROC, Accuracy \n self.train_auc = AUROC(task='binary')\n self.val_auc = AUROC(task='binary')", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "ModelPL_Contrastive", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "class ModelPL_Contrastive(pl.LightningModule):\n def __init__(self, model, learning_rate=0.01):\n super().__init__()\n self.model = model\n self.learning_rate = learning_rate\n # self.criterion = losses.ContrastiveLoss(pos_margin=0.1, neg_margin=1.0)\n # try:\n self.criterion_alt = losses.NTXentLoss(temperature=0.5)\n # except:\n from qml_ssl.losses import NTXentLoss", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "_ACTIVATIONS", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "_ACTIVATIONS = {\"relu\": nn.ReLU, \"gelu\": nn.GELU, \"swish\": lambda: nn.SiLU(), \"leaky_relu\": nn.LeakyReLU}\n_POOLING = {\"max\": nn.MaxPool2d, \"avg\": nn.AvgPool2d}\nclass ConvUnit(nn.Module):\n \"\"\"\n A convolutional unit consisting of a convolutional layer, batch normalization, activation, and pooling.\n \"\"\"\n def __init__(self, input_channels, output_channels, kernel_size, stride, padding, \n pool_type, pool_kernel_size, pool_stride, activ_type):\n super().__init__()\n self.conv = nn.Conv2d(input_channels, output_channels, kernel_size, stride, padding)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "_POOLING", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "peekOfCode": "_POOLING = {\"max\": nn.MaxPool2d, \"avg\": nn.AvgPool2d}\nclass ConvUnit(nn.Module):\n \"\"\"\n A convolutional unit consisting of a convolutional layer, batch normalization, activation, and pooling.\n \"\"\"\n def __init__(self, input_channels, output_channels, kernel_size, stride, padding, \n pool_type, pool_kernel_size, pool_stride, activ_type):\n super().__init__()\n self.conv = nn.Conv2d(input_channels, output_channels, kernel_size, stride, padding)\n self.bn = nn.BatchNorm2d(output_channels)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_classical", + "documentation": {} + }, + { + "label": "QuantumHead", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_hybrid", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_hybrid", + "peekOfCode": "class QuantumHead(nn.Module):\n def __init__(self, in_features, n_qubits, out_features, n_qlayers):\n super().__init__()\n self.in_features = in_features\n self.out_features = out_features\n self.n_qubits = n_qubits\n self.n_qlayers = n_qlayers\n self.proj = nn.Linear(in_features, n_qubits)\n self.device = qml.device('default.qubit', wires=self.n_qubits)\n @qml.qnode(self.device, interface='torch')", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_hybrid", + "documentation": {} + }, + { + "label": "Hybrid_Contrastive", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_hybrid", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_hybrid", + "peekOfCode": "class Hybrid_Contrastive(pl.LightningModule):\n \"\"\"\n A PyTorch Lightning module for supervised contrastive learning on the MNIST dataset.\n \"\"\"\n def __init__(self, activ_type, pool_type, head_output, n_qubits, n_qlayers, lr, pos_margin=0.25, neg_margin=1.5, preprocess=None):\n super().__init__()\n self.save_hyperparameters()\n self.preprocessing = get_preprocessing(preprocess)\n self.encoder = ConvEncoder(activ_type, pool_type)\n self.head = QuantumHead(ConvEncoder.backbone_output_size, n_qubits, head_output, n_qlayers)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_hybrid", + "documentation": {} + }, + { + "label": "ConvSQAE", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qae", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qae", + "peekOfCode": "class ConvSQAE(pl.LightningModule):\n \"\"\"SQAE with a convolutional-like path encoding of the data.\"\"\"\n def __init__(self, data_qbits, latent_qbits, device, img_dim, kernel_size, stride, DRCs, diff_method=\"best\", learning_rate=0.01):\n \"\"\"Create basic convolutional-like SQAE\n Args:\n data_qbits (int): Number of qubits to upload data and use as encoder\n latent_qbits (int): Number of latent qubits\n device (pennylane.Device): Pennylane device to use for circuit evaluation\n img_dim (int): Dimension of the images (width)\n kernel_size (int): Size of the kernel to use when uploading the data", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qae", + "documentation": {} + }, + { + "label": "QuantumHead", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "peekOfCode": "class QuantumHead(torch.nn.Module):\n def __init__(self, input_size, n_qubits, device):\n super(QuantumHead, self).__init__()\n self.input_size = input_size\n self.n_qubits = n_qubits\n self.drc_layers = int(self.input_size / self.n_qubits)\n self.params_size = qml.StronglyEntanglingLayers.shape(n_layers=self.drc_layers, n_wires=self.n_qubits)\n self.device = device\n # Define parameters for the circuit\n # self.input_params = torch.nn.Parameter(torch.randn(self.drc_layers, self.n_qubits))", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "documentation": {} + }, + { + "label": "QuantumConvolution", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "peekOfCode": "class QuantumConvolution(torch.nn.Module):\n def __init__(self, input_size, in_filters, out_filters, kernel_size, stride, num_qubits, device):\n super(QuantumConvolution, self).__init__()\n self.in_filters = in_filters\n self.out_filters = out_filters\n self.kernel_size = kernel_size\n self.stride = stride\n self.num_qubits = num_qubits\n self.iter = int(np.ceil(1 + (input_size[0] - self.kernel_size[0]) / self.stride))\n self.drc_layers = int((kernel_size[0] * kernel_size[1]) / num_qubits)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "documentation": {} + }, + { + "label": "QuantumModel", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "peekOfCode": "class QuantumModel(torch.nn.Module):\n def __init__(self):\n super(QuantumModel, self).__init__()\n # Define the layers in the model using the refactored QuantumConvolution and QuantumHead classes\n self.layer_3 = QuantumConvolution(input_size=(18+1, 18+1), in_filters=1, out_filters=3, kernel_size=(3, 3), stride=2, num_qubits=num_qubits, device=dev)\n self.layer_4 = QuantumConvolution(input_size=(9, 9), in_filters=3, out_filters=1, kernel_size=(3, 3), stride=2, num_qubits=num_qubits, device=dev)\n self.layer_7 = torch.nn.Flatten()\n self.layer_8 = QuantumHead(input_size=16, n_qubits=head_circuit_qubits, device=devh)\n def forward(self, x):\n x = torch.nn.functional.pad(x, (0, 1, 0, 1), 'constant', value=0)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "documentation": {} + }, + { + "label": "num_qubits", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "peekOfCode": "num_qubits = 3\nhead_circuit_qubits = 1\ndev = qml.device(\"default.qubit\", wires=num_qubits)\ndevh = qml.device(\"default.qubit\", wires=head_circuit_qubits)\nclass QuantumHead(torch.nn.Module):\n def __init__(self, input_size, n_qubits, device):\n super(QuantumHead, self).__init__()\n self.input_size = input_size\n self.n_qubits = n_qubits\n self.drc_layers = int(self.input_size / self.n_qubits)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "documentation": {} + }, + { + "label": "head_circuit_qubits", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "peekOfCode": "head_circuit_qubits = 1\ndev = qml.device(\"default.qubit\", wires=num_qubits)\ndevh = qml.device(\"default.qubit\", wires=head_circuit_qubits)\nclass QuantumHead(torch.nn.Module):\n def __init__(self, input_size, n_qubits, device):\n super(QuantumHead, self).__init__()\n self.input_size = input_size\n self.n_qubits = n_qubits\n self.drc_layers = int(self.input_size / self.n_qubits)\n self.params_size = qml.StronglyEntanglingLayers.shape(n_layers=self.drc_layers, n_wires=self.n_qubits)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "documentation": {} + }, + { + "label": "dev", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "peekOfCode": "dev = qml.device(\"default.qubit\", wires=num_qubits)\ndevh = qml.device(\"default.qubit\", wires=head_circuit_qubits)\nclass QuantumHead(torch.nn.Module):\n def __init__(self, input_size, n_qubits, device):\n super(QuantumHead, self).__init__()\n self.input_size = input_size\n self.n_qubits = n_qubits\n self.drc_layers = int(self.input_size / self.n_qubits)\n self.params_size = qml.StronglyEntanglingLayers.shape(n_layers=self.drc_layers, n_wires=self.n_qubits)\n self.device = device", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "documentation": {} + }, + { + "label": "devh", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "peekOfCode": "devh = qml.device(\"default.qubit\", wires=head_circuit_qubits)\nclass QuantumHead(torch.nn.Module):\n def __init__(self, input_size, n_qubits, device):\n super(QuantumHead, self).__init__()\n self.input_size = input_size\n self.n_qubits = n_qubits\n self.drc_layers = int(self.input_size / self.n_qubits)\n self.params_size = qml.StronglyEntanglingLayers.shape(n_layers=self.drc_layers, n_wires=self.n_qubits)\n self.device = device\n # Define parameters for the circuit", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn", + "documentation": {} + }, + { + "label": "ContrastiveConvEncoderCircuit", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn_siamese", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn_siamese", + "peekOfCode": "class ContrastiveConvEncoderCircuit:\n def __init__(self, input_qbits, latent_qbits, aux_qbits, device, img_dim, kernel_size, stride, DRCs, diff_method=\"best\"):\n \"\"\"Create basic SQAE\n Args:\n input_qbits (int): number of qbits to upload input and use as encoder\n latent_qbits (int): number of latent qbits\n aux_qbits (int): number of latent qbits\n device (pennylane device): pennylane device to use for circuit evaluation\n img_dim (int): dimension of the images (width)\n kernel_size (int): size of the kernel to use when uploading the input", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn_siamese", + "documentation": {} + }, + { + "label": "CustomTorchLayer", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn_siamese", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn_siamese", + "peekOfCode": "class CustomTorchLayer(qml.qnn.TorchLayer):\n def __init__(\n self,\n qnode,\n weight_shapes,\n ):\n super().__init__(qnode, weight_shapes)\n def forward(self, inputs): # pylint: disable=arguments-differ\n \"\"\"Evaluates a forward pass through the QNode based upon input data and the initialized\n weights.", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn_siamese", + "documentation": {} + }, + { + "label": "QuantumContrastive", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn_siamese", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn_siamese", + "peekOfCode": "class QuantumContrastive(pl.LightningModule):\n def __init__(self, input_qbits, latent_qbits, aux_qbits, device, img_dim, kernel_size, stride, DRCs, lr=0.01):\n super(QuantumContrastive, self).__init__()\n self.quantum_circuit = ContrastiveConvEncoderCircuit(input_qbits, latent_qbits, aux_qbits, device, img_dim, kernel_size, stride, DRCs, diff_method=\"adjoint\")\n quantum_layer = CustomTorchLayer(self.quantum_circuit.circuit_node, {\"params\": self.quantum_circuit.parameters_shape})\n # self.params = torch.nn.Parameter(torch.rand(self.quantum_circuit.parameters_shape, requires_grad=True))\n # self.register_parameter('params', self.params)\n self.lr = lr\n self.quantum_layer = torch.nn.Sequential(\n quantum_layer", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.img_qcnn_siamese", + "documentation": {} + }, + { + "label": "get_mnist_augmentations", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.mods", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.mods", + "peekOfCode": "def get_mnist_augmentations():\n return T.Compose([\n # Apply rotation (small angles to avoid too much distortion)\n T.RandomRotation(degrees=10), \n # Apply translation (shift up/down, left/right)\n T.RandomAffine(degrees=0, translate=(0.1, 0.1)), \n # Apply shear (mimic slanted writing)\n T.RandomAffine(degrees=0, shear=10),\n # Apply scaling (zoom in/out)\n T.RandomAffine(degrees=0, scale=(0.8, 1.2)),", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.models.mods", + "documentation": {} + }, + { + "label": "QM7b_aug", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.aug", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.aug", + "peekOfCode": "class QM7b_aug(InMemoryDataset):\n url = 'https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/qm7.mat'\n def __init__(\n self,\n root: str,\n transform: Optional[Callable] = None,\n pre_transform: Optional[Callable] = None,\n pre_filter: Optional[Callable] = None,\n aug=None, \n rho=0.9,", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.aug", + "documentation": {} + }, + { + "label": "LogReg", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "peekOfCode": "class LogReg(nn.Module):\n def __init__(self, ft_in, nb_classes):\n super(LogReg, self).__init__()\n self.fc = nn.Linear(ft_in, nb_classes)\n for m in self.modules():\n self.weights_init(m)\n def weights_init(self, m):\n if isinstance(m, nn.Linear):\n torch.nn.init.xavier_uniform_(m.weight.data)\n if m.bias is not None:", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "documentation": {} + }, + { + "label": "draw_plot", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "peekOfCode": "def draw_plot(datadir, DS, embeddings, fname, max_nodes=None):\n return\n graphs = read_graphfile(datadir, DS, max_nodes=max_nodes)\n labels = [graph.graph['label'] for graph in graphs]\n labels = preprocessing.LabelEncoder().fit_transform(labels)\n x, y = np.array(embeddings), np.array(labels)\n print('fitting TSNE ...')\n x = TSNE(n_components=2).fit_transform(x)\n plt.close()\n df = pd.DataFrame(columns=['x0', 'x1', 'Y'])", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "documentation": {} + }, + { + "label": "logistic_classify", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "peekOfCode": "def logistic_classify(x, y):\n nb_classes = np.unique(y).shape[0]\n xent = nn.CrossEntropyLoss()\n hid_units = x.shape[1]\n accs = []\n accs_val = []\n kf = KFold(n_splits=10, shuffle=True, random_state=None)\n for train_index, test_index in kf.split(x, y):\n # test\n train_embs, test_embs = x[train_index], x[test_index]", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "documentation": {} + }, + { + "label": "svc_classify", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "peekOfCode": "def svc_classify(x, y, search):\n n_splits = min(10, len(x))\n print(len(x), n_splits, 'fold cross validation')\n kf = KFold(n_splits=n_splits, shuffle=True, random_state=42)\n mae = []\n mae_val = []\n for train_index, test_index in kf.split(x, y):\n # test\n x_train, x_test = x[train_index], x[test_index]\n y_train, y_test = y[train_index], y[test_index]", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "documentation": {} + }, + { + "label": "randomforest_classify", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "peekOfCode": "def randomforest_classify(x, y, search):\n kf = KFold(n_splits=10, shuffle=True, random_state=None)\n accuracies = []\n accuracies_val = []\n for train_index, test_index in kf.split(x, y):\n # test\n x_train, x_test = x[train_index], x[test_index]\n y_train, y_test = y[train_index], y[test_index]\n if search:\n params = {'n_estimators': [100, 200, 500, 1000]}", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "documentation": {} + }, + { + "label": "linearsvc_classify", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "peekOfCode": "def linearsvc_classify(x, y, search):\n kf = StratifiedKFold(n_splits=10, shuffle=True, random_state=None)\n accuracies = []\n accuracies_val = []\n for train_index, test_index in kf.split(x, y):\n # test\n x_train, x_test = x[train_index], x[test_index]\n y_train, y_test = y[train_index], y[test_index]\n if search:\n params = {'C':[0.001, 0.01,0.1,1,10,100,1000]}", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "documentation": {} + }, + { + "label": "evaluate_embedding", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "peekOfCode": "def evaluate_embedding(embeddings, labels, search=True):\n labels = preprocessing.LabelEncoder().fit_transform(labels)\n x, y = np.array(embeddings), np.array(labels)\n acc = 0\n acc_val = 0\n '''\n _acc_val, _acc = logistic_classify(x, y)\n if _acc_val > acc_val:\n acc_val = _acc_val\n acc = _acc", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.evaluate_embedding", + "documentation": {} + }, + { + "label": "log_sum_exp", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "peekOfCode": "def log_sum_exp(x, axis=None):\n \"\"\"Log sum exp function\n Args:\n x: Input.\n axis: Axis over which to perform sum.\n Returns:\n torch.Tensor: log sum exp\n \"\"\"\n x_max = torch.max(x, axis)[0]\n y = torch.log((torch.exp(x - x_max)).sum(axis)) + x_max", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "documentation": {} + }, + { + "label": "raise_measure_error", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "peekOfCode": "def raise_measure_error(measure):\n supported_measures = ['GAN', 'JSD', 'X2', 'KL', 'RKL', 'DV', 'H2', 'W1']\n raise NotImplementedError(\n 'Measure `{}` not supported. Supported: {}'.format(measure,\n supported_measures))\ndef get_positive_expectation(p_samples, measure, average=True):\n \"\"\"Computes the positive part of a divergence / difference.\n Args:\n p_samples: Positive samples.\n measure: Measure to compute for.", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "documentation": {} + }, + { + "label": "get_positive_expectation", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "peekOfCode": "def get_positive_expectation(p_samples, measure, average=True):\n \"\"\"Computes the positive part of a divergence / difference.\n Args:\n p_samples: Positive samples.\n measure: Measure to compute for.\n average: Average the result over samples.\n Returns:\n torch.Tensor\n \"\"\"\n log_2 = math.log(2.)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "documentation": {} + }, + { + "label": "get_negative_expectation", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "peekOfCode": "def get_negative_expectation(q_samples, measure, average=True):\n \"\"\"Computes the negative part of a divergence / difference.\n Args:\n q_samples: Negative samples.\n measure: Measure to compute for.\n average: Average the result over samples.\n Returns:\n torch.Tensor\n \"\"\"\n log_2 = math.log(2.)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gan_losses", + "documentation": {} + }, + { + "label": "Explainer", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "peekOfCode": "class Explainer(torch.nn.Module):\n def __init__(self, num_features, dim, num_gc_layers):\n super(Explainer, self).__init__()\n self.num_gc_layers = num_gc_layers\n self.convs = torch.nn.ModuleList()\n self.bns = torch.nn.ModuleList()\n for i in range(num_gc_layers):\n if i and i != num_gc_layers - 1:\n nn = Sequential(Linear(dim, dim), ReLU(), Linear(dim, dim))\n bn = torch.nn.BatchNorm1d(dim)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "documentation": {} + }, + { + "label": "Encoder", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "peekOfCode": "class Encoder(torch.nn.Module):\n def __init__(self, num_features, dim, num_gc_layers, pooling):\n super(Encoder, self).__init__()\n self.num_gc_layers = num_gc_layers\n self.pooling = pooling\n self.convs = torch.nn.ModuleList()\n self.bns = torch.nn.ModuleList()\n self.dim = dim\n for i in range(num_gc_layers):\n if i:", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "documentation": {} + }, + { + "label": "Net", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "peekOfCode": "class Net(torch.nn.Module):\n def __init__(self):\n super(Net, self).__init__()\n try:\n num_features = dataset.num_features\n except:\n num_features = 1\n dim = 32\n self.encoder = Encoder(num_features, dim)\n self.fc1 = Linear(dim*5, dim)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "documentation": {} + }, + { + "label": "train", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "peekOfCode": "def train(epoch):\n model.train()\n if epoch == 51:\n for param_group in optimizer.param_groups:\n param_group['lr'] = 0.5 * param_group['lr']\n loss_all = 0\n for data in train_loader:\n data = data.to(device)\n optimizer.zero_grad()\n output = model(data.x, data.edge_index, data.batch)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "documentation": {} + }, + { + "label": "test", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "peekOfCode": "def test(loader):\n model.eval()\n correct = 0\n for data in loader:\n data = data.to(device)\n output = model(data.x, data.edge_index, data.batch)\n pred = output.max(dim=1)[1]\n correct += pred.eq(data.y).sum().item()\n return correct / len(loader.dataset)\nif __name__ == '__main__':", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.gin", + "documentation": {} + }, + { + "label": "local_global_loss_", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.losses", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.losses", + "peekOfCode": "def local_global_loss_(l_enc, g_enc, edge_index, batch, measure):\n '''\n Args:\n l: Local feature map.\n g: Global features.\n measure: Type of f-divergence. For use with mode `fd`\n mode: Loss mode. Fenchel-dual `fd`, NCE `nce`, or Donsker-Vadadhan `dv`.\n Returns:\n torch.Tensor: Loss.\n '''", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.losses", + "documentation": {} + }, + { + "label": "adj_loss_", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.losses", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.losses", + "peekOfCode": "def adj_loss_(l_enc, g_enc, edge_index, batch):\n num_graphs = g_enc.shape[0]\n num_nodes = l_enc.shape[0]\n adj = torch.zeros((num_nodes, num_nodes)).cuda()\n mask = torch.eye(num_nodes).cuda()\n for node1, node2 in zip(edge_index[0], edge_index[1]):\n adj[node1.item()][node2.item()] = 1.\n adj[node2.item()][node1.item()] = 1.\n res = torch.sigmoid((torch.mm(l_enc, l_enc.t())))\n res = (1-mask) * res", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.losses", + "documentation": {} + }, + { + "label": "Permute", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.mi_networks", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.mi_networks", + "peekOfCode": "class Permute(torch.nn.Module):\n \"\"\"Module for permuting axes.\n \"\"\"\n def __init__(self, *perm):\n \"\"\"\n Args:\n *perm: Permute axes.\n \"\"\"\n super().__init__()\n self.perm = perm", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.mi_networks", + "documentation": {} + }, + { + "label": "MIFCNet", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.mi_networks", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.mi_networks", + "peekOfCode": "class MIFCNet(nn.Module):\n \"\"\"Simple custom network for computing MI.\n \"\"\"\n def __init__(self, n_input, n_units):\n \"\"\"\n Args:\n n_input: Number of input units.\n n_units: Number of output units.\n \"\"\"\n super().__init__()", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.mi_networks", + "documentation": {} + }, + { + "label": "MI1x1ConvNet", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.mi_networks", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.mi_networks", + "peekOfCode": "class MI1x1ConvNet(nn.Module):\n \"\"\"Simple custorm 1x1 convnet.\n \"\"\"\n def __init__(self, n_input, n_units):\n \"\"\"\n Args:\n n_input: Number of input units.\n n_units: Number of output units.\n \"\"\"\n super().__init__()", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.mi_networks", + "documentation": {} + }, + { + "label": "GlobalDiscriminator", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.model", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.model", + "peekOfCode": "class GlobalDiscriminator(nn.Module):\n def __init__(self, args, input_dim):\n super().__init__()\n self.l0 = nn.Linear(16, 16)\n self.l1 = nn.Linear(16, 16)\n self.l2 = nn.Linear(128, 1)\n def forward(self, y, M, data):\n adj = Variable(data['adj'].float(), requires_grad=False).cuda()\n # h0 = Variable(data['feats'].float()).cuda()\n batch_num_nodes = data['num_nodes'].int().numpy()", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.model", + "documentation": {} + }, + { + "label": "PriorDiscriminator", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.model", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.model", + "peekOfCode": "class PriorDiscriminator(nn.Module):\n def __init__(self, input_dim):\n super().__init__()\n self.l0 = nn.Linear(input_dim, input_dim)\n self.l1 = nn.Linear(input_dim, input_dim)\n self.l2 = nn.Linear(input_dim, 1)\n def forward(self, x):\n h = F.relu(self.l0(x))\n h = F.relu(self.l1(h))\n return torch.sigmoid(self.l2(h))", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.model", + "documentation": {} + }, + { + "label": "FF", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.model", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.model", + "peekOfCode": "class FF(nn.Module):\n def __init__(self, input_dim):\n super().__init__()\n # self.c0 = nn.Conv1d(input_dim, 512, kernel_size=1)\n # self.c1 = nn.Conv1d(512, 512, kernel_size=1)\n # self.c2 = nn.Conv1d(512, 1, kernel_size=1)\n self.block = nn.Sequential(\n nn.Linear(input_dim, input_dim),\n nn.ReLU(),\n nn.Linear(input_dim, input_dim),", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.model", + "documentation": {} + }, + { + "label": "simclr", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.rgcl", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.rgcl", + "peekOfCode": "class simclr(nn.Module):\n def __init__(self, hidden_dim, num_gc_layers, pooling, dataset_num_features, alpha=0.5, beta=1., gamma=.1):\n super(simclr, self).__init__()\n self.alpha = alpha\n self.beta = beta\n self.gamma = gamma\n self.prior = args.prior\n if pooling == 'last':\n self.embedding_dim = hidden_dim\n else:", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.rgcl", + "documentation": {} + }, + { + "label": "arg_parse", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.rgcl", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.rgcl", + "peekOfCode": "def arg_parse():\n parser = argparse.ArgumentParser(description='RD-GCL')\n parser.add_argument('--DS', dest='DS', type=str, default='qm7', help='Dataset')\n parser.add_argument('--local', dest='local', action='store_const', const=True, default=False)\n parser.add_argument('--glob', dest='glob', action='store_const', const=True, default=False)\n parser.add_argument('--prior', dest='prior', action='store_const', const=True, default=False)\n parser.add_argument('--lr', dest='lr', type=float, default=0.01, help='Learning rate.')\n parser.add_argument('--num-gc-layers', dest='num_gc_layers', type=int, default=3,\n help='Number of graph convolution layers before each pooling')\n parser.add_argument('--hidden-dim', dest='hidden_dim', type=int, default=32, help='hidden dimension')", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.rgcl", + "documentation": {} + }, + { + "label": "setup_seed", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.rgcl", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.rgcl", + "peekOfCode": "def setup_seed(seed):\n torch.manual_seed(seed)\n torch.cuda.manual_seed_all(seed)\n torch.backends.cudnn.deterministic = True\n np.random.seed(seed)\n random.seed(seed)\nclass simclr(nn.Module):\n def __init__(self, hidden_dim, num_gc_layers, pooling, dataset_num_features, alpha=0.5, beta=1., gamma=.1):\n super(simclr, self).__init__()\n self.alpha = alpha", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.rgcl.rgcl", + "documentation": {} + }, + { + "label": "pca_proj", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "peekOfCode": "def pca_proj(embeddings, labels, seed=1):\n \"\"\"\n Perform PCA projection and plot the results.\n Args:\n embeddings (np.ndarray): Embedding vectors.\n labels (np.ndarray): Corresponding labels.\n seed (int): Random seed for reproducibility.\n \"\"\"\n proj = PCA(n_components=2, random_state=seed).fit_transform(embeddings)\n sns.scatterplot(x=proj[:, 0], y=proj[:, 1], hue=labels, palette=sns.color_palette(\"tab10\")).set(title=\"PCA\")", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "documentation": {} + }, + { + "label": "tsne_proj", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "peekOfCode": "def tsne_proj(embeddings, labels, seed=1):\n \"\"\"\n Perform t-SNE projection and plot the results.\n Args:\n embeddings (np.ndarray): Embedding vectors.\n labels (np.ndarray): Corresponding labels.\n seed (int): Random seed for reproducibility.\n \"\"\"\n proj = TSNE(n_components=2, random_state=seed).fit_transform(embeddings)\n sns.scatterplot(x=proj[:, 0], y=proj[:, 1], hue=labels, palette=sns.color_palette(\"tab10\")).set(title=\"T-SNE\")", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "documentation": {} + }, + { + "label": "gaussian_kde_2d", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "peekOfCode": "def gaussian_kde_2d(embeddings, labels):\n \"\"\"\n Plot Gaussian KDE for embeddings in R2.\n Args:\n embeddings (np.ndarray): Embedding vectors.\n labels (np.ndarray): Corresponding labels.\n \"\"\"\n sns.kdeplot(x=embeddings[:, 0], y=embeddings[:, 1], hue=labels, fill=True, palette=sns.color_palette(\"tab10\"))\n plt.title(\"Gaussian KDE in R2\")\n plt.show()", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "documentation": {} + }, + { + "label": "vmf_kde_angles", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "peekOfCode": "def vmf_kde_angles(embeddings, labels, bins=100):\n \"\"\"\n Plot von Mises-Fisher KDE for angles.\n Args:\n embeddings (np.ndarray): Embedding vectors.\n labels (np.ndarray): Corresponding labels.\n bins (int): Number of bins for histogram.\n \"\"\"\n angles = np.arctan2(embeddings[:, 1], embeddings[:, 0])\n unique_labels = np.unique(labels)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "documentation": {} + }, + { + "label": "vmf_kde_on_circle", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "peekOfCode": "def vmf_kde_on_circle(embeddings, labels):\n \"\"\"\n Plot embeddings as a scatter plot on a circle.\n Args:\n embeddings (np.ndarray): Embedding vectors.\n labels (np.ndarray): Corresponding labels.\n \"\"\"\n angles = np.arctan2(embeddings[:, 1], embeddings[:, 0])\n radii = np.ones_like(angles) # Set radius to 1 for all points\n unique_labels = np.unique(labels)", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "documentation": {} + }, + { + "label": "plot_training", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "peekOfCode": "def plot_training(logdir):\n # Plot training and validation loss\n metrics_df = pd.read_csv(f\"{logdir}/metrics.csv\")\n train_loss_epoch = metrics_df['train_loss'].dropna().reset_index(drop=True)\n val_loss_epoch = metrics_df['valid_loss'].dropna().reset_index(drop=True)\n min_length = min(len(train_loss_epoch), len(val_loss_epoch))\n train_loss_epoch = train_loss_epoch[:min_length]\n val_loss_epoch = val_loss_epoch[:min_length]\n plt.plot(train_loss_epoch, label='Train Loss')\n plt.plot(val_loss_epoch, label='Validation Loss')", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "documentation": {} + }, + { + "label": "generate_embeddings", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "peekOfCode": "def generate_embeddings(model, data_loader):\n \"\"\"\n Generate embeddings for the given data using the provided model.\n Args:\n model (nn.Module): Trained model.\n data_loader (DataLoader): Data loader for the dataset.\n Returns:\n tuple: Embeddings and labels as numpy arrays.\n \"\"\"\n model.eval()", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.plotting", + "documentation": {} + }, + { + "label": "get_trainer", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.training", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.training", + "peekOfCode": "def get_trainer(\n experiment_name: str,\n project_name: str = \"Quantum Contrastive Representation Learning\",\n max_epochs: int = 20,\n patience: int = 20,\n monitor_metric: str = \"valid_loss\",\n mode: str = \"min\",\n save_dir: str = \"logs/\",\n model_summary_depth: int = 8,\n) -> pl.Trainer:", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.training", + "documentation": {} + }, + { + "label": "API_KEY", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.training", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.training", + "peekOfCode": "API_KEY = None\nAPI_KEY = \"qagcCboPOVUgd06fVzpoc5rly\"\ndef get_trainer(\n experiment_name: str,\n project_name: str = \"Quantum Contrastive Representation Learning\",\n max_epochs: int = 20,\n patience: int = 20,\n monitor_metric: str = \"valid_loss\",\n mode: str = \"min\",\n save_dir: str = \"logs/\",", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.training", + "documentation": {} + }, + { + "label": "API_KEY", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.training", + "description": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.training", + "peekOfCode": "API_KEY = \"qagcCboPOVUgd06fVzpoc5rly\"\ndef get_trainer(\n experiment_name: str,\n project_name: str = \"Quantum Contrastive Representation Learning\",\n max_epochs: int = 20,\n patience: int = 20,\n monitor_metric: str = \"valid_loss\",\n mode: str = \"min\",\n save_dir: str = \"logs/\",\n model_summary_depth: int = 8,", + "detail": "Quantum_SSL_for_HEP_Duy_Do_Le.src.qml_ssl.utils.training", + "documentation": {} + }, + { + "label": "preprocess_data", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "def preprocess_data(images, labels):\n # Add a dimenison for channel\n images = np.expand_dims(images, -1)\n # Normalize\n images = images.astype('float32') / 255.0 \n return images, labels\ndef crop(images, size):\n x = np.argmax(np.mean(images[:, :, :, 0], axis=0))\n center = [int(x/size), x%size]\n img_size = 8", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "crop", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "def crop(images, size):\n x = np.argmax(np.mean(images[:, :, :, 0], axis=0))\n center = [int(x/size), x%size]\n img_size = 8\n images = images[:, (center[0]-int(img_size/2)):(center[0]+int(img_size/2)), (center[1]-int(img_size/2)):(center[1]+int(img_size/2))]\n return images\nx_train = x_train[:,:,:,0]\nx_test = x_test[:,:,:,0]\nx_train, y_train = preprocess_data(x_train, y_train)\nx_test, y_test = preprocess_data(x_test, y_test)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "create_pairs", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "def create_pairs(images, labels):\n pairs = []\n pair_labels = []\n num_classes = len(np.unique(labels)) # 2\n digit_indices = [np.where(labels == i)[0] for i in range(num_classes)]\n # print(digit_indices)\n for idx1 in range(len(images)):\n x1, label1 = images[idx1], labels[idx1]\n idx2 = np.random.choice(digit_indices[int(label1)])\n x2 = images[idx2]", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "data", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "data = np.load('../../data/electron-photon-large.npz', allow_pickle=True)\nx_train = data[\"x_train\"]\ny_train = data[\"y_train\"]\nx_test = data[\"x_test\"]\ny_test = data[\"y_test\"]\nprint(f\"Data Loading: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"Data Loading: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Preprocess the dataset\ndef preprocess_data(images, labels):\n # Add a dimenison for channel", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "x_train", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "x_train = data[\"x_train\"]\ny_train = data[\"y_train\"]\nx_test = data[\"x_test\"]\ny_test = data[\"y_test\"]\nprint(f\"Data Loading: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"Data Loading: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Preprocess the dataset\ndef preprocess_data(images, labels):\n # Add a dimenison for channel\n images = np.expand_dims(images, -1)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "y_train", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "y_train = data[\"y_train\"]\nx_test = data[\"x_test\"]\ny_test = data[\"y_test\"]\nprint(f\"Data Loading: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"Data Loading: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Preprocess the dataset\ndef preprocess_data(images, labels):\n # Add a dimenison for channel\n images = np.expand_dims(images, -1)\n # Normalize", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "x_test", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "x_test = data[\"x_test\"]\ny_test = data[\"y_test\"]\nprint(f\"Data Loading: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"Data Loading: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Preprocess the dataset\ndef preprocess_data(images, labels):\n # Add a dimenison for channel\n images = np.expand_dims(images, -1)\n # Normalize\n images = images.astype('float32') / 255.0 ", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "y_test", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "y_test = data[\"y_test\"]\nprint(f\"Data Loading: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"Data Loading: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Preprocess the dataset\ndef preprocess_data(images, labels):\n # Add a dimenison for channel\n images = np.expand_dims(images, -1)\n # Normalize\n images = images.astype('float32') / 255.0 \n return images, labels", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "x_train", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "x_train = x_train[:,:,:,0]\nx_test = x_test[:,:,:,0]\nx_train, y_train = preprocess_data(x_train, y_train)\nx_test, y_test = preprocess_data(x_test, y_test)\nx_train = crop(x_train, 32)\nx_test = crop(x_test, 32)\nprint(f\"After Data Preprocessing: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"After Data Preprocessing: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Create pairs of images and labels\ndef create_pairs(images, labels):", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "x_test", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "x_test = x_test[:,:,:,0]\nx_train, y_train = preprocess_data(x_train, y_train)\nx_test, y_test = preprocess_data(x_test, y_test)\nx_train = crop(x_train, 32)\nx_test = crop(x_test, 32)\nprint(f\"After Data Preprocessing: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"After Data Preprocessing: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Create pairs of images and labels\ndef create_pairs(images, labels):\n pairs = []", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "x_train", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "x_train = crop(x_train, 32)\nx_test = crop(x_test, 32)\nprint(f\"After Data Preprocessing: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"After Data Preprocessing: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Create pairs of images and labels\ndef create_pairs(images, labels):\n pairs = []\n pair_labels = []\n num_classes = len(np.unique(labels)) # 2\n digit_indices = [np.where(labels == i)[0] for i in range(num_classes)]", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "x_test", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "peekOfCode": "x_test = crop(x_test, 32)\nprint(f\"After Data Preprocessing: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"After Data Preprocessing: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Create pairs of images and labels\ndef create_pairs(images, labels):\n pairs = []\n pair_labels = []\n num_classes = len(np.unique(labels)) # 2\n digit_indices = [np.where(labels == i)[0] for i in range(num_classes)]\n # print(digit_indices)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.data_preprocessing_and_pair_creation", + "documentation": {} + }, + { + "label": "evaluate_precision_recall_accuracy", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "peekOfCode": "def evaluate_precision_recall_accuracy(y_true, y_pred, threshold=0.5):\n y_pred_binary = (y_pred >= threshold).astype(int)\n true_positives = np.sum((y_true == 1) & (y_pred_binary == 1))\n false_positives = np.sum((y_true == 0) & (y_pred_binary == 1))\n false_negatives = np.sum((y_true == 1) & (y_pred_binary == 0))\n true_negatives = np.sum((y_true == 0) & (y_pred_binary == 0))\n precision = true_positives / (true_positives + false_positives) if true_positives + false_positives > 0 else 0\n recall = true_positives / (true_positives + false_negatives) if true_positives + false_negatives > 0 else 0\n accuracy = (true_positives + true_negatives) / len(y_true)\n return precision, recall, accuracy", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "documentation": {} + }, + { + "label": "confusion_matrix", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "peekOfCode": "def confusion_matrix(y_true, y_pred, threshold=0.5):\n y_pred_binary = (y_pred >= threshold).astype(int)\n true_positives = np.sum((y_true == 1) & (y_pred_binary == 1))\n false_positives = np.sum((y_true == 0) & (y_pred_binary == 1))\n false_negatives = np.sum((y_true == 1) & (y_pred_binary == 0))\n true_negatives = np.sum((y_true == 0) & (y_pred_binary == 0))\n return np.array([[true_negatives, false_positives],\n [false_negatives, true_positives]])\ndef make_cm(y_true,y_pred,classes=None,figsize=(10,10),text_size=15):\n cm = cmatrix(y_true,tf.round(y_pred))", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "documentation": {} + }, + { + "label": "make_cm", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "peekOfCode": "def make_cm(y_true,y_pred,classes=None,figsize=(10,10),text_size=15):\n cm = cmatrix(y_true,tf.round(y_pred))\n cm_norm = cm.astype(\"float\")/cm.sum(axis=1)[:,np.newaxis] # normalise confusion matrix\n n_class = cm.shape[0]\n fig, ax = plt.subplots(figsize=figsize)\n cax = ax.matshow(cm,cmap=plt.cm.Blues)\n fig.colorbar(cax)\n if classes:\n labels=classes\n else:", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "documentation": {} + }, + { + "label": "plot_auc", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "peekOfCode": "def plot_auc(y_true, y_pred):\n fpr, tpr, _ = roc_curve(y_true, y_pred)\n roc_auc = auc(fpr, tpr)\n plt.figure()\n plt.plot(fpr, tpr, color='darkorange', lw=2, label=f'ROC curve (area = {roc_auc:.2f})')\n plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--')\n plt.xlim([0.0, 1.0])\n plt.ylim([0.0, 1.05])\n plt.xlabel('False Positive Rate')\n plt.ylabel('True Positive Rate')", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_electron_photon.helpers", + "documentation": {} + }, + { + "label": "evaluate_precision_recall_accuracy", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "peekOfCode": "def evaluate_precision_recall_accuracy(y_true, y_pred, threshold=0.5):\n y_pred_binary = (y_pred >= threshold).astype(int)\n true_positives = np.sum((y_true == 1) & (y_pred_binary == 1))\n false_positives = np.sum((y_true == 0) & (y_pred_binary == 1))\n false_negatives = np.sum((y_true == 1) & (y_pred_binary == 0))\n true_negatives = np.sum((y_true == 0) & (y_pred_binary == 0))\n precision = true_positives / (true_positives + false_positives) if true_positives + false_positives > 0 else 0\n recall = true_positives / (true_positives + false_negatives) if true_positives + false_negatives > 0 else 0\n accuracy = (true_positives + true_negatives) / len(y_true)\n return precision, recall, accuracy", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "documentation": {} + }, + { + "label": "confusion_matrix", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "peekOfCode": "def confusion_matrix(y_true, y_pred, threshold=0.5):\n y_pred_binary = (y_pred >= threshold).astype(int)\n true_positives = np.sum((y_true == 1) & (y_pred_binary == 1))\n false_positives = np.sum((y_true == 0) & (y_pred_binary == 1))\n false_negatives = np.sum((y_true == 1) & (y_pred_binary == 0))\n true_negatives = np.sum((y_true == 0) & (y_pred_binary == 0))\n return np.array([[true_negatives, false_positives],\n [false_negatives, true_positives]])\ndef make_cm(y_true,y_pred,classes=None,figsize=(10,10),text_size=15):\n cm = cmatrix(y_true,tf.round(y_pred))", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "documentation": {} + }, + { + "label": "make_cm", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "peekOfCode": "def make_cm(y_true,y_pred,classes=None,figsize=(10,10),text_size=15):\n cm = cmatrix(y_true,tf.round(y_pred))\n cm_norm = cm.astype(\"float\")/cm.sum(axis=1)[:,np.newaxis] # normalise confusion matrix\n n_class = cm.shape[0]\n fig, ax = plt.subplots(figsize=figsize)\n cax = ax.matshow(cm,cmap=plt.cm.Blues)\n fig.colorbar(cax)\n if classes:\n labels=classes\n else:", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "documentation": {} + }, + { + "label": "plot_auc", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "peekOfCode": "def plot_auc(y_true, y_pred):\n fpr, tpr, _ = roc_curve(y_true, y_pred)\n roc_auc = auc(fpr, tpr)\n plt.figure()\n plt.plot(fpr, tpr, color='darkorange', lw=2, label=f'ROC curve (area = {roc_auc:.2f})')\n plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--')\n plt.xlim([0.0, 1.0])\n plt.ylim([0.0, 1.05])\n plt.xlabel('False Positive Rate')\n plt.ylabel('True Positive Rate')", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.helpers", + "documentation": {} + }, + { + "label": "residual_block", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.resnet", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.resnet", + "peekOfCode": "def residual_block(x, filters, kernel_size=3, stride=1, use_batch_norm=True):\n shortcut = x\n x = layers.Conv2D(filters, kernel_size, strides=stride, padding='same')(x)\n if use_batch_norm:\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n x = layers.Conv2D(filters, kernel_size, strides=1, padding='same')(x)\n if use_batch_norm:\n x = layers.BatchNormalization()(x)\n if stride != 1 or x.shape[-1] != shortcut.shape[-1]:", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.resnet", + "documentation": {} + }, + { + "label": "resnet18", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.resnet", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.resnet", + "peekOfCode": "def resnet18(input_shape=(224, 224, 3), return_embeddings=True):\n inputs = layers.Input(shape=input_shape)\n x = layers.Conv2D(64, kernel_size=7, strides=2, padding='same')(inputs)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n x = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(x)\n x = residual_block(x, 64, stride=1)\n x = residual_block(x, 64, stride=1)\n x = residual_block(x, 128, stride=2)\n x = residual_block(x, 128, stride=1)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.notebooks.Experiment_quark_gluon.resnet", + "documentation": {} + }, + { + "label": "DataLoader", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "peekOfCode": "class DataLoader:\n '''Loads the training and test pairs'''\n def __init__(self, file_path):\n data = np.load(file_path, allow_pickle=True)\n self.pairs_train = np.expand_dims(data[\"pairs_train\"], -1)\n self.labels_train = data[\"labels_train\"]\n self.pairs_test = np.expand_dims(data[\"pairs_test\"], -1)\n self.labels_test = data[\"labels_test\"]\n def get_train_data(self):\n return self.pairs_train, self.labels_train", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "documentation": {} + }, + { + "label": "LabeledContrastiveDatasetQG", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "peekOfCode": "class LabeledContrastiveDatasetQG():\n \"\"\"\n Dataset class to load images from .npz files, convert them to PyTorch tensors, and return x1 and x2.\n \"\"\"\n def __init__(self, file, transforms=None):\n #self.files = [os.path.join(folder, file) for file in os.listdir(folder) if file.endswith('.npz')]\n self.file=file\n self.transform = transforms\n def __len__(self):\n return len(self.file)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "documentation": {} + }, + { + "label": "preprocess_fixed_nodes", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "peekOfCode": "def preprocess_fixed_nodes(x_data,y_data,nodes_per_graph=10): \n '''Preprocesses graph dataset (courtesy to Roy's open-source code)'''\n print('--- Finding All Unique Particles ---')\n unique_particles = np.unique(x_data[:,:,3])\n x_data = torch.tensor(x_data)\n y_data = torch.tensor(y_data)\n print()\n print('--- Inserting Masses ---')\n masses = torch.zeros((x_data.shape[0],x_data.shape[1]))\n for i,particle in tqdm(enumerate(unique_particles)):", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "documentation": {} + }, + { + "label": "create_contrastive_graph_pairs", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "peekOfCode": "def create_contrastive_graph_pairs(dataset):\n pairs = []\n labels = []\n # Group graphs by their labels (0 or 1)\n label_to_graphs = defaultdict(list)\n for data in dataset:\n label_to_graphs[data.y.item()].append(data)\n # Create pairs for positive class\n for data in label_to_graphs[1]: # For each positive graph\n data_aug = graph_augment(data) # Create an augmented version", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_loader", + "documentation": {} + }, + { + "label": "preprocess_data", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "def preprocess_data(images, labels):\n # Add a dimenison for channel\n images = np.expand_dims(images, -1)\n # Normalize\n images = images.astype('float32') / 255.0 \n return images, labels\ndef crop(images, size):\n x = np.argmax(np.mean(images[:, :, :, 0], axis=0))\n center = [int(x/size), x%size]\n img_size = 8", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "crop", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "def crop(images, size):\n x = np.argmax(np.mean(images[:, :, :, 0], axis=0))\n center = [int(x/size), x%size]\n img_size = 8\n images = images[:, (center[0]-int(img_size/2)):(center[0]+int(img_size/2)), (center[1]-int(img_size/2)):(center[1]+int(img_size/2))]\n return images\nx_train = x_train[:,:,:,0]\nx_test = x_test[:,:,:,0]\nx_train, y_train = preprocess_data(x_train, y_train)\nx_test, y_test = preprocess_data(x_test, y_test)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "create_pairs", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "def create_pairs(images, labels):\n pairs = []\n pair_labels = []\n num_classes = len(np.unique(labels)) # 2\n digit_indices = [np.where(labels == i)[0] for i in range(num_classes)]\n # print(digit_indices)\n for idx1 in range(len(images)):\n x1, label1 = images[idx1], labels[idx1]\n idx2 = np.random.choice(digit_indices[int(label1)])\n x2 = images[idx2]", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "data", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "data = np.load('../../data/electron-photon-large.npz', allow_pickle=True)\nx_train = data[\"x_train\"]\ny_train = data[\"y_train\"]\nx_test = data[\"x_test\"]\ny_test = data[\"y_test\"]\nprint(f\"Data Loading: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"Data Loading: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Preprocess the dataset\ndef preprocess_data(images, labels):\n # Add a dimenison for channel", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "x_train", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "x_train = data[\"x_train\"]\ny_train = data[\"y_train\"]\nx_test = data[\"x_test\"]\ny_test = data[\"y_test\"]\nprint(f\"Data Loading: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"Data Loading: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Preprocess the dataset\ndef preprocess_data(images, labels):\n # Add a dimenison for channel\n images = np.expand_dims(images, -1)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "y_train", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "y_train = data[\"y_train\"]\nx_test = data[\"x_test\"]\ny_test = data[\"y_test\"]\nprint(f\"Data Loading: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"Data Loading: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Preprocess the dataset\ndef preprocess_data(images, labels):\n # Add a dimenison for channel\n images = np.expand_dims(images, -1)\n # Normalize", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "x_test", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "x_test = data[\"x_test\"]\ny_test = data[\"y_test\"]\nprint(f\"Data Loading: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"Data Loading: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Preprocess the dataset\ndef preprocess_data(images, labels):\n # Add a dimenison for channel\n images = np.expand_dims(images, -1)\n # Normalize\n images = images.astype('float32') / 255.0 ", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "y_test", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "y_test = data[\"y_test\"]\nprint(f\"Data Loading: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"Data Loading: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Preprocess the dataset\ndef preprocess_data(images, labels):\n # Add a dimenison for channel\n images = np.expand_dims(images, -1)\n # Normalize\n images = images.astype('float32') / 255.0 \n return images, labels", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "x_train", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "x_train = x_train[:,:,:,0]\nx_test = x_test[:,:,:,0]\nx_train, y_train = preprocess_data(x_train, y_train)\nx_test, y_test = preprocess_data(x_test, y_test)\nx_train = crop(x_train, 32)\nx_test = crop(x_test, 32)\nprint(f\"After Data Preprocessing: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"After Data Preprocessing: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Create pairs of images and labels\ndef create_pairs(images, labels):", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "x_test", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "x_test = x_test[:,:,:,0]\nx_train, y_train = preprocess_data(x_train, y_train)\nx_test, y_test = preprocess_data(x_test, y_test)\nx_train = crop(x_train, 32)\nx_test = crop(x_test, 32)\nprint(f\"After Data Preprocessing: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"After Data Preprocessing: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Create pairs of images and labels\ndef create_pairs(images, labels):\n pairs = []", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "x_train", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "x_train = crop(x_train, 32)\nx_test = crop(x_test, 32)\nprint(f\"After Data Preprocessing: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"After Data Preprocessing: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Create pairs of images and labels\ndef create_pairs(images, labels):\n pairs = []\n pair_labels = []\n num_classes = len(np.unique(labels)) # 2\n digit_indices = [np.where(labels == i)[0] for i in range(num_classes)]", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "x_test", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "peekOfCode": "x_test = crop(x_test, 32)\nprint(f\"After Data Preprocessing: x_train shape {x_train.shape}, x_test shape: {x_test.shape}\")\nprint(f\"After Data Preprocessing: y_train shape {y_train.shape}, y_test shape: {y_test.shape}\")\n# Create pairs of images and labels\ndef create_pairs(images, labels):\n pairs = []\n pair_labels = []\n num_classes = len(np.unique(labels)) # 2\n digit_indices = [np.where(labels == i)[0] for i in range(num_classes)]\n # print(digit_indices)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_pair_creation", + "documentation": {} + }, + { + "label": "add_fourth_channel", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def add_fourth_channel(images):\n '''4th channel from overlay of the 3 channels'''\n images_with_four_channels = []\n for image in images:\n superimposed_channel = np.mean(image, axis=2, keepdims=True)\n image_with_four_channels = np.concatenate((image, superimposed_channel), axis=2)\n images_with_four_channels.append(image_with_four_channels)\n return np.array(images_with_four_channels)\n# Soft Transformations\ndef rotate_image(image, angle):", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "rotate_image", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def rotate_image(image, angle):\n (h, w) = image.shape[:2]\n center = (w // 2, h // 2)\n M = cv2.getRotationMatrix2D(center, angle, 1.0)\n rotated = cv2.warpAffine(image, M, (w, h))\n return rotated\ndef translate_image(image, x, y):\n M = np.float32([[1, 0, x], [0, 1, y]])\n shifted = cv2.warpAffine(image, M, (image.shape[1], image.shape[0]))\n return shifted", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "translate_image", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def translate_image(image, x, y):\n M = np.float32([[1, 0, x], [0, 1, y]])\n shifted = cv2.warpAffine(image, M, (image.shape[1], image.shape[0]))\n return shifted\ndef scale_image(image, scale_factor):\n h, w = image.shape[:2]\n resized = cv2.resize(image, (int(w * scale_factor), int(h * scale_factor)))\n return resized\ndef shear_image(image, shear_factor):\n M = np.float32([[1, shear_factor, 0], [0, 1, 0]])", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "scale_image", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def scale_image(image, scale_factor):\n h, w = image.shape[:2]\n resized = cv2.resize(image, (int(w * scale_factor), int(h * scale_factor)))\n return resized\ndef shear_image(image, shear_factor):\n M = np.float32([[1, shear_factor, 0], [0, 1, 0]])\n sheared = cv2.warpAffine(image, M, (image.shape[1], image.shape[0]))\n return sheared\ndef adjust_brightness(image, brightness_factor):\n hsv = cv2.cvtColor(image[:, :, :3], cv2.COLOR_BGR2HSV)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "shear_image", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def shear_image(image, shear_factor):\n M = np.float32([[1, shear_factor, 0], [0, 1, 0]])\n sheared = cv2.warpAffine(image, M, (image.shape[1], image.shape[0]))\n return sheared\ndef adjust_brightness(image, brightness_factor):\n hsv = cv2.cvtColor(image[:, :, :3], cv2.COLOR_BGR2HSV)\n h, s, v = cv2.split(hsv)\n v = cv2.add(v, brightness_factor)\n v[v > 255] = 255\n v[v < 0] = 0", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "adjust_brightness", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def adjust_brightness(image, brightness_factor):\n hsv = cv2.cvtColor(image[:, :, :3], cv2.COLOR_BGR2HSV)\n h, s, v = cv2.split(hsv)\n v = cv2.add(v, brightness_factor)\n v[v > 255] = 255\n v[v < 0] = 0\n final_hsv = cv2.merge((h, s, v))\n image_bright = cv2.cvtColor(final_hsv, cv2.COLOR_HSV2BGR)\n image[:, :, :3] = image_bright\n return image", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "add_noise", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def add_noise(image, noise_factor):\n row, col, ch = image.shape\n mean = 0\n sigma = noise_factor ** 0.5\n gauss = np.random.normal(mean, sigma, (row, col, ch))\n gauss = gauss.reshape(row, col, ch)\n noisy = image + gauss\n return noisy\n# collinear transformations\ndef affine_transform(image, pts1, pts2):", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "affine_transform", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def affine_transform(image, pts1, pts2):\n M = cv2.getAffineTransform(pts1, pts2)\n rows, cols, ch = image.shape\n affine_transformed = cv2.warpAffine(image, M, (cols, rows))\n return affine_transformed\ndef perspective_transform(image, pts1, pts2):\n M = cv2.getPerspectiveTransform(pts1, pts2)\n rows, cols, ch = image.shape\n perspective_transformed = cv2.warpPerspective(image, M, (cols, rows))\n return perspective_transformed", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "perspective_transform", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def perspective_transform(image, pts1, pts2):\n M = cv2.getPerspectiveTransform(pts1, pts2)\n rows, cols, ch = image.shape\n perspective_transformed = cv2.warpPerspective(image, M, (cols, rows))\n return perspective_transformed\n# Normalisation\ndef z_score():\n if len(image.shape) == 3 and image.shape[2] == 3:\n image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)\n mean = np.mean(image)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "z_score", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def z_score():\n if len(image.shape) == 3 and image.shape[2] == 3:\n image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)\n mean = np.mean(image)\n std = np.std(image)\n z_score_transformed = (image - mean) / std\n normalized_transformed = cv2.normalize(z_score_transformed, None, 0, 255, cv2.NORM_MINMAX)\n normalized_transformed = normalized_transformed.astype(np.uint8)\n return normalized_transformed\n# Heatmaps", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "compute_relative_difference", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def compute_relative_difference(images):\n avgs = np.mean(images, axis=0)\n relative_diffs = np.zeros_like(avgs)\n for img in images:\n relative_diffs += np.abs(img - avgs) / (avgs + 1e-10) # Adding a small constant to avoid division by zero\n relative_diffs /= len(images)\n return relative_diffs\ndef plot_heatmaps(relative_diffs):\n h, w, c = relative_diffs.shape\n for i in range(c):", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "plot_heatmaps", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def plot_heatmaps(relative_diffs):\n h, w, c = relative_diffs.shape\n for i in range(c):\n plt.figure(figsize=(10, 8))\n sns.heatmap(relative_diffs[:, :, i], cmap=\"viridis\")\n plt.title(f'Channel {i+1} Sensitivity Heatmap')\n plt.show()\n# Log transformation of the 4th channel\ndef preprocess_4th_channel(image): \n if image.shape[-1] < 3:", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "preprocess_4th_channel", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def preprocess_4th_channel(image): \n if image.shape[-1] < 3:\n raise ValueError(\"Image must have at least 4 channels.\")\n fourth_channel = image[:, :, 3]\n transformed_channel = np.log(np.abs(fourth_channel) + 1e-6)\n normalized_channel = (transformed_channel - transformed_channel.min()) / (transformed_channel.max() - transformed_channel.min() + 1e-6)\n return normalized_channel\ndef preprocess_all_images(images):\n if images.shape[-1] < 4:\n raise ValueError(\"Each image must have at least 4 channels.\")", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "preprocess_all_images", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def preprocess_all_images(images):\n if images.shape[-1] < 4:\n raise ValueError(\"Each image must have at least 4 channels.\")\n # Extract the 4th channels from all images\n fourth_channels = images[:, :, :, 3]\n # Apply logarithmic transformation and normalization to the 4th channels\n transformed_channels = np.log(np.abs(fourth_channels) + 1e-6)\n min_vals = transformed_channels.min(axis=(1, 2), keepdims=True)\n max_vals = transformed_channels.max(axis=(1, 2), keepdims=True)\n normalized_channels = (transformed_channels - min_vals) / (max_vals - min_vals + 1e-6)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "drop_nodes", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def drop_nodes(data, drop_prob=0.2):\n node_mask = torch.rand(data.x.size(0)) > drop_prob\n data.x = data.x[node_mask]\n data.edge_index, _ = subgraph(node_mask, data.edge_index, relabel_nodes=True)\n return data\n# Function to randomly drop edges in a graph\ndef drop_edges(data, drop_prob=0.2):\n edge_mask = torch.rand(data.edge_index.size(1)) > drop_prob\n data.edge_index = data.edge_index[:, edge_mask]\n return data", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "drop_edges", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def drop_edges(data, drop_prob=0.2):\n edge_mask = torch.rand(data.edge_index.size(1)) > drop_prob\n data.edge_index = data.edge_index[:, edge_mask]\n return data\n# Function to randomly mask node features in a graph\ndef mask_features(data, mask_prob=0.2):\n feature_mask = torch.rand(data.x.size()) > mask_prob\n data.x = data.x * feature_mask.float()\n return data\n# Define the augmentation function", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "mask_features", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def mask_features(data, mask_prob=0.2):\n feature_mask = torch.rand(data.x.size()) > mask_prob\n data.x = data.x * feature_mask.float()\n return data\n# Define the augmentation function\ndef graph_augment(data):\n data_aug = data.clone()\n data_aug = drop_nodes(data_aug, drop_prob=0.2)\n data_aug = drop_edges(data_aug, drop_prob=0.1)\n data_aug = mask_features(data_aug, mask_prob=0.2)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "graph_augment", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "peekOfCode": "def graph_augment(data):\n data_aug = data.clone()\n data_aug = drop_nodes(data_aug, drop_prob=0.2)\n data_aug = drop_edges(data_aug, drop_prob=0.1)\n data_aug = mask_features(data_aug, mask_prob=0.2)\n return data_aug", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_preprocessing_augmentation", + "documentation": {} + }, + { + "label": "plot", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "peekOfCode": "def plot(channel=0):\n '''Plots specified channel of quark-gluon dataset'''\n fig2 = plt.figure(figsize=(10,10))\n r = 1\n c = 2\n index = [np.where(data['y_train'] == 0)[0], np.where(data['y_train'] == 1)[0]]\n for i in range(2):\n fig2.add_subplot(r,c,i+1)\n plt.imshow(np.log(np.mean(data['x_train'][index[i],:,:,channel], axis=0)))\n plt.title('Quark' if i == 0 else 'Gluon')", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "documentation": {} + }, + { + "label": "plot_image_grid_superimposed", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "peekOfCode": "def plot_image_grid_superimposed(data, label, channel=0, rows=5, cols=5):\n '''Plots image grid of the 4th channel'''\n fig, axes = plt.subplots(rows, cols, figsize=(10, 10))\n for i in range(rows):\n for j in range(cols):\n # axes[i, j].imshow(data[i * cols + j, :, :, channel])\n axes[i, j].imshow(data[i * cols + j, :, :, 3])\n axes[i, j].axis('off')\n plt.suptitle(f'{label}')\n plt.show()", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "documentation": {} + }, + { + "label": "plot_image_grid", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "peekOfCode": "def plot_image_grid(data, label, channel=0, rows=5, cols=5):\n '''Plots image grid of specified channel'''\n fig, axes = plt.subplots(rows, cols, figsize=(10, 10))\n for i in range(rows):\n for j in range(cols):\n axes[i, j].imshow(data[i * cols + j, :, :, channel])\n axes[i, j].axis('off')\n plt.suptitle(f'{label}')\n plt.show()\ndef plot_sample_pairs(pairs, labels, shape, num_samples=5):", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "documentation": {} + }, + { + "label": "plot_sample_pairs", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "peekOfCode": "def plot_sample_pairs(pairs, labels, shape, num_samples=5):\n '''Plots sample pairs'''\n plt.figure(figsize=(15, num_samples * 2))\n for i in range(num_samples):\n ax = plt.subplot(num_samples, 2, 2 * i + 1)\n plt.imshow(pairs[i, 0].reshape(shape, shape))\n ax.axis('off')\n ax = plt.subplot(num_samples, 2, 2 * i + 2)\n plt.imshow(pairs[i, 1].reshape(shape, shape))\n ax.axis('off')", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.data.data_visualisation", + "documentation": {} + }, + { + "label": "run_model_lct", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "peekOfCode": "def run_model_lct(model, epoch, dataloader, lossFn, optimizer=None, train=True, return_embeddings=False):\n if train:\n model.train()\n else:\n model.eval()\n total_loss = 0\n correct = 0\n num_samples = 0\n all_embeddings = []\n all_labels = []", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "documentation": {} + }, + { + "label": "evaluate_precision_recall_accuracy", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "peekOfCode": "def evaluate_precision_recall_accuracy(y_true, y_pred, threshold=0.5):\n '''Returns Precision, Recall and Accuracy'''\n y_pred_binary = (y_pred >= threshold).astype(int)\n true_positives = np.sum((y_true == 1) & (y_pred_binary == 1))\n false_positives = np.sum((y_true == 0) & (y_pred_binary == 1))\n false_negatives = np.sum((y_true == 1) & (y_pred_binary == 0))\n true_negatives = np.sum((y_true == 0) & (y_pred_binary == 0))\n precision = true_positives / (true_positives + false_positives) if true_positives + false_positives > 0 else 0\n recall = true_positives / (true_positives + false_negatives) if true_positives + false_negatives > 0 else 0\n accuracy = (true_positives + true_negatives) / len(y_true)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "documentation": {} + }, + { + "label": "confusion_matrix", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "peekOfCode": "def confusion_matrix(y_true, y_pred, threshold=0.5):\n '''Creates confusion matrix'''\n y_pred_binary = (y_pred >= threshold).astype(int)\n true_positives = np.sum((y_true == 1) & (y_pred_binary == 1))\n false_positives = np.sum((y_true == 0) & (y_pred_binary == 1))\n false_negatives = np.sum((y_true == 1) & (y_pred_binary == 0))\n true_negatives = np.sum((y_true == 0) & (y_pred_binary == 0))\n return np.array([[true_negatives, false_positives],\n [false_negatives, true_positives]])\ndef make_cm(y_true,y_pred,classes=None,figsize=(10,10),text_size=15):", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "documentation": {} + }, + { + "label": "make_cm", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "peekOfCode": "def make_cm(y_true,y_pred,classes=None,figsize=(10,10),text_size=15):\n '''Creates a pretty confusion matrix'''\n cm = cmatrix(y_true,tf.round(y_pred))\n cm_norm = cm.astype(\"float\")/cm.sum(axis=1)[:,np.newaxis] # normalise confusion matrix\n n_class = cm.shape[0]\n fig, ax = plt.subplots(figsize=figsize)\n cax = ax.matshow(cm,cmap=plt.cm.Blues)\n fig.colorbar(cax)\n if classes:\n labels=classes", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "documentation": {} + }, + { + "label": "plot_auc", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "peekOfCode": "def plot_auc(y_true, y_pred):\n '''Plots AUC-ROC curve'''\n fpr, tpr, _ = roc_curve(y_true, y_pred)\n roc_auc = auc(fpr, tpr)\n plt.figure()\n plt.plot(fpr, tpr, color='darkorange', lw=2, label=f'ROC curve (area = {roc_auc:.2f})')\n plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--')\n plt.xlim([0.0, 1.0])\n plt.ylim([0.0, 1.05])\n plt.xlabel('False Positive Rate')", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "documentation": {} + }, + { + "label": "plot_auc", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "peekOfCode": "def plot_auc(labels, preds):\n auc = roc_auc_score(labels, preds)\n fpr, tpr, _ = roc_curve(labels, preds)\n plt.plot(fpr, tpr, label=\"AUC = {0}\".format(auc))\n plt.xlabel('False Positive Rate')\n plt.ylabel('True Positive Rate')\n plt.legend()\n wandb.log({\"Confusion Matrix\": wandb.Image(plt)})\n plt.show()\ndef make_cm(y_true,y_pred,classes=None,figsize=(10,10),text_size=15):", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "documentation": {} + }, + { + "label": "make_cm", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "peekOfCode": "def make_cm(y_true,y_pred,classes=None,figsize=(10,10),text_size=15):\n cm = cmatrix(y_true,y_pred)\n cm_norm = cm.astype(\"float\")/cm.sum(axis=1)[:,np.newaxis] # normalise confusion matrix\n n_class = cm.shape[0]\n fig, ax = plt.subplots(figsize=figsize)\n cax = ax.matshow(cm,cmap=plt.cm.Blues)\n fig.colorbar(cax)\n if classes:\n labels=classes\n else:", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.evaluation.evaluate", + "documentation": {} + }, + { + "label": "NTXent", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "peekOfCode": "class NTXent(torch.nn.Module):\n '''\n Modified from: https://zablo.net/blog/post/understanding-implementing-simclr-guide-eli5-pytorch/\n '''\n def __init__(self, \n batch_size, \n temperature=0.5,\n device='cuda'):\n super().__init__()\n self.batch_size = batch_size", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "documentation": {} + }, + { + "label": "ContrastiveLoss", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "peekOfCode": "class ContrastiveLoss(torch.nn.Module):\n \"\"\"\n Contrastive loss function.\n Based on: http://yann.lecun.com/exdb/publis/pdf/hadsell-chopra-lecun-06.pdf\n \"\"\"\n def __init__(self,\n distance = lambda x,y: torch.pow(x-y, 2).sum(1),\n margin=1.0,\n mode='pairs',\n batch_size=None,", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "documentation": {} + }, + { + "label": "Losses", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "peekOfCode": "class Losses:\n @staticmethod\n def quantum_fidelity_loss(y_true, y_pred):\n return tf.reduce_mean(tf.square(y_true - y_pred))\n @staticmethod\n def contrastive_pair_loss(margin=Config.MARGIN):\n def loss(y_true, dist):\n y_true = tf.cast(y_true, tf.float32)\n square_dist = tf.square(dist)\n margin_square = tf.square(tf.maximum(margin - dist, 0))", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "documentation": {} + }, + { + "label": "InfoNCELoss", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "peekOfCode": "class InfoNCELoss(keras.losses.Loss):\n def __init__(self, n_qubits, n_ancillas, q_depth, q_params, temperature=0.1, epsilon=1e-4, negative_mode='unpaired'):\n super(InfoNCELoss, self).__init__()\n self.temperature = temperature\n self.epsilon = epsilon\n self.negative_mode = negative_mode\n self.q_params = q_params\n self.q_depth = q_depth\n self.n_qubits = n_qubits\n self.n_ancillas = n_ancillas", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "documentation": {} + }, + { + "label": "form_pairs", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "peekOfCode": "def form_pairs(inA, inB):\n '''\n Form pairs from two tensors of embeddings. It is assumed that the embeddings at corresponding batch positions are similar\n and all other batch positions are dissimilar \n '''\n b, emb_size = inA.shape\n perms = b**2\n labels = [0]*perms; sim_idxs = [(0 + i*b) + i for i in range(b)]\n for idx in sim_idxs:\n labels[idx] = 1", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.loss.losses", + "documentation": {} + }, + { + "label": "quantum_circuit_angle_entangle_weights", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "peekOfCode": "def quantum_circuit_angle_entangle_weights(inputs, weights):\n # Explicit AngleEmbedding gates (RY rotations)\n for i in range(n_qubits):\n qml.RY(inputs[i], wires=i)\n # Explicit BasicEntanglerLayer gates\n for layer in range(len(weights)):\n for i in range(n_qubits):\n qml.RX(weights[layer][i], wires=i)\n for i in range(n_qubits - 1):\n qml.CNOT(wires=[i, i+1]) # Chain entanglement", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "documentation": {} + }, + { + "label": "quantum_circuit_angle_entangle_inputs", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "peekOfCode": "def quantum_circuit_angle_entangle_inputs(inputs):\n for i in range(n_qubits):\n qml.RY(inputs[i], wires=i)\n for i in range(n_qubits):\n qml.RX(inputs[i], wires=i) \n for i in range(n_qubits - 1):\n qml.CNOT(wires=[i, i + 1]) \n return [qml.expval(qml.PauliZ(wires=i)) for i in range(n_qubits)]\n@qml.qnode(dev, interface='torch')\ndef quantum_circuit_amplitude(data):", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "documentation": {} + }, + { + "label": "quantum_circuit_amplitude", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "peekOfCode": "def quantum_circuit_amplitude(data):\n # Amplitude embedding (data should be of size 2^n_qubits)\n qml.AmplitudeEmbedding(features=data, wires=range(n_qubits), normalize=True)\n # Apply rotations using weights for parameterization\n for i in range(n_qubits):\n qml.RY(data[i], wires=i) # Rotation using the provided weights\n # Apply CNOT gates for strong entanglement\n for i in range(n_qubits - 1):\n qml.CNOT(wires=[i, i + 1])\n # Add final CNOT between the last and first qubit for full entanglement", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "documentation": {} + }, + { + "label": "n_qubits", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "peekOfCode": "n_qubits = 6\ndev = qml.device('default.qubit', wires=n_qubits)\n@qml.qnode(dev, interface='torch')\ndef quantum_circuit_angle_entangle_weights(inputs, weights):\n # Explicit AngleEmbedding gates (RY rotations)\n for i in range(n_qubits):\n qml.RY(inputs[i], wires=i)\n # Explicit BasicEntanglerLayer gates\n for layer in range(len(weights)):\n for i in range(n_qubits):", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "documentation": {} + }, + { + "label": "dev", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "peekOfCode": "dev = qml.device('default.qubit', wires=n_qubits)\n@qml.qnode(dev, interface='torch')\ndef quantum_circuit_angle_entangle_weights(inputs, weights):\n # Explicit AngleEmbedding gates (RY rotations)\n for i in range(n_qubits):\n qml.RY(inputs[i], wires=i)\n # Explicit BasicEntanglerLayer gates\n for layer in range(len(weights)):\n for i in range(n_qubits):\n qml.RX(weights[layer][i], wires=i)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qc", + "documentation": {} + }, + { + "label": "QuantumCircuit", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "peekOfCode": "class QuantumCircuit:\n def __init__(self, n_qubits=Config.N_QUBITS, n_layers=Config.N_LAYERS):\n self.n_qubits = n_qubits\n self.n_layers = n_layers\n self.dev = qml.device(\"default.qubit\", wires=n_qubits)\n self.weight_shapes = {\"weights\": (n_layers, n_qubits)}\n self.qnode = qml.QNode(quantum_circuit, self.dev)\n def get_quantum_layer(self):\n return qml.qnn.KerasLayer(self.qnode, self.weight_shapes, output_dim=self.n_qubits)\n# CNN Model with Quantum Layer", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "documentation": {} + }, + { + "label": "QuantumCNN", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "peekOfCode": "class QuantumCNN:\n def __init__(self, input_shape, quantum_layer, n_qubits=Config.N_QUBITS):\n self.input_shape = input_shape\n self.quantum_layer = quantum_layer\n self.n_qubits = n_qubits\n def create_model(self, return_embeddings=False):\n model = models.Sequential()\n model.add(layers.Input(shape=self.input_shape))\n model.add(layers.Conv2D(32, (3, 3), activation='relu')) # Conv layer 1\n model.add(layers.MaxPooling2D((2, 2)))", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "documentation": {} + }, + { + "label": "SiameseNetwork", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "peekOfCode": "class SiameseNetwork:\n def __init__(self, input_shape, quantum_cnn):\n self.input_shape = input_shape\n self.quantum_cnn = quantum_cnn\n def create_network(self):\n base_model = self.quantum_cnn.create_model()\n input_0 = layers.Input(shape=self.input_shape)\n input_1 = layers.Input(shape=self.input_shape)\n processed_0 = base_model(input_0)\n processed_1 = base_model(input_1)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "documentation": {} + }, + { + "label": "quantum_circuit", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "peekOfCode": "def quantum_circuit(inputs, weights):\n qml.templates.AngleEmbedding(inputs, wires=range(n_qubits))\n qml.templates.BasicEntanglerLayers(weights, wires=range(n_qubits))\n return [qml.expval(qml.PauliZ(wires=i)) for i in range(n_qubits)]\n# Quantum Circuit Class\nclass QuantumCircuit:\n def __init__(self, n_qubits=Config.N_QUBITS, n_layers=Config.N_LAYERS):\n self.n_qubits = n_qubits\n self.n_layers = n_layers\n self.dev = qml.device(\"default.qubit\", wires=n_qubits)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "documentation": {} + }, + { + "label": "n_qubits", + "kind": 5, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "peekOfCode": "n_qubits = Config.N_QUBITS\n# Define Quantum Circuit Function\ndef quantum_circuit(inputs, weights):\n qml.templates.AngleEmbedding(inputs, wires=range(n_qubits))\n qml.templates.BasicEntanglerLayers(weights, wires=range(n_qubits))\n return [qml.expval(qml.PauliZ(wires=i)) for i in range(n_qubits)]\n# Quantum Circuit Class\nclass QuantumCircuit:\n def __init__(self, n_qubits=Config.N_QUBITS, n_layers=Config.N_LAYERS):\n self.n_qubits = n_qubits", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qcnn", + "documentation": {} + }, + { + "label": "HybridQuantumGNN", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qgnn", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qgnn", + "peekOfCode": "class HybridQuantumGNN(torch.nn.Module):\n def __init__(self, input_dims, hidden_dims, output_dims, activ_fn, QuantumLayer, n_qubits):\n super().__init__()\n layers = []\n self.norm_layers = []\n self.residual_projections = [] # To project residuals if needed\n # Initial GAT Layer\n layers.append(GATConv(input_dims, hidden_dims[0]))\n self.norm_layers.append(BatchNorm1d(hidden_dims[0]))\n if input_dims != hidden_dims[0]:", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qgnn", + "documentation": {} + }, + { + "label": "GNN", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qgnn", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qgnn", + "peekOfCode": "class GNN(torch.nn.Module):\n def __init__(self, input_dims, hidden_dims, output_dims, activ_fn):\n super().__init__()\n layers = []\n self.norm_layers = []\n self.residual_projections = [] # To project residuals if needed\n # Initial GAT Layer\n layers.append(GATConv(input_dims, hidden_dims[0]))\n self.norm_layers.append(BatchNorm(hidden_dims[0]))\n if input_dims != hidden_dims[0]:", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.qgnn", + "documentation": {} + }, + { + "label": "residual_block", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.resnet18", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.resnet18", + "peekOfCode": "def residual_block(x, filters, kernel_size=3, stride=1, use_batch_norm=True):\n shortcut = x\n x = layers.Conv2D(filters, kernel_size, strides=stride, padding='same')(x)\n if use_batch_norm:\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n x = layers.Conv2D(filters, kernel_size, strides=1, padding='same')(x)\n if use_batch_norm:\n x = layers.BatchNormalization()(x)\n if stride != 1 or x.shape[-1] != shortcut.shape[-1]:", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.resnet18", + "documentation": {} + }, + { + "label": "resnet18", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.resnet18", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.resnet18", + "peekOfCode": "def resnet18(input_shape=(224, 224, 3), return_embeddings=True):\n inputs = layers.Input(shape=input_shape)\n x = layers.Conv2D(64, kernel_size=7, strides=2, padding='same')(inputs)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n x = layers.MaxPooling2D(pool_size=3, strides=2, padding='same')(x)\n x = residual_block(x, 64, stride=1)\n x = residual_block(x, 64, stride=1)\n x = residual_block(x, 128, stride=2)\n x = residual_block(x, 128, stride=1)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.models.resnet18", + "documentation": {} + }, + { + "label": "Trainer", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "peekOfCode": "class Trainer:\n def __init__(self, siamese_network, pairs_train, labels_train, pairs_test, labels_test):\n self.siamese_network = siamese_network\n self.pairs_train = pairs_train\n self.labels_train = labels_train\n self.pairs_test = pairs_test\n self.labels_test = labels_test\n def train(self, epochs=Config.EPOCHS, batch_size=Config.BATCH_SIZE, learning_rate=Config.LEARNING_RATE):\n tf.get_logger().setLevel('ERROR')\n self.siamese_network.compile(", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "documentation": {} + }, + { + "label": "run_model", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "peekOfCode": "def run_model(model, epoch, dataloader, lossFn, optimizer=None, train=True):\n if train:\n model.train()\n else:\n model.eval()\n total_loss = 0\n correct = 0\n num_samples = 0\n for batch in dataloader:\n optimizer.zero_grad() if train else None", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "documentation": {} + }, + { + "label": "run_qmodel", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "peekOfCode": "def run_qmodel(model, epoch, dataloader, lossFn, optimizer=None, train=True):\n if train:\n model.train()\n else:\n model.eval()\n total_loss = 0\n correct = 0\n num_samples = 0\n for batch in dataloader:\n optimizer.zero_grad() if train else None", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "documentation": {} + }, + { + "label": "train_model", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "peekOfCode": "def train_model(model, optimizer, lossFn, epochs, lr, train_dataloader, val_dataloader):\n history = {'train_loss': [], 'val_loss': []}\n for epoch in range(epochs):\n # Run training\n train_loss = run_model(model, epoch, train_dataloader, lossFn, optimizer)\n # Run validation\n val_loss = run_model(model, epoch, val_dataloader, lossFn, optimizer=None, train=False)\n print(f\"Epoch {epoch + 1}, Train Loss: {train_loss:.4f}, Val Loss: {val_loss:.4f}\")\n # Store loss for this epoch\n history['train_loss'].append(train_loss)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "documentation": {} + }, + { + "label": "train_qmodel", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "peekOfCode": "def train_qmodel(model, optimizer, lossFn, epochs, lr, train_dataloader, val_dataloader):\n history = {'train_loss': [], 'val_loss': []}\n for epoch in range(epochs):\n # Run training\n train_loss = run_qmodel(model, epoch, train_dataloader, lossFn, optimizer)\n # Run validation\n val_loss = run_qmodel(model, epoch, val_dataloader, lossFn, optimizer=None, train=False)\n print(f\"Epoch {epoch + 1}, Train Loss: {train_loss:.4f}, Val Loss: {val_loss:.4f}\")\n # Store loss for this epoch\n history['train_loss'].append(train_loss)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainer", + "documentation": {} + }, + { + "label": "Trainer", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainers", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainers", + "peekOfCode": "class Trainer:\n ##########################\n # #\n # Initialization #\n # #\n ##########################\n def __init__(self,\n model,\n dataloader,\n optimizer = None,", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainers", + "documentation": {} + }, + { + "label": "Tester", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainers", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainers", + "peekOfCode": "class Tester:\n ##########################\n # #\n # Initialization #\n # #\n ##########################\n def __init__(self,\n model,\n dataloader,\n metric=None,", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.training.trainers", + "documentation": {} + }, + { + "label": "filter_labels", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.helpers", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.helpers", + "peekOfCode": "def filter_labels(images, labels, label):\n filter_mask = np.isin(labels, [label])\n return images[filter_mask]", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.helpers", + "documentation": {} + }, + { + "label": "extract_embeddings", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "peekOfCode": "def extract_embeddings(test_dataloader, model, N, reduce_to_dimension=2, device='cuda'):\n '''\n Use a test dataloader and torch model to extract N embeddings, reduce them to \n reduce_to_dimension dimensions, then organize them into a dataframe with the GT labels\n '''\n model.eval()\n m = model\n # m = model.to(device)\n embs = None\n labs = None", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "documentation": {} + }, + { + "label": "plot_embeddings", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "peekOfCode": "def plot_embeddings(emb_df):\n '''\n Plot the DataFrame from extract_embeddings() in 2 dimensions \n and color by label\n '''\n embs = list(emb_df['Emb'])\n assert embs[0].shape[0] == 2, \"Embeddings must be reduced to dimension 2, use reduce_to_dimension param in extract_embeddings\"\n embs_x = [e[0] for e in embs]; embs_y = [e[1] for e in embs]\n labs = list(emb_df['Label'])\n import seaborn as sns", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "documentation": {} + }, + { + "label": "plot_auc", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "peekOfCode": "def plot_auc(labels, preds):\n '''Plotas auc and logs to wandb'''\n auc = roc_auc_score(labels, preds)\n fpr, tpr, _ = roc_curve(labels, preds)\n plt.plot(fpr, tpr, label=\"AUC = {0}\".format(auc))\n plt.xlabel('False Positive Rate')\n plt.ylabel('True Positive Rate')\n plt.legend()\n wandb.log({\"AUC\": wandb.Image(plt)})\n plt.show()", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "documentation": {} + }, + { + "label": "visualize_graph_pairs", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "peekOfCode": "def visualize_graph_pairs(pairs, labels, num_pairs=3):\n '''visualise graph views'''\n plt.figure(figsize=(10, 5 * num_pairs))\n for i, (data1, data2) in enumerate(pairs[:num_pairs]):\n # Create a subplot for each pair\n plt.subplot(num_pairs, 2, 2 * i + 1)\n # Convert both graphs to NetworkX format for visualization\n G1 = to_networkx(data1, to_undirected=True)\n node_labels_1 = {j: f\"pt: {data1.x[j][0]:.2f}, y: {data1.x[j][1]:.2f}, phi: {data1.x[j][2]:.2f}\" \n for j in range(data1.x.size(0))}", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "documentation": {} + }, + { + "label": "visualize_graph_pairs_01", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "peekOfCode": "def visualize_graph_pairs_01(pairs, labels):\n '''Visualise graph positive and negative view'''\n plt.figure(figsize=(10, 10))\n pos_pair_found = False\n neg_pair_found = False\n for i, (data1, data2) in enumerate(pairs):\n if labels[i] == 1 and not pos_pair_found:\n # Plot the positive pair\n plt.subplot(2, 2, 1)\n G1 = to_networkx(data1, to_undirected=True)", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "documentation": {} + }, + { + "label": "plot_and_save_loss", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "peekOfCode": "def plot_and_save_loss(history):\n '''Learning History Plot'''\n plt.figure(figsize=(10, 6))\n plt.plot(history[\"train_loss\"], label=\"Train Loss\", color='blue')\n plt.plot(history[\"val_loss\"], label=\"Validation Loss\", color='orange')\n plt.xlabel(\"Epochs\")\n plt.ylabel(\"Loss\")\n plt.title(\"Training and Validation Loss\")\n plt.legend()\n plt.savefig(\"loss_plot.png\")", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "documentation": {} + }, + { + "label": "plot_auc", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "peekOfCode": "def plot_auc(model, dataloader):\n '''Plots AUC'''\n model.eval() # Set the model to evaluation mode\n y_true = []\n y_scores = []\n with torch.no_grad():\n for data1, data2, labels in dataloader:\n emb1 = model(data1.x, data1.edge_index, data1.batch)\n emb2 = model(data2.x, data2.edge_index, data2.batch)\n distances = torch.sqrt(torch.sum((emb1 - emb2) ** 2, dim=1)).cpu().numpy() # L2 distance", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "documentation": {} + }, + { + "label": "plot_confusion_matrix", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "peekOfCode": "def plot_confusion_matrix(model, dataloader):\n '''Plots confusion matrix'''\n model.eval() # Set the model to evaluation mode\n y_true = []\n y_pred = []\n with torch.no_grad():\n for data1, data2, labels in dataloader:\n emb1 = model(data1.x, data1.edge_index, data1.batch)\n emb2 = model(data2.x, data2.edge_index, data2.batch)\n # distances = torch.sqrt(torch.sum((emb1 - emb2) ** 2, dim=1)) # L2 distance", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "documentation": {} + }, + { + "label": "plot_embeddings", + "kind": 2, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "peekOfCode": "def plot_embeddings(model, dataloader):\n '''Plots embeddings'''\n model.eval() # Set the model to evaluation mode\n embeddings = []\n labels = []\n with torch.no_grad():\n for data1, data2, label in dataloader:\n emb1 = model(data1.x, data1.edge_index, data1.batch)\n emb2 = model(data2.x, data2.edge_index, data2.batch)\n embeddings.extend(emb1.cpu().numpy())", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.utils.utils", + "documentation": {} + }, + { + "label": "Config", + "kind": 6, + "importPath": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.config", + "description": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.config", + "peekOfCode": "class Config:\n DATA_PATH = \"../data\"\n MODEL_PATH = \"../saved_models\"\n BATCH_SIZE = 128\n LEARNING_RATE = 1e-3\n EPOCHS = 10\n MARGIN = 1.0\n N_QUBITS = 8\n N_LAYERS = 3", + "detail": "Quantum_SSL_for_HEP_Sanya_Nanda.qssl.config", + "documentation": {} + }, + { + "label": "RBSGate", + "kind": 6, + "importPath": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "description": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "peekOfCode": "class RBSGate(cirq.Gate):\n def __init__(self, theta):\n super(RBSGate, self).__init__()\n self.theta = theta\n def _num_qubits_(self):\n return 2\n def _decompose_(self, qubits):\n q0, q1 = qubits\n yield cirq.H(q0), cirq.H(q1)\n yield cirq.CZ(q0, q1)", + "detail": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "documentation": {} + }, + { + "label": "convert_array", + "kind": 2, + "importPath": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "description": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "peekOfCode": "def convert_array(X):\n X = tf.cast(X, dtype=tf.float32) \n alphas = tf.zeros(X.shape[:-1] + (X.shape[-1]-1,), dtype=X.dtype)\n X_normd = X / (tf.sqrt(tf.reduce_sum(X**2, axis=-1, keepdims=True))+ 1e-10) # 1e-10 is added to prevent divisions by 0\n for i in range(X.shape[-1]-1):\n if i == 0:\n alphas = tf.tensor_scatter_nd_update(alphas, [[i]], [tf.acos(X_normd[..., i])])\n elif i < (X.shape[-1]-2):\n prod_sin_alphas = tf.reduce_prod(tf.sin(alphas[..., :i]), axis=-1)\n updated_value = tf.acos(X_normd[..., i] / (prod_sin_alphas + 1e-10)) # 1e-10 is added to prevent divisions by 0", + "detail": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "documentation": {} + }, + { + "label": "convert_matrix", + "kind": 2, + "importPath": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "description": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "peekOfCode": "def convert_matrix(X):\n mag_alphas = convert_array(tf.sqrt(tf.reduce_sum(X**2, axis=1)))\n alphas = tf.TensorArray(dtype=X.dtype, size=X.shape[0])\n for i in range(X.shape[0]):\n alphas = alphas.write(i, convert_array(X[i]))\n alphas = alphas.stack()\n return mag_alphas, alphas\ndef vector_loader(circuit, alphas, wires=None, is_x=True, is_conjugate=False):\n if wires is None:\n wires = [i for i in range(len(alphas) + 1)]", + "detail": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "documentation": {} + }, + { + "label": "vector_loader", + "kind": 2, + "importPath": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "description": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "peekOfCode": "def vector_loader(circuit, alphas, wires=None, is_x=True, is_conjugate=False):\n if wires is None:\n wires = [i for i in range(len(alphas) + 1)]\n if is_x and not is_conjugate:\n circuit.append(cirq.X(cirq.LineQubit(wires[0])))\n if is_conjugate:\n for i in range(len(wires) - 1):\n # Ensure alphas[i] is a float before using it\n alpha_value = alphas[i].numpy() if hasattr(alphas[i], 'numpy') else alphas[i]\n rbs_gate = RBSGate(-alpha_value)", + "detail": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "documentation": {} + }, + { + "label": "matrix_loader", + "kind": 2, + "importPath": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "description": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "peekOfCode": "def matrix_loader(circuit, mag_alphas, alphas, mag_wires, wires, is_conjugate=False):\n if not is_conjugate:\n vector_loader(circuit, mag_alphas, wires=mag_wires, is_x=False)\n for i in range(len(mag_wires)):\n circuit.append(cirq.CNOT(cirq.LineQubit(mag_wires[i]), cirq.LineQubit(wires[0])))\n vector_loader(circuit, alphas[i], wires=wires, is_x=False)\n if i != len(mag_alphas):\n vector_loader(circuit, alphas[i+1], wires=wires, is_x=False, is_conjugate=True)\n else:\n for i in reversed(range(len(mag_wires))):", + "detail": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "documentation": {} + }, + { + "label": "pyramid_circuit", + "kind": 2, + "importPath": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "description": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "peekOfCode": "def pyramid_circuit(circuit, parameters, wires=None):\n # If wires is None, use all qubits in the circuit\n if wires is None:\n wires = list(circuit.all_qubits())\n length = len(wires)\n else:\n # If wires is not None, ensure it's a list of qubits\n length = len(wires)\n k = 0 \n for i in range(2 * length - 2):", + "detail": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "documentation": {} + }, + { + "label": "x_circuit", + "kind": 2, + "importPath": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "description": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "peekOfCode": "def x_circuit(circuit, parameters, wires=None):\n # If wires is None, use all qubits in the circuit\n if wires is None:\n wires = list(circuit.all_qubits())\n length = len(wires)\n else:\n # If wires is not None, ensure it's a list of qubits\n length = len(wires)\n k = 0 \n for i in range(len(wires) - 1):", + "detail": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "documentation": {} + }, + { + "label": "butterfly_circuit", + "kind": 2, + "importPath": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "description": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "peekOfCode": "def butterfly_circuit(circuit, parameters, wires=None):\n # If wires is None, use all qubits in the circuit\n if wires is None:\n wires = list(circuit.all_qubits())\n length = len(wires)\n else:\n # If wires is not None, ensure it's a list of qubits\n length = len(wires)\n if length > 1:\n n=length//2", + "detail": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "documentation": {} + }, + { + "label": "orthogonal_patch_wise_NN_circuit", + "kind": 2, + "importPath": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "description": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "peekOfCode": "def orthogonal_patch_wise_NN_circuit(circuit, patch, parameters, wires=None):\n if wires==None:\n wires = list(circuit.all_qubits())[:len(patch)]\n # Load the vector (patch) onto the circuit\n alphas = convert_array(patch)\n vector_loader(circuit, alphas, wires=[qubit.x for qubit in wires])\n # Apply the butterfly circuit to create the orthogonal layer\n butterfly_circuit(circuit, parameters, wires=wires)", + "detail": "Quantum_Transformers_Alessandro_Tesi.Tensorflow Quantum.Quantum_circuits", + "documentation": {} + }, + { + "label": "train", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.hpopt.hpopt", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.hpopt.hpopt", + "peekOfCode": "def train(config) -> None:\n # Perform imports here to avoid warning messages when running only --help\n import tensorflow as tf\n tf.config.set_visible_devices([], device_type='GPU') # Ensure TF does not see GPU and grab all GPU memory\n from quantum_transformers import datasets\n from quantum_transformers.quantum_layer import get_circuit\n from quantum_transformers.transformers import Transformer, VisionTransformer\n from quantum_transformers.training import train_and_evaluate\n c = config # Shorter alias for config\n tf.random.set_seed(c['seed']) # For reproducible data loading", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.hpopt.hpopt", + "documentation": {} + }, + { + "label": "vision_datasets", + "kind": 5, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.hpopt.hpopt", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.hpopt.hpopt", + "peekOfCode": "vision_datasets = ['mnist', 'electron-photon', 'quark-gluon'] # TODO: add medmnist\ntext_datasets = ['imdb']\ndef train(config) -> None:\n # Perform imports here to avoid warning messages when running only --help\n import tensorflow as tf\n tf.config.set_visible_devices([], device_type='GPU') # Ensure TF does not see GPU and grab all GPU memory\n from quantum_transformers import datasets\n from quantum_transformers.quantum_layer import get_circuit\n from quantum_transformers.transformers import Transformer, VisionTransformer\n from quantum_transformers.training import train_and_evaluate", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.hpopt.hpopt", + "documentation": {} + }, + { + "label": "text_datasets", + "kind": 5, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.hpopt.hpopt", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.hpopt.hpopt", + "peekOfCode": "text_datasets = ['imdb']\ndef train(config) -> None:\n # Perform imports here to avoid warning messages when running only --help\n import tensorflow as tf\n tf.config.set_visible_devices([], device_type='GPU') # Ensure TF does not see GPU and grab all GPU memory\n from quantum_transformers import datasets\n from quantum_transformers.quantum_layer import get_circuit\n from quantum_transformers.transformers import Transformer, VisionTransformer\n from quantum_transformers.training import train_and_evaluate\n c = config # Shorter alias for config", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.hpopt.hpopt", + "documentation": {} + }, + { + "label": "MLP", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.mlp", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.mlp", + "peekOfCode": "class MLP(flax.linen.Module):\n hidden_size: int\n @flax.linen.compact\n def __call__(self, x, **_):\n x = flax.linen.Dense(self.hidden_size)(x)\n x = flax.linen.relu(x)\n x = flax.linen.Dense(self.hidden_size)(x)\n x = flax.linen.relu(x)\n x = flax.linen.Dense(1)(x)\n return x", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.mlp", + "documentation": {} + }, + { + "label": "MultiHeadSelfAttention", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "peekOfCode": "class MultiHeadSelfAttention(nn.Module):\n embed_dim: int\n num_heads: int\n dropout: float = 0.0\n @nn.compact\n def __call__(self, x, deterministic):\n batch_size, seq_len, embed_dim = x.shape\n # x.shape = (batch_size, seq_len, embed_dim)\n assert embed_dim == self.embed_dim, f\"Input embedding dimension ({embed_dim}) should match layer embedding dimension ({self.embed_dim})\"\n assert embed_dim % self.num_heads == 0, f\"Input embedding dimension ({embed_dim}) should be divisible by number of heads ({self.num_heads})\"", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "documentation": {} + }, + { + "label": "FeedForward", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "peekOfCode": "class FeedForward(nn.Module):\n hidden_size: int\n mlp_hidden_size: int\n dropout: float = 0.0\n @nn.compact\n def __call__(self, x, deterministic):\n x = nn.Dense(features=self.mlp_hidden_size)(x)\n x = nn.Dropout(rate=self.dropout)(x, deterministic=deterministic)\n x = nn.gelu(x)\n x = nn.Dense(features=self.hidden_size)(x)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "documentation": {} + }, + { + "label": "TransformerBlock", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "peekOfCode": "class TransformerBlock(nn.Module):\n hidden_size: int\n num_heads: int\n mlp_hidden_size: int\n dropout: float = 0.0\n @nn.compact\n def __call__(self, x, deterministic):\n attn_output = nn.LayerNorm()(x)\n attn_output = MultiHeadSelfAttention(embed_dim=self.hidden_size, num_heads=self.num_heads,\n dropout=self.dropout)(attn_output, deterministic=deterministic)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "documentation": {} + }, + { + "label": "VisionTransformer", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "peekOfCode": "class VisionTransformer(nn.Module):\n num_classes: int\n patch_size: int\n hidden_size: int\n num_heads: int\n num_transformer_blocks: int\n mlp_hidden_size: int\n dropout: float = 0.1\n channels_last: bool = True\n @nn.compact", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.classical.vit", + "documentation": {} + }, + { + "label": "MLP", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.mlp", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.mlp", + "peekOfCode": "def MLP(hidden_size: int, qml_backend=\"pennylane\", qdevice=\"default.qubit\", qdiff_method=\"best\", use_catalyst=False):\n if qml_backend == \"pennylane\":\n from quantum_transformers.qmlperfcomp.jax_backend.quantum.pennylane_backend import QuantumLayer, get_circuit\n elif qml_backend == \"tensorcircuit\":\n from quantum_transformers.quantum_layer import QuantumLayer, get_circuit\n else:\n raise ValueError(f\"Unknown qml_backend: {qml_backend}\")\n circuit = get_circuit(num_qubits=hidden_size, qdevice=qdevice, diff_method=qdiff_method, use_catalyst=use_catalyst)\n class MLP(flax.linen.Module):\n hidden_size: int", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.mlp", + "documentation": {} + }, + { + "label": "QuantumLayer", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.pennylane_backend", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.pennylane_backend", + "peekOfCode": "class QuantumLayer(nn.Module):\n circuit: Callable\n num_qubits: int\n num_layers: int = 1\n @nn.compact\n def __call__(self, x):\n shape = x.shape\n x = jnp.reshape(x, (-1, shape[-1]))\n weights = self.param('w', nn.initializers.xavier_normal(), (self.num_layers, self.num_qubits))\n x = jax.vmap(self.circuit, in_axes=(0, None))(x, weights)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.pennylane_backend", + "documentation": {} + }, + { + "label": "get_circuit", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.pennylane_backend", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.pennylane_backend", + "peekOfCode": "def get_circuit(num_qubits, qdevice=\"default.qubit.jax\", diff_method=\"best\", use_catalyst=False):\n dev = qml.device(qdevice, wires=num_qubits)\n @qml.qnode(dev, interface=\"jax\", diff_method=diff_method)\n def circuit(inputs, weights):\n qml.templates.AngleEmbedding(inputs, wires=range(num_qubits))\n qml.templates.BasicEntanglerLayers(weights, wires=range(num_qubits))\n return [qml.expval(qml.PauliZ(wires=i)) for i in range(num_qubits)]\n if not use_catalyst:\n jitted_circuit = jax.jit(circuit)\n else:", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.pennylane_backend", + "documentation": {} + }, + { + "label": "VisionTransformer", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.vit", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.vit", + "peekOfCode": "def VisionTransformer(num_classes: int,\n patch_size: int,\n hidden_size: int,\n num_heads: int,\n num_transformer_blocks: int,\n mlp_hidden_size: int,\n dropout: float = 0.1,\n channels_last: bool = True,\n qml_backend: str = \"pennylane\",\n qdevice=\"default.qubit\", use_catalyst=False):", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.quantum.vit", + "documentation": {} + }, + { + "label": "datasets_to_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.data", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.data", + "peekOfCode": "def datasets_to_dataloaders(train_dataset, valid_dataset, batch_size):\n # Shuffle train dataset\n train_dataset = train_dataset.shuffle(train_dataset.cardinality(), reshuffle_each_iteration=True)\n # Batch and prefetch\n train_dataloader = train_dataset.batch(batch_size, drop_remainder=True).prefetch(tf.data.experimental.AUTOTUNE)\n valid_dataloader = valid_dataset.batch(batch_size, drop_remainder=True).prefetch(tf.data.experimental.AUTOTUNE)\n # Convert to numpy\n return tfds.as_numpy(train_dataloader), tfds.as_numpy(valid_dataloader)\ndef get_swiss_roll_dataloaders(plot: bool = False, dataset_size: int = 500, train_frac: float = 0.8, batch_size: int = 10):\n train_inputs, train_labels, valid_inputs, valid_labels = make_swiss_roll_dataset(n_points=dataset_size, train_frac=train_frac, plot=plot)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.data", + "documentation": {} + }, + { + "label": "get_swiss_roll_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.data", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.data", + "peekOfCode": "def get_swiss_roll_dataloaders(plot: bool = False, dataset_size: int = 500, train_frac: float = 0.8, batch_size: int = 10):\n train_inputs, train_labels, valid_inputs, valid_labels = make_swiss_roll_dataset(n_points=dataset_size, train_frac=train_frac, plot=plot)\n # Convert the splits to TensorFlow datasets\n train_dataset = tf.data.Dataset.from_tensor_slices((train_inputs, train_labels))\n valid_dataset = tf.data.Dataset.from_tensor_slices((valid_inputs, valid_labels))\n return datasets_to_dataloaders(train_dataset, valid_dataset, batch_size)\ndef get_mnist_dataloaders(root: str = '~/data', plot: bool = False, batch_size: int = 10):\n def normalize_image(image, label):\n image = tf.cast(image, tf.float32) / 255.0\n return (image - 0.1307) / 0.3081, label", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.data", + "documentation": {} + }, + { + "label": "get_mnist_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.data", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.data", + "peekOfCode": "def get_mnist_dataloaders(root: str = '~/data', plot: bool = False, batch_size: int = 10):\n def normalize_image(image, label):\n image = tf.cast(image, tf.float32) / 255.0\n return (image - 0.1307) / 0.3081, label\n # Load datasets\n train_dataset = tfds.load(name='mnist', split='train', as_supervised=True, data_dir=root, shuffle_files=True)\n valid_dataset = tfds.load(name='mnist', split='test', as_supervised=True, data_dir=root)\n if plot:\n import matplotlib.pyplot as plt\n from mpl_toolkits.axes_grid1 import ImageGrid", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.data", + "documentation": {} + }, + { + "label": "TrainState", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "peekOfCode": "class TrainState(flax.training.train_state.TrainState):\n # See https://flax.readthedocs.io/en/latest/guides/dropout.html.\n key: jax.random.KeyArray\n@jax.jit\ndef train_step(state: TrainState, batch, dropout_key):\n dropout_train_key = jax.random.fold_in(key=dropout_key, data=state.step)\n def loss_fn(params):\n logits = state.apply_fn(\n {'params': params},\n x=batch['input'],", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "documentation": {} + }, + { + "label": "train_step", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "peekOfCode": "def train_step(state: TrainState, batch, dropout_key):\n dropout_train_key = jax.random.fold_in(key=dropout_key, data=state.step)\n def loss_fn(params):\n logits = state.apply_fn(\n {'params': params},\n x=batch['input'],\n train=True,\n rngs={'dropout': dropout_train_key}\n )\n if logits.shape[1] <= 2:", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "documentation": {} + }, + { + "label": "eval_step", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "peekOfCode": "def eval_step(state: TrainState, batch):\n logits = state.apply_fn(\n {'params': state.params},\n x=batch['input'],\n train=False,\n rngs={'dropout': state.key}\n )\n if logits.shape[1] <= 2:\n if logits.shape[1] == 2:\n logits = logits[:, 1]", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "documentation": {} + }, + { + "label": "train_and_evaluate", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "peekOfCode": "def train_and_evaluate(model: flax.linen.Module, train_dataloader, val_dataloader, num_classes: int,\n num_epochs: int, learning_rate: float = 1e-3, seed: int = 42, verbose: bool = False) -> None:\n \"\"\"Trains the given model on the given dataloaders for the given parameters\"\"\"\n root_key = jax.random.PRNGKey(seed=seed)\n root_key, params_key, dropout_key = jax.random.split(key=root_key, num=3)\n dummy_batch = next(iter(train_dataloader))[0]\n input_shape = dummy_batch[0].shape\n input_dtype = dummy_batch[0].dtype\n batch_size = len(dummy_batch)\n x = jnp.zeros(shape=(batch_size,) + tuple(input_shape), dtype=input_dtype) # Dummy input", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.jax_backend.training", + "documentation": {} + }, + { + "label": "MLP", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.mlp", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.mlp", + "peekOfCode": "class MLP(torch.nn.Module):\n def __init__(self, hidden_size):\n super().__init__()\n self.fc1 = torch.nn.Sequential(torch.nn.LazyLinear(hidden_size), torch.nn.ReLU())\n self.fc2 = torch.nn.Sequential(torch.nn.Linear(hidden_size, hidden_size), torch.nn.ReLU())\n self.fc3 = torch.nn.Linear(hidden_size, 1)\n def forward(self, x):\n x = self.fc1(x)\n x = self.fc2(x)\n x = self.fc3(x)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.mlp", + "documentation": {} + }, + { + "label": "MultiheadSelfAttention", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "peekOfCode": "class MultiheadSelfAttention(nn.Module):\n def __init__(self, embed_dim, num_heads, dropout=0.0):\n super().__init__()\n assert embed_dim % num_heads == 0, f\"Embedding dimension ({embed_dim}) should be divisible by number of heads ({num_heads})\"\n self.embed_dim = embed_dim\n self.num_heads = num_heads\n self.head_dim = embed_dim // num_heads\n self.q_proj = nn.Linear(embed_dim, embed_dim)\n self.k_proj = nn.Linear(embed_dim, embed_dim)\n self.v_proj = nn.Linear(embed_dim, embed_dim)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "documentation": {} + }, + { + "label": "FeedForward", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "peekOfCode": "class FeedForward(nn.Module):\n def __init__(self, hidden_size, mlp_hidden_size, dropout=0.0):\n super().__init__()\n self.fc1 = nn.Linear(hidden_size, mlp_hidden_size)\n self.dropout = nn.Dropout(dropout)\n self.gelu = nn.GELU()\n self.fc2 = nn.Linear(mlp_hidden_size, hidden_size)\n def forward(self, x):\n x = self.fc1(x)\n x = self.dropout(x)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "documentation": {} + }, + { + "label": "TransformerBlock", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "peekOfCode": "class TransformerBlock(nn.Module):\n def __init__(self, hidden_size, num_heads, mlp_hidden_size, dropout=0.0):\n super().__init__()\n self.attn_norm = nn.LayerNorm(hidden_size)\n self.attn = MultiheadSelfAttention(hidden_size, num_heads, dropout)\n self.attn_dropout = nn.Dropout(dropout)\n self.mlp_norm = nn.LayerNorm(hidden_size)\n self.mlp = FeedForward(hidden_size, mlp_hidden_size, dropout)\n self.mlp_dropout = nn.Dropout(dropout)\n def forward(self, x):", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "documentation": {} + }, + { + "label": "VisionTransformer", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "peekOfCode": "class VisionTransformer(nn.Module):\n def __init__(self, img_size, num_channels, num_classes, patch_size, hidden_size, num_heads, num_transformer_blocks, mlp_hidden_size,\n dropout=0.1, channels_last=False):\n super().__init__()\n self.channels_last = channels_last\n # Splitting an image into patches and linearly projecting these flattened patches can be\n # simplified as a single convolution operation, where both the kernel size and the stride size\n # are set to the patch size.\n self.patch_embedding = nn.Conv2d(\n in_channels=num_channels,", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.classical.vit", + "documentation": {} + }, + { + "label": "MLP", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.mlp", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.mlp", + "peekOfCode": "def MLP(hidden_size: int, qml_backend: str = \"pennylane\", qdevice: str = \"default.qubit\", qdiff_method: str = \"best\"):\n if qml_backend == \"pennylane\":\n from quantum_transformers.qmlperfcomp.torch_backend.quantum.pennylane_backend import QuantumLayer\n elif qml_backend == \"tensorcircuit\":\n from quantum_transformers.qmlperfcomp.torch_backend.quantum.tensorcircuit_backend import QuantumLayer\n else:\n raise ValueError(f\"Unknown qml_backend: {qml_backend}\")\n class MLP(torch.nn.Module):\n def __init__(self, hidden_size, qdevice=\"default.qubit\", qdiff_method=\"best\"):\n super().__init__()", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.mlp", + "documentation": {} + }, + { + "label": "QuantumLayer", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.pennylane_backend", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.pennylane_backend", + "peekOfCode": "class QuantumLayer(torch.nn.Module):\n def __init__(self, num_qubits, num_qlayers=1, qdevice=\"default.qubit\", diff_method=\"best\"):\n super().__init__()\n def circuit(inputs, weights):\n qml.templates.AngleEmbedding(inputs, wires=range(num_qubits))\n qml.templates.BasicEntanglerLayers(weights, wires=range(num_qubits))\n return [qml.expval(qml.PauliZ(wires=i)) for i in range(num_qubits)]\n dev = qml.device(qdevice, wires=num_qubits)\n qlayer = qml.QNode(circuit, dev, interface=\"torch\", diff_method=diff_method)\n self.linear = qml.qnn.TorchLayer(qlayer, {\"weights\": (num_qlayers, num_qubits)})", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.pennylane_backend", + "documentation": {} + }, + { + "label": "QuantumLayer", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.tensorcircuit_backend", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.tensorcircuit_backend", + "peekOfCode": "class QuantumLayer(torch.nn.Module):\n def __init__(self, num_qubits, num_qlayers=1, **_):\n super().__init__()\n self.weights = torch.nn.Parameter(torch.nn.init.xavier_normal_(torch.empty(num_qlayers, num_qubits)))\n self.linear = get_circuit(torch_interface=True)\n def forward(self, x):\n shape = x.shape\n x = x.reshape(-1, shape[-1])\n x = self.linear(x, self.weights)\n x = x.reshape(shape)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.tensorcircuit_backend", + "documentation": {} + }, + { + "label": "VisionTransformer", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.vit", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.vit", + "peekOfCode": "def VisionTransformer(img_size, num_channels, num_classes, patch_size, hidden_size, num_heads, num_transformer_blocks, mlp_hidden_size,\n dropout=0.1, channels_last=False, qml_backend: str = \"pennylane\", qdevice: str = \"default.qubit\"):\n if qml_backend == \"pennylane\":\n from quantum_transformers.qmlperfcomp.torch_backend.quantum.pennylane_backend import QuantumLayer\n elif qml_backend == \"tensorcircuit\":\n from quantum_transformers.qmlperfcomp.torch_backend.quantum.tensorcircuit_backend import QuantumLayer\n else:\n raise ValueError(f\"Unknown qml_backend: {qml_backend}\")\n class MultiheadSelfAttention(nn.Module):\n def __init__(self, embed_dim, num_heads, dropout=0.0):", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.quantum.vit", + "documentation": {} + }, + { + "label": "datasets_to_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.data", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.data", + "peekOfCode": "def datasets_to_dataloaders(train_dataset: torch.utils.data.Dataset, valid_dataset: torch.utils.data.Dataset, **dataloader_kwargs) \\\n -> tuple[torch.utils.data.DataLoader, torch.utils.data.DataLoader]:\n \"\"\"Returns dataloaders for the given datasets\"\"\"\n train_dataloader = torch.utils.data.DataLoader(train_dataset, shuffle=True, **dataloader_kwargs)\n valid_dataloader = torch.utils.data.DataLoader(valid_dataset, **dataloader_kwargs)\n return train_dataloader, valid_dataloader\ndef get_swiss_roll_dataloaders(dataset_size: int = 500, train_frac: float = 0.8, **dataloader_kwargs):\n \"\"\"Returns dataloaders for the Swiss roll dataset (3 features, binary classification)\"\"\"\n train_inputs, train_labels, eval_inputs, eval_labels = make_swiss_roll_dataset(n_points=dataset_size, train_frac=train_frac)\n train_dataset = torch.utils.data.TensorDataset(torch.from_numpy(train_inputs), torch.from_numpy(train_labels))", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.data", + "documentation": {} + }, + { + "label": "get_swiss_roll_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.data", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.data", + "peekOfCode": "def get_swiss_roll_dataloaders(dataset_size: int = 500, train_frac: float = 0.8, **dataloader_kwargs):\n \"\"\"Returns dataloaders for the Swiss roll dataset (3 features, binary classification)\"\"\"\n train_inputs, train_labels, eval_inputs, eval_labels = make_swiss_roll_dataset(n_points=dataset_size, train_frac=train_frac)\n train_dataset = torch.utils.data.TensorDataset(torch.from_numpy(train_inputs), torch.from_numpy(train_labels))\n valid_dataset = torch.utils.data.TensorDataset(torch.from_numpy(eval_inputs), torch.from_numpy(eval_labels))\n return datasets_to_dataloaders(train_dataset, valid_dataset, **dataloader_kwargs)\ndef get_mnist_dataloaders(root: str = '~/data', **dataloader_kwargs) \\\n -> tuple[torch.utils.data.DataLoader, torch.utils.data.DataLoader]:\n \"\"\"Returns dataloaders for the MNIST digits dataset (computer vision, 10-class classification)\"\"\"\n root = os.path.expanduser(root)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.data", + "documentation": {} + }, + { + "label": "get_mnist_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.data", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.data", + "peekOfCode": "def get_mnist_dataloaders(root: str = '~/data', **dataloader_kwargs) \\\n -> tuple[torch.utils.data.DataLoader, torch.utils.data.DataLoader]:\n \"\"\"Returns dataloaders for the MNIST digits dataset (computer vision, 10-class classification)\"\"\"\n root = os.path.expanduser(root)\n transform = torchvision.transforms.Compose([\n torchvision.transforms.ToTensor(),\n torchvision.transforms.Normalize((0.1307,), (0.3081,))\n ])\n train_dataset = torchvision.datasets.MNIST(root, train=True, download=True, transform=transform)\n valid_dataset = torchvision.datasets.MNIST(root, train=False, download=True, transform=transform)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.data", + "documentation": {} + }, + { + "label": "train_and_evaluate", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.training", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.training", + "peekOfCode": "def train_and_evaluate(model: torch.nn.Module, train_dataloader: torch.utils.data.DataLoader, valid_dataloader: torch.utils.data.DataLoader, num_classes: int,\n num_epochs: int, device: torch.device, learning_rate: float = 1e-3, verbose: bool = False) -> None:\n \"\"\"Trains the given model on the given dataloaders for the given parameters\"\"\"\n model.to(device)\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n if num_classes == 2:\n criterion = torch.nn.BCEWithLogitsLoss()\n else:\n criterion = torch.nn.CrossEntropyLoss()\n best_val_auc, best_epoch = 0.0, 0", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.torch_backend.training", + "documentation": {} + }, + { + "label": "make_swiss_roll_dataset", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.swiss_roll", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.swiss_roll", + "peekOfCode": "def make_swiss_roll_dataset(n_points: int = 500, train_frac: float = 0.8, plot=False):\n inputs, t = make_swiss_roll(n_samples=n_points, noise=0.1, random_state=0)\n inputs = inputs.astype(np.float32)\n labels = np.expand_dims(np.where(t < np.mean(t), 0, 1), axis=1)\n if plot:\n ax = plt.axes(projection='3d')\n ax.scatter(inputs[:, 0], inputs[:, 1], inputs[:, 2], c=labels, cmap='RdYlGn');\n ax.view_init(azim=-75, elev=3)\n ax.set_title('Swiss roll dataset')\n plt.show()", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.qmlperfcomp.swiss_roll", + "documentation": {} + }, + { + "label": "NumPyFolderDataset", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "peekOfCode": "class NumPyFolderDataset(tfds.core.GeneratorBasedBuilder):\n \"\"\"\n A dataset consisting of NumPy arrays stored in folders (one folder per class).\n \"\"\"\n VERSION = tfds.core.Version('1.0.0') # to avoid ValueError\n def __init__(self, name, img_shape, num_classes, extracted_data_path=None, gdrive_id=None, **kwargs):\n \"\"\"Creates a NumPyFolderDataset.\"\"\"\n self.name = name\n self.img_shape = img_shape\n self.num_classes = num_classes", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "documentation": {} + }, + { + "label": "datasets_to_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "peekOfCode": "def datasets_to_dataloaders(train_dataset, val_dataset, test_dataset, batch_size, drop_remainder=True, transform=None):\n # Shuffle train dataset\n train_dataset = train_dataset.shuffle(10_000, reshuffle_each_iteration=True)\n # Batch\n train_dataset = train_dataset.batch(batch_size, drop_remainder=drop_remainder)\n val_dataset = val_dataset.batch(batch_size, drop_remainder=drop_remainder)\n test_dataset = test_dataset.batch(batch_size, drop_remainder=drop_remainder)\n # Transform\n if transform is not None:\n train_dataset = train_dataset.map(transform, num_parallel_calls=tf.data.AUTOTUNE)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "documentation": {} + }, + { + "label": "get_mnist_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "peekOfCode": "def get_mnist_dataloaders(data_dir: str = '~/data', batch_size: int = 1, drop_remainder: bool = True):\n \"\"\"\n Returns dataloaders for the MNIST dataset (computer vision, multi-class classification)\n Information about the dataset: https://www.tensorflow.org/datasets/catalog/mnist\n \"\"\"\n data_dir = os.path.expanduser(data_dir)\n # Load datasets\n train_dataset, val_dataset, test_dataset = tfds.load(name='mnist',\n split=['train[:90%]', 'train[90%:]', 'test'], as_supervised=True, data_dir=data_dir, shuffle_files=True)\n train_dataset, val_dataset, test_dataset = train_dataset.with_options(options), val_dataset.with_options(options), test_dataset.with_options(options)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "documentation": {} + }, + { + "label": "get_electron_photon_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "peekOfCode": "def get_electron_photon_dataloaders(data_dir: str = '~/data', batch_size: int = 1, drop_remainder: bool = True):\n \"\"\"\n Returns dataloaders for the electron-photon dataset (computer vision - particle physics, binary classification)\n Information about the dataset: https://arxiv.org/abs/1807.11916\n \"\"\"\n data_dir = os.path.expanduser(data_dir)\n # Load datasets\n electron_photon_builder = NumPyFolderDataset(data_dir=data_dir, name=\"electron-photon\", gdrive_id=\"1VAqGQaMS5jSWV8gTXw39Opz-fNMsDZ8e\",\n img_shape=(32, 32, 2), num_classes=2)\n electron_photon_builder.download_and_prepare(download_dir=data_dir)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "documentation": {} + }, + { + "label": "get_quark_gluon_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "peekOfCode": "def get_quark_gluon_dataloaders(data_dir: str = '~/data', batch_size: int = 1, drop_remainder: bool = True):\n \"\"\"\n Returns dataloaders for the quark-gluon dataset (computer vision - particle physics, binary classification)\n Information about the dataset: https://arxiv.org/abs/1902.08276\n \"\"\"\n data_dir = os.path.expanduser(data_dir)\n # Load datasets\n quark_gluon_builder = NumPyFolderDataset(data_dir=data_dir, name=\"quark-gluon\", gdrive_id=\"1PL2YEr5V__zUZVuUfGdUvFTkE9ULHayz\",\n img_shape=(125, 125, 3), num_classes=2)\n quark_gluon_builder.download_and_prepare(download_dir=data_dir)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "documentation": {} + }, + { + "label": "get_medmnist_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "peekOfCode": "def get_medmnist_dataloaders(dataset: str, data_dir: str = '~/data', batch_size: int = 1, drop_remainder: bool = True):\n \"\"\"\n Returns dataloaders for a MedMNIST dataset\n Information about the dataset: https://medmnist.com/\n \"\"\"\n raise NotImplementedError\ndef get_imdb_dataloaders(data_dir: str = '~/data', batch_size: int = 1, drop_remainder: bool = True,\n max_vocab_size: int = 20_000, max_seq_len: int = 512):\n \"\"\"\n Returns dataloaders for the IMDB sentiment analysis dataset (natural language processing, binary classification),", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "documentation": {} + }, + { + "label": "get_imdb_dataloaders", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "peekOfCode": "def get_imdb_dataloaders(data_dir: str = '~/data', batch_size: int = 1, drop_remainder: bool = True,\n max_vocab_size: int = 20_000, max_seq_len: int = 512):\n \"\"\"\n Returns dataloaders for the IMDB sentiment analysis dataset (natural language processing, binary classification),\n as well as the vocabulary and tokenizer.\n Information about the dataset: https://www.tensorflow.org/datasets/catalog/imdb_reviews\n \"\"\"\n import tensorflow_text as tf_text\n from tensorflow_text.tools.wordpiece_vocab.bert_vocab_from_dataset import bert_vocab_from_dataset\n data_dir = os.path.expanduser(data_dir)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "documentation": {} + }, + { + "label": "options", + "kind": 5, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "peekOfCode": "options = tf.data.Options()\noptions.deterministic = True\nclass NumPyFolderDataset(tfds.core.GeneratorBasedBuilder):\n \"\"\"\n A dataset consisting of NumPy arrays stored in folders (one folder per class).\n \"\"\"\n VERSION = tfds.core.Version('1.0.0') # to avoid ValueError\n def __init__(self, name, img_shape, num_classes, extracted_data_path=None, gdrive_id=None, **kwargs):\n \"\"\"Creates a NumPyFolderDataset.\"\"\"\n self.name = name", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "documentation": {} + }, + { + "label": "options.deterministic", + "kind": 5, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "peekOfCode": "options.deterministic = True\nclass NumPyFolderDataset(tfds.core.GeneratorBasedBuilder):\n \"\"\"\n A dataset consisting of NumPy arrays stored in folders (one folder per class).\n \"\"\"\n VERSION = tfds.core.Version('1.0.0') # to avoid ValueError\n def __init__(self, name, img_shape, num_classes, extracted_data_path=None, gdrive_id=None, **kwargs):\n \"\"\"Creates a NumPyFolderDataset.\"\"\"\n self.name = name\n self.img_shape = img_shape", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.datasets", + "documentation": {} + }, + { + "label": "QuantumLayer", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "peekOfCode": "class QuantumLayer(flax.linen.Module):\n circuit: Callable\n num_qubits: int\n w_shape: tuple = (1,)\n @flax.linen.compact\n def __call__(self, x):\n shape = x.shape\n x = jnp.reshape(x, (-1, shape[-1]))\n w = self.param('w', flax.linen.initializers.xavier_normal(), self.w_shape + (self.num_qubits,))\n x = self.circuit(x, w)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "documentation": {} + }, + { + "label": "angle_embedding", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "peekOfCode": "def angle_embedding(c: tc.Circuit, inputs):\n num_qubits = inputs.shape[-1]\n for j in range(num_qubits):\n c.rx(j, theta=inputs[j])\ndef basic_vqc(c: tc.Circuit, inputs, weights):\n num_qubits = inputs.shape[-1]\n num_qlayers = weights.shape[-2]\n for i in range(num_qlayers):\n for j in range(num_qubits):\n c.rx(j, theta=weights[i, j])", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "documentation": {} + }, + { + "label": "basic_vqc", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "peekOfCode": "def basic_vqc(c: tc.Circuit, inputs, weights):\n num_qubits = inputs.shape[-1]\n num_qlayers = weights.shape[-2]\n for i in range(num_qlayers):\n for j in range(num_qubits):\n c.rx(j, theta=weights[i, j])\n if num_qubits == 2:\n c.cnot(0, 1)\n elif num_qubits > 2:\n for j in range(num_qubits):", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "documentation": {} + }, + { + "label": "get_quantum_layer_circuit", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "peekOfCode": "def get_quantum_layer_circuit(inputs, weights,\n embedding: Callable = angle_embedding, vqc: Callable = basic_vqc):\n \"\"\"\n Equivalent to the following PennyLane circuit:\n def circuit(inputs, weights):\n qml.templates.AngleEmbedding(inputs, wires=range(num_qubits))\n qml.templates.BasicEntanglerLayers(weights, wires=range(num_qubits))\n \"\"\"\n num_qubits = inputs.shape[-1]\n c = tc.Circuit(num_qubits)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "documentation": {} + }, + { + "label": "get_circuit", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "peekOfCode": "def get_circuit(embedding: Callable = angle_embedding, vqc: Callable = basic_vqc,\n torch_interface: bool = False):\n def qpred(inputs, weights):\n c = get_quantum_layer_circuit(inputs, weights, embedding, vqc)\n return K.real(jnp.array([c.expectation_ps(z=[i]) for i in range(weights.shape[1])]))\n qpred_batch = K.vmap(qpred, vectorized_argnums=0)\n if torch_interface:\n qpred_batch = tc.interfaces.torch_interface(qpred_batch, jit=True)\n return qpred_batch\nclass QuantumLayer(flax.linen.Module):", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "documentation": {} + }, + { + "label": "K", + "kind": 5, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "peekOfCode": "K = tc.set_backend(\"jax\")\ndef angle_embedding(c: tc.Circuit, inputs):\n num_qubits = inputs.shape[-1]\n for j in range(num_qubits):\n c.rx(j, theta=inputs[j])\ndef basic_vqc(c: tc.Circuit, inputs, weights):\n num_qubits = inputs.shape[-1]\n num_qlayers = weights.shape[-2]\n for i in range(num_qlayers):\n for j in range(num_qubits):", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.quantum_layer", + "documentation": {} + }, + { + "label": "TrainState", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "peekOfCode": "class TrainState(flax.training.train_state.TrainState):\n # See https://flax.readthedocs.io/en/latest/guides/dropout.html.\n key: jax.random.KeyArray # type: ignore\n@jax.jit\ndef train_step(state: TrainState, inputs: jax.Array, labels: jax.Array, key: jax.random.KeyArray) -> TrainState:\n \"\"\"\n Performs a single training step on the given batch of inputs and labels.\n Args:\n state: The current training state.\n inputs: The batch of inputs.", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "documentation": {} + }, + { + "label": "train_step", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "peekOfCode": "def train_step(state: TrainState, inputs: jax.Array, labels: jax.Array, key: jax.random.KeyArray) -> TrainState:\n \"\"\"\n Performs a single training step on the given batch of inputs and labels.\n Args:\n state: The current training state.\n inputs: The batch of inputs.\n labels: The batch of labels.\n key: The random key to use.\n Returns:\n The updated training state.", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "documentation": {} + }, + { + "label": "eval_step", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "peekOfCode": "def eval_step(state: TrainState, inputs: jax.Array, labels: jax.Array) -> tuple[float, jax.Array]:\n \"\"\"\n Performs a single evaluation step on the given batch of inputs and labels.\n Args:\n state: The current training state.\n inputs: The batch of inputs.\n labels: The batch of labels.\n Returns:\n loss: The loss on the given batch.\n logits: The logits on the given batch.", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "documentation": {} + }, + { + "label": "evaluate", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "peekOfCode": "def evaluate(state: TrainState, eval_dataloader, num_classes: int,\n tqdm_desc: Optional[str] = None, debug: bool = False) -> tuple[float, float, npt.ArrayLike, npt.ArrayLike]:\n \"\"\"\n Evaluates the model given the current training state on the given dataloader.\n Args:\n state: The current training state.\n eval_dataloader: The dataloader to evaluate on.\n num_classes: The number of classes.\n tqdm_desc: The description to use for the tqdm progress bar. If None, no progress bar is shown.\n debug: Whether to print extra information for debugging.", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "documentation": {} + }, + { + "label": "train_and_evaluate", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "peekOfCode": "def train_and_evaluate(model: flax.linen.Module, train_dataloader, val_dataloader, test_dataloader, num_classes: int,\n num_epochs: int, lrs_peak_value: float = 1e-3, lrs_warmup_steps: int = 5_000, lrs_decay_steps: int = 50_000,\n seed: int = 42, use_ray: bool = False, debug: bool = False) -> tuple[float, float, npt.ArrayLike, npt.ArrayLike]:\n \"\"\"\n Trains the given model on the given dataloaders for the given hyperparameters.\n The progress and evaluation results are printed to stdout.\n Args:\n model: The model to train.\n train_dataloader: The dataloader for the training set.\n val_dataloader: The dataloader for the validation set.", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "documentation": {} + }, + { + "label": "TQDM_BAR_FORMAT", + "kind": 5, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "peekOfCode": "TQDM_BAR_FORMAT = '{l_bar}{bar:10}{r_bar}{bar:-10b}'\nclass TrainState(flax.training.train_state.TrainState):\n # See https://flax.readthedocs.io/en/latest/guides/dropout.html.\n key: jax.random.KeyArray # type: ignore\n@jax.jit\ndef train_step(state: TrainState, inputs: jax.Array, labels: jax.Array, key: jax.random.KeyArray) -> TrainState:\n \"\"\"\n Performs a single training step on the given batch of inputs and labels.\n Args:\n state: The current training state.", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.training", + "documentation": {} + }, + { + "label": "MultiHeadSelfAttention", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "peekOfCode": "class MultiHeadSelfAttention(nn.Module):\n hidden_size: int\n num_heads: int\n dropout: float = 0.0\n quantum_w_shape: tuple = (1,)\n quantum_circuit: Optional[Callable] = None\n @nn.compact\n def __call__(self, x, deterministic):\n batch_size, seq_len, hidden_size = x.shape\n # x.shape = (batch_size, seq_len, hidden_size)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "documentation": {} + }, + { + "label": "FeedForward", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "peekOfCode": "class FeedForward(nn.Module):\n hidden_size: int\n mlp_hidden_size: int\n dropout: float = 0.0\n quantum_w_shape: tuple = (1,)\n quantum_circuit: Optional[Callable] = None\n @nn.compact\n def __call__(self, x, deterministic):\n x = nn.Dense(features=self.mlp_hidden_size)(x)\n if self.quantum_circuit is not None:", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "documentation": {} + }, + { + "label": "TransformerBlock", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "peekOfCode": "class TransformerBlock(nn.Module):\n hidden_size: int\n num_heads: int\n mlp_hidden_size: int\n dropout: float = 0.0\n quantum_w_shape: tuple = (1,)\n quantum_attn_circuit: Optional[Callable] = None\n quantum_mlp_circuit: Optional[Callable] = None\n @nn.compact\n def __call__(self, x, deterministic):", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "documentation": {} + }, + { + "label": "Transformer", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "peekOfCode": "class Transformer(nn.Module):\n num_tokens: int\n max_seq_len: int\n num_classes: int\n hidden_size: int\n num_heads: int\n num_transformer_blocks: int\n mlp_hidden_size: int\n dropout: float = 0.0\n quantum_w_shape: tuple = (1,)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "documentation": {} + }, + { + "label": "VisionTransformer", + "kind": 6, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "peekOfCode": "class VisionTransformer(nn.Module):\n num_classes: int\n patch_size: int\n hidden_size: int\n num_heads: int\n num_transformer_blocks: int\n mlp_hidden_size: int\n dropout: float = 0.1\n pos_embedding: Literal['none', 'learn', 'sincos'] = 'learn'\n classifier: Literal['token', 'gap'] = 'gap'", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "documentation": {} + }, + { + "label": "posemb_sincos_2d", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "peekOfCode": "def posemb_sincos_2d(sqrt_num_steps, hidden_size, temperature=10_000., dtype=jnp.float32):\n \"\"\"2D sin-cos position embedding. Follows the MoCo v3 logic.\"\"\"\n # Code adapted from https://github.com/google-research/big_vision/blob/184d1201eb34abe7da84fc69f84fd89a06ad43c4/big_vision/models/vit.py#L33.\n y, x = jnp.mgrid[:sqrt_num_steps, :sqrt_num_steps]\n assert hidden_size % 4 == 0, f\"Hidden size ({hidden_size}) must be divisible by 4 for 2D sin-cos position embedding\"\n omega = jnp.arange(hidden_size // 4) / (hidden_size // 4 - 1)\n omega = 1. / (temperature**omega)\n y = jnp.einsum(\"m,d->md\", y.flatten(), omega)\n x = jnp.einsum(\"m,d->md\", x.flatten(), omega)\n pe = jnp.concatenate([jnp.sin(x), jnp.cos(x), jnp.sin(y), jnp.cos(y)], axis=1)", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.transformers", + "documentation": {} + }, + { + "label": "plot_image", + "kind": 2, + "importPath": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.utils", + "description": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.utils", + "peekOfCode": "def plot_image(image, abs_log=False):\n \"\"\"Plots an image with one subplot per channel\"\"\"\n num_channels = image.shape[2]\n fig, axs = plt.subplots(1, num_channels, figsize=(num_channels * 3, 3))\n if num_channels == 1:\n axs = [axs]\n for i, ax in enumerate(axs):\n if abs_log:\n ax.imshow(np.log(np.abs(image[:, :, i]) + 1e-6))\n else:", + "detail": "Quantum_Transformers_Marçal_Comajoan_Cara.quantum_transformers.utils", + "documentation": {} + } +] \ No newline at end of file diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/example_notebooks/Fully_QuantumKAN.ipynb b/Quantum_KAN_for_HEP_Ria_Khatoniar/example_notebooks/Fully_QuantumKAN.ipynb new file mode 100644 index 0000000..63b6224 --- /dev/null +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/example_notebooks/Fully_QuantumKAN.ipynb @@ -0,0 +1,2832 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 16, + "id": "937c7772-5c57-4fa5-83bb-83554cb4f68a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[HybridQuKANLayer] built edges: nodes=6, in_dim=4, total_edges=24\n", + "[HybridQuKANLayer] built edges: nodes=6, in_dim=6, total_edges=36\n", + "[Pretrain] Layer 1 QCBM on degree-2 B-splines\n", + "[QCBM pretrain] epoch 001 | MSE=0.007273 | TV=0.955602\n", + "[QCBM pretrain] epoch 050 | MSE=0.000061 | TV=0.377527\n", + "[QCBM pretrain] epoch 100 | MSE=0.000037 | TV=0.308189\n", + "[QCBM pretrain] epoch 150 | MSE=0.000030 | TV=0.276578\n", + "[QCBM pretrain] epoch 200 | MSE=0.000028 | TV=0.261863\n", + "[Pretrain] Layer 2 QCBM on degree-2 B-splines\n", + "[QCBM pretrain] epoch 001 | MSE=0.007276 | TV=0.955730\n", + "[QCBM pretrain] epoch 050 | MSE=0.000068 | TV=0.398482\n", + "[QCBM pretrain] epoch 100 | MSE=0.000037 | TV=0.306850\n", + "[QCBM pretrain] epoch 150 | MSE=0.000029 | TV=0.265133\n", + "[QCBM pretrain] epoch 200 | MSE=0.000026 | TV=0.248326\n", + "\n", + "=== Training QuKAN (Hybrid, 2 layers, Quantum Fourier residual + KAN readout) on Iris ===\n", + "Epoch 001 | Loss=1.1007 | Train Acc=42.86% | Val Acc=66.67%\n", + "Epoch 003 | Loss=0.9679 | Train Acc=70.48% | Val Acc=66.67%\n", + "Epoch 005 | Loss=0.7352 | Train Acc=82.86% | Val Acc=80.00%\n", + "Epoch 007 | Loss=0.5402 | Train Acc=89.52% | Val Acc=86.67%\n", + "Epoch 009 | Loss=0.4031 | Train Acc=93.33% | Val Acc=97.78%\n", + "Epoch 011 | Loss=0.3149 | Train Acc=95.24% | Val Acc=97.78%\n", + "Epoch 013 | Loss=0.2653 | Train Acc=96.19% | Val Acc=97.78%\n", + "Epoch 015 | Loss=0.2464 | Train Acc=94.29% | Val Acc=97.78%\n", + "Epoch 017 | Loss=0.2205 | Train Acc=96.19% | Val Acc=97.78%\n", + "Epoch 019 | Loss=0.2093 | Train Acc=96.19% | Val Acc=97.78%\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[16], line 473\u001b[0m\n\u001b[0;32m 470\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDone.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 472\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;18m__name__\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m__main__\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m--> 473\u001b[0m \u001b[43mrun_iris\u001b[49m\u001b[43m(\u001b[49m\u001b[43mseed\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m)\u001b[49m\n", + "Cell \u001b[1;32mIn[16], line 448\u001b[0m, in \u001b[0;36mrun_iris\u001b[1;34m(seed)\u001b[0m\n\u001b[0;32m 446\u001b[0m yb \u001b[38;5;241m=\u001b[39m yb_all[i:i\u001b[38;5;241m+\u001b[39mB]\n\u001b[0;32m 447\u001b[0m opt\u001b[38;5;241m.\u001b[39mzero_grad(set_to_none\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m--> 448\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43mxb\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 449\u001b[0m loss \u001b[38;5;241m=\u001b[39m ce(logits, yb)\n\u001b[0;32m 450\u001b[0m loss\u001b[38;5;241m.\u001b[39mbackward()\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\torch\\nn\\modules\\module.py:1518\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1516\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1517\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1518\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\torch\\nn\\modules\\module.py:1527\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1522\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1523\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1524\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1525\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1526\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1527\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1529\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 1530\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "Cell \u001b[1;32mIn[16], line 395\u001b[0m, in \u001b[0;36mQuKANNet.forward\u001b[1;34m(self, X01)\u001b[0m\n\u001b[0;32m 393\u001b[0m X01 \u001b[38;5;241m=\u001b[39m X01\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32)\n\u001b[0;32m 394\u001b[0m h1 \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39ml1(X01, input_is_01\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32) \u001b[38;5;66;03m# (B, M1)\u001b[39;00m\n\u001b[1;32m--> 395\u001b[0m h2 \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43ml2\u001b[49m\u001b[43m(\u001b[49m\u001b[43mh1\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minput_is_01\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32) \u001b[38;5;66;03m# (B, M2)\u001b[39;00m\n\u001b[0;32m 396\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mreadout(h2)\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\torch\\nn\\modules\\module.py:1518\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1516\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[0;32m 1517\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 1518\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\torch\\nn\\modules\\module.py:1527\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1522\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[0;32m 1523\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[0;32m 1524\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[0;32m 1525\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[0;32m 1526\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[1;32m-> 1527\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1529\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 1530\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "Cell \u001b[1;32mIn[16], line 302\u001b[0m, in \u001b[0;36mHybridQuKANLayer.forward\u001b[1;34m(self, X_in, input_is_01)\u001b[0m\n\u001b[0;32m 300\u001b[0m edge \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39medges[eidx]\n\u001b[0;32m 301\u001b[0m x_pos \u001b[38;5;241m=\u001b[39m X01_pos[:, j]\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32) \u001b[38;5;66;03m# (B,)\u001b[39;00m\n\u001b[1;32m--> 302\u001b[0m out_j \u001b[38;5;241m=\u001b[39m \u001b[43medge\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbatch_forward\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx_pos\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mx_pos\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mprobs_flat\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32) \u001b[38;5;66;03m# (B,)\u001b[39;00m\n\u001b[0;32m 303\u001b[0m acc \u001b[38;5;241m=\u001b[39m acc \u001b[38;5;241m+\u001b[39m out_j\n\u001b[0;32m 304\u001b[0m eidx \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n", + "Cell \u001b[1;32mIn[16], line 202\u001b[0m, in \u001b[0;36mQuKANResidualEdge.batch_forward\u001b[1;34m(self, x_raw, x_pos01, probs_flat)\u001b[0m\n\u001b[0;32m 199\u001b[0m idx \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mclamp(idx, \u001b[38;5;241m0\u001b[39m, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mNpos \u001b[38;5;241m-\u001b[39m \u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m 201\u001b[0m p_vals \u001b[38;5;241m=\u001b[39m lp[:, idx]\u001b[38;5;241m.\u001b[39msum(dim\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0\u001b[39m)\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32) \u001b[38;5;66;03m# (B,)\u001b[39;00m\n\u001b[1;32m--> 202\u001b[0m qfr_vals \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mqfour\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward_batch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx_pos01\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# (B,)\u001b[39;00m\n\u001b[0;32m 204\u001b[0m out \u001b[38;5;241m=\u001b[39m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mwf \u001b[38;5;241m*\u001b[39m p_vals \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mwq \u001b[38;5;241m*\u001b[39m qfr_vals)\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32) \u001b[38;5;66;03m# (B,)\u001b[39;00m\n\u001b[0;32m 205\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out\n", + "Cell \u001b[1;32mIn[16], line 170\u001b[0m, in \u001b[0;36mQuantumFourierBlock.forward_batch\u001b[1;34m(self, x01_vec)\u001b[0m\n\u001b[0;32m 168\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward_batch\u001b[39m(\u001b[38;5;28mself\u001b[39m, x01_vec: torch\u001b[38;5;241m.\u001b[39mTensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m torch\u001b[38;5;241m.\u001b[39mTensor:\n\u001b[0;32m 169\u001b[0m x01_vec \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mclamp(x01_vec\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32), \u001b[38;5;241m0.0\u001b[39m, \u001b[38;5;241m1.0\u001b[39m)\n\u001b[1;32m--> 170\u001b[0m vals \u001b[38;5;241m=\u001b[39m \u001b[43m[\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward_scalar\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx01_vec\u001b[49m\u001b[43m[\u001b[49m\u001b[43mi\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mi\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43mrange\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mx01_vec\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mshape\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\u001b[43m]\u001b[49m\n\u001b[0;32m 171\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mstack(vals, dim\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0\u001b[39m)\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32)\n", + "Cell \u001b[1;32mIn[16], line 170\u001b[0m, in \u001b[0;36m\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m 168\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward_batch\u001b[39m(\u001b[38;5;28mself\u001b[39m, x01_vec: torch\u001b[38;5;241m.\u001b[39mTensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m torch\u001b[38;5;241m.\u001b[39mTensor:\n\u001b[0;32m 169\u001b[0m x01_vec \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mclamp(x01_vec\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32), \u001b[38;5;241m0.0\u001b[39m, \u001b[38;5;241m1.0\u001b[39m)\n\u001b[1;32m--> 170\u001b[0m vals \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward_scalar\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx01_vec\u001b[49m\u001b[43m[\u001b[49m\u001b[43mi\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(x01_vec\u001b[38;5;241m.\u001b[39mshape[\u001b[38;5;241m0\u001b[39m])]\n\u001b[0;32m 171\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mstack(vals, dim\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0\u001b[39m)\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32)\n", + "Cell \u001b[1;32mIn[16], line 161\u001b[0m, in \u001b[0;36mQuantumFourierBlock.forward_scalar\u001b[1;34m(self, x01_scalar)\u001b[0m\n\u001b[0;32m 159\u001b[0m omega \u001b[38;5;241m=\u001b[39m F\u001b[38;5;241m.\u001b[39msoftplus(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlog_omega) \u001b[38;5;241m+\u001b[39m \u001b[38;5;241m1e-4\u001b[39m \u001b[38;5;66;03m# positive\u001b[39;00m\n\u001b[0;32m 160\u001b[0m alpha \u001b[38;5;241m=\u001b[39m omega \u001b[38;5;241m*\u001b[39m (\u001b[38;5;241m2.0\u001b[39m \u001b[38;5;241m*\u001b[39m math\u001b[38;5;241m.\u001b[39mpi \u001b[38;5;241m*\u001b[39m x01) \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mphase \u001b[38;5;66;03m# (K,)\u001b[39;00m\n\u001b[1;32m--> 161\u001b[0m outs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_qnode\u001b[49m\u001b[43m(\u001b[49m\u001b[43malpha\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfloat32\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 162\u001b[0m outs \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mstack([o \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(o, torch\u001b[38;5;241m.\u001b[39mTensor) \u001b[38;5;28;01melse\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mas_tensor(o, dtype\u001b[38;5;241m=\u001b[39mtorch\u001b[38;5;241m.\u001b[39mfloat32)\n\u001b[0;32m 163\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m o \u001b[38;5;129;01min\u001b[39;00m outs], dim\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0\u001b[39m)\u001b[38;5;241m.\u001b[39mto(torch\u001b[38;5;241m.\u001b[39mfloat32)\n\u001b[0;32m 164\u001b[0m z \u001b[38;5;241m=\u001b[39m outs[: \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mK]\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\workflow\\qnode.py:922\u001b[0m, in \u001b[0;36mQNode.__call__\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 919\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m_capture_qnode\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m capture_qnode \u001b[38;5;66;03m# pylint: disable=import-outside-toplevel\u001b[39;00m\n\u001b[0;32m 921\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m capture_qnode(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m--> 922\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_impl_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\workflow\\qnode.py:895\u001b[0m, in \u001b[0;36mQNode._impl_call\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 892\u001b[0m \u001b[38;5;66;03m# Calculate the classical jacobians if necessary\u001b[39;00m\n\u001b[0;32m 893\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_transform_program\u001b[38;5;241m.\u001b[39mset_classical_component(\u001b[38;5;28mself\u001b[39m, args, kwargs)\n\u001b[1;32m--> 895\u001b[0m res \u001b[38;5;241m=\u001b[39m \u001b[43mexecute\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 896\u001b[0m \u001b[43m \u001b[49m\u001b[43m(\u001b[49m\u001b[43mtape\u001b[49m\u001b[43m,\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 897\u001b[0m \u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdevice\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 898\u001b[0m \u001b[43m \u001b[49m\u001b[43mdiff_method\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdiff_method\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 899\u001b[0m \u001b[43m \u001b[49m\u001b[43minterface\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minterface\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 900\u001b[0m \u001b[43m \u001b[49m\u001b[43mtransform_program\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_transform_program\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 901\u001b[0m \u001b[43m \u001b[49m\u001b[43mgradient_kwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgradient_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 902\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexecute_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 903\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 904\u001b[0m res \u001b[38;5;241m=\u001b[39m res[\u001b[38;5;241m0\u001b[39m]\n\u001b[0;32m 906\u001b[0m \u001b[38;5;66;03m# convert result to the interface in case the qfunc has no parameters\u001b[39;00m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\workflow\\execution.py:233\u001b[0m, in \u001b[0;36mexecute\u001b[1;34m(tapes, device, diff_method, interface, grad_on_execution, cache, cachesize, max_diff, device_vjp, postselect_mode, mcm_method, gradient_kwargs, transform_program, executor_backend)\u001b[0m\n\u001b[0;32m 229\u001b[0m tapes, outer_post_processing \u001b[38;5;241m=\u001b[39m outer_transform(tapes)\n\u001b[0;32m 231\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m outer_transform\u001b[38;5;241m.\u001b[39mis_informative, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mshould only contain device preprocessing\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m--> 233\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtapes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minner_transform\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 234\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m user_post_processing(outer_post_processing(results))\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\workflow\\run.py:291\u001b[0m, in \u001b[0;36mrun\u001b[1;34m(tapes, device, config, inner_transform_program)\u001b[0m\n\u001b[0;32m 287\u001b[0m no_interface_boundary_required \u001b[38;5;241m=\u001b[39m (\n\u001b[0;32m 288\u001b[0m config\u001b[38;5;241m.\u001b[39minterface \u001b[38;5;241m==\u001b[39m Interface\u001b[38;5;241m.\u001b[39mNUMPY \u001b[38;5;129;01mor\u001b[39;00m config\u001b[38;5;241m.\u001b[39mgradient_method \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbackprop\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 289\u001b[0m )\n\u001b[0;32m 290\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m no_interface_boundary_required:\n\u001b[1;32m--> 291\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[43minner_execute\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtapes\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 292\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m results\n\u001b[0;32m 294\u001b[0m \u001b[38;5;66;03m# TODO: Prune once support for tf-autograph is dropped\u001b[39;00m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\workflow\\run.py:256\u001b[0m, in \u001b[0;36m_make_inner_execute..inner_execute\u001b[1;34m(tapes)\u001b[0m\n\u001b[0;32m 253\u001b[0m transformed_tapes, transform_post_processing \u001b[38;5;241m=\u001b[39m inner_transform(tapes)\n\u001b[0;32m 255\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m transformed_tapes:\n\u001b[1;32m--> 256\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[43mdevice\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexecute\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtransformed_tapes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mexecution_config\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mexecution_config\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 258\u001b[0m results \u001b[38;5;241m=\u001b[39m ()\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\devices\\modifiers\\simulator_tracking.py:28\u001b[0m, in \u001b[0;36m_track_execute..execute\u001b[1;34m(self, circuits, execution_config)\u001b[0m\n\u001b[0;32m 26\u001b[0m \u001b[38;5;129m@wraps\u001b[39m(untracked_execute)\n\u001b[0;32m 27\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mexecute\u001b[39m(\u001b[38;5;28mself\u001b[39m, circuits, execution_config\u001b[38;5;241m=\u001b[39mDefaultExecutionConfig):\n\u001b[1;32m---> 28\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[43muntracked_execute\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcircuits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mexecution_config\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 29\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(circuits, QuantumScript):\n\u001b[0;32m 30\u001b[0m batch \u001b[38;5;241m=\u001b[39m (circuits,)\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\devices\\modifiers\\single_tape_support.py:30\u001b[0m, in \u001b[0;36m_make_execute..execute\u001b[1;34m(self, circuits, execution_config)\u001b[0m\n\u001b[0;32m 28\u001b[0m is_single_circuit \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m 29\u001b[0m circuits \u001b[38;5;241m=\u001b[39m (circuits,)\n\u001b[1;32m---> 30\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[43mbatch_execute\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcircuits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mexecution_config\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 31\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m results[\u001b[38;5;241m0\u001b[39m] \u001b[38;5;28;01mif\u001b[39;00m is_single_circuit \u001b[38;5;28;01melse\u001b[39;00m results\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\logging\\decorators.py:61\u001b[0m, in \u001b[0;36mlog_string_debug_func..wrapper_entry\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 54\u001b[0m s_caller \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m::L\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(\n\u001b[0;32m 55\u001b[0m [\u001b[38;5;28mstr\u001b[39m(i) \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m inspect\u001b[38;5;241m.\u001b[39mgetouterframes(inspect\u001b[38;5;241m.\u001b[39mcurrentframe(), \u001b[38;5;241m2\u001b[39m)[\u001b[38;5;241m1\u001b[39m][\u001b[38;5;241m1\u001b[39m:\u001b[38;5;241m3\u001b[39m]]\n\u001b[0;32m 56\u001b[0m )\n\u001b[0;32m 57\u001b[0m lgr\u001b[38;5;241m.\u001b[39mdebug(\n\u001b[0;32m 58\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCalling \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mf_string\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m from \u001b[39m\u001b[38;5;132;01m{\u001b[39;00ms_caller\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m 59\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m_debug_log_kwargs,\n\u001b[0;32m 60\u001b[0m )\n\u001b[1;32m---> 61\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\devices\\default_qubit.py:707\u001b[0m, in \u001b[0;36mDefaultQubit.execute\u001b[1;34m(self, circuits, execution_config)\u001b[0m\n\u001b[0;32m 697\u001b[0m warnings\u001b[38;5;241m.\u001b[39mwarn(\n\u001b[0;32m 698\u001b[0m (\n\u001b[0;32m 699\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mJitting executions with many circuits may have substantial classical overhead.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 702\u001b[0m \u001b[38;5;167;01mUserWarning\u001b[39;00m,\n\u001b[0;32m 703\u001b[0m )\n\u001b[0;32m 705\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m max_workers \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m--> 707\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mtuple\u001b[39m(\n\u001b[0;32m 708\u001b[0m _simulate_wrapper(\n\u001b[0;32m 709\u001b[0m c,\n\u001b[0;32m 710\u001b[0m {\n\u001b[0;32m 711\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrng\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_rng,\n\u001b[0;32m 712\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdebugger\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_debugger,\n\u001b[0;32m 713\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124minterface\u001b[39m\u001b[38;5;124m\"\u001b[39m: interface,\n\u001b[0;32m 714\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mstate_cache\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_state_cache,\n\u001b[0;32m 715\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mprng_key\u001b[39m\u001b[38;5;124m\"\u001b[39m: _key,\n\u001b[0;32m 716\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmcm_method\u001b[39m\u001b[38;5;124m\"\u001b[39m: execution_config\u001b[38;5;241m.\u001b[39mmcm_config\u001b[38;5;241m.\u001b[39mmcm_method,\n\u001b[0;32m 717\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpostselect_mode\u001b[39m\u001b[38;5;124m\"\u001b[39m: execution_config\u001b[38;5;241m.\u001b[39mmcm_config\u001b[38;5;241m.\u001b[39mpostselect_mode,\n\u001b[0;32m 718\u001b[0m },\n\u001b[0;32m 719\u001b[0m )\n\u001b[0;32m 720\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m c, _key \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mzip\u001b[39m(circuits, prng_keys)\n\u001b[0;32m 721\u001b[0m )\n\u001b[0;32m 723\u001b[0m vanilla_circuits \u001b[38;5;241m=\u001b[39m convert_to_numpy_parameters(circuits)[\u001b[38;5;241m0\u001b[39m]\n\u001b[0;32m 724\u001b[0m seeds \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_rng\u001b[38;5;241m.\u001b[39mintegers(\u001b[38;5;241m2\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m31\u001b[39m \u001b[38;5;241m-\u001b[39m \u001b[38;5;241m1\u001b[39m, size\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mlen\u001b[39m(vanilla_circuits))\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\devices\\default_qubit.py:708\u001b[0m, in \u001b[0;36m\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m 697\u001b[0m warnings\u001b[38;5;241m.\u001b[39mwarn(\n\u001b[0;32m 698\u001b[0m (\n\u001b[0;32m 699\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mJitting executions with many circuits may have substantial classical overhead.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 702\u001b[0m \u001b[38;5;167;01mUserWarning\u001b[39;00m,\n\u001b[0;32m 703\u001b[0m )\n\u001b[0;32m 705\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m max_workers \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 707\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mtuple\u001b[39m(\n\u001b[1;32m--> 708\u001b[0m \u001b[43m_simulate_wrapper\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 709\u001b[0m \u001b[43m \u001b[49m\u001b[43mc\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 710\u001b[0m \u001b[43m \u001b[49m\u001b[43m{\u001b[49m\n\u001b[0;32m 711\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrng\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_rng\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 712\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdebugger\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_debugger\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 713\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43minterface\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43minterface\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 714\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mstate_cache\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_state_cache\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 715\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mprng_key\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43m_key\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 716\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmcm_method\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mexecution_config\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmcm_config\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmcm_method\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 717\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mpostselect_mode\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mexecution_config\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmcm_config\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpostselect_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 718\u001b[0m \u001b[43m \u001b[49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 719\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 720\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m c, _key \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mzip\u001b[39m(circuits, prng_keys)\n\u001b[0;32m 721\u001b[0m )\n\u001b[0;32m 723\u001b[0m vanilla_circuits \u001b[38;5;241m=\u001b[39m convert_to_numpy_parameters(circuits)[\u001b[38;5;241m0\u001b[39m]\n\u001b[0;32m 724\u001b[0m seeds \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_rng\u001b[38;5;241m.\u001b[39mintegers(\u001b[38;5;241m2\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m31\u001b[39m \u001b[38;5;241m-\u001b[39m \u001b[38;5;241m1\u001b[39m, size\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mlen\u001b[39m(vanilla_circuits))\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\devices\\default_qubit.py:1055\u001b[0m, in \u001b[0;36m_simulate_wrapper\u001b[1;34m(circuit, kwargs)\u001b[0m\n\u001b[0;32m 1054\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_simulate_wrapper\u001b[39m(circuit, kwargs):\n\u001b[1;32m-> 1055\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43msimulate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcircuit\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\logging\\decorators.py:61\u001b[0m, in \u001b[0;36mlog_string_debug_func..wrapper_entry\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 54\u001b[0m s_caller \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m::L\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(\n\u001b[0;32m 55\u001b[0m [\u001b[38;5;28mstr\u001b[39m(i) \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m inspect\u001b[38;5;241m.\u001b[39mgetouterframes(inspect\u001b[38;5;241m.\u001b[39mcurrentframe(), \u001b[38;5;241m2\u001b[39m)[\u001b[38;5;241m1\u001b[39m][\u001b[38;5;241m1\u001b[39m:\u001b[38;5;241m3\u001b[39m]]\n\u001b[0;32m 56\u001b[0m )\n\u001b[0;32m 57\u001b[0m lgr\u001b[38;5;241m.\u001b[39mdebug(\n\u001b[0;32m 58\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCalling \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mf_string\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m from \u001b[39m\u001b[38;5;132;01m{\u001b[39;00ms_caller\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m 59\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m_debug_log_kwargs,\n\u001b[0;32m 60\u001b[0m )\n\u001b[1;32m---> 61\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\devices\\qubit\\simulate.py:359\u001b[0m, in \u001b[0;36msimulate\u001b[1;34m(circuit, debugger, state_cache, **execution_kwargs)\u001b[0m\n\u001b[0;32m 356\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mtuple\u001b[39m(results)\n\u001b[0;32m 358\u001b[0m ops_key, meas_key \u001b[38;5;241m=\u001b[39m jax_random_split(prng_key)\n\u001b[1;32m--> 359\u001b[0m state, is_state_batched \u001b[38;5;241m=\u001b[39m \u001b[43mget_final_state\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 360\u001b[0m \u001b[43m \u001b[49m\u001b[43mcircuit\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdebugger\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdebugger\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mprng_key\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mops_key\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mexecution_kwargs\u001b[49m\n\u001b[0;32m 361\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 362\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m state_cache \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 363\u001b[0m state_cache[circuit\u001b[38;5;241m.\u001b[39mhash] \u001b[38;5;241m=\u001b[39m state\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\logging\\decorators.py:61\u001b[0m, in \u001b[0;36mlog_string_debug_func..wrapper_entry\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 54\u001b[0m s_caller \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m::L\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(\n\u001b[0;32m 55\u001b[0m [\u001b[38;5;28mstr\u001b[39m(i) \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m inspect\u001b[38;5;241m.\u001b[39mgetouterframes(inspect\u001b[38;5;241m.\u001b[39mcurrentframe(), \u001b[38;5;241m2\u001b[39m)[\u001b[38;5;241m1\u001b[39m][\u001b[38;5;241m1\u001b[39m:\u001b[38;5;241m3\u001b[39m]]\n\u001b[0;32m 56\u001b[0m )\n\u001b[0;32m 57\u001b[0m lgr\u001b[38;5;241m.\u001b[39mdebug(\n\u001b[0;32m 58\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCalling \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mf_string\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m from \u001b[39m\u001b[38;5;132;01m{\u001b[39;00ms_caller\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m 59\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m_debug_log_kwargs,\n\u001b[0;32m 60\u001b[0m )\n\u001b[1;32m---> 61\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\devices\\qubit\\simulate.py:192\u001b[0m, in \u001b[0;36mget_final_state\u001b[1;34m(circuit, debugger, **execution_kwargs)\u001b[0m\n\u001b[0;32m 190\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(op, MidMeasureMP):\n\u001b[0;32m 191\u001b[0m prng_key, key \u001b[38;5;241m=\u001b[39m jax_random_split(prng_key)\n\u001b[1;32m--> 192\u001b[0m state \u001b[38;5;241m=\u001b[39m \u001b[43mapply_operation\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 193\u001b[0m \u001b[43m \u001b[49m\u001b[43mop\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 194\u001b[0m \u001b[43m \u001b[49m\u001b[43mstate\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 195\u001b[0m \u001b[43m \u001b[49m\u001b[43mis_state_batched\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mis_state_batched\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 196\u001b[0m \u001b[43m \u001b[49m\u001b[43mdebugger\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdebugger\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 197\u001b[0m \u001b[43m \u001b[49m\u001b[43mprng_key\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mkey\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 198\u001b[0m \u001b[43m \u001b[49m\u001b[43mtape_shots\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcircuit\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mshots\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 199\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mexecution_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 200\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 201\u001b[0m \u001b[38;5;66;03m# Handle postselection on mid-circuit measurements\u001b[39;00m\n\u001b[0;32m 202\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(op, qml\u001b[38;5;241m.\u001b[39mProjector):\n", + "File \u001b[1;32m~\\anaconda3\\Lib\\functools.py:909\u001b[0m, in \u001b[0;36msingledispatch..wrapper\u001b[1;34m(*args, **kw)\u001b[0m\n\u001b[0;32m 905\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m args:\n\u001b[0;32m 906\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfuncname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m requires at least \u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[0;32m 907\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m1 positional argument\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m--> 909\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mdispatch\u001b[49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m]\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;18;43m__class__\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkw\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\devices\\qubit\\apply_operation.py:521\u001b[0m, in \u001b[0;36mapply_cnot\u001b[1;34m(op, state, is_state_batched, debugger, **_)\u001b[0m\n\u001b[0;32m 518\u001b[0m sl_1 \u001b[38;5;241m=\u001b[39m _get_slice(\u001b[38;5;241m1\u001b[39m, control_axes, n_dim)\n\u001b[0;32m 520\u001b[0m state_x \u001b[38;5;241m=\u001b[39m math\u001b[38;5;241m.\u001b[39mroll(state[sl_1], \u001b[38;5;241m1\u001b[39m, target_axes)\n\u001b[1;32m--> 521\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mmath\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstack\u001b[49m\u001b[43m(\u001b[49m\u001b[43m[\u001b[49m\u001b[43mstate\u001b[49m\u001b[43m[\u001b[49m\u001b[43msl_0\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstate_x\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43maxis\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcontrol_axes\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\math\\multi_dispatch.py:153\u001b[0m, in \u001b[0;36mmulti_dispatch..decorator..wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 150\u001b[0m interface \u001b[38;5;241m=\u001b[39m interface \u001b[38;5;129;01mor\u001b[39;00m get_interface(\u001b[38;5;241m*\u001b[39mdispatch_args)\n\u001b[0;32m 151\u001b[0m kwargs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mlike\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m interface\n\u001b[1;32m--> 153\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\pennylane\\math\\multi_dispatch.py:518\u001b[0m, in \u001b[0;36mstack\u001b[1;34m(values, axis, like)\u001b[0m\n\u001b[0;32m 489\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Stack a sequence of tensors along the specified axis.\u001b[39;00m\n\u001b[0;32m 490\u001b[0m \n\u001b[0;32m 491\u001b[0m \u001b[38;5;124;03m.. warning::\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 515\u001b[0m \u001b[38;5;124;03m [5.00e+00, 8.00e+00, 1.01e+02]], dtype=float32)>\u001b[39;00m\n\u001b[0;32m 516\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 517\u001b[0m values \u001b[38;5;241m=\u001b[39m np\u001b[38;5;241m.\u001b[39mcoerce(values, like\u001b[38;5;241m=\u001b[39mlike)\n\u001b[1;32m--> 518\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mnp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstack\u001b[49m\u001b[43m(\u001b[49m\u001b[43mvalues\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43maxis\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43maxis\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlike\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlike\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\anaconda3\\Lib\\site-packages\\autoray\\autoray.py:81\u001b[0m, in \u001b[0;36mdo\u001b[1;34m(fn, like, *args, **kwargs)\u001b[0m\n\u001b[0;32m 79\u001b[0m backend \u001b[38;5;241m=\u001b[39m _choose_backend(fn, args, kwargs, like\u001b[38;5;241m=\u001b[39mlike)\n\u001b[0;32m 80\u001b[0m func \u001b[38;5;241m=\u001b[39m get_lib_fn(backend, fn)\n\u001b[1;32m---> 81\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\anaconda3\\Lib\\site-packages\\autoray\\autoray.py:1331\u001b[0m, in \u001b[0;36mtranslate_wrapper..translated_function\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 1328\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m key, value \u001b[38;5;129;01min\u001b[39;00m translation\u001b[38;5;241m.\u001b[39mitems():\n\u001b[0;32m 1329\u001b[0m new_kwargs[value[\u001b[38;5;241m0\u001b[39m]] \u001b[38;5;241m=\u001b[39m value[\u001b[38;5;241m1\u001b[39m]\n\u001b[1;32m-> 1331\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mnew_kwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "import math\n", + "import numpy as np\n", + "from dataclasses import dataclass, field\n", + "\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import pennylane as qml\n", + "\n", + "from sklearn.datasets import load_iris\n", + "from sklearn.model_selection import train_test_split\n", + "from sklearn.preprocessing import MinMaxScaler\n", + "\n", + "torch.set_default_dtype(torch.float32)\n", + "\n", + "\n", + "def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n", + " assert num_splines >= degree + 1\n", + " n = num_splines - 1\n", + " p = degree\n", + " if n - p > 0:\n", + " interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1]\n", + " else:\n", + " interior = np.array([], dtype=float)\n", + " knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)])\n", + "\n", + " def N(i, r, t):\n", + " if r == 0:\n", + " left = knots[i]\n", + " right = knots[i + 1]\n", + " return np.where(((t >= left) & (t < right)) | ((right == 1.0) & (t == 1.0)), 1.0, 0.0)\n", + " left_den = knots[i + r] - knots[i]\n", + " right_den = knots[i + r + 1] - knots[i + 1]\n", + " left_term = 0.0\n", + " right_term = 0.0\n", + " if left_den > 0:\n", + " left_term = ((t - knots[i]) / left_den) * N(i, r - 1, t)\n", + " if right_den > 0:\n", + " right_term = ((knots[i + r + 1] - t) / right_den) * N(i + 1, r - 1, t)\n", + " return left_term + right_term\n", + "\n", + " tgrid = np.asarray(grid, dtype=float)\n", + " B = np.vstack([N(i, p, tgrid) for i in range(num_splines)])\n", + " return np.maximum(B, 0.0)\n", + "\n", + "\n", + "class QCBMState(nn.Module):\n", + " def __init__(self, n_label_qubits: int, n_pos_qubits: int, depth: int = 3, seed: int = 0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.L = n_label_qubits\n", + " self.P = n_pos_qubits\n", + " self.n_qubits = self.L + self.P\n", + " self.depth = depth\n", + " init = 0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32)\n", + " self.theta = nn.Parameter(init)\n", + " self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n", + "\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights):\n", + " qml.templates.StronglyEntanglingLayers(weights, wires=list(range(self.n_qubits)))\n", + " return qml.probs(wires=list(range(self.n_qubits)))\n", + "\n", + " self._qprobs = qnode\n", + "\n", + " def forward(self):\n", + " return self._qprobs(self.theta).to(torch.float32)\n", + "\n", + " @torch.no_grad()\n", + " def freeze(self):\n", + " self.theta.requires_grad_(False)\n", + "\n", + "\n", + "class LabelMixer(nn.Module):\n", + " def __init__(self, qcbm: QCBMState, depth: int = 2, seed: int = 0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.qcbm = qcbm\n", + " self.L = qcbm.L\n", + " self.P = qcbm.P\n", + " self.n_qubits = qcbm.n_qubits\n", + " self.depth = depth\n", + " init = 0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32)\n", + " self.phi = nn.Parameter(init)\n", + " self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n", + "\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights_qcbm, weights_label):\n", + " qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=list(range(self.n_qubits)))\n", + " if self.L > 0:\n", + " qml.templates.StronglyEntanglingLayers(weights_label, wires=list(range(self.L)))\n", + " return qml.probs(wires=list(range(self.n_qubits)))\n", + "\n", + " self._qprobs = qnode\n", + "\n", + " def forward(self):\n", + " return self._qprobs(self.qcbm.theta, self.phi).to(torch.float32)\n", + "\n", + "\n", + "class QuantumBlock(nn.Module):\n", + " def __init__(self, k_frequencies: int = 4, entangle_depth: int = 1, seed: int = 0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.K = k_frequencies\n", + " self.depth = entangle_depth\n", + " self.log_omega = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.05)\n", + " self.phase = nn.Parameter(torch.zeros(self.K, dtype=torch.float32))\n", + " self.w_cos = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1)\n", + " self.w_sin = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1)\n", + " self.dev = qml.device(\"default.qubit\", wires=self.K)\n", + "\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(alpha_vec):\n", + " for k in range(self.K):\n", + " qml.RY(alpha_vec[k], wires=k)\n", + " for _ in range(self.depth):\n", + " for k in range(self.K):\n", + " qml.CNOT(wires=[k, (k + 1) % self.K])\n", + " z = [qml.expval(qml.PauliZ(k)) for k in range(self.K)]\n", + " x = [qml.expval(qml.PauliX(k)) for k in range(self.K)]\n", + " return z + x\n", + "\n", + " self._qnode = qnode\n", + "\n", + " def forward_scalar(self, x01_scalar: torch.Tensor) -> torch.Tensor:\n", + " x01 = torch.clamp(x01_scalar.reshape(()), 0.0, 1.0)\n", + " omega = F.softplus(self.log_omega) + 1e-4\n", + " alpha = omega * (2.0 * math.pi * x01) + self.phase\n", + " outs = self._qnode(alpha.to(torch.float32))\n", + " outs = torch.stack([o if isinstance(o, torch.Tensor) else torch.as_tensor(o, dtype=torch.float32) for o in outs], dim=0).to(torch.float32)\n", + " z = outs[: self.K]\n", + " x = outs[self.K:]\n", + " return (self.w_cos * z).sum() + (self.w_sin * x).sum()\n", + "\n", + " def forward_batch(self, x01_vec: torch.Tensor) -> torch.Tensor:\n", + " x01_vec = torch.clamp(x01_vec.to(torch.float32), 0.0, 1.0)\n", + " vals = [self.forward_scalar(x01_vec[i]) for i in range(x01_vec.shape[0])]\n", + " return torch.stack(vals, dim=0).to(torch.float32)\n", + "\n", + "\n", + "class QuKANResidualEdge(nn.Module):\n", + " def __init__(self, mixer: LabelMixer, n_label_qubits: int, n_pos_qubits: int,\n", + " fourier_k: int = 4, fourier_depth: int = 1, seed: int = 0, w_init=0.5):\n", + " super().__init__()\n", + " self.mixer = mixer\n", + " self.L = n_label_qubits\n", + " self.P = n_pos_qubits\n", + " self.Nlabel = 2 ** self.L\n", + " self.Npos = 2 ** self.P\n", + " self.wf = nn.Parameter(torch.tensor(float(w_init), dtype=torch.float32))\n", + " self.wq = nn.Parameter(torch.tensor(float(w_init), dtype=torch.float32))\n", + " self.qfour = QuantumBlock(k_frequencies=fourier_k, entangle_depth=fourier_depth, seed=seed)\n", + "\n", + " def batch_forward(self, x_raw: torch.Tensor, x_pos01: torch.Tensor, probs_flat: torch.Tensor) -> torch.Tensor:\n", + " x_pos01 = x_pos01.to(torch.float32)\n", + " probs_flat = probs_flat.to(torch.float32)\n", + " B = x_pos01.shape[0]\n", + " lp = probs_flat.view(self.Nlabel, self.Npos)\n", + " idx = torch.round(torch.clamp(x_pos01, 0.0, 1.0) * (self.Npos - 1)).long()\n", + " idx = torch.clamp(idx, 0, self.Npos - 1)\n", + " p_vals = lp[:, idx].sum(dim=0).to(torch.float32)\n", + " qfr_vals = self.qfour.forward_batch(x_pos01)\n", + " out = (self.wf * p_vals + self.wq * qfr_vals).to(torch.float32)\n", + " return out\n", + "\n", + "\n", + "@dataclass\n", + "class QuKANLayerCfg:\n", + " n_nodes: int = 6\n", + " n_label_qubits: int = 2\n", + " n_pos_qubits: int = 5\n", + " qcbm_depth: int = 3\n", + " label_mixer_depth: int = 2\n", + " fourier_k: int = 4\n", + " fourier_depth: int = 1\n", + "\n", + "\n", + "class QuKANLayer(nn.Module):\n", + " def __init__(self, cfg: QuKANLayerCfg, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.cfg = cfg\n", + " self.L = cfg.n_label_qubits\n", + " self.P = cfg.n_pos_qubits\n", + " self.qcbm = QCBMState(self.L, self.P, depth=cfg.qcbm_depth, seed=seed)\n", + " self.mixers = nn.ModuleList()\n", + " self.edges = nn.ModuleList()\n", + " self._built = False\n", + "\n", + " def build(self, input_dim: int, seed: int = 0):\n", + " self.input_dim = input_dim\n", + " torch.manual_seed(seed)\n", + " for m in range(self.cfg.n_nodes):\n", + " for j in range(input_dim):\n", + " mixer = LabelMixer(self.qcbm, depth=self.cfg.label_mixer_depth, seed=seed + 97 * m + j)\n", + " edge = QuKANResidualEdge(\n", + " mixer,\n", + " self.L, self.P,\n", + " fourier_k=self.cfg.fourier_k,\n", + " fourier_depth=self.cfg.fourier_depth,\n", + " seed=seed + 991 * m + 13 * j,\n", + " w_init=0.5\n", + " )\n", + " self.mixers.append(mixer)\n", + " self.edges.append(edge)\n", + " self._built = True\n", + " print(f\"[QuKANLayer] built edges: nodes={self.cfg.n_nodes}, in_dim={input_dim}, total_edges={len(self.edges)}\")\n", + "\n", + " def pretrain_qcbm_on_splines(self, degree=2, epochs=200, lr=5e-2, verbose=True):\n", + " num_splines = 2 ** self.L\n", + " Npos = 2 ** self.P\n", + " grid = np.linspace(0.0, 1.0, Npos, dtype=float)\n", + " B = bspline_basis_matrix(num_splines, degree, grid)\n", + " B = B + 1e-8\n", + " B = B / B.sum(axis=1, keepdims=True)\n", + " target = torch.tensor((B / num_splines).reshape(-1), dtype=torch.float32)\n", + " opt = torch.optim.Adam(self.qcbm.parameters(), lr=lr)\n", + " for ep in range(1, epochs + 1):\n", + " opt.zero_grad()\n", + " probs = self.qcbm().to(torch.float32)\n", + " loss = F.mse_loss(probs, target)\n", + " loss.backward()\n", + " opt.step()\n", + " if verbose and (ep % 50 == 0 or ep == 1):\n", + " with torch.no_grad():\n", + " tv = 0.5 * torch.sum(torch.abs(probs - target)).item()\n", + " print(f\"[QCBM pretrain] epoch {ep:03d} | MSE={loss.item():.6f} | TV={tv:.6f}\")\n", + " self.qcbm.freeze()\n", + "\n", + " def forward(self, X_in: torch.Tensor, input_is_01: bool) -> torch.Tensor:\n", + " assert self._built, \"Call build(input_dim) first.\"\n", + " X_in = X_in.to(torch.float32)\n", + " B, D = X_in.shape\n", + " M = self.cfg.n_nodes\n", + " edge_probs = [mix().to(torch.float32) for mix in self.mixers]\n", + " X01_pos = (X_in if input_is_01 else torch.sigmoid(X_in)).to(torch.float32)\n", + " nodes = []\n", + " eidx = 0\n", + " for m in range(M):\n", + " acc = torch.zeros(B, dtype=torch.float32, device=X_in.device)\n", + " for j in range(D):\n", + " probs_flat = edge_probs[eidx]\n", + " edge = self.edges[eidx]\n", + " x_pos = X01_pos[:, j].to(torch.float32)\n", + " out_j = edge.batch_forward(x_pos, x_pos, probs_flat).to(torch.float32)\n", + " acc = acc + out_j\n", + " eidx += 1\n", + " nodes.append(acc)\n", + " nodes = torch.stack(nodes, dim=1).to(torch.float32)\n", + " return nodes\n", + "\n", + "\n", + "@dataclass\n", + "class KANReadoutCfg:\n", + " n_classes: int\n", + " in_dim: int\n", + " fourier_k: int = 3\n", + " fourier_depth: int = 1\n", + "\n", + "\n", + "class KANReadout(nn.Module):\n", + " def __init__(self, cfg: KANReadoutCfg, seed: int = 0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.cfg = cfg\n", + " C, M = cfg.n_classes, cfg.in_dim\n", + " self.qfr = nn.ModuleList([\n", + " QuantumBlock(k_frequencies=cfg.fourier_k,\n", + " entangle_depth=cfg.fourier_depth,\n", + " seed=seed + 131 * c + m)\n", + " for c in range(C) for m in range(M)\n", + " ])\n", + " self.b = nn.Parameter(torch.zeros(C, dtype=torch.float32))\n", + "\n", + " def _edge_idx(self, c: int, m: int) -> int:\n", + " return c * self.cfg.in_dim + m\n", + "\n", + " def forward(self, H: torch.Tensor) -> torch.Tensor:\n", + " H = H.to(torch.float32)\n", + " B, M = H.shape\n", + " C = self.cfg.n_classes\n", + " H01 = torch.sigmoid(H)\n", + " logits = []\n", + " for c in range(C):\n", + " acc_c = torch.zeros(B, dtype=torch.float32, device=H.device)\n", + " for m in range(M):\n", + " qfr = self.qfr[self._edge_idx(c, m)]\n", + " acc_c = acc_c + qfr.forward_batch(H01[:, m])\n", + " logits.append(acc_c + self.b[c])\n", + " return torch.stack(logits, dim=1).to(torch.float32)\n", + "\n", + "\n", + "@dataclass\n", + "class QuKANNetCfg:\n", + " layer1: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1))\n", + " layer2: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1))\n", + " n_classes: int = 3\n", + "\n", + "\n", + "class QuKANNet(nn.Module):\n", + " def __init__(self, cfg: QuKANNetCfg, input_dim: int, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.cfg = cfg\n", + " self.l1 = QuKANLayer(cfg.layer1, seed=seed)\n", + " self.l1.build(input_dim=input_dim, seed=seed)\n", + " self.l2 = QuKANLayer(cfg.layer2, seed=seed + 1)\n", + " self.l2.build(input_dim=cfg.layer1.n_nodes, seed=seed + 1)\n", + " self.readout = KANReadout(\n", + " KANReadoutCfg(\n", + " n_classes=cfg.n_classes,\n", + " in_dim=cfg.layer2.n_nodes,\n", + " fourier_k=3,\n", + " fourier_depth=1\n", + " ),\n", + " seed=seed + 1234\n", + " )\n", + "\n", + " def pretrain_qcbms(self, degree=2, epochs=200, lr=5e-2, verbose=True):\n", + " print(\"[Pretrain] Layer 1 QCBM on degree-2 B-splines\")\n", + " self.l1.pretrain_qcbm_on_splines(degree=degree, epochs=epochs, lr=lr, verbose=verbose)\n", + " print(\"[Pretrain] Layer 2 QCBM on degree-2 B-splines\")\n", + " self.l2.pretrain_qcbm_on_splines(degree=degree, epochs=epochs, lr=lr, verbose=verbose)\n", + "\n", + " def forward(self, X01: torch.Tensor) -> torch.Tensor:\n", + " X01 = X01.to(torch.float32)\n", + " h1 = self.l1(X01, input_is_01=True).to(torch.float32)\n", + " h2 = self.l2(h1, input_is_01=False).to(torch.float32)\n", + " return self.readout(h2)\n", + "\n", + "\n", + "def run_iris(seed=0):\n", + " torch.manual_seed(seed)\n", + " np.random.seed(seed)\n", + " iris = load_iris()\n", + " X = iris.data.astype(np.float32)\n", + " y = iris.target.astype(np.int64)\n", + " scaler = MinMaxScaler(feature_range=(0.0, 1.0))\n", + " X01 = scaler.fit_transform(X).astype(np.float32)\n", + " X_tr, X_te, y_tr, y_te = train_test_split(\n", + " X01, y, test_size=0.3, random_state=seed, stratify=y\n", + " )\n", + " X_tr = torch.tensor(X_tr, dtype=torch.float32)\n", + " X_te = torch.tensor(X_te, dtype=torch.float32)\n", + " y_tr = torch.tensor(y_tr, dtype=torch.long)\n", + " y_te = torch.tensor(y_te, dtype=torch.long)\n", + " cfg = QuKANNetCfg(\n", + " layer1=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, qcbm_depth=3, label_mixer_depth=2, fourier_k=4, fourier_depth=1),\n", + " layer2=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, qcbm_depth=3, label_mixer_depth=2, fourier_k=4, fourier_depth=1),\n", + " n_classes=3,\n", + " )\n", + " model = QuKANNet(cfg, input_dim=4, seed=seed)\n", + " model.pretrain_qcbms(degree=2, epochs=200, lr=5e-2, verbose=True)\n", + " opt = torch.optim.AdamW(model.parameters(), lr=2e-3, weight_decay=8e-4)\n", + " sched = torch.optim.lr_scheduler.CosineAnnealingLR(opt, T_max=60)\n", + " ce = nn.CrossEntropyLoss(label_smoothing=0.03)\n", + " print(\"\\nTraining QuKAN on Iris\")\n", + " epochs = 60\n", + " B = 16\n", + " for ep in range(1, epochs + 1):\n", + " model.train()\n", + " perm = torch.randperm(X_tr.shape[0])\n", + " Xb_all, yb_all = X_tr[perm], y_tr[perm]\n", + " tot, corr, loss_sum = 0, 0, 0.0\n", + " for i in range(0, Xb_all.shape[0], B):\n", + " xb = Xb_all[i:i+B]\n", + " yb = yb_all[i:i+B]\n", + " opt.zero_grad(set_to_none=True)\n", + " logits = model(xb)\n", + " loss = ce(logits, yb)\n", + " loss.backward()\n", + " torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)\n", + " opt.step()\n", + " loss_sum += float(loss.item()) * xb.size(0)\n", + " tot += xb.size(0)\n", + " corr += (logits.argmax(1) == yb).sum().item()\n", + " sched.step()\n", + " train_acc = 100.0 * corr / tot\n", + " train_loss = loss_sum / tot\n", + " model.eval()\n", + " with torch.no_grad():\n", + " logits_te = model(X_te)\n", + " val_acc = 100.0 * (logits_te.argmax(1) == y_te).float().mean().item()\n", + " if ep % 2 == 1 or ep >= epochs - 10:\n", + " print(f\"Epoch {ep:03d} | Loss={train_loss:.4f} | Train Acc={train_acc:.2f}% | Val Acc={val_acc:.2f}%\")\n", + " print(\"Done.\")\n", + "\n", + "\n", + "if __name__ == \"__main__\":\n", + " run_iris(seed=0)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "73b22516-cd5e-41c1-b746-e4ee69436a46", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[HybridQuKANLayer] built edges: nodes=6, in_dim=2, total_edges=12\n", + "[HybridQuKANLayer] built edges: nodes=6, in_dim=6, total_edges=36\n", + "[Pretrain] Layer 1 QCBM on degree-2 B-splines\n", + "[QCBM pretrain] epoch 001 | MSE=0.007273 | TV=0.955602\n", + "[QCBM pretrain] epoch 050 | MSE=0.000061 | TV=0.377527\n", + "[QCBM pretrain] epoch 100 | MSE=0.000037 | TV=0.308189\n", + "[QCBM pretrain] epoch 150 | MSE=0.000030 | TV=0.276578\n", + "[QCBM pretrain] epoch 200 | MSE=0.000028 | TV=0.261863\n", + "[Pretrain] Layer 2 QCBM on degree-2 B-splines\n", + "[QCBM pretrain] epoch 001 | MSE=0.007276 | TV=0.955730\n", + "[QCBM pretrain] epoch 050 | MSE=0.000068 | TV=0.398482\n", + "[QCBM pretrain] epoch 100 | MSE=0.000037 | TV=0.306850\n", + "[QCBM pretrain] epoch 150 | MSE=0.000029 | TV=0.265133\n", + "[QCBM pretrain] epoch 200 | MSE=0.000026 | TV=0.248326\n", + "\n", + "=== Training QuKAN (Hybrid, 2 layers, QFR + KAN readout) on Social_Network_Ads ===\n", + "Epoch 001 | Loss=0.6937 | Train Acc=52.50% | Val Acc=64.17%\n", + "Epoch 003 | Loss=0.6446 | Train Acc=64.29% | Val Acc=64.17%\n", + "Epoch 005 | Loss=0.5781 | Train Acc=70.36% | Val Acc=71.67%\n", + "Epoch 007 | Loss=0.4162 | Train Acc=84.64% | Val Acc=88.33%\n", + "Epoch 009 | Loss=0.3143 | Train Acc=91.79% | Val Acc=89.17%\n", + "Epoch 011 | Loss=0.2784 | Train Acc=93.21% | Val Acc=90.00%\n", + "Epoch 013 | Loss=0.2688 | Train Acc=92.50% | Val Acc=89.17%\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[17], line 500\u001b[0m\n\u001b[0;32m 497\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124mFinal Test Accuracy: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00macc\u001b[38;5;132;01m:\u001b[39;00m\u001b[38;5;124m.2f\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 499\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;18m__name__\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m__main__\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m--> 500\u001b[0m \u001b[43mrun_social\u001b[49m\u001b[43m(\u001b[49m\u001b[43mseed\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m)\u001b[49m\n", + "Cell \u001b[1;32mIn[17], line 471\u001b[0m, in \u001b[0;36mrun_social\u001b[1;34m(seed)\u001b[0m\n\u001b[0;32m 469\u001b[0m logits \u001b[38;5;241m=\u001b[39m model(xb)\n\u001b[0;32m 470\u001b[0m loss \u001b[38;5;241m=\u001b[39m ce(logits, yb)\n\u001b[1;32m--> 471\u001b[0m \u001b[43mloss\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 472\u001b[0m torch\u001b[38;5;241m.\u001b[39mnn\u001b[38;5;241m.\u001b[39mutils\u001b[38;5;241m.\u001b[39mclip_grad_norm_(model\u001b[38;5;241m.\u001b[39mparameters(), \u001b[38;5;241m1.0\u001b[39m)\n\u001b[0;32m 473\u001b[0m opt\u001b[38;5;241m.\u001b[39mstep()\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\torch\\_tensor.py:492\u001b[0m, in \u001b[0;36mTensor.backward\u001b[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[0;32m 482\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m 483\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(\n\u001b[0;32m 484\u001b[0m Tensor\u001b[38;5;241m.\u001b[39mbackward,\n\u001b[0;32m 485\u001b[0m (\u001b[38;5;28mself\u001b[39m,),\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 490\u001b[0m inputs\u001b[38;5;241m=\u001b[39minputs,\n\u001b[0;32m 491\u001b[0m )\n\u001b[1;32m--> 492\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mautograd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 493\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgradient\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\n\u001b[0;32m 494\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python311\\site-packages\\torch\\autograd\\__init__.py:251\u001b[0m, in \u001b[0;36mbackward\u001b[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[0;32m 246\u001b[0m retain_graph \u001b[38;5;241m=\u001b[39m create_graph\n\u001b[0;32m 248\u001b[0m \u001b[38;5;66;03m# The reason we repeat the same comment below is that\u001b[39;00m\n\u001b[0;32m 249\u001b[0m \u001b[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[0;32m 250\u001b[0m \u001b[38;5;66;03m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[1;32m--> 251\u001b[0m \u001b[43mVariable\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execution_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_backward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[0;32m 252\u001b[0m \u001b[43m \u001b[49m\u001b[43mtensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 253\u001b[0m \u001b[43m \u001b[49m\u001b[43mgrad_tensors_\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 256\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 257\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unreachable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 258\u001b[0m \u001b[43m \u001b[49m\u001b[43maccumulate_grad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 259\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "CSV_PATH = r\"C:\\Users\\riakh\\Downloads\\Social_Network_Ads.csv\"\n", + "torch.set_default_dtype(torch.float32)\n", + "\n", + "def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n", + " assert num_splines >= degree + 1\n", + " n = num_splines - 1\n", + " p = degree\n", + " if n - p > 0:\n", + " interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1]\n", + " else:\n", + " interior = np.array([], dtype=float)\n", + " knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)])\n", + " def N(i, r, t):\n", + " if r == 0:\n", + " left = knots[i]\n", + " right = knots[i + 1]\n", + " return np.where(((t >= left) & (t < right)) | ((right == 1.0) & (t == 1.0)), 1.0, 0.0)\n", + " left_den = knots[i + r] - knots[i]\n", + " right_den = knots[i + r + 1] - knots[i + 1]\n", + " left_term = 0.0\n", + " right_term = 0.0\n", + " if left_den > 0:\n", + " left_term = ((t - knots[i]) / left_den) * N(i, r - 1, t)\n", + " if right_den > 0:\n", + " right_term = ((knots[i + r + 1] - t) / right_den) * N(i + 1, r - 1, t)\n", + " return left_term + right_term\n", + " tgrid = np.asarray(grid, dtype=float)\n", + " B = np.vstack([N(i, p, tgrid) for i in range(num_splines)])\n", + " return np.maximum(B, 0.0)\n", + "\n", + "class QCBMState(nn.Module):\n", + " def __init__(self, n_label_qubits: int, n_pos_qubits: int, depth: int = 3, seed: int = 0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.L = n_label_qubits\n", + " self.P = n_pos_qubits\n", + " self.n_qubits = self.L + self.P\n", + " self.depth = depth\n", + " init = 0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32)\n", + " self.theta = nn.Parameter(init)\n", + " self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights):\n", + " qml.templates.StronglyEntanglingLayers(weights, wires=list(range(self.n_qubits)))\n", + " return qml.probs(wires=list(range(self.n_qubits)))\n", + " self._qprobs = qnode\n", + " def forward(self):\n", + " return self._qprobs(self.theta).to(torch.float32)\n", + " @torch.no_grad()\n", + " def freeze(self):\n", + " self.theta.requires_grad_(False)\n", + "\n", + "class LabelMixer(nn.Module):\n", + " def __init__(self, qcbm: QCBMState, depth: int = 2, seed: int = 0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.qcbm = qcbm\n", + " self.L = qcbm.L\n", + " self.P = qcbm.P\n", + " self.n_qubits = qcbm.n_qubits\n", + " self.depth = depth\n", + " init = 0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32)\n", + " self.phi = nn.Parameter(init)\n", + " self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights_qcbm, weights_label):\n", + " qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=list(range(self.n_qubits)))\n", + " if self.L > 0:\n", + " qml.templates.StronglyEntanglingLayers(weights_label, wires=list(range(self.L)))\n", + " return qml.probs(wires=list(range(self.n_qubits)))\n", + " self._qprobs = qnode\n", + " def forward(self):\n", + " return self._qprobs(self.qcbm.theta, self.phi).to(torch.float32)\n", + "\n", + "class QuantumBlock(nn.Module):\n", + " def __init__(self, k_frequencies: int = 4, entangle_depth: int = 1, seed: int = 0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.K = k_frequencies\n", + " self.depth = entangle_depth\n", + " self.log_omega = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.05)\n", + " self.phase = nn.Parameter(torch.zeros(self.K, dtype=torch.float32))\n", + " self.w_cos = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1)\n", + " self.w_sin = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1)\n", + " self.dev = qml.device(\"default.qubit\", wires=self.K)\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(alpha_vec):\n", + " for k in range(self.K):\n", + " qml.RY(alpha_vec[k], wires=k)\n", + " for _ in range(self.depth):\n", + " for k in range(self.K):\n", + " qml.CNOT(wires=[k, (k + 1) % self.K])\n", + " z = [qml.expval(qml.PauliZ(k)) for k in range(self.K)]\n", + " x = [qml.expval(qml.PauliX(k)) for k in range(self.K)]\n", + " return z + x\n", + " self._qnode = qnode\n", + " def forward_scalar(self, x01_scalar: torch.Tensor) -> torch.Tensor:\n", + " x01 = torch.clamp(x01_scalar.reshape(()), 0.0, 1.0)\n", + " omega = F.softplus(self.log_omega) + 1e-4\n", + " alpha = omega * (2.0 * math.pi * x01) + self.phase\n", + " outs = self._qnode(alpha.to(torch.float32))\n", + " outs = torch.stack([o if isinstance(o, torch.Tensor) else torch.as_tensor(o, dtype=torch.float32)\n", + " for o in outs], dim=0).to(torch.float32)\n", + " z = outs[: self.K]\n", + " x = outs[self.K:]\n", + " return (self.w_cos * z).sum() + (self.w_sin * x).sum()\n", + " def forward_batch(self, x01_vec: torch.Tensor) -> torch.Tensor:\n", + " x01_vec = torch.clamp(x01_vec.to(torch.float32), 0.0, 1.0)\n", + " vals = [self.forward_scalar(x01_vec[i]) for i in range(x01_vec.shape[0])]\n", + " return torch.stack(vals, dim=0).to(torch.float32)\n", + "\n", + "class QuKANResidualEdge(nn.Module):\n", + " def __init__(self, mixer: LabelMixer, n_label_qubits: int, n_pos_qubits: int,\n", + " fourier_k: int = 4, fourier_depth: int = 1, seed: int = 0, w_init=0.5):\n", + " super().__init__()\n", + " self.mixer = mixer\n", + " self.L = n_label_qubits\n", + " self.P = n_pos_qubits\n", + " self.Nlabel = 2 ** self.L\n", + " self.Npos = 2 ** self.P\n", + " self.wf = nn.Parameter(torch.tensor(float(w_init), dtype=torch.float32))\n", + " self.wq = nn.Parameter(torch.tensor(float(w_init), dtype=torch.float32))\n", + " self.qfour = QuantumBlock(k_frequencies=fourier_k, entangle_depth=fourier_depth, seed=seed)\n", + " def batch_forward(self, x_pos01: torch.Tensor, probs_flat: torch.Tensor) -> torch.Tensor:\n", + " x_pos01 = x_pos01.to(torch.float32)\n", + " probs_flat = probs_flat.to(torch.float32)\n", + " B = x_pos01.shape[0]\n", + " lp = probs_flat.view(self.Nlabel, self.Npos)\n", + " idx = torch.round(torch.clamp(x_pos01, 0.0, 1.0) * (self.Npos - 1)).long()\n", + " idx = torch.clamp(idx, 0, self.Npos - 1)\n", + " p_vals = lp[:, idx].sum(dim=0).to(torch.float32)\n", + " qfr_vals = self.qfour.forward_batch(x_pos01)\n", + " out = (self.wf * p_vals + self.wq * qfr_vals).to(torch.float32)\n", + " return out\n", + "\n", + "@dataclass\n", + "class QuKANLayerCfg:\n", + " n_nodes: int = 6\n", + " n_label_qubits: int = 2\n", + " n_pos_qubits: int = 5\n", + " qcbm_depth: int = 3\n", + " label_mixer_depth: int = 2\n", + " fourier_k: int = 4\n", + " fourier_depth: int = 1\n", + "\n", + "class QuKANLayer(nn.Module):\n", + " def __init__(self, cfg: QuKANLayerCfg, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.cfg = cfg\n", + " self.L = cfg.n_label_qubits\n", + " self.P = cfg.n_pos_qubits\n", + " self.qcbm = QCBMState(self.L, self.P, depth=cfg.qcbm_depth, seed=seed)\n", + " self.mixers = nn.ModuleList()\n", + " self.edges = nn.ModuleList()\n", + " self._built = False\n", + " def build(self, input_dim: int, seed: int = 0):\n", + " self.input_dim = input_dim\n", + " torch.manual_seed(seed)\n", + " for m in range(self.cfg.n_nodes):\n", + " for j in range(input_dim):\n", + " mixer = LabelMixer(self.qcbm, depth=self.cfg.label_mixer_depth, seed=seed + 97 * m + j)\n", + " edge = QuKANResidualEdge(\n", + " mixer,\n", + " self.L, self.P,\n", + " fourier_k=self.cfg.fourier_k,\n", + " fourier_depth=self.cfg.fourier_depth,\n", + " seed=seed + 991 * m + 13 * j,\n", + " w_init=0.5\n", + " )\n", + " self.mixers.append(mixer)\n", + " self.edges.append(edge)\n", + " self._built = True\n", + " print(f\"[QuKANLayer] built edges: nodes={self.cfg.n_nodes}, in_dim={input_dim}, total_edges={len(self.edges)}\")\n", + " def pretrain_qcbm_on_splines(self, degree=2, epochs=200, lr=5e-2, verbose=True):\n", + " num_splines = 2 ** self.L\n", + " Npos = 2 ** self.P\n", + " grid = np.linspace(0.0, 1.0, Npos, dtype=float)\n", + " B = bspline_basis_matrix(num_splines, degree, grid)\n", + " B = B + 1e-8\n", + " B = B / B.sum(axis=1, keepdims=True)\n", + " target = torch.tensor((B / num_splines).reshape(-1), dtype=torch.float32)\n", + " opt = torch.optim.Adam(self.qcbm.parameters(), lr=lr)\n", + " for ep in range(1, epochs + 1):\n", + " opt.zero_grad()\n", + " probs = self.qcbm().to(torch.float32)\n", + " loss = F.mse_loss(probs, target)\n", + " loss.backward()\n", + " opt.step()\n", + " if verbose and (ep % 50 == 0 or ep == 1):\n", + " with torch.no_grad():\n", + " tv = 0.5 * torch.sum(torch.abs(probs - target)).item()\n", + " print(f\"[QCBM pretrain] epoch {ep:03d} | MSE={loss.item():.6f} | TV={tv:.6f}\")\n", + " self.qcbm.freeze()\n", + " def forward(self, X_in: torch.Tensor, input_is_01: bool) -> torch.Tensor:\n", + " assert self._built, \"Call build(input_dim) first.\"\n", + " X_in = X_in.to(torch.float32)\n", + " B, D = X_in.shape\n", + " M = self.cfg.n_nodes\n", + " edge_probs = [mix().to(torch.float32) for mix in self.mixers]\n", + " X01_pos = (X_in if input_is_01 else torch.sigmoid(X_in)).to(torch.float32)\n", + " nodes = []\n", + " eidx = 0\n", + " for m in range(M):\n", + " acc = torch.zeros(B, dtype=torch.float32, device=X_in.device)\n", + " for j in range(D):\n", + " probs_flat = edge_probs[eidx]\n", + " edge = self.edges[eidx]\n", + " x_pos = X01_pos[:, j].to(torch.float32)\n", + " out_j = edge.batch_forward(x_pos, probs_flat).to(torch.float32)\n", + " acc = acc + out_j\n", + " eidx += 1\n", + " nodes.append(acc)\n", + " nodes = torch.stack(nodes, dim=1).to(torch.float32)\n", + " return nodes\n", + "\n", + "@dataclass\n", + "class KANReadoutCfg:\n", + " n_classes: int\n", + " in_dim: int\n", + " fourier_k: int = 3\n", + " fourier_depth: int = 1\n", + "\n", + "class KANReadout(nn.Module):\n", + " def __init__(self, cfg: KANReadoutCfg, seed: int = 0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.cfg = cfg\n", + " C, M = cfg.n_classes, cfg.in_dim\n", + " self.qfr = nn.ModuleList([\n", + " QuantumBlock(k_frequencies=cfg.fourier_k,\n", + " entangle_depth=cfg.fourier_depth,\n", + " seed=seed + 131 * c + m)\n", + " for c in range(C) for m in range(M)\n", + " ])\n", + " self.b = nn.Parameter(torch.zeros(C, dtype=torch.float32))\n", + " def _edge_idx(self, c: int, m: int) -> int:\n", + " return c * self.cfg.in_dim + m\n", + " def forward(self, H: torch.Tensor) -> torch.Tensor:\n", + " H = H.to(torch.float32)\n", + " B, M = H.shape\n", + " C = self.cfg.n_classes\n", + " H01 = torch.sigmoid(H)\n", + " logits = []\n", + " for c in range(C):\n", + " acc_c = torch.zeros(B, dtype=torch.float32, device=H.device)\n", + " for m in range(M):\n", + " qfr = self.qfr[self._edge_idx(c, m)]\n", + " acc_c = acc_c + qfr.forward_batch(H01[:, m])\n", + " logits.append(acc_c + self.b[c])\n", + " return torch.stack(logits, dim=1).to(torch.float32)\n", + "\n", + "@dataclass\n", + "class QuKANNetCfg:\n", + " layer1: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1))\n", + " layer2: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1))\n", + " n_classes: int = 2\n", + "\n", + "class QuKANNet(nn.Module):\n", + " def __init__(self, cfg: QuKANNetCfg, input_dim: int, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.cfg = cfg\n", + " self.l1 = QuKANLayer(cfg.layer1, seed=seed)\n", + " self.l1.build(input_dim=input_dim, seed=seed)\n", + " self.l2 = QuKANLayer(cfg.layer2, seed=seed + 1)\n", + " self.l2.build(input_dim=cfg.layer1.n_nodes, seed=seed + 1)\n", + " self.readout = KANReadout(\n", + " KANReadoutCfg(\n", + " n_classes=cfg.n_classes,\n", + " in_dim=cfg.layer2.n_nodes,\n", + " fourier_k=3,\n", + " fourier_depth=1\n", + " ),\n", + " seed=seed + 1234\n", + " )\n", + " def pretrain_qcbms(self, degree=2, epochs=200, lr=5e-2, verbose=True):\n", + " print(\"[Pretrain] Layer 1 QCBM on degree-2 B-splines\")\n", + " self.l1.pretrain_qcbm_on_splines(degree=degree, epochs=epochs, lr=lr, verbose=verbose)\n", + " print(\"[Pretrain] Layer 2 QCBM on degree-2 B-splines\")\n", + " self.l2.pretrain_qcbm_on_splines(degree=degree, epochs=epochs, lr=lr, verbose=verbose)\n", + " def forward(self, X01: torch.Tensor) -> torch.Tensor:\n", + " X01 = X01.to(torch.float32)\n", + " h1 = self.l1(X01, input_is_01=True).to(torch.float32)\n", + " h2 = self.l2(h1, input_is_01=False).to(torch.float32)\n", + " return self.readout(h2)\n", + "\n", + "def run_social(seed=0):\n", + " torch.manual_seed(seed)\n", + " np.random.seed(seed)\n", + " assert os.path.exists(CSV_PATH), f\"CSV not found: {CSV_PATH}\"\n", + " df = pd.read_csv(CSV_PATH)\n", + " cols = [c.lower() for c in df.columns]\n", + " col_map = {c.lower(): c for c in df.columns}\n", + " needed = [\"age\", \"estimatedsalary\", \"purchased\"]\n", + " for k in needed:\n", + " assert k in cols, f\"Column '{k}' not found in CSV. Found columns: {df.columns.tolist()}\"\n", + " X_np = df[[col_map[\"age\"], col_map[\"estimatedsalary\"]]].values.astype(np.float32)\n", + " y_np = df[col_map[\"purchased\"]].values.astype(np.int64)\n", + " scaler = MinMaxScaler(feature_range=(0.0, 1.0))\n", + " X01 = scaler.fit_transform(X_np).astype(np.float32)\n", + " X_tr, X_te, y_tr, y_te = train_test_split(\n", + " X01, y_np, test_size=0.3, random_state=seed, stratify=y_np\n", + " )\n", + " X_tr = torch.tensor(X_tr, dtype=torch.float32)\n", + " X_te = torch.tensor(X_te, dtype=torch.float32)\n", + " y_tr = torch.tensor(y_tr, dtype=torch.long)\n", + " y_te = torch.tensor(y_te, dtype=torch.long)\n", + " cfg = QuKANNetCfg(\n", + " layer1=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5,\n", + " qcbm_depth=3, label_mixer_depth=2, fourier_k=4, fourier_depth=1),\n", + " layer2=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5,\n", + " qcbm_depth=3, label_mixer_depth=2, fourier_k=4, fourier_depth=1),\n", + " n_classes=2,\n", + " )\n", + " model = QuKANNet(cfg, input_dim=2, seed=seed)\n", + " model.pretrain_qcbms(degree=2, epochs=200, lr=5e-2, verbose=True)\n", + " opt = torch.optim.AdamW(model.parameters(), lr=2e-3, weight_decay=8e-4)\n", + " sched = torch.optim.lr_scheduler.CosineAnnealingLR(opt, T_max=60)\n", + " ce = nn.CrossEntropyLoss(label_smoothing=0.03)\n", + " print(\"\\nTraining QuKAN on Social_Network_Ads\")\n", + " epochs = 60\n", + " B = 32\n", + " for ep in range(1, epochs + 1):\n", + " model.train()\n", + " perm = torch.randperm(X_tr.shape[0])\n", + " Xb_all, yb_all = X_tr[perm], y_tr[perm]\n", + " tot, corr, loss_sum = 0, 0, 0.0\n", + " for i in range(0, Xb_all.shape[0], B):\n", + " xb = Xb_all[i:i+B]\n", + " yb = yb_all[i:i+B]\n", + " opt.zero_grad(set_to_none=True)\n", + " logits = model(xb)\n", + " loss = ce(logits, yb)\n", + " loss.backward()\n", + " torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)\n", + " opt.step()\n", + " loss_sum += float(loss.item()) * xb.size(0)\n", + " tot += xb.size(0)\n", + " corr += (logits.argmax(1) == yb).sum().item()\n", + " sched.step()\n", + " train_acc = 100.0 * corr / tot\n", + " train_loss = loss_sum / tot\n", + " model.eval()\n", + " with torch.no_grad():\n", + " logits_te = model(X_te)\n", + " val_acc = 100.0 * (logits_te.argmax(1) == y_te).float().mean().item()\n", + " if ep % 2 == 1 or ep >= epochs - 10:\n", + " print(f\"Epoch {ep:03d} | Loss={train_loss:.4f} | Train Acc={train_acc:.2f}% | Val Acc={val_acc:.2f}%\")\n", + " print(\"Done.\")\n", + " with torch.no_grad():\n", + " pred = model(X_te).argmax(1).cpu().numpy()\n", + " acc = (pred == y_te.cpu().numpy()).mean() * 100\n", + " print(f\"\\nFinal Test Accuracy: {acc:.2f}%\")\n", + "\n", + "if __name__ == \"__main__\":\n", + " run_social(seed=0)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "5236d506-557c-4ed2-ba97-f6e80122eb7b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Input dim: 11\n", + "[HybridQuKANLayer] built edges: 6 nodes × 11 inputs = 66 edges\n", + "[HybridQuKANLayer] built edges: 6 nodes × 6 inputs = 36 edges\n", + "\n", + "[Pretrain] Layer 1 QCBM\n", + "[QCBM pretrain] 000 | MSE=0.007273 | TV=0.955602\n", + "[QCBM pretrain] 020 | MSE=0.000122 | TV=0.517162\n", + "[QCBM pretrain] 040 | MSE=0.000075 | TV=0.411801\n", + "[QCBM pretrain] 060 | MSE=0.000052 | TV=0.350588\n", + "[QCBM pretrain] 079 | MSE=0.000043 | TV=0.327553\n", + ">> QCBM frozen.\n", + "\n", + "[Pretrain] Layer 2 QCBM\n", + "[QCBM pretrain] 000 | MSE=0.007276 | TV=0.955730\n", + "[QCBM pretrain] 020 | MSE=0.000119 | TV=0.506432\n", + "[QCBM pretrain] 040 | MSE=0.000074 | TV=0.419522\n", + "[QCBM pretrain] 060 | MSE=0.000059 | TV=0.372766\n", + "[QCBM pretrain] 079 | MSE=0.000047 | TV=0.344233\n", + ">> QCBM frozen.\n", + "\n", + "=== Training QuKAN on Titanic ===\n", + "Epoch 001 | Train Acc=61.64% | Val Acc=61.57%\n", + "Epoch 002 | Train Acc=61.64% | Val Acc=61.57%\n", + "Epoch 003 | Train Acc=62.92% | Val Acc=67.91%\n", + "Epoch 004 | Train Acc=66.93% | Val Acc=73.13%\n", + "Epoch 005 | Train Acc=75.92% | Val Acc=79.10%\n", + "Epoch 006 | Train Acc=77.21% | Val Acc=78.36%\n", + "Epoch 007 | Train Acc=77.53% | Val Acc=81.34%\n", + "Epoch 008 | Train Acc=78.97% | Val Acc=82.46%\n", + "Epoch 009 | Train Acc=78.49% | Val Acc=80.22%\n", + "Epoch 010 | Train Acc=79.45% | Val Acc=82.09%\n", + "Epoch 011 | Train Acc=78.81% | Val Acc=81.34%\n", + "Epoch 012 | Train Acc=80.42% | Val Acc=82.09%\n", + "Epoch 013 | Train Acc=79.45% | Val Acc=81.34%\n", + "Epoch 014 | Train Acc=80.26% | Val Acc=83.21%\n", + "Epoch 015 | Train Acc=80.58% | Val Acc=82.09%\n", + "Epoch 016 | Train Acc=80.90% | Val Acc=82.84%\n", + "Epoch 017 | Train Acc=81.38% | Val Acc=82.46%\n", + "Epoch 018 | Train Acc=80.74% | Val Acc=82.09%\n", + "Epoch 019 | Train Acc=81.38% | Val Acc=83.21%\n", + "Epoch 020 | Train Acc=81.54% | Val Acc=82.46%\n", + "Epoch 021 | Train Acc=81.38% | Val Acc=82.46%\n", + "Epoch 022 | Train Acc=81.38% | Val Acc=82.09%\n", + "Epoch 023 | Train Acc=81.70% | Val Acc=82.09%\n", + "Epoch 024 | Train Acc=81.86% | Val Acc=82.09%\n", + "Epoch 025 | Train Acc=82.83% | Val Acc=83.21%\n", + "Epoch 026 | Train Acc=82.18% | Val Acc=82.09%\n", + "Epoch 027 | Train Acc=82.02% | Val Acc=82.46%\n", + "Epoch 028 | Train Acc=82.18% | Val Acc=82.46%\n", + "Epoch 029 | Train Acc=82.34% | Val Acc=82.46%\n", + "Epoch 030 | Train Acc=82.02% | Val Acc=82.46%\n", + "Epoch 031 | Train Acc=82.18% | Val Acc=82.84%\n", + "Epoch 032 | Train Acc=82.34% | Val Acc=82.46%\n", + "Epoch 033 | Train Acc=82.50% | Val Acc=82.84%\n", + "Epoch 034 | Train Acc=82.18% | Val Acc=82.46%\n", + "Epoch 035 | Train Acc=82.50% | Val Acc=82.46%\n", + "Epoch 036 | Train Acc=82.34% | Val Acc=82.46%\n", + "Epoch 037 | Train Acc=82.34% | Val Acc=82.46%\n", + "Epoch 038 | Train Acc=82.34% | Val Acc=82.46%\n", + "Epoch 039 | Train Acc=82.34% | Val Acc=82.46%\n", + "Epoch 040 | Train Acc=82.34% | Val Acc=82.46%\n" + ] + } + ], + "source": [ + "torch.set_default_dtype(torch.float32)\n", + "CSV_PATH = r\"C:\\Users\\riakh\\Downloads\\archive\\Titanic-Dataset.csv\"\n", + "\n", + "def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n", + " assert num_splines >= degree + 1\n", + " n = num_splines - 1\n", + " p = degree\n", + " if n - p > 0:\n", + " interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1]\n", + " else:\n", + " interior = np.array([], dtype=float)\n", + " knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)])\n", + " def N(i, r, t):\n", + " if r == 0:\n", + " left, right = knots[i], knots[i + 1]\n", + " return np.where(((t >= left) & (t < right)) | ((right == 1.0) & (t == 1.0)), 1.0, 0.0)\n", + " left_den = knots[i + r] - knots[i]\n", + " right_den = knots[i + r + 1] - knots[i + 1]\n", + " left_term = ((t - knots[i]) / left_den) * N(i, r - 1, t) if left_den > 0 else 0\n", + " right_term = ((knots[i + r + 1] - t) / right_den) * N(i + 1, r - 1, t) if right_den > 0 else 0\n", + " return left_term + right_term\n", + " tgrid = np.asarray(grid, dtype=float)\n", + " return np.vstack([N(i, p, tgrid) for i in range(num_splines)])\n", + "\n", + "class QCBMState(nn.Module):\n", + " def __init__(self, n_label_qubits, n_pos_qubits, depth=3, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.L, self.P = n_label_qubits, n_pos_qubits\n", + " self.n_qubits = self.L + self.P\n", + " self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32))\n", + " self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights):\n", + " qml.templates.StronglyEntanglingLayers(weights, wires=range(self.n_qubits))\n", + " return qml.probs(wires=range(self.n_qubits))\n", + " self._qprobs = qnode\n", + " def forward(self):\n", + " return self._qprobs(self.theta).to(torch.float32)\n", + " def freeze(self):\n", + " self.theta.requires_grad_(False)\n", + "\n", + "class LabelMixer(nn.Module):\n", + " def __init__(self, qcbm: QCBMState, depth=1, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.qcbm = qcbm\n", + " self.L, self.P = qcbm.L, qcbm.P\n", + " self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32))\n", + " self.dev = qml.device(\"default.qubit\", wires=self.L + self.P)\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights_qcbm, weights_label):\n", + " qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=range(self.L + self.P))\n", + " if self.L > 0:\n", + " qml.templates.StronglyEntanglingLayers(weights_label, wires=range(self.L))\n", + " return qml.probs(wires=range(self.L + self.P))\n", + " self._qprobs = qnode\n", + " def forward(self):\n", + " return self._qprobs(self.qcbm.theta, self.phi).to(torch.float32)\n", + "\n", + "class QuantumBlock(nn.Module):\n", + " def __init__(self, k_frequencies=3, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.K = k_frequencies\n", + " self.log_omega = nn.Parameter(torch.randn(self.K) * 0.05)\n", + " self.phase = nn.Parameter(torch.zeros(self.K))\n", + " self.w_cos = nn.Parameter(torch.randn(self.K) * 0.1)\n", + " self.w_sin = nn.Parameter(torch.randn(self.K) * 0.1)\n", + " def forward_batch(self, x01_vec):\n", + " x01_vec = torch.clamp(x01_vec, 0, 1)\n", + " omega = F.softplus(self.log_omega) + 1e-4\n", + " vals = []\n", + " for val in x01_vec:\n", + " alpha = omega * (2*math.pi*val) + self.phase\n", + " z = torch.cos(alpha)\n", + " x = torch.sin(alpha)\n", + " vals.append((self.w_cos*z).sum() + (self.w_sin*x).sum())\n", + " return torch.stack(vals)\n", + "\n", + "class QuKANResidualEdge(nn.Module):\n", + " def __init__(self, mixer, n_label_qubits, n_pos_qubits, fourier_k=3, seed=0, w_init=0.5):\n", + " super().__init__()\n", + " self.mixer = mixer\n", + " self.L, self.P = n_label_qubits, n_pos_qubits\n", + " self.Nlabel, self.Npos = 2**self.L, 2**self.P\n", + " self.wf = nn.Parameter(torch.tensor(float(w_init)))\n", + " self.wq = nn.Parameter(torch.tensor(float(w_init)))\n", + " self.qfour = QuantumBlock(fourier_k, seed=seed)\n", + " def batch_forward(self, x_pos01, probs_flat):\n", + " lp = probs_flat.view(self.Nlabel, self.Npos)\n", + " idx = torch.round(torch.clamp(x_pos01,0,1)*(self.Npos-1)).long()\n", + " idx = torch.clamp(idx, 0, self.Npos-1)\n", + " p_vals = lp[:,idx].sum(0)\n", + " qfr_vals = self.qfour.forward_batch(x_pos01)\n", + " return self.wf*p_vals + self.wq*qfr_vals\n", + "\n", + "@dataclass\n", + "class QuKANLayerCfg:\n", + " n_nodes: int = 6\n", + " n_label_qubits: int = 2\n", + " n_pos_qubits: int = 5\n", + " qcbm_depth: int = 3\n", + " label_mixer_depth: int = 1\n", + " fourier_k: int = 3\n", + " mixers_trainable: bool = False\n", + "\n", + "class QuKANLayer(nn.Module):\n", + " def __init__(self, cfg: QuKANLayerCfg, seed=0):\n", + " super().__init__()\n", + " self.cfg = cfg\n", + " self.qcbm = QCBMState(cfg.n_label_qubits, cfg.n_pos_qubits, cfg.qcbm_depth, seed)\n", + " self.mixers, self.edges = nn.ModuleList(), nn.ModuleList()\n", + " self._built=False\n", + " self._train_mixers = cfg.mixers_trainable\n", + " def build(self, input_dim, seed=0):\n", + " for m in range(self.cfg.n_nodes):\n", + " for j in range(input_dim):\n", + " mixer = LabelMixer(self.qcbm, self.cfg.label_mixer_depth, seed+97*m+j)\n", + " edge = QuKANResidualEdge(mixer, self.cfg.n_label_qubits, self.cfg.n_pos_qubits,\n", + " self.cfg.fourier_k, seed=seed+991*m+13*j)\n", + " self.mixers.append(mixer); self.edges.append(edge)\n", + " self._built=True\n", + " print(f\"[QuKANLayer] built edges: {self.cfg.n_nodes} nodes × {input_dim} inputs = {len(self.edges)} edges\")\n", + " def pretrain_qcbm_on_splines(self, degree=2, epochs=80, lr=5e-2, verbose=True):\n", + " num_spl, Npos = 2**self.cfg.n_label_qubits, 2**self.cfg.n_pos_qubits\n", + " grid = np.linspace(0,1,Npos)\n", + " B = bspline_basis_matrix(num_spl, degree, grid)\n", + " B = (B+1e-8)/B.sum(1,keepdims=True)\n", + " target = torch.tensor((B/num_spl).reshape(-1), dtype=torch.float32)\n", + " opt=torch.optim.Adam(self.qcbm.parameters(), lr=lr)\n", + " for ep in range(epochs):\n", + " opt.zero_grad(); probs=self.qcbm()\n", + " loss=F.mse_loss(probs, target); loss.backward(); opt.step()\n", + " if verbose and (ep%20==0 or ep==epochs-1):\n", + " tv=0.5*torch.sum(torch.abs(probs-target)).item()\n", + " print(f\"[QCBM pretrain] {ep:03d} | MSE={loss.item():.6f} | TV={tv:.6f}\")\n", + " self.qcbm.freeze()\n", + " print(\">> QCBM frozen.\")\n", + " def forward(self,X, input_is_01=True):\n", + " X01 = (X if input_is_01 else torch.sigmoid(X))\n", + " if self._train_mixers:\n", + " edge_probs=[mix() for mix in self.mixers]\n", + " else:\n", + " with torch.no_grad():\n", + " edge_probs=[mix() for mix in self.mixers]\n", + " nodes=[]; eidx=0\n", + " for m in range(self.cfg.n_nodes):\n", + " acc=torch.zeros(X.shape[0], dtype=torch.float32)\n", + " for j in range(X.shape[1]):\n", + " out=self.edges[eidx].batch_forward(X01[:,j], edge_probs[eidx])\n", + " acc=acc+out; eidx+=1\n", + " nodes.append(acc)\n", + " return torch.stack(nodes,1)\n", + "\n", + "@dataclass\n", + "class KANReadoutCfg:\n", + " n_classes:int; in_dim:int; fourier_k:int=3\n", + "\n", + "class KANReadout(nn.Module):\n", + " def __init__(self,cfg:KANReadoutCfg,seed=0):\n", + " super().__init__()\n", + " self.cfg=cfg; C,M=cfg.n_classes,cfg.in_dim\n", + " self.qfr=nn.ModuleList([QuantumBlock(cfg.fourier_k,seed+131*c+m)\n", + " for c in range(C) for m in range(M)])\n", + " self.b=nn.Parameter(torch.zeros(C))\n", + " def _idx(self,c,m): return c*self.cfg.in_dim+m\n", + " def forward(self,H):\n", + " H01=torch.sigmoid(H); logits=[]\n", + " for c in range(self.cfg.n_classes):\n", + " acc=torch.zeros(H.shape[0], dtype=torch.float32)\n", + " for m in range(H.shape[1]):\n", + " acc=acc+self.qfr[self._idx(c,m)].forward_batch(H01[:,m])\n", + " logits.append(acc+self.b[c])\n", + " return torch.stack(logits,1)\n", + "\n", + "@dataclass\n", + "class QuKANNetCfg:\n", + " layer1:QuKANLayerCfg=field(default_factory=QuKANLayerCfg)\n", + " layer2:QuKANLayerCfg=field(default_factory=QuKANLayerCfg)\n", + " n_classes:int=2\n", + "\n", + "class QuKANNet(nn.Module):\n", + " def __init__(self,cfg,input_dim,seed=0):\n", + " super().__init__()\n", + " self.l1=QuKANLayer(cfg.layer1,seed); self.l1.build(input_dim,seed)\n", + " self.l2=QuKANLayer(cfg.layer2,seed+1); self.l2.build(cfg.layer1.n_nodes,seed+1)\n", + " self.readout=KANReadout(KANReadoutCfg(cfg.n_classes,cfg.layer2.n_nodes),seed+123)\n", + " def pretrain_qcbms(self,degree=2,epochs=80,lr=5e-2):\n", + " print(\"\\n[Pretrain] Layer 1 QCBM\"); self.l1.pretrain_qcbm_on_splines(degree,epochs,lr)\n", + " print(\"\\n[Pretrain] Layer 2 QCBM\"); self.l2.pretrain_qcbm_on_splines(degree,epochs,lr)\n", + " def forward(self,X):\n", + " h1=self.l1(X,True); h2=self.l2(h1,False); return self.readout(h2)\n", + "\n", + "def _first_present(cols_map, names):\n", + " for n in names:\n", + " if n in cols_map:\n", + " return cols_map[n]\n", + " return None\n", + "\n", + "def load_titanic_features(csv_path: str):\n", + " assert os.path.exists(csv_path), f\"CSV not found: {csv_path}\"\n", + " df = pd.read_csv(csv_path)\n", + " cols_map = {c.lower(): c for c in df.columns}\n", + " survived = _first_present(cols_map, [\"survived\"])\n", + " pclass = _first_present(cols_map, [\"pclass\",\"p class\",\"p_class\"])\n", + " sex = _first_present(cols_map, [\"sex\",\"gender\"])\n", + " age = _first_present(cols_map, [\"age\"])\n", + " sibsp = _first_present(cols_map, [\"sibsp\",\"siblings/spouses aboard\",\"siblingsaboard\",\"siblings_spouses_aboard\"])\n", + " parch = _first_present(cols_map, [\"parch\",\"parents/children aboard\",\"parentschildrenaboard\",\"parents_children_aboard\"])\n", + " fare = _first_present(cols_map, [\"fare\"])\n", + " embarked = _first_present(cols_map, [\"embarked\",\"port of embarkation\",\"emb\"])\n", + " for k,v in {\"survived\":survived,\"pclass\":pclass,\"sex\":sex,\"age\":age,\n", + " \"sibsp\":sibsp,\"parch\":parch,\"fare\":fare,\"embarked\":embarked}.items():\n", + " if v is None:\n", + " raise ValueError(f\"Could not find required column alias for '{k}'. Found columns: {list(df.columns)}\")\n", + " sub = df[[survived, pclass, sex, age, sibsp, parch, fare, embarked]].copy()\n", + " sub.columns = [c.lower() for c in sub.columns]\n", + " sub[\"sex\"] = sub[\"sex\"].astype(str).str.lower().map({\"male\": 0, \"m\": 0, \"female\": 1, \"f\": 1})\n", + " sub[\"embarked\"] = sub[\"embarked\"].astype(str).str.upper().map({\"S\": \"S\", \"C\": \"C\", \"Q\": \"Q\"})\n", + " sub[\"age\"] = pd.to_numeric(sub[\"age\"], errors=\"coerce\")\n", + " sub[\"fare\"] = pd.to_numeric(sub[\"fare\"], errors=\"coerce\")\n", + " sub[\"age\"] = sub[\"age\"].fillna(sub[\"age\"].median())\n", + " sub[\"fare\"] = sub[\"fare\"].fillna(sub[\"fare\"].median())\n", + " sub[\"sex\"] = sub[\"sex\"].fillna(0)\n", + " sub[\"embarked\"] = sub[\"embarked\"].fillna(\"S\")\n", + " one_hot_emb = pd.get_dummies(sub[\"embarked\"], prefix=\"emb\", drop_first=False)\n", + " one_hot_cls = pd.get_dummies(sub[\"pclass\"].astype(int), prefix=\"pcls\", drop_first=False)\n", + " X = pd.concat(\n", + " [one_hot_cls,\n", + " sub[[\"sex\", \"age\", \"sibsp\", \"parch\", \"fare\"]].reset_index(drop=True),\n", + " one_hot_emb.reset_index(drop=True)],axis=1).astype(np.float32)\n", + " y = sub[\"survived\"].astype(np.int64).to_numpy()\n", + " scaler = MinMaxScaler((0.0, 1.0))\n", + " X01 = scaler.fit_transform(X.to_numpy().astype(np.float32)).astype(np.float32)\n", + " return X01, y\n", + "\n", + "def run_titanic(seed=0):\n", + " torch.manual_seed(seed); np.random.seed(seed)\n", + " X, y = load_titanic_features(CSV_PATH)\n", + " X_tr, X_te, y_tr, y_te = train_test_split(X, y, test_size=0.3, random_state=seed, stratify=y)\n", + " X_tr, X_te = torch.tensor(X_tr, dtype=torch.float32), torch.tensor(X_te, dtype=torch.float32)\n", + " y_tr, y_te = torch.tensor(y_tr, dtype=torch.long), torch.tensor(y_te, dtype=torch.long)\n", + " input_dim = X_tr.shape[1]\n", + " print(f\"Input dim: {input_dim}\")\n", + " cfg = QuKANNetCfg(\n", + " layer1=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, label_mixer_depth=1, fourier_k=3, mixers_trainable=False),\n", + " layer2=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, label_mixer_depth=1, fourier_k=3, mixers_trainable=False),\n", + " n_classes=2\n", + " )\n", + " model = QuKANNet(cfg, input_dim=input_dim, seed=seed)\n", + " model.pretrain_qcbms()\n", + " opt = torch.optim.AdamW(model.parameters(), lr=1.5e-3, weight_decay=8e-4)\n", + " sched = torch.optim.lr_scheduler.CosineAnnealingLR(opt, T_max=40)\n", + " ce = nn.CrossEntropyLoss(label_smoothing=0.03)\n", + " print(\"\\nTraining QuKAN on Titanic\")\n", + " for ep in range(1, 41):\n", + " model.train()\n", + " perm = torch.randperm(X_tr.shape[0])\n", + " Xb_all, yb_all = X_tr[perm], y_tr[perm]\n", + " loss_sum, tot, corr = 0.0, 0, 0\n", + " for i in range(0, Xb_all.shape[0], 64):\n", + " xb, yb = Xb_all[i:i+64], yb_all[i:i+64]\n", + " opt.zero_grad(set_to_none=True)\n", + " logits = model(xb)\n", + " loss = ce(logits, yb)\n", + " loss.backward()\n", + " opt.step()\n", + " loss_sum += float(loss.item()) * xb.size(0)\n", + " tot += xb.size(0)\n", + " corr += (logits.argmax(1) == yb).sum().item()\n", + " sched.step()\n", + " train_acc = 100.0 * corr / tot\n", + " val_acc = (model(X_te).argmax(1) == y_te).float().mean().item() * 100.0\n", + " print(f\"Epoch {ep:03d} | Train Acc={train_acc:.2f}% | Val Acc={val_acc:.2f}%\")\n", + "\n", + "if __name__ == \"__main__\":\n", + " run_titanic(0)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "87cf74a9-551d-4070-a0a1-e6623c1b2f64", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[HybridQuKANLayer] built edges: 4 nodes × 64 inputs = 256 edges\n", + "[HybridQuKANLayer] built edges: 4 nodes × 4 inputs = 16 edges\n", + "\n", + "[Pretrain] Layer 1 QCBM\n", + "[QCBM pretrain] 000 | MSE=0.003764 | TV=0.976948\n", + "[QCBM pretrain] 020 | MSE=0.000040 | TV=0.549158\n", + "[QCBM pretrain] 040 | MSE=0.000027 | TV=0.492827\n", + "[QCBM pretrain] 060 | MSE=0.000024 | TV=0.469451\n", + "[QCBM pretrain] 079 | MSE=0.000021 | TV=0.438543\n", + ">> QCBM frozen.\n", + "\n", + "[Pretrain] Layer 2 QCBM\n", + "[QCBM pretrain] 000 | MSE=0.003764 | TV=0.976846\n", + "[QCBM pretrain] 020 | MSE=0.000039 | TV=0.556429\n", + "[QCBM pretrain] 040 | MSE=0.000029 | TV=0.503609\n", + "[QCBM pretrain] 060 | MSE=0.000023 | TV=0.457479\n", + "[QCBM pretrain] 079 | MSE=0.000020 | TV=0.429788\n", + ">> QCBM frozen.\n", + "\n", + "=== Training QuKAN on Digits Dataset (1000 samples) ===\n", + "Epoch 001 | Train Acc=9.86% | Val Acc=10.00%\n", + "Epoch 002 | Train Acc=11.71% | Val Acc=13.67%\n", + "Epoch 003 | Train Acc=23.29% | Val Acc=29.33%\n", + "Epoch 004 | Train Acc=31.29% | Val Acc=31.67%\n", + "Epoch 005 | Train Acc=34.57% | Val Acc=35.00%\n", + "Epoch 006 | Train Acc=36.86% | Val Acc=36.33%\n", + "Epoch 007 | Train Acc=41.29% | Val Acc=42.00%\n", + "Epoch 008 | Train Acc=47.29% | Val Acc=45.00%\n", + "Epoch 009 | Train Acc=49.86% | Val Acc=49.67%\n", + "Epoch 010 | Train Acc=56.29% | Val Acc=55.33%\n", + "Epoch 011 | Train Acc=60.14% | Val Acc=55.67%\n", + "Epoch 012 | Train Acc=64.29% | Val Acc=58.67%\n", + "Epoch 013 | Train Acc=66.43% | Val Acc=59.33%\n", + "Epoch 014 | Train Acc=67.57% | Val Acc=61.33%\n", + "Epoch 015 | Train Acc=68.57% | Val Acc=61.67%\n", + "Epoch 016 | Train Acc=68.71% | Val Acc=62.00%\n", + "Epoch 017 | Train Acc=70.14% | Val Acc=64.67%\n", + "Epoch 018 | Train Acc=71.43% | Val Acc=64.33%\n", + "Epoch 019 | Train Acc=72.86% | Val Acc=66.33%\n", + "Epoch 020 | Train Acc=74.14% | Val Acc=68.00%\n", + "Epoch 021 | Train Acc=74.86% | Val Acc=68.67%\n", + "Epoch 022 | Train Acc=76.71% | Val Acc=70.00%\n", + "Epoch 023 | Train Acc=77.43% | Val Acc=70.67%\n", + "Epoch 024 | Train Acc=78.86% | Val Acc=71.33%\n", + "Epoch 025 | Train Acc=79.86% | Val Acc=72.00%\n", + "Epoch 026 | Train Acc=80.86% | Val Acc=73.00%\n", + "Epoch 027 | Train Acc=80.57% | Val Acc=73.33%\n", + "Epoch 028 | Train Acc=80.71% | Val Acc=73.33%\n", + "Epoch 029 | Train Acc=80.86% | Val Acc=73.33%\n", + "Epoch 030 | Train Acc=80.86% | Val Acc=73.33%\n", + "Epoch 031 | Train Acc=81.14% | Val Acc=73.33%\n", + "Epoch 032 | Train Acc=81.29% | Val Acc=73.67%\n", + "Epoch 033 | Train Acc=81.57% | Val Acc=73.67%\n", + "Epoch 034 | Train Acc=81.86% | Val Acc=73.33%\n", + "Epoch 035 | Train Acc=82.00% | Val Acc=73.33%\n", + "Epoch 036 | Train Acc=82.00% | Val Acc=73.33%\n", + "Epoch 037 | Train Acc=82.14% | Val Acc=73.33%\n", + "Epoch 038 | Train Acc=82.14% | Val Acc=73.33%\n", + "Epoch 039 | Train Acc=82.14% | Val Acc=73.33%\n", + "Epoch 040 | Train Acc=82.14% | Val Acc=73.33%\n" + ] + } + ], + "source": [ + "def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n", + " assert num_splines >= degree + 1\n", + " n = num_splines - 1\n", + " p = degree\n", + " if n - p > 0:\n", + " interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1]\n", + " else:\n", + " interior = np.array([], dtype=float)\n", + " knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)])\n", + " def N(i, r, t):\n", + " if r == 0:\n", + " left, right = knots[i], knots[i + 1]\n", + " return np.where(((t >= left) & (t < right)) | ((right == 1.0) & (t == 1.0)), 1.0, 0.0)\n", + " left_den = knots[i + r] - knots[i]\n", + " right_den = knots[i + r + 1] - knots[i + 1]\n", + " left_term = ((t - knots[i]) / left_den) * N(i, r - 1, t) if left_den > 0 else 0\n", + " right_term = ((knots[i + r + 1] - t) / right_den) * N(i + 1, r - 1, t) if right_den > 0 else 0\n", + " return left_term + right_term\n", + " tgrid = np.asarray(grid, dtype=float)\n", + " return np.vstack([N(i, p, tgrid) for i in range(num_splines)])\n", + "\n", + "class QCBMState(nn.Module):\n", + " def __init__(self, n_label_qubits, n_pos_qubits, depth=3, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.L, self.P = n_label_qubits, n_pos_qubits\n", + " self.n_qubits = self.L + self.P\n", + " self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32))\n", + " self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights):\n", + " qml.templates.StronglyEntanglingLayers(weights, wires=range(self.n_qubits))\n", + " return qml.probs(wires=range(self.n_qubits))\n", + " self._qprobs = qnode\n", + " def forward(self):\n", + " return self._qprobs(self.theta).to(torch.float32)\n", + " def freeze(self):\n", + " self.theta.requires_grad_(False)\n", + "\n", + "class LabelMixer(nn.Module):\n", + " def __init__(self, qcbm: QCBMState, depth=1, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.qcbm = qcbm\n", + " self.L, self.P = qcbm.L, qcbm.P\n", + " self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32))\n", + " self.dev = qml.device(\"default.qubit\", wires=self.L + self.P)\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights_qcbm, weights_label):\n", + " qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=range(self.L + self.P))\n", + " if self.L > 0:\n", + " qml.templates.StronglyEntanglingLayers(weights_label, wires=range(self.L))\n", + " return qml.probs(wires=range(self.L + self.P))\n", + " self._qprobs = qnode\n", + " def forward(self):\n", + " return self._qprobs(self.qcbm.theta, self.phi).to(torch.float32)\n", + "\n", + "class QuantumBlock(nn.Module):\n", + " def __init__(self, k_frequencies=3, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.K = k_frequencies\n", + " self.log_omega = nn.Parameter(torch.randn(self.K) * 0.05)\n", + " self.phase = nn.Parameter(torch.zeros(self.K))\n", + " self.w_cos = nn.Parameter(torch.randn(self.K) * 0.1)\n", + " self.w_sin = nn.Parameter(torch.randn(self.K) * 0.1)\n", + " def forward_batch(self, x01_vec):\n", + " x01_vec = torch.clamp(x01_vec, 0, 1)\n", + " omega = F.softplus(self.log_omega) + 1e-4\n", + " vals = []\n", + " for val in x01_vec:\n", + " alpha = omega * (2*math.pi*val) + self.phase\n", + " z = torch.cos(alpha)\n", + " x = torch.sin(alpha)\n", + " vals.append((self.w_cos*z).sum() + (self.w_sin*x).sum())\n", + " return torch.stack(vals)\n", + "\n", + "class QuKANResidualEdge(nn.Module):\n", + " def __init__(self, mixer, n_label_qubits, n_pos_qubits, fourier_k=3, seed=0, w_init=0.5):\n", + " super().__init__()\n", + " self.mixer = mixer\n", + " self.L, self.P = n_label_qubits, n_pos_qubits\n", + " self.Nlabel, self.Npos = 2**self.L, 2**self.P\n", + " self.wf = nn.Parameter(torch.tensor(float(w_init)))\n", + " self.wq = nn.Parameter(torch.tensor(float(w_init)))\n", + " self.qfour = QuantumBlock(fourier_k, seed=seed)\n", + " def batch_forward(self, x_pos01, probs_flat):\n", + " lp = probs_flat.view(self.Nlabel, self.Npos)\n", + " idx = torch.round(torch.clamp(x_pos01,0,1)*(self.Npos-1)).long()\n", + " idx = torch.clamp(idx, 0, self.Npos-1)\n", + " p_vals = lp[:,idx].sum(0)\n", + " qfr_vals = self.qfour.forward_batch(x_pos01)\n", + " return self.wf*p_vals + self.wq*qfr_vals\n", + "\n", + "@dataclass\n", + "class QuKANLayerCfg:\n", + " n_nodes: int = 4\n", + " n_label_qubits: int = 2\n", + " n_pos_qubits: int = 6\n", + " qcbm_depth: int = 3\n", + " label_mixer_depth: int = 1\n", + " fourier_k: int = 3\n", + " mixers_trainable: bool = False\n", + "\n", + "class QuKANLayer(nn.Module):\n", + " def __init__(self, cfg: QuKANLayerCfg, seed=0):\n", + " super().__init__()\n", + " self.cfg = cfg\n", + " self.qcbm = QCBMState(cfg.n_label_qubits, cfg.n_pos_qubits, cfg.qcbm_depth, seed)\n", + " self.mixers, self.edges = nn.ModuleList(), nn.ModuleList()\n", + " self._built=False\n", + " self._train_mixers = cfg.mixers_trainable\n", + " def build(self, input_dim, seed=0):\n", + " for m in range(self.cfg.n_nodes):\n", + " for j in range(input_dim):\n", + " mixer = LabelMixer(self.qcbm, self.cfg.label_mixer_depth, seed+97*m+j)\n", + " edge = QuKANResidualEdge(mixer, self.cfg.n_label_qubits, self.cfg.n_pos_qubits,\n", + " self.cfg.fourier_k, seed=seed+991*m+13*j)\n", + " self.mixers.append(mixer); self.edges.append(edge)\n", + " self._built=True\n", + " print(f\"[QuKANLayer] built edges: {self.cfg.n_nodes} nodes × {input_dim} inputs = {len(self.edges)} edges\")\n", + " def pretrain_qcbm_on_splines(self, degree=2, epochs=80, lr=5e-2, verbose=True):\n", + " num_spl, Npos = 2**self.cfg.n_label_qubits, 2**self.cfg.n_pos_qubits\n", + " grid = np.linspace(0,1,Npos)\n", + " B = bspline_basis_matrix(num_spl, degree, grid)\n", + " B = (B+1e-8)/B.sum(1,keepdims=True)\n", + " target = torch.tensor((B/num_spl).reshape(-1), dtype=torch.float32)\n", + " opt=torch.optim.Adam(self.qcbm.parameters(), lr=lr)\n", + " for ep in range(epochs):\n", + " opt.zero_grad(); probs=self.qcbm()\n", + " loss=F.mse_loss(probs, target); loss.backward(); opt.step()\n", + " if verbose and (ep%20==0 or ep==epochs-1):\n", + " tv=0.5*torch.sum(torch.abs(probs-target)).item()\n", + " print(f\"[QCBM pretrain] {ep:03d} | MSE={loss.item():.6f} | TV={tv:.6f}\")\n", + " self.qcbm.freeze()\n", + " print(\"QCBM frozen.\")\n", + " def forward(self,X, input_is_01=True):\n", + " X01 = (X if input_is_01 else torch.sigmoid(X))\n", + " if self._train_mixers:\n", + " edge_probs=[mix() for mix in self.mixers]\n", + " else:\n", + " with torch.no_grad():\n", + " edge_probs=[mix() for mix in self.mixers]\n", + " nodes=[]; eidx=0\n", + " for m in range(self.cfg.n_nodes):\n", + " acc=torch.zeros(X.shape[0], dtype=torch.float32)\n", + " for j in range(X.shape[1]):\n", + " out=self.edges[eidx].batch_forward(X01[:,j], edge_probs[eidx])\n", + " acc=acc+out; eidx+=1\n", + " nodes.append(acc)\n", + " return torch.stack(nodes,1)\n", + "\n", + "@dataclass\n", + "class KANReadoutCfg:\n", + " n_classes:int; in_dim:int; fourier_k:int=3\n", + "\n", + "class KANReadout(nn.Module):\n", + " def __init__(self,cfg:KANReadoutCfg,seed=0):\n", + " super().__init__()\n", + " self.cfg=cfg; C,M=cfg.n_classes,cfg.in_dim\n", + " self.qfr=nn.ModuleList([QuantumBlock(cfg.fourier_k,seed+131*c+m)\n", + " for c in range(C) for m in range(M)])\n", + " self.b=nn.Parameter(torch.zeros(C))\n", + " def _idx(self,c,m): return c*self.cfg.in_dim+m\n", + " def forward(self,H):\n", + " H01=torch.sigmoid(H); logits=[]\n", + " for c in range(self.cfg.n_classes):\n", + " acc=torch.zeros(H.shape[0], dtype=torch.float32)\n", + " for m in range(H.shape[1]):\n", + " acc=acc+self.qfr[self._idx(c,m)].forward_batch(H01[:,m])\n", + " logits.append(acc+self.b[c])\n", + " return torch.stack(logits,1)\n", + "\n", + "@dataclass\n", + "class QuKANNetCfg:\n", + " layer1:QuKANLayerCfg=field(default_factory=QuKANLayerCfg)\n", + " layer2:QuKANLayerCfg=field(default_factory=lambda: QuKANLayerCfg(n_pos_qubits=6))\n", + " n_classes:int=10\n", + "\n", + "class QuKANNet(nn.Module):\n", + " def __init__(self,cfg,input_dim,seed=0):\n", + " super().__init__()\n", + " self.l1=QuKANLayer(cfg.layer1,seed); self.l1.build(input_dim,seed)\n", + " self.l2=QuKANLayer(cfg.layer2,seed+1); self.l2.build(cfg.layer1.n_nodes,seed+1)\n", + " self.readout=KANReadout(KANReadoutCfg(cfg.n_classes,cfg.layer2.n_nodes),seed+123)\n", + " def pretrain_qcbms(self,degree=2,epochs=80,lr=5e-2):\n", + " print(\"\\n[Pretrain] Layer 1 QCBM\"); self.l1.pretrain_qcbm_on_splines(degree,epochs,lr)\n", + " print(\"\\n[Pretrain] Layer 2 QCBM\"); self.l2.pretrain_qcbm_on_splines(degree,epochs,lr)\n", + " def forward(self,X):\n", + " h1=self.l1(X,True); h2=self.l2(h1,False); return self.readout(h2)\n", + "\n", + "def run_digits(seed=0):\n", + " torch.manual_seed(seed); np.random.seed(seed)\n", + " digits = load_digits()\n", + " X, y = digits.data.astype(np.float32), digits.target.astype(np.int64)\n", + " X, y = X[:1000], y[:1000]\n", + " X = MinMaxScaler((0,1)).fit_transform(X).astype(np.float32)\n", + " X_tr, X_te, y_tr, y_te = train_test_split(X, y, test_size=0.3, random_state=seed, stratify=y)\n", + " X_tr, X_te = torch.tensor(X_tr), torch.tensor(X_te)\n", + " y_tr, y_te = torch.tensor(y_tr), torch.tensor(y_te)\n", + " model = QuKANNet(QuKANNetCfg(), input_dim=64, seed=seed)\n", + " model.pretrain_qcbms()\n", + " opt = torch.optim.AdamW(model.parameters(), lr=1e-3, weight_decay=8e-4)\n", + " sched = torch.optim.lr_scheduler.CosineAnnealingLR(opt, T_max=40)\n", + " ce = nn.CrossEntropyLoss(label_smoothing=0.05)\n", + " print(\"\\nTraining QuKAN on Digits Dataset (1000 samples)\")\n", + " for ep in range(1, 41):\n", + " model.train()\n", + " perm = torch.randperm(X_tr.shape[0])\n", + " Xb_all, yb_all = X_tr[perm], y_tr[perm]\n", + " loss_sum, tot, corr = 0.0, 0, 0\n", + " for i in range(0, Xb_all.shape[0], 64):\n", + " xb, yb = Xb_all[i:i+64], yb_all[i:i+64]\n", + " opt.zero_grad(set_to_none=True)\n", + " logits = model(xb)\n", + " loss = ce(logits, yb)\n", + " loss.backward()\n", + " opt.step()\n", + " loss_sum += float(loss.item()) * xb.size(0)\n", + " tot += xb.size(0)\n", + " corr += (logits.argmax(1) == yb).sum().item()\n", + " sched.step()\n", + " train_acc = 100.0 * corr / tot\n", + " val_acc = (model(X_te).argmax(1) == y_te).float().mean().item() * 100.0\n", + " print(f\"Epoch {ep:03d} | Train Acc={train_acc:.2f}% | Val Acc={val_acc:.2f}%\")\n", + "\n", + "if __name__ == \"__main__\":\n", + " run_digits(0)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ea38ffad-6d35-48e0-b00c-85ebbcca4c4c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Data] Loading first 2000 rows from: C:\\Users\\riakh\\Downloads\\archive (26)\\HIGGS.csv\n", + "[QuKANNet] Initializing network...\n", + "[HybridQuKANLayer] Building with 28 inputs...\n", + "[HybridQuKANLayer] Built edges: 5 nodes × 28 inputs = 140 edges\n", + "[HybridQuKANLayer] Building with 5 inputs...\n", + "[HybridQuKANLayer] Built edges: 5 nodes × 5 inputs = 25 edges\n", + "[QuKANNet] Build complete.\n", + "\n", + "=== Training QuKAN on HIGGS (28 features) ===\n", + "Epoch 001 | Loss=0.6926 | Train Acc=53.75% | Val Acc=53.75%\n", + "Epoch 002 | Loss=0.6905 | Train Acc=53.75% | Val Acc=53.75%\n", + "Epoch 003 | Loss=0.6895 | Train Acc=53.75% | Val Acc=53.75%\n", + "Epoch 004 | Loss=0.6897 | Train Acc=53.94% | Val Acc=53.75%\n", + "Epoch 005 | Loss=0.6877 | Train Acc=53.75% | Val Acc=54.00%\n", + "Epoch 006 | Loss=0.6871 | Train Acc=57.62% | Val Acc=53.75%\n", + "Epoch 007 | Loss=0.6850 | Train Acc=54.75% | Val Acc=55.50%\n", + "Epoch 008 | Loss=0.6819 | Train Acc=56.94% | Val Acc=55.50%\n", + "Epoch 009 | Loss=0.6759 | Train Acc=58.25% | Val Acc=56.75%\n", + "Epoch 010 | Loss=0.6719 | Train Acc=59.62% | Val Acc=56.00%\n", + "Epoch 011 | Loss=0.6715 | Train Acc=59.81% | Val Acc=57.00%\n", + "Epoch 012 | Loss=0.6679 | Train Acc=60.44% | Val Acc=54.75%\n", + "Epoch 013 | Loss=0.6630 | Train Acc=60.44% | Val Acc=58.00%\n", + "Epoch 014 | Loss=0.6581 | Train Acc=61.56% | Val Acc=55.75%\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import pennylane as qml\n", + "import math\n", + "from dataclasses import dataclass, field\n", + "from sklearn.model_selection import train_test_split\n", + "from sklearn.preprocessing import MinMaxScaler\n", + "\n", + "torch.set_default_dtype(torch.float32)\n", + "\n", + "class QCBMState(nn.Module):\n", + " def __init__(self, n_label_qubits, n_pos_qubits, depth=3, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.L, self.P = n_label_qubits, n_pos_qubits\n", + " self.n_qubits = self.L + self.P\n", + " self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32))\n", + " self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights):\n", + " qml.templates.StronglyEntanglingLayers(weights, wires=range(self.n_qubits))\n", + " return qml.probs(wires=range(self.n_qubits))\n", + " self._qprobs = qnode\n", + " def forward(self):\n", + " return self._qprobs(self.theta).to(torch.float32)\n", + "\n", + "class LabelMixer(nn.Module):\n", + " def __init__(self, qcbm: QCBMState, depth=2, seed=0):\n", + " super().__init__() \n", + " torch.manual_seed(seed)\n", + " self.qcbm = qcbm\n", + " self.L, self.P = qcbm.L, qcbm.P\n", + " self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32))\n", + " self.dev = qml.device(\"default.qubit\", wires=self.L + self.P)\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights_qcbm, weights_label):\n", + " qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=range(self.L + self.P))\n", + " if self.L > 0:\n", + " qml.templates.StronglyEntanglingLayers(weights_label, wires=range(self.L))\n", + " return qml.probs(wires=range(self.L + self.P))\n", + " self._qprobs = qnode\n", + " def forward(self):\n", + " return self._qprobs(self.qcbm.theta, self.phi).to(torch.float32)\n", + "\n", + "class QuantumBlock(nn.Module):\n", + " def __init__(self, k_frequencies=4, entangle_depth=1, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.K = k_frequencies\n", + " self.log_omega = nn.Parameter(torch.randn(self.K) * 0.05)\n", + " self.phase = nn.Parameter(torch.zeros(self.K))\n", + " self.w_cos = nn.Parameter(torch.randn(self.K) * 0.1)\n", + " self.w_sin = nn.Parameter(torch.randn(self.K) * 0.1)\n", + " self.dev = qml.device(\"default.qubit\", wires=self.K)\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(alpha_vec):\n", + " for k in range(self.K):\n", + " qml.RY(alpha_vec[k], wires=k)\n", + " for _ in range(entangle_depth):\n", + " for k in range(self.K):\n", + " qml.CNOT([k, (k + 1) % self.K])\n", + " z = [qml.expval(qml.PauliZ(k)) for k in range(self.K)]\n", + " x = [qml.expval(qml.PauliX(k)) for k in range(self.K)]\n", + " return z + x\n", + " self._qnode = qnode\n", + " def forward_batch(self, x01_vec: torch.Tensor):\n", + " x01_vec = torch.clamp(x01_vec, 0, 1)\n", + " omega = F.softplus(self.log_omega) + 1e-4\n", + " vals = []\n", + " for val in x01_vec:\n", + " alpha = omega * (2 * math.pi * val) + self.phase\n", + " outs = self._qnode(alpha.to(torch.float32))\n", + " outs = torch.stack([torch.as_tensor(o, dtype=torch.float32) for o in outs])\n", + " vals.append((self.w_cos * outs[:self.K]).sum() + (self.w_sin * outs[self.K:]).sum())\n", + " return torch.stack(vals)\n", + "\n", + "class QuKANResidualEdge(nn.Module):\n", + " def __init__(self, mixer, n_label_qubits, n_pos_qubits, fourier_k=4, fourier_depth=1, seed=0, w_init=0.5):\n", + " super().__init__()\n", + " self.mixer = mixer\n", + " self.L, self.P = n_label_qubits, n_pos_qubits\n", + " self.Nlabel, self.Npos = 2 ** self.L, 2 ** self.P\n", + " self.wf = nn.Parameter(torch.tensor(float(w_init)))\n", + " self.wq = nn.Parameter(torch.tensor(float(w_init)))\n", + " self.qfour = QuantumBlock(fourier_k, fourier_depth, seed=seed)\n", + " def batch_forward(self, x_pos01: torch.Tensor, probs_flat: torch.Tensor):\n", + " lp = probs_flat.view(self.Nlabel, self.Npos)\n", + " idx = torch.round(torch.clamp(x_pos01, 0, 1) * (self.Npos - 1)).long()\n", + " idx = torch.clamp(idx, 0, self.Npos - 1)\n", + " p_vals = lp[:, idx].sum(0)\n", + " qfr_vals = self.qfour.forward_batch(x_pos01)\n", + " return self.wf * p_vals + self.wq * qfr_vals\n", + "\n", + "@dataclass\n", + "class QuKANLayerCfg:\n", + " n_nodes: int = 5\n", + " n_label_qubits: int = 2\n", + " n_pos_qubits: int = 6\n", + " qcbm_depth: int = 3\n", + " label_mixer_depth: int = 2\n", + " fourier_k: int = 4\n", + " fourier_depth: int = 1\n", + "\n", + "class QuKANLayer(nn.Module):\n", + " def __init__(self, cfg: QuKANLayerCfg, seed=0):\n", + " super().__init__()\n", + " self.cfg = cfg\n", + " self.qcbm = QCBMState(cfg.n_label_qubits, cfg.n_pos_qubits, cfg.qcbm_depth, seed)\n", + " self.mixers, self.edges = nn.ModuleList(), nn.ModuleList()\n", + " def build(self, input_dim, seed=0):\n", + " for m in range(self.cfg.n_nodes):\n", + " for j in range(input_dim):\n", + " mixer = LabelMixer(self.qcbm, self.cfg.label_mixer_depth, seed + 97 * m + j)\n", + " edge = QuKANResidualEdge(\n", + " mixer, self.cfg.n_label_qubits, self.cfg.n_pos_qubits,\n", + " self.cfg.fourier_k, self.cfg.fourier_depth, seed + 991 * m + 13 * j\n", + " )\n", + " self.mixers.append(mixer)\n", + " self.edges.append(edge)\n", + " def forward(self, X):\n", + " X01 = torch.sigmoid(X)\n", + " edge_probs = [mix() for mix in self.mixers]\n", + " nodes = []\n", + " eidx = 0\n", + " for m in range(self.cfg.n_nodes):\n", + " acc = torch.zeros(X.shape[0], dtype=torch.float32, device=X.device)\n", + " for j in range(X.shape[1]):\n", + " out = self.edges[eidx].batch_forward(X01[:, j], edge_probs[eidx])\n", + " acc = acc + out\n", + " eidx += 1\n", + " nodes.append(acc)\n", + " return torch.stack(nodes, 1)\n", + "\n", + "@dataclass\n", + "class KANReadoutCfg:\n", + " n_classes: int\n", + " in_dim: int\n", + " fourier_k: int = 3\n", + " fourier_depth: int = 1\n", + "\n", + "class KANReadout(nn.Module):\n", + " def __init__(self, cfg: KANReadoutCfg, seed=0):\n", + " super().__init__()\n", + " self.cfg = cfg\n", + " C, M = cfg.n_classes, cfg.in_dim\n", + " self.qfr = nn.ModuleList([\n", + " QuantumBlock(cfg.fourier_k, cfg.fourier_depth, seed + 131 * c + m)\n", + " for c in range(C) for m in range(M)\n", + " ])\n", + " self.b = nn.Parameter(torch.zeros(C))\n", + " def _idx(self, c, m):\n", + " return c * self.cfg.in_dim + m\n", + " def forward(self, H):\n", + " H01 = torch.sigmoid(H)\n", + " logits = []\n", + " for c in range(self.cfg.n_classes):\n", + " acc = torch.zeros(H.shape[0], dtype=torch.float32, device=H.device)\n", + " for m in range(H.shape[1]):\n", + " acc = acc + self.qfr[self._idx(c, m)].forward_batch(H01[:, m])\n", + " logits.append(acc + self.b[c])\n", + " return torch.stack(logits, 1)\n", + "\n", + "@dataclass\n", + "class QuKANNetCfg:\n", + " layer1: QuKANLayerCfg = field(default_factory=QuKANLayerCfg)\n", + " layer2: QuKANLayerCfg = field(default_factory=QuKANLayerCfg)\n", + " n_classes: int = 2 \n", + "\n", + "class QuKANNet(nn.Module):\n", + " def __init__(self, cfg, input_dim, seed=0):\n", + " super().__init__()\n", + " self.l1 = QuKANLayer(cfg.layer1, seed); self.l1.build(input_dim, seed)\n", + " self.l2 = QuKANLayer(cfg.layer2, seed+1); self.l2.build(cfg.layer1.n_nodes, seed+1)\n", + " self.readout = KANReadout(KANReadoutCfg(cfg.n_classes, cfg.layer2.n_nodes), seed+123)\n", + " def forward(self, X):\n", + " h1 = self.l1(X)\n", + " h2 = self.l2(h1)\n", + " return self.readout(h2)\n", + "\n", + "def load_higgs_csv_first_n(csv_path: str, n_samples: int):\n", + " data = np.loadtxt(csv_path, delimiter=\",\", max_rows=n_samples)\n", + " y = data[:, 0].astype(np.int64)\n", + " X = data[:, 1:29].astype(np.float32)\n", + " scaler = MinMaxScaler((0, 1))\n", + " X = scaler.fit_transform(X).astype(np.float32)\n", + " return X, y\n", + "\n", + "def run_higgs(csv_path: str, n_samples: int = 20000, epochs: int = 20, batch_size: int = 128, seed: int = 0):\n", + " torch.manual_seed(seed); np.random.seed(seed)\n", + " X, y = load_higgs_csv_first_n(csv_path, n_samples)\n", + " X_tr, X_te, y_tr, y_te = train_test_split(X, y, test_size=0.2, random_state=seed, stratify=y)\n", + " X_tr = torch.tensor(X_tr, dtype=torch.float32)\n", + " X_te = torch.tensor(X_te, dtype=torch.float32)\n", + " y_tr = torch.tensor(y_tr, dtype=torch.long)\n", + " y_te = torch.tensor(y_te, dtype=torch.long)\n", + " input_dim = X_tr.shape[1] \n", + " model = QuKANNet(QuKANNetCfg(), input_dim=input_dim, seed=seed)\n", + " opt = torch.optim.AdamW(model.parameters(), lr=1e-3, weight_decay=1e-4)\n", + " ce = nn.CrossEntropyLoss(label_smoothing=0.05)\n", + " for ep in range(1, epochs + 1):\n", + " model.train()\n", + " perm = torch.randperm(X_tr.shape[0])\n", + " xb_all, yb_all = X_tr[perm], y_tr[perm]\n", + " tot, corr = 0, 0\n", + " epoch_loss = 0.0\n", + " for i in range(0, xb_all.shape[0], batch_size):\n", + " xb = xb_all[i:i+batch_size]\n", + " yb = yb_all[i:i+batch_size]\n", + " opt.zero_grad(set_to_none=True)\n", + " logits = model(xb)\n", + " loss = ce(logits, yb)\n", + " loss.backward()\n", + " opt.step()\n", + " epoch_loss += loss.item() * xb.size(0)\n", + " tot += xb.size(0)\n", + " corr += (logits.argmax(1) == yb).sum().item()\n", + " train_acc = 100.0 * corr / tot\n", + " with torch.no_grad():\n", + " val_logits = model(X_te)\n", + " val_acc = (val_logits.argmax(1) == y_te).float().mean().item() * 100.0\n", + " avg_loss = epoch_loss / tot\n", + " print(f\"Epoch {ep:03d} | Loss={avg_loss:.4f} | Train Acc={train_acc:.2f}% | Val Acc={val_acc:.2f}%\")\n", + "\n", + "if __name__ == \"__main__\":\n", + " CSV_PATH = r\"C:\\Users\\riakh\\Downloads\\archive (26)\\HIGGS.csv\"\n", + " run_higgs(CSV_PATH, n_samples=2000, epochs=20, batch_size=128, seed=0)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "ebc631d9-a560-4936-8286-c255c2dc08f1", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "=== Training QuKAN Regressor on f_func ===\n", + "[QuKANRegressor] built edges: 6 nodes × 1 inputs = 6 edges\n", + "[QCBM pretrain] 000 | MSE=0.007273 | TV=0.955602\n", + "[QCBM pretrain] 010 | MSE=0.000574 | TV=0.650723\n", + "[QCBM pretrain] 020 | MSE=0.000122 | TV=0.517162\n", + "[QCBM pretrain] 030 | MSE=0.000094 | TV=0.466575\n", + "[QCBM pretrain] 040 | MSE=0.000075 | TV=0.411801\n", + "[QCBM pretrain] 049 | MSE=0.000061 | TV=0.377527\n", + ">> QCBM frozen.\n", + "[f_func] Epoch 5 | Train Loss=0.80472 | Test Loss=0.87720\n", + "[f_func] Epoch 10 | Train Loss=0.79135 | Test Loss=0.85996\n", + "[f_func] Epoch 15 | Train Loss=0.78032 | Test Loss=0.84498\n", + "[f_func] Epoch 20 | Train Loss=0.77089 | Test Loss=0.83136\n", + "[f_func] Epoch 25 | Train Loss=0.76133 | Test Loss=0.81692\n", + "[f_func] Epoch 30 | Train Loss=0.74736 | Test Loss=0.79742\n", + "[f_func] Epoch 35 | Train Loss=0.72494 | Test Loss=0.76910\n", + "[f_func] Epoch 40 | Train Loss=0.69029 | Test Loss=0.72731\n", + "[f_func] Epoch 45 | Train Loss=0.64046 | Test Loss=0.67059\n", + "[f_func] Epoch 50 | Train Loss=0.57680 | Test Loss=0.60159\n", + "[f_func] Epoch 55 | Train Loss=0.50780 | Test Loss=0.53089\n", + "[f_func] Epoch 60 | Train Loss=0.44433 | Test Loss=0.46902\n", + "[f_func] Epoch 65 | Train Loss=0.39039 | Test Loss=0.41829\n", + "[f_func] Epoch 70 | Train Loss=0.34429 | Test Loss=0.37587\n", + "[f_func] Epoch 75 | Train Loss=0.30453 | Test Loss=0.33857\n", + "[f_func] Epoch 80 | Train Loss=0.26969 | Test Loss=0.30449\n", + "[f_func] Epoch 85 | Train Loss=0.23809 | Test Loss=0.27260\n", + "[f_func] Epoch 90 | Train Loss=0.20964 | Test Loss=0.24350\n", + "[f_func] Epoch 95 | Train Loss=0.18416 | Test Loss=0.21738\n", + "[f_func] Epoch 100 | Train Loss=0.16161 | Test Loss=0.19414\n", + "[f_func] Epoch 105 | Train Loss=0.14186 | Test Loss=0.17345\n", + "[f_func] Epoch 110 | Train Loss=0.12480 | Test Loss=0.15547\n", + "[f_func] Epoch 115 | Train Loss=0.11017 | Test Loss=0.13997\n", + "[f_func] Epoch 120 | Train Loss=0.09767 | Test Loss=0.12663\n", + "[f_func] Epoch 125 | Train Loss=0.08697 | Test Loss=0.11517\n", + "[f_func] Epoch 130 | Train Loss=0.07780 | Test Loss=0.10527\n", + "[f_func] Epoch 135 | Train Loss=0.06998 | Test Loss=0.09679\n", + "[f_func] Epoch 140 | Train Loss=0.06334 | Test Loss=0.08956\n", + "[f_func] Epoch 145 | Train Loss=0.05772 | Test Loss=0.08337\n", + "[f_func] Epoch 150 | Train Loss=0.05298 | Test Loss=0.07811\n", + "[f_func] Epoch 155 | Train Loss=0.04899 | Test Loss=0.07364\n", + "[f_func] Epoch 160 | Train Loss=0.04563 | Test Loss=0.06983\n", + "[f_func] Epoch 165 | Train Loss=0.04281 | Test Loss=0.06660\n", + "[f_func] Epoch 170 | Train Loss=0.04044 | Test Loss=0.06385\n", + "[f_func] Epoch 175 | Train Loss=0.03845 | Test Loss=0.06152\n", + "[f_func] Epoch 180 | Train Loss=0.03678 | Test Loss=0.05954\n", + "[f_func] Epoch 185 | Train Loss=0.03537 | Test Loss=0.05785\n", + "[f_func] Epoch 190 | Train Loss=0.03418 | Test Loss=0.05640\n", + "[f_func] Epoch 195 | Train Loss=0.03317 | Test Loss=0.05516\n", + "[f_func] Epoch 200 | Train Loss=0.03231 | Test Loss=0.05408\n", + "[f_func] Epoch 205 | Train Loss=0.03157 | Test Loss=0.05313\n", + "[f_func] Epoch 210 | Train Loss=0.03092 | Test Loss=0.05230\n", + "[f_func] Epoch 215 | Train Loss=0.03037 | Test Loss=0.05157\n", + "[f_func] Epoch 220 | Train Loss=0.02987 | Test Loss=0.05091\n", + "[f_func] Epoch 225 | Train Loss=0.02944 | Test Loss=0.05030\n", + "[f_func] Epoch 230 | Train Loss=0.02904 | Test Loss=0.04975\n", + "[f_func] Epoch 235 | Train Loss=0.02868 | Test Loss=0.04923\n", + "[f_func] Epoch 240 | Train Loss=0.02835 | Test Loss=0.04874\n", + "[f_func] Epoch 245 | Train Loss=0.02804 | Test Loss=0.04827\n", + "[f_func] Epoch 250 | Train Loss=0.02775 | Test Loss=0.04782\n", + "[f_func] Epoch 255 | Train Loss=0.02748 | Test Loss=0.04738\n", + "[f_func] Epoch 260 | Train Loss=0.02721 | Test Loss=0.04694\n", + "[f_func] Epoch 265 | Train Loss=0.02695 | Test Loss=0.04650\n", + "[f_func] Epoch 270 | Train Loss=0.02669 | Test Loss=0.04606\n", + "[f_func] Epoch 275 | Train Loss=0.02643 | Test Loss=0.04562\n", + "[f_func] Epoch 280 | Train Loss=0.02618 | Test Loss=0.04516\n", + "[f_func] Epoch 285 | Train Loss=0.02592 | Test Loss=0.04470\n", + "[f_func] Epoch 290 | Train Loss=0.02567 | Test Loss=0.04422\n", + "[f_func] Epoch 295 | Train Loss=0.02541 | Test Loss=0.04373\n", + "[f_func] Epoch 300 | Train Loss=0.02515 | Test Loss=0.04323\n", + "[f_func] Epoch 305 | Train Loss=0.02488 | Test Loss=0.04271\n", + "[f_func] Epoch 310 | Train Loss=0.02462 | Test Loss=0.04219\n", + "[f_func] Epoch 315 | Train Loss=0.02436 | Test Loss=0.04165\n", + "[f_func] Epoch 320 | Train Loss=0.02409 | Test Loss=0.04111\n", + "[f_func] Epoch 325 | Train Loss=0.02384 | Test Loss=0.04056\n", + "[f_func] Epoch 330 | Train Loss=0.02358 | Test Loss=0.04001\n", + "[f_func] Epoch 335 | Train Loss=0.02333 | Test Loss=0.03945\n", + "[f_func] Epoch 340 | Train Loss=0.02309 | Test Loss=0.03891\n", + "[f_func] Epoch 345 | Train Loss=0.02286 | Test Loss=0.03837\n", + "[f_func] Epoch 350 | Train Loss=0.02264 | Test Loss=0.03784\n", + "[f_func] Epoch 355 | Train Loss=0.02243 | Test Loss=0.03732\n", + "[f_func] Epoch 360 | Train Loss=0.02223 | Test Loss=0.03681\n", + "[f_func] Epoch 365 | Train Loss=0.02204 | Test Loss=0.03633\n", + "[f_func] Epoch 370 | Train Loss=0.02186 | Test Loss=0.03585\n", + "[f_func] Epoch 375 | Train Loss=0.02169 | Test Loss=0.03540\n", + "[f_func] Epoch 380 | Train Loss=0.02153 | Test Loss=0.03497\n", + "[f_func] Epoch 385 | Train Loss=0.02138 | Test Loss=0.03455\n", + "[f_func] Epoch 390 | Train Loss=0.02124 | Test Loss=0.03415\n", + "[f_func] Epoch 395 | Train Loss=0.02111 | Test Loss=0.03377\n", + "[f_func] Epoch 400 | Train Loss=0.02098 | Test Loss=0.03341\n", + "[f_func] Epoch 405 | Train Loss=0.02087 | Test Loss=0.03306\n", + "[f_func] Epoch 410 | Train Loss=0.02076 | Test Loss=0.03273\n", + "[f_func] Epoch 415 | Train Loss=0.02066 | Test Loss=0.03242\n", + "[f_func] Epoch 420 | Train Loss=0.02056 | Test Loss=0.03212\n", + "[f_func] Epoch 425 | Train Loss=0.02047 | Test Loss=0.03184\n", + "[f_func] Epoch 430 | Train Loss=0.02038 | Test Loss=0.03157\n", + "[f_func] Epoch 435 | Train Loss=0.02030 | Test Loss=0.03131\n", + "[f_func] Epoch 440 | Train Loss=0.02022 | Test Loss=0.03107\n", + "[f_func] Epoch 445 | Train Loss=0.02014 | Test Loss=0.03083\n", + "[f_func] Epoch 450 | Train Loss=0.02007 | Test Loss=0.03061\n", + "[f_func] Epoch 455 | Train Loss=0.02000 | Test Loss=0.03040\n", + "[f_func] Epoch 460 | Train Loss=0.01993 | Test Loss=0.03020\n", + "[f_func] Epoch 465 | Train Loss=0.01987 | Test Loss=0.03000\n", + "[f_func] Epoch 470 | Train Loss=0.01980 | Test Loss=0.02982\n", + "[f_func] Epoch 475 | Train Loss=0.01974 | Test Loss=0.02964\n", + "[f_func] Epoch 480 | Train Loss=0.01968 | Test Loss=0.02947\n", + "[f_func] Epoch 485 | Train Loss=0.01962 | Test Loss=0.02931\n", + "[f_func] Epoch 490 | Train Loss=0.01957 | Test Loss=0.02915\n", + "[f_func] Epoch 495 | Train Loss=0.01951 | Test Loss=0.02900\n", + "[f_func] Epoch 500 | Train Loss=0.01946 | Test Loss=0.02886\n", + "\n", + "=== Training QuKAN Regressor on g_func ===\n", + "[QuKANRegressor] built edges: 6 nodes × 1 inputs = 6 edges\n", + "[QCBM pretrain] 000 | MSE=0.007273 | TV=0.955602\n", + "[QCBM pretrain] 010 | MSE=0.000574 | TV=0.650723\n", + "[QCBM pretrain] 020 | MSE=0.000122 | TV=0.517162\n", + "[QCBM pretrain] 030 | MSE=0.000094 | TV=0.466575\n", + "[QCBM pretrain] 040 | MSE=0.000075 | TV=0.411801\n", + "[QCBM pretrain] 049 | MSE=0.000061 | TV=0.377527\n", + ">> QCBM frozen.\n", + "[g_func] Epoch 5 | Train Loss=0.65610 | Test Loss=0.68772\n", + "[g_func] Epoch 10 | Train Loss=0.64824 | Test Loss=0.68407\n", + "[g_func] Epoch 15 | Train Loss=0.63970 | Test Loss=0.67713\n", + "[g_func] Epoch 20 | Train Loss=0.62698 | Test Loss=0.66348\n", + "[g_func] Epoch 25 | Train Loss=0.60783 | Test Loss=0.64048\n", + "[g_func] Epoch 30 | Train Loss=0.57958 | Test Loss=0.60540\n", + "[g_func] Epoch 35 | Train Loss=0.54054 | Test Loss=0.55712\n", + "[g_func] Epoch 40 | Train Loss=0.49163 | Test Loss=0.49835\n", + "[g_func] Epoch 45 | Train Loss=0.43717 | Test Loss=0.43584\n", + "[g_func] Epoch 50 | Train Loss=0.38301 | Test Loss=0.37819\n", + "[g_func] Epoch 55 | Train Loss=0.33287 | Test Loss=0.33048\n", + "[g_func] Epoch 60 | Train Loss=0.28848 | Test Loss=0.29218\n", + "[g_func] Epoch 65 | Train Loss=0.25123 | Test Loss=0.26109\n", + "[g_func] Epoch 70 | Train Loss=0.22128 | Test Loss=0.23590\n", + "[g_func] Epoch 75 | Train Loss=0.19732 | Test Loss=0.21612\n", + "[g_func] Epoch 80 | Train Loss=0.17842 | Test Loss=0.20104\n", + "[g_func] Epoch 85 | Train Loss=0.16370 | Test Loss=0.18934\n", + "[g_func] Epoch 90 | Train Loss=0.15212 | Test Loss=0.17992\n", + "[g_func] Epoch 95 | Train Loss=0.14308 | Test Loss=0.17249\n", + "[g_func] Epoch 100 | Train Loss=0.13604 | Test Loss=0.16679\n", + "[g_func] Epoch 105 | Train Loss=0.13061 | Test Loss=0.16241\n", + "[g_func] Epoch 110 | Train Loss=0.12646 | Test Loss=0.15897\n", + "[g_func] Epoch 115 | Train Loss=0.12332 | Test Loss=0.15633\n", + "[g_func] Epoch 120 | Train Loss=0.12097 | Test Loss=0.15437\n", + "[g_func] Epoch 125 | Train Loss=0.11919 | Test Loss=0.15292\n", + "[g_func] Epoch 130 | Train Loss=0.11785 | Test Loss=0.15178\n", + "[g_func] Epoch 135 | Train Loss=0.11682 | Test Loss=0.15087\n", + "[g_func] Epoch 140 | Train Loss=0.11601 | Test Loss=0.15014\n", + "[g_func] Epoch 145 | Train Loss=0.11537 | Test Loss=0.14952\n", + "[g_func] Epoch 150 | Train Loss=0.11484 | Test Loss=0.14897\n", + "[g_func] Epoch 155 | Train Loss=0.11441 | Test Loss=0.14848\n", + "[g_func] Epoch 160 | Train Loss=0.11405 | Test Loss=0.14803\n", + "[g_func] Epoch 165 | Train Loss=0.11374 | Test Loss=0.14763\n", + "[g_func] Epoch 170 | Train Loss=0.11348 | Test Loss=0.14725\n", + "[g_func] Epoch 175 | Train Loss=0.11326 | Test Loss=0.14691\n", + "[g_func] Epoch 180 | Train Loss=0.11307 | Test Loss=0.14659\n", + "[g_func] Epoch 185 | Train Loss=0.11290 | Test Loss=0.14630\n", + "[g_func] Epoch 190 | Train Loss=0.11276 | Test Loss=0.14604\n", + "[g_func] Epoch 195 | Train Loss=0.11264 | Test Loss=0.14579\n", + "[g_func] Epoch 200 | Train Loss=0.11252 | Test Loss=0.14557\n", + "[g_func] Epoch 205 | Train Loss=0.11243 | Test Loss=0.14538\n", + "[g_func] Epoch 210 | Train Loss=0.11234 | Test Loss=0.14519\n", + "[g_func] Epoch 215 | Train Loss=0.11226 | Test Loss=0.14502\n", + "[g_func] Epoch 220 | Train Loss=0.11219 | Test Loss=0.14487\n", + "[g_func] Epoch 225 | Train Loss=0.11212 | Test Loss=0.14472\n", + "[g_func] Epoch 230 | Train Loss=0.11206 | Test Loss=0.14459\n", + "[g_func] Epoch 235 | Train Loss=0.11200 | Test Loss=0.14446\n", + "[g_func] Epoch 240 | Train Loss=0.11195 | Test Loss=0.14434\n", + "[g_func] Epoch 245 | Train Loss=0.11190 | Test Loss=0.14423\n", + "[g_func] Epoch 250 | Train Loss=0.11185 | Test Loss=0.14413\n", + "[g_func] Epoch 255 | Train Loss=0.11180 | Test Loss=0.14402\n", + "[g_func] Epoch 260 | Train Loss=0.11176 | Test Loss=0.14393\n", + "[g_func] Epoch 265 | Train Loss=0.11172 | Test Loss=0.14383\n", + "[g_func] Epoch 270 | Train Loss=0.11167 | Test Loss=0.14374\n", + "[g_func] Epoch 275 | Train Loss=0.11163 | Test Loss=0.14366\n", + "[g_func] Epoch 280 | Train Loss=0.11159 | Test Loss=0.14357\n", + "[g_func] Epoch 285 | Train Loss=0.11156 | Test Loss=0.14349\n", + "[g_func] Epoch 290 | Train Loss=0.11152 | Test Loss=0.14341\n", + "[g_func] Epoch 295 | Train Loss=0.11148 | Test Loss=0.14334\n", + "[g_func] Epoch 300 | Train Loss=0.11144 | Test Loss=0.14326\n", + "[g_func] Epoch 305 | Train Loss=0.11141 | Test Loss=0.14319\n", + "[g_func] Epoch 310 | Train Loss=0.11137 | Test Loss=0.14312\n", + "[g_func] Epoch 315 | Train Loss=0.11134 | Test Loss=0.14305\n", + "[g_func] Epoch 320 | Train Loss=0.11130 | Test Loss=0.14298\n", + "[g_func] Epoch 325 | Train Loss=0.11127 | Test Loss=0.14291\n", + "[g_func] Epoch 330 | Train Loss=0.11123 | Test Loss=0.14285\n", + "[g_func] Epoch 335 | Train Loss=0.11120 | Test Loss=0.14278\n", + "[g_func] Epoch 340 | Train Loss=0.11117 | Test Loss=0.14272\n", + "[g_func] Epoch 345 | Train Loss=0.11113 | Test Loss=0.14266\n", + "[g_func] Epoch 350 | Train Loss=0.11110 | Test Loss=0.14260\n", + "[g_func] Epoch 355 | Train Loss=0.11106 | Test Loss=0.14254\n", + "[g_func] Epoch 360 | Train Loss=0.11103 | Test Loss=0.14248\n", + "[g_func] Epoch 365 | Train Loss=0.11100 | Test Loss=0.14242\n", + "[g_func] Epoch 370 | Train Loss=0.11097 | Test Loss=0.14236\n", + "[g_func] Epoch 375 | Train Loss=0.11093 | Test Loss=0.14231\n", + "[g_func] Epoch 380 | Train Loss=0.11090 | Test Loss=0.14225\n", + "[g_func] Epoch 385 | Train Loss=0.11087 | Test Loss=0.14219\n", + "[g_func] Epoch 390 | Train Loss=0.11084 | Test Loss=0.14214\n", + "[g_func] Epoch 395 | Train Loss=0.11081 | Test Loss=0.14208\n", + "[g_func] Epoch 400 | Train Loss=0.11078 | Test Loss=0.14203\n", + "[g_func] Epoch 405 | Train Loss=0.11075 | Test Loss=0.14197\n", + "[g_func] Epoch 410 | Train Loss=0.11072 | Test Loss=0.14192\n", + "[g_func] Epoch 415 | Train Loss=0.11070 | Test Loss=0.14187\n", + "[g_func] Epoch 420 | Train Loss=0.11067 | Test Loss=0.14182\n", + "[g_func] Epoch 425 | Train Loss=0.11064 | Test Loss=0.14177\n", + "[g_func] Epoch 430 | Train Loss=0.11061 | Test Loss=0.14172\n", + "[g_func] Epoch 435 | Train Loss=0.11059 | Test Loss=0.14167\n", + "[g_func] Epoch 440 | Train Loss=0.11056 | Test Loss=0.14162\n", + "[g_func] Epoch 445 | Train Loss=0.11054 | Test Loss=0.14157\n", + "[g_func] Epoch 450 | Train Loss=0.11051 | Test Loss=0.14152\n", + "[g_func] Epoch 455 | Train Loss=0.11049 | Test Loss=0.14148\n", + "[g_func] Epoch 460 | Train Loss=0.11046 | Test Loss=0.14143\n", + "[g_func] Epoch 465 | Train Loss=0.11044 | Test Loss=0.14139\n", + "[g_func] Epoch 470 | Train Loss=0.11042 | Test Loss=0.14134\n", + "[g_func] Epoch 475 | Train Loss=0.11040 | Test Loss=0.14130\n", + "[g_func] Epoch 480 | Train Loss=0.11038 | Test Loss=0.14126\n", + "[g_func] Epoch 485 | Train Loss=0.11036 | Test Loss=0.14121\n", + "[g_func] Epoch 490 | Train Loss=0.11034 | Test Loss=0.14117\n", + "[g_func] Epoch 495 | Train Loss=0.11032 | Test Loss=0.14113\n", + "[g_func] Epoch 500 | Train Loss=0.11030 | Test Loss=0.14109\n", + "\n", + "=== Training QuKAN Regressor on h_func ===\n", + "[QuKANRegressor] built edges: 6 nodes × 1 inputs = 6 edges\n", + "[QCBM pretrain] 000 | MSE=0.007273 | TV=0.955602\n", + "[QCBM pretrain] 010 | MSE=0.000574 | TV=0.650723\n", + "[QCBM pretrain] 020 | MSE=0.000122 | TV=0.517162\n", + "[QCBM pretrain] 030 | MSE=0.000094 | TV=0.466575\n", + "[QCBM pretrain] 040 | MSE=0.000075 | TV=0.411801\n", + "[QCBM pretrain] 049 | MSE=0.000061 | TV=0.377527\n", + ">> QCBM frozen.\n", + "[h_func] Epoch 5 | Train Loss=1.88860 | Test Loss=2.34632\n", + "[h_func] Epoch 10 | Train Loss=1.81957 | Test Loss=2.26783\n", + "[h_func] Epoch 15 | Train Loss=1.75120 | Test Loss=2.19021\n", + "[h_func] Epoch 20 | Train Loss=1.68456 | Test Loss=2.11419\n", + "[h_func] Epoch 25 | Train Loss=1.61929 | Test Loss=2.03930\n", + "[h_func] Epoch 30 | Train Loss=1.55491 | Test Loss=1.96504\n", + "[h_func] Epoch 35 | Train Loss=1.49119 | Test Loss=1.89113\n", + "[h_func] Epoch 40 | Train Loss=1.42813 | Test Loss=1.81752\n", + "[h_func] Epoch 45 | Train Loss=1.36565 | Test Loss=1.74400\n", + "[h_func] Epoch 50 | Train Loss=1.30344 | Test Loss=1.66999\n", + "[h_func] Epoch 55 | Train Loss=1.24065 | Test Loss=1.59428\n", + "[h_func] Epoch 60 | Train Loss=1.17641 | Test Loss=1.51573\n", + "[h_func] Epoch 65 | Train Loss=1.11148 | Test Loss=1.43576\n", + "[h_func] Epoch 70 | Train Loss=1.05054 | Test Loss=1.36106\n", + "[h_func] Epoch 75 | Train Loss=0.99852 | Test Loss=1.29575\n", + "[h_func] Epoch 80 | Train Loss=0.94956 | Test Loss=1.23135\n", + "[h_func] Epoch 85 | Train Loss=0.90242 | Test Loss=1.16876\n", + "[h_func] Epoch 90 | Train Loss=0.85392 | Test Loss=1.10444\n", + "[h_func] Epoch 95 | Train Loss=0.80210 | Test Loss=1.03860\n", + "[h_func] Epoch 100 | Train Loss=0.75020 | Test Loss=0.97622\n", + "[h_func] Epoch 105 | Train Loss=0.70323 | Test Loss=0.92313\n", + "[h_func] Epoch 110 | Train Loss=0.66373 | Test Loss=0.87693\n", + "[h_func] Epoch 115 | Train Loss=0.62713 | Test Loss=0.83085\n", + "[h_func] Epoch 120 | Train Loss=0.59044 | Test Loss=0.78252\n", + "[h_func] Epoch 125 | Train Loss=0.55480 | Test Loss=0.73479\n", + "[h_func] Epoch 130 | Train Loss=0.52048 | Test Loss=0.68937\n", + "[h_func] Epoch 135 | Train Loss=0.48645 | Test Loss=0.64602\n", + "[h_func] Epoch 140 | Train Loss=0.45313 | Test Loss=0.60420\n", + "[h_func] Epoch 145 | Train Loss=0.42151 | Test Loss=0.56344\n", + "[h_func] Epoch 150 | Train Loss=0.39180 | Test Loss=0.52467\n", + "[h_func] Epoch 155 | Train Loss=0.36426 | Test Loss=0.48895\n", + "[h_func] Epoch 160 | Train Loss=0.33856 | Test Loss=0.45629\n", + "[h_func] Epoch 165 | Train Loss=0.31446 | Test Loss=0.42615\n", + "[h_func] Epoch 170 | Train Loss=0.29221 | Test Loss=0.39828\n", + "[h_func] Epoch 175 | Train Loss=0.27186 | Test Loss=0.37241\n", + "[h_func] Epoch 180 | Train Loss=0.25327 | Test Loss=0.34848\n", + "[h_func] Epoch 185 | Train Loss=0.23631 | Test Loss=0.32655\n", + "[h_func] Epoch 190 | Train Loss=0.22080 | Test Loss=0.30643\n", + "[h_func] Epoch 195 | Train Loss=0.20661 | Test Loss=0.28789\n", + "[h_func] Epoch 200 | Train Loss=0.19360 | Test Loss=0.27082\n", + "[h_func] Epoch 205 | Train Loss=0.18164 | Test Loss=0.25508\n", + "[h_func] Epoch 210 | Train Loss=0.17065 | Test Loss=0.24055\n", + "[h_func] Epoch 215 | Train Loss=0.16051 | Test Loss=0.22712\n", + "[h_func] Epoch 220 | Train Loss=0.15114 | Test Loss=0.21469\n", + "[h_func] Epoch 225 | Train Loss=0.14248 | Test Loss=0.20317\n", + "[h_func] Epoch 230 | Train Loss=0.13445 | Test Loss=0.19249\n", + "[h_func] Epoch 235 | Train Loss=0.12701 | Test Loss=0.18257\n", + "[h_func] Epoch 240 | Train Loss=0.12008 | Test Loss=0.17333\n", + "[h_func] Epoch 245 | Train Loss=0.11364 | Test Loss=0.16472\n", + "[h_func] Epoch 250 | Train Loss=0.10763 | Test Loss=0.15668\n", + "[h_func] Epoch 255 | Train Loss=0.10203 | Test Loss=0.14917\n", + "[h_func] Epoch 260 | Train Loss=0.09679 | Test Loss=0.14215\n", + "[h_func] Epoch 265 | Train Loss=0.09189 | Test Loss=0.13555\n", + "[h_func] Epoch 270 | Train Loss=0.08730 | Test Loss=0.12936\n", + "[h_func] Epoch 275 | Train Loss=0.08300 | Test Loss=0.12354\n", + "[h_func] Epoch 280 | Train Loss=0.07895 | Test Loss=0.11807\n", + "[h_func] Epoch 285 | Train Loss=0.07515 | Test Loss=0.11292\n", + "[h_func] Epoch 290 | Train Loss=0.07157 | Test Loss=0.10804\n", + "[h_func] Epoch 295 | Train Loss=0.06821 | Test Loss=0.10344\n", + "[h_func] Epoch 300 | Train Loss=0.06503 | Test Loss=0.09908\n", + "[h_func] Epoch 305 | Train Loss=0.06202 | Test Loss=0.09496\n", + "[h_func] Epoch 310 | Train Loss=0.05918 | Test Loss=0.09106\n", + "[h_func] Epoch 315 | Train Loss=0.05649 | Test Loss=0.08736\n", + "[h_func] Epoch 320 | Train Loss=0.05395 | Test Loss=0.08382\n", + "[h_func] Epoch 325 | Train Loss=0.05154 | Test Loss=0.08046\n", + "[h_func] Epoch 330 | Train Loss=0.04925 | Test Loss=0.07725\n", + "[h_func] Epoch 335 | Train Loss=0.04708 | Test Loss=0.07419\n", + "[h_func] Epoch 340 | Train Loss=0.04502 | Test Loss=0.07128\n", + "[h_func] Epoch 345 | Train Loss=0.04306 | Test Loss=0.06849\n", + "[h_func] Epoch 350 | Train Loss=0.04121 | Test Loss=0.06581\n", + "[h_func] Epoch 355 | Train Loss=0.03944 | Test Loss=0.06324\n", + "[h_func] Epoch 360 | Train Loss=0.03776 | Test Loss=0.06079\n", + "[h_func] Epoch 365 | Train Loss=0.03616 | Test Loss=0.05845\n", + "[h_func] Epoch 370 | Train Loss=0.03463 | Test Loss=0.05619\n", + "[h_func] Epoch 375 | Train Loss=0.03317 | Test Loss=0.05403\n", + "[h_func] Epoch 380 | Train Loss=0.03178 | Test Loss=0.05198\n", + "[h_func] Epoch 385 | Train Loss=0.03044 | Test Loss=0.05002\n", + "[h_func] Epoch 390 | Train Loss=0.02917 | Test Loss=0.04813\n", + "[h_func] Epoch 395 | Train Loss=0.02796 | Test Loss=0.04634\n", + "[h_func] Epoch 400 | Train Loss=0.02681 | Test Loss=0.04461\n", + "[h_func] Epoch 405 | Train Loss=0.02571 | Test Loss=0.04294\n", + "[h_func] Epoch 410 | Train Loss=0.02466 | Test Loss=0.04135\n", + "[h_func] Epoch 415 | Train Loss=0.02366 | Test Loss=0.03983\n", + "[h_func] Epoch 420 | Train Loss=0.02270 | Test Loss=0.03838\n", + "[h_func] Epoch 425 | Train Loss=0.02179 | Test Loss=0.03699\n", + "[h_func] Epoch 430 | Train Loss=0.02091 | Test Loss=0.03566\n", + "[h_func] Epoch 435 | Train Loss=0.02008 | Test Loss=0.03439\n", + "[h_func] Epoch 440 | Train Loss=0.01929 | Test Loss=0.03317\n", + "[h_func] Epoch 445 | Train Loss=0.01853 | Test Loss=0.03201\n", + "[h_func] Epoch 450 | Train Loss=0.01781 | Test Loss=0.03089\n", + "[h_func] Epoch 455 | Train Loss=0.01712 | Test Loss=0.02982\n", + "[h_func] Epoch 460 | Train Loss=0.01646 | Test Loss=0.02881\n", + "[h_func] Epoch 465 | Train Loss=0.01583 | Test Loss=0.02784\n", + "[h_func] Epoch 470 | Train Loss=0.01524 | Test Loss=0.02691\n", + "[h_func] Epoch 475 | Train Loss=0.01467 | Test Loss=0.02603\n", + "[h_func] Epoch 480 | Train Loss=0.01414 | Test Loss=0.02518\n", + "[h_func] Epoch 485 | Train Loss=0.01363 | Test Loss=0.02438\n", + "[h_func] Epoch 490 | Train Loss=0.01315 | Test Loss=0.02361\n", + "[h_func] Epoch 495 | Train Loss=0.01269 | Test Loss=0.02287\n", + "[h_func] Epoch 500 | Train Loss=0.01226 | Test Loss=0.02217\n", + "\n", + "=== Training QuKAN Regressor on k_func ===\n", + "[QuKANRegressor] built edges: 6 nodes × 1 inputs = 6 edges\n", + "[QCBM pretrain] 000 | MSE=0.007273 | TV=0.955602\n", + "[QCBM pretrain] 010 | MSE=0.000574 | TV=0.650723\n", + "[QCBM pretrain] 020 | MSE=0.000122 | TV=0.517162\n", + "[QCBM pretrain] 030 | MSE=0.000094 | TV=0.466575\n", + "[QCBM pretrain] 040 | MSE=0.000075 | TV=0.411801\n", + "[QCBM pretrain] 049 | MSE=0.000061 | TV=0.377527\n", + ">> QCBM frozen.\n", + "[k_func] Epoch 5 | Train Loss=6.31919 | Test Loss=6.59280\n", + "[k_func] Epoch 10 | Train Loss=6.15957 | Test Loss=6.42978\n", + "[k_func] Epoch 15 | Train Loss=5.99941 | Test Loss=6.26668\n", + "[k_func] Epoch 20 | Train Loss=5.84037 | Test Loss=6.10442\n", + "[k_func] Epoch 25 | Train Loss=5.68082 | Test Loss=5.94146\n", + "[k_func] Epoch 30 | Train Loss=5.51948 | Test Loss=5.77670\n", + "[k_func] Epoch 35 | Train Loss=5.35602 | Test Loss=5.60977\n", + "[k_func] Epoch 40 | Train Loss=5.19007 | Test Loss=5.44012\n", + "[k_func] Epoch 45 | Train Loss=5.02059 | Test Loss=5.26654\n", + "[k_func] Epoch 50 | Train Loss=4.84563 | Test Loss=5.08686\n", + "[k_func] Epoch 55 | Train Loss=4.66163 | Test Loss=4.89715\n", + "[k_func] Epoch 60 | Train Loss=4.46458 | Test Loss=4.69367\n", + "[k_func] Epoch 65 | Train Loss=4.25709 | Test Loss=4.48072\n", + "[k_func] Epoch 70 | Train Loss=4.05591 | Test Loss=4.27618\n", + "[k_func] Epoch 75 | Train Loss=3.86570 | Test Loss=4.08183\n", + "[k_func] Epoch 80 | Train Loss=3.67905 | Test Loss=3.89517\n", + "[k_func] Epoch 85 | Train Loss=3.50616 | Test Loss=3.72010\n", + "[k_func] Epoch 90 | Train Loss=3.34045 | Test Loss=3.55206\n", + "[k_func] Epoch 95 | Train Loss=3.18548 | Test Loss=3.39462\n", + "[k_func] Epoch 100 | Train Loss=3.03914 | Test Loss=3.24678\n", + "[k_func] Epoch 105 | Train Loss=2.90148 | Test Loss=3.10681\n", + "[k_func] Epoch 110 | Train Loss=2.77141 | Test Loss=2.97462\n", + "[k_func] Epoch 115 | Train Loss=2.64798 | Test Loss=2.84914\n", + "[k_func] Epoch 120 | Train Loss=2.53045 | Test Loss=2.72920\n", + "[k_func] Epoch 125 | Train Loss=2.41807 | Test Loss=2.61441\n", + "[k_func] Epoch 130 | Train Loss=2.31028 | Test Loss=2.50419\n", + "[k_func] Epoch 135 | Train Loss=2.20670 | Test Loss=2.39809\n", + "[k_func] Epoch 140 | Train Loss=2.10708 | Test Loss=2.29596\n", + "[k_func] Epoch 145 | Train Loss=2.01126 | Test Loss=2.19754\n", + "[k_func] Epoch 150 | Train Loss=1.91907 | Test Loss=2.10276\n", + "[k_func] Epoch 155 | Train Loss=1.83038 | Test Loss=2.01141\n", + "[k_func] Epoch 160 | Train Loss=1.74505 | Test Loss=1.92337\n", + "[k_func] Epoch 165 | Train Loss=1.66294 | Test Loss=1.83845\n", + "[k_func] Epoch 170 | Train Loss=1.58390 | Test Loss=1.75649\n", + "[k_func] Epoch 175 | Train Loss=1.50775 | Test Loss=1.67725\n", + "[k_func] Epoch 180 | Train Loss=1.43432 | Test Loss=1.60053\n", + "[k_func] Epoch 185 | Train Loss=1.36344 | Test Loss=1.52614\n", + "[k_func] Epoch 190 | Train Loss=1.29503 | Test Loss=1.45399\n", + "[k_func] Epoch 195 | Train Loss=1.22914 | Test Loss=1.38417\n", + "[k_func] Epoch 200 | Train Loss=1.16587 | Test Loss=1.31686\n", + "[k_func] Epoch 205 | Train Loss=1.10526 | Test Loss=1.25215\n", + "[k_func] Epoch 210 | Train Loss=1.04712 | Test Loss=1.18989\n", + "[k_func] Epoch 215 | Train Loss=0.99132 | Test Loss=1.12995\n", + "[k_func] Epoch 220 | Train Loss=0.93783 | Test Loss=1.07223\n", + "[k_func] Epoch 225 | Train Loss=0.88664 | Test Loss=1.01658\n", + "[k_func] Epoch 230 | Train Loss=0.83770 | Test Loss=0.96293\n", + "[k_func] Epoch 235 | Train Loss=0.79102 | Test Loss=0.91141\n", + "[k_func] Epoch 240 | Train Loss=0.74660 | Test Loss=0.86205\n", + "[k_func] Epoch 245 | Train Loss=0.70443 | Test Loss=0.81485\n", + "[k_func] Epoch 250 | Train Loss=0.66449 | Test Loss=0.76971\n", + "[k_func] Epoch 255 | Train Loss=0.62676 | Test Loss=0.72660\n", + "[k_func] Epoch 260 | Train Loss=0.59121 | Test Loss=0.68551\n", + "[k_func] Epoch 265 | Train Loss=0.55780 | Test Loss=0.64643\n", + "[k_func] Epoch 270 | Train Loss=0.52648 | Test Loss=0.60930\n", + "[k_func] Epoch 275 | Train Loss=0.49719 | Test Loss=0.57404\n", + "[k_func] Epoch 280 | Train Loss=0.46985 | Test Loss=0.54062\n", + "[k_func] Epoch 285 | Train Loss=0.44431 | Test Loss=0.50895\n", + "[k_func] Epoch 290 | Train Loss=0.42031 | Test Loss=0.47897\n", + "[k_func] Epoch 295 | Train Loss=0.39729 | Test Loss=0.45060\n", + "[k_func] Epoch 300 | Train Loss=0.37433 | Test Loss=0.42382\n", + "[k_func] Epoch 305 | Train Loss=0.35116 | Test Loss=0.39824\n", + "[k_func] Epoch 310 | Train Loss=0.32881 | Test Loss=0.37282\n", + "[k_func] Epoch 315 | Train Loss=0.30741 | Test Loss=0.34736\n", + "[k_func] Epoch 320 | Train Loss=0.28718 | Test Loss=0.32289\n", + "[k_func] Epoch 325 | Train Loss=0.26857 | Test Loss=0.30046\n", + "[k_func] Epoch 330 | Train Loss=0.25152 | Test Loss=0.28015\n", + "[k_func] Epoch 335 | Train Loss=0.23563 | Test Loss=0.26152\n", + "[k_func] Epoch 340 | Train Loss=0.22052 | Test Loss=0.24417\n", + "[k_func] Epoch 345 | Train Loss=0.20618 | Test Loss=0.22807\n", + "[k_func] Epoch 350 | Train Loss=0.19274 | Test Loss=0.21314\n", + "[k_func] Epoch 355 | Train Loss=0.18011 | Test Loss=0.19912\n", + "[k_func] Epoch 360 | Train Loss=0.16821 | Test Loss=0.18580\n", + "[k_func] Epoch 365 | Train Loss=0.15699 | Test Loss=0.17317\n", + "[k_func] Epoch 370 | Train Loss=0.14647 | Test Loss=0.16136\n", + "[k_func] Epoch 375 | Train Loss=0.13665 | Test Loss=0.15044\n", + "[k_func] Epoch 380 | Train Loss=0.12756 | Test Loss=0.14038\n", + "[k_func] Epoch 385 | Train Loss=0.11919 | Test Loss=0.13113\n", + "[k_func] Epoch 390 | Train Loss=0.11156 | Test Loss=0.12276\n", + "[k_func] Epoch 395 | Train Loss=0.10471 | Test Loss=0.11508\n", + "[k_func] Epoch 400 | Train Loss=0.09838 | Test Loss=0.10824\n", + "[k_func] Epoch 405 | Train Loss=0.09269 | Test Loss=0.10201\n", + "[k_func] Epoch 410 | Train Loss=0.08747 | Test Loss=0.09627\n", + "[k_func] Epoch 415 | Train Loss=0.08270 | Test Loss=0.09107\n", + "[k_func] Epoch 420 | Train Loss=0.07830 | Test Loss=0.08633\n", + "[k_func] Epoch 425 | Train Loss=0.07425 | Test Loss=0.08198\n", + "[k_func] Epoch 430 | Train Loss=0.07050 | Test Loss=0.07801\n", + "[k_func] Epoch 435 | Train Loss=0.06704 | Test Loss=0.07438\n", + "[k_func] Epoch 440 | Train Loss=0.06383 | Test Loss=0.07107\n", + "[k_func] Epoch 445 | Train Loss=0.06087 | Test Loss=0.06803\n", + "[k_func] Epoch 450 | Train Loss=0.05812 | Test Loss=0.06526\n", + "[k_func] Epoch 455 | Train Loss=0.05558 | Test Loss=0.06272\n", + "[k_func] Epoch 460 | Train Loss=0.05322 | Test Loss=0.06040\n", + "[k_func] Epoch 465 | Train Loss=0.05103 | Test Loss=0.05827\n", + "[k_func] Epoch 470 | Train Loss=0.04900 | Test Loss=0.05632\n", + "[k_func] Epoch 475 | Train Loss=0.04711 | Test Loss=0.05454\n", + "[k_func] Epoch 480 | Train Loss=0.04535 | Test Loss=0.05291\n", + "[k_func] Epoch 485 | Train Loss=0.04372 | Test Loss=0.05140\n", + "[k_func] Epoch 490 | Train Loss=0.04219 | Test Loss=0.05002\n", + "[k_func] Epoch 495 | Train Loss=0.04075 | Test Loss=0.04874\n", + "[k_func] Epoch 500 | Train Loss=0.03940 | Test Loss=0.04752\n", + "\n", + "=== Training QuKAN Regressor on m_func ===\n", + "[QuKANRegressor] built edges: 6 nodes × 1 inputs = 6 edges\n", + "[QCBM pretrain] 000 | MSE=0.007273 | TV=0.955602\n", + "[QCBM pretrain] 010 | MSE=0.000574 | TV=0.650723\n", + "[QCBM pretrain] 020 | MSE=0.000122 | TV=0.517162\n", + "[QCBM pretrain] 030 | MSE=0.000094 | TV=0.466575\n", + "[QCBM pretrain] 040 | MSE=0.000075 | TV=0.411801\n", + "[QCBM pretrain] 049 | MSE=0.000061 | TV=0.377527\n", + ">> QCBM frozen.\n", + "[m_func] Epoch 5 | Train Loss=1.01713 | Test Loss=1.21506\n", + "[m_func] Epoch 10 | Train Loss=0.98575 | Test Loss=1.17856\n", + "[m_func] Epoch 15 | Train Loss=0.95626 | Test Loss=1.14350\n", + "[m_func] Epoch 20 | Train Loss=0.92762 | Test Loss=1.10904\n", + "[m_func] Epoch 25 | Train Loss=0.89936 | Test Loss=1.07449\n", + "[m_func] Epoch 30 | Train Loss=0.87015 | Test Loss=1.03812\n", + "[m_func] Epoch 35 | Train Loss=0.83763 | Test Loss=0.99740\n", + "[m_func] Epoch 40 | Train Loss=0.79838 | Test Loss=0.94838\n", + "[m_func] Epoch 45 | Train Loss=0.75014 | Test Loss=0.89020\n", + "[m_func] Epoch 50 | Train Loss=0.69667 | Test Loss=0.82830\n", + "[m_func] Epoch 55 | Train Loss=0.64529 | Test Loss=0.77104\n", + "[m_func] Epoch 60 | Train Loss=0.59975 | Test Loss=0.72211\n", + "[m_func] Epoch 65 | Train Loss=0.55923 | Test Loss=0.67951\n", + "[m_func] Epoch 70 | Train Loss=0.52350 | Test Loss=0.64085\n", + "[m_func] Epoch 75 | Train Loss=0.49091 | Test Loss=0.60408\n", + "[m_func] Epoch 80 | Train Loss=0.45991 | Test Loss=0.56830\n", + "[m_func] Epoch 85 | Train Loss=0.43054 | Test Loss=0.53386\n", + "[m_func] Epoch 90 | Train Loss=0.40254 | Test Loss=0.50080\n", + "[m_func] Epoch 95 | Train Loss=0.37568 | Test Loss=0.46894\n", + "[m_func] Epoch 100 | Train Loss=0.34983 | Test Loss=0.43793\n", + "[m_func] Epoch 105 | Train Loss=0.32479 | Test Loss=0.40768\n", + "[m_func] Epoch 110 | Train Loss=0.30054 | Test Loss=0.37825\n", + "[m_func] Epoch 115 | Train Loss=0.27699 | Test Loss=0.34954\n", + "[m_func] Epoch 120 | Train Loss=0.25413 | Test Loss=0.32151\n", + "[m_func] Epoch 125 | Train Loss=0.23197 | Test Loss=0.29420\n", + "[m_func] Epoch 130 | Train Loss=0.21053 | Test Loss=0.26768\n", + "[m_func] Epoch 135 | Train Loss=0.18997 | Test Loss=0.24222\n", + "[m_func] Epoch 140 | Train Loss=0.17066 | Test Loss=0.21827\n", + "[m_func] Epoch 145 | Train Loss=0.15280 | Test Loss=0.19606\n", + "[m_func] Epoch 150 | Train Loss=0.13646 | Test Loss=0.17567\n", + "[m_func] Epoch 155 | Train Loss=0.12163 | Test Loss=0.15709\n", + "[m_func] Epoch 160 | Train Loss=0.10825 | Test Loss=0.14026\n", + "[m_func] Epoch 165 | Train Loss=0.09624 | Test Loss=0.12507\n", + "[m_func] Epoch 170 | Train Loss=0.08549 | Test Loss=0.11141\n", + "[m_func] Epoch 175 | Train Loss=0.07592 | Test Loss=0.09918\n", + "[m_func] Epoch 180 | Train Loss=0.06741 | Test Loss=0.08825\n", + "[m_func] Epoch 185 | Train Loss=0.05988 | Test Loss=0.07852\n", + "[m_func] Epoch 190 | Train Loss=0.05322 | Test Loss=0.06988\n", + "[m_func] Epoch 195 | Train Loss=0.04735 | Test Loss=0.06223\n", + "[m_func] Epoch 200 | Train Loss=0.04218 | Test Loss=0.05549\n", + "[m_func] Epoch 205 | Train Loss=0.03764 | Test Loss=0.04955\n", + "[m_func] Epoch 210 | Train Loss=0.03366 | Test Loss=0.04435\n", + "[m_func] Epoch 215 | Train Loss=0.03018 | Test Loss=0.03980\n", + "[m_func] Epoch 220 | Train Loss=0.02714 | Test Loss=0.03584\n", + "[m_func] Epoch 225 | Train Loss=0.02449 | Test Loss=0.03240\n", + "[m_func] Epoch 230 | Train Loss=0.02219 | Test Loss=0.02943\n", + "[m_func] Epoch 235 | Train Loss=0.02019 | Test Loss=0.02686\n", + "[m_func] Epoch 240 | Train Loss=0.01847 | Test Loss=0.02464\n", + "[m_func] Epoch 245 | Train Loss=0.01699 | Test Loss=0.02272\n", + "[m_func] Epoch 250 | Train Loss=0.01571 | Test Loss=0.02106\n", + "[m_func] Epoch 255 | Train Loss=0.01461 | Test Loss=0.01962\n", + "[m_func] Epoch 260 | Train Loss=0.01367 | Test Loss=0.01838\n", + "[m_func] Epoch 265 | Train Loss=0.01285 | Test Loss=0.01730\n", + "[m_func] Epoch 270 | Train Loss=0.01214 | Test Loss=0.01635\n", + "[m_func] Epoch 275 | Train Loss=0.01151 | Test Loss=0.01551\n", + "[m_func] Epoch 280 | Train Loss=0.01097 | Test Loss=0.01476\n", + "[m_func] Epoch 285 | Train Loss=0.01048 | Test Loss=0.01408\n", + "[m_func] Epoch 290 | Train Loss=0.01005 | Test Loss=0.01348\n", + "[m_func] Epoch 295 | Train Loss=0.00965 | Test Loss=0.01293\n", + "[m_func] Epoch 300 | Train Loss=0.00929 | Test Loss=0.01242\n", + "[m_func] Epoch 305 | Train Loss=0.00894 | Test Loss=0.01194\n", + "[m_func] Epoch 310 | Train Loss=0.00862 | Test Loss=0.01149\n", + "[m_func] Epoch 315 | Train Loss=0.00831 | Test Loss=0.01108\n", + "[m_func] Epoch 320 | Train Loss=0.00801 | Test Loss=0.01070\n", + "[m_func] Epoch 325 | Train Loss=0.00773 | Test Loss=0.01034\n", + "[m_func] Epoch 330 | Train Loss=0.00746 | Test Loss=0.01000\n", + "[m_func] Epoch 335 | Train Loss=0.00720 | Test Loss=0.00969\n", + "[m_func] Epoch 340 | Train Loss=0.00694 | Test Loss=0.00939\n", + "[m_func] Epoch 345 | Train Loss=0.00670 | Test Loss=0.00911\n", + "[m_func] Epoch 350 | Train Loss=0.00646 | Test Loss=0.00884\n", + "[m_func] Epoch 355 | Train Loss=0.00624 | Test Loss=0.00858\n", + "[m_func] Epoch 360 | Train Loss=0.00602 | Test Loss=0.00832\n", + "[m_func] Epoch 365 | Train Loss=0.00581 | Test Loss=0.00808\n", + "[m_func] Epoch 370 | Train Loss=0.00561 | Test Loss=0.00784\n", + "[m_func] Epoch 375 | Train Loss=0.00542 | Test Loss=0.00761\n", + "[m_func] Epoch 380 | Train Loss=0.00524 | Test Loss=0.00739\n", + "[m_func] Epoch 385 | Train Loss=0.00506 | Test Loss=0.00718\n", + "[m_func] Epoch 390 | Train Loss=0.00489 | Test Loss=0.00697\n", + "[m_func] Epoch 395 | Train Loss=0.00473 | Test Loss=0.00678\n", + "[m_func] Epoch 400 | Train Loss=0.00457 | Test Loss=0.00659\n", + "[m_func] Epoch 405 | Train Loss=0.00442 | Test Loss=0.00640\n", + "[m_func] Epoch 410 | Train Loss=0.00428 | Test Loss=0.00623\n", + "[m_func] Epoch 415 | Train Loss=0.00415 | Test Loss=0.00605\n", + "[m_func] Epoch 420 | Train Loss=0.00401 | Test Loss=0.00589\n", + "[m_func] Epoch 425 | Train Loss=0.00389 | Test Loss=0.00572\n", + "[m_func] Epoch 430 | Train Loss=0.00376 | Test Loss=0.00556\n", + "[m_func] Epoch 435 | Train Loss=0.00364 | Test Loss=0.00538\n", + "[m_func] Epoch 440 | Train Loss=0.00351 | Test Loss=0.00520\n", + "[m_func] Epoch 445 | Train Loss=0.00341 | Test Loss=0.00512\n", + "[m_func] Epoch 450 | Train Loss=0.00333 | Test Loss=0.00507\n", + "[m_func] Epoch 455 | Train Loss=0.00327 | Test Loss=0.00502\n", + "[m_func] Epoch 460 | Train Loss=0.00322 | Test Loss=0.00497\n", + "[m_func] Epoch 465 | Train Loss=0.00319 | Test Loss=0.00492\n", + "[m_func] Epoch 470 | Train Loss=0.00316 | Test Loss=0.00488\n", + "[m_func] Epoch 475 | Train Loss=0.00313 | Test Loss=0.00484\n", + "[m_func] Epoch 480 | Train Loss=0.00311 | Test Loss=0.00479\n", + "[m_func] Epoch 485 | Train Loss=0.00308 | Test Loss=0.00474\n", + "[m_func] Epoch 490 | Train Loss=0.00306 | Test Loss=0.00469\n", + "[m_func] Epoch 495 | Train Loss=0.00304 | Test Loss=0.00465\n", + "[m_func] Epoch 500 | Train Loss=0.00302 | Test Loss=0.00460\n", + "\n", + "=== Training QuKAN Regressor on n_func ===\n", + "[QuKANRegressor] built edges: 6 nodes × 1 inputs = 6 edges\n", + "[QCBM pretrain] 000 | MSE=0.007273 | TV=0.955602\n", + "[QCBM pretrain] 010 | MSE=0.000574 | TV=0.650723\n", + "[QCBM pretrain] 020 | MSE=0.000122 | TV=0.517162\n", + "[QCBM pretrain] 030 | MSE=0.000094 | TV=0.466575\n", + "[QCBM pretrain] 040 | MSE=0.000075 | TV=0.411801\n", + "[QCBM pretrain] 049 | MSE=0.000061 | TV=0.377527\n", + ">> QCBM frozen.\n", + "[n_func] Epoch 5 | Train Loss=0.99870 | Test Loss=0.96978\n", + "[n_func] Epoch 10 | Train Loss=0.93523 | Test Loss=0.90764\n", + "[n_func] Epoch 15 | Train Loss=0.87305 | Test Loss=0.84697\n", + "[n_func] Epoch 20 | Train Loss=0.81275 | Test Loss=0.78810\n", + "[n_func] Epoch 25 | Train Loss=0.75375 | Test Loss=0.73054\n", + "[n_func] Epoch 30 | Train Loss=0.69567 | Test Loss=0.67400\n", + "[n_func] Epoch 35 | Train Loss=0.63855 | Test Loss=0.61852\n", + "[n_func] Epoch 40 | Train Loss=0.58243 | Test Loss=0.56407\n", + "[n_func] Epoch 45 | Train Loss=0.52716 | Test Loss=0.51050\n", + "[n_func] Epoch 50 | Train Loss=0.47242 | Test Loss=0.45746\n", + "[n_func] Epoch 55 | Train Loss=0.41752 | Test Loss=0.40428\n", + "[n_func] Epoch 60 | Train Loss=0.36193 | Test Loss=0.35063\n", + "[n_func] Epoch 65 | Train Loss=0.30722 | Test Loss=0.29846\n", + "[n_func] Epoch 70 | Train Loss=0.25832 | Test Loss=0.25260\n", + "[n_func] Epoch 75 | Train Loss=0.21597 | Test Loss=0.21305\n", + "[n_func] Epoch 80 | Train Loss=0.17836 | Test Loss=0.17876\n", + "[n_func] Epoch 85 | Train Loss=0.14720 | Test Loss=0.15006\n", + "[n_func] Epoch 90 | Train Loss=0.12103 | Test Loss=0.12636\n", + "[n_func] Epoch 95 | Train Loss=0.10005 | Test Loss=0.10690\n", + "[n_func] Epoch 100 | Train Loss=0.08279 | Test Loss=0.08984\n", + "[n_func] Epoch 105 | Train Loss=0.06839 | Test Loss=0.07495\n", + "[n_func] Epoch 110 | Train Loss=0.05597 | Test Loss=0.06204\n", + "[n_func] Epoch 115 | Train Loss=0.04530 | Test Loss=0.05079\n", + "[n_func] Epoch 120 | Train Loss=0.03629 | Test Loss=0.04116\n", + "[n_func] Epoch 125 | Train Loss=0.02900 | Test Loss=0.03358\n", + "[n_func] Epoch 130 | Train Loss=0.02342 | Test Loss=0.02787\n", + "[n_func] Epoch 135 | Train Loss=0.01923 | Test Loss=0.02344\n", + "[n_func] Epoch 140 | Train Loss=0.01608 | Test Loss=0.02006\n", + "[n_func] Epoch 145 | Train Loss=0.01370 | Test Loss=0.01750\n", + "[n_func] Epoch 150 | Train Loss=0.01192 | Test Loss=0.01554\n", + "[n_func] Epoch 155 | Train Loss=0.01058 | Test Loss=0.01405\n", + "[n_func] Epoch 160 | Train Loss=0.00958 | Test Loss=0.01290\n", + "[n_func] Epoch 165 | Train Loss=0.00881 | Test Loss=0.01202\n", + "[n_func] Epoch 170 | Train Loss=0.00823 | Test Loss=0.01134\n", + "[n_func] Epoch 175 | Train Loss=0.00777 | Test Loss=0.01080\n", + "[n_func] Epoch 180 | Train Loss=0.00741 | Test Loss=0.01036\n", + "[n_func] Epoch 185 | Train Loss=0.00712 | Test Loss=0.00999\n", + "[n_func] Epoch 190 | Train Loss=0.00687 | Test Loss=0.00967\n", + "[n_func] Epoch 195 | Train Loss=0.00666 | Test Loss=0.00938\n", + "[n_func] Epoch 200 | Train Loss=0.00646 | Test Loss=0.00912\n", + "[n_func] Epoch 205 | Train Loss=0.00628 | Test Loss=0.00886\n", + "[n_func] Epoch 210 | Train Loss=0.00610 | Test Loss=0.00861\n", + "[n_func] Epoch 215 | Train Loss=0.00594 | Test Loss=0.00837\n", + "[n_func] Epoch 220 | Train Loss=0.00577 | Test Loss=0.00814\n", + "[n_func] Epoch 225 | Train Loss=0.00561 | Test Loss=0.00792\n", + "[n_func] Epoch 230 | Train Loss=0.00545 | Test Loss=0.00770\n", + "[n_func] Epoch 235 | Train Loss=0.00530 | Test Loss=0.00749\n", + "[n_func] Epoch 240 | Train Loss=0.00515 | Test Loss=0.00729\n", + "[n_func] Epoch 245 | Train Loss=0.00502 | Test Loss=0.00710\n", + "[n_func] Epoch 250 | Train Loss=0.00489 | Test Loss=0.00692\n", + "[n_func] Epoch 255 | Train Loss=0.00477 | Test Loss=0.00676\n", + "[n_func] Epoch 260 | Train Loss=0.00466 | Test Loss=0.00660\n", + "[n_func] Epoch 265 | Train Loss=0.00456 | Test Loss=0.00645\n", + "[n_func] Epoch 270 | Train Loss=0.00446 | Test Loss=0.00630\n", + "[n_func] Epoch 275 | Train Loss=0.00437 | Test Loss=0.00616\n", + "[n_func] Epoch 280 | Train Loss=0.00428 | Test Loss=0.00603\n", + "[n_func] Epoch 285 | Train Loss=0.00419 | Test Loss=0.00591\n", + "[n_func] Epoch 290 | Train Loss=0.00411 | Test Loss=0.00578\n", + "[n_func] Epoch 295 | Train Loss=0.00404 | Test Loss=0.00567\n", + "[n_func] Epoch 300 | Train Loss=0.00396 | Test Loss=0.00556\n", + "[n_func] Epoch 305 | Train Loss=0.00389 | Test Loss=0.00545\n", + "[n_func] Epoch 310 | Train Loss=0.00382 | Test Loss=0.00534\n", + "[n_func] Epoch 315 | Train Loss=0.00376 | Test Loss=0.00524\n", + "[n_func] Epoch 320 | Train Loss=0.00369 | Test Loss=0.00515\n", + "[n_func] Epoch 325 | Train Loss=0.00363 | Test Loss=0.00505\n", + "[n_func] Epoch 330 | Train Loss=0.00357 | Test Loss=0.00496\n", + "[n_func] Epoch 335 | Train Loss=0.00352 | Test Loss=0.00487\n", + "[n_func] Epoch 340 | Train Loss=0.00346 | Test Loss=0.00478\n", + "[n_func] Epoch 345 | Train Loss=0.00340 | Test Loss=0.00470\n", + "[n_func] Epoch 350 | Train Loss=0.00335 | Test Loss=0.00461\n", + "[n_func] Epoch 355 | Train Loss=0.00330 | Test Loss=0.00453\n", + "[n_func] Epoch 360 | Train Loss=0.00325 | Test Loss=0.00445\n", + "[n_func] Epoch 365 | Train Loss=0.00320 | Test Loss=0.00437\n", + "[n_func] Epoch 370 | Train Loss=0.00315 | Test Loss=0.00430\n", + "[n_func] Epoch 375 | Train Loss=0.00310 | Test Loss=0.00422\n", + "[n_func] Epoch 380 | Train Loss=0.00306 | Test Loss=0.00415\n", + "[n_func] Epoch 385 | Train Loss=0.00301 | Test Loss=0.00408\n", + "[n_func] Epoch 390 | Train Loss=0.00297 | Test Loss=0.00400\n", + "[n_func] Epoch 395 | Train Loss=0.00292 | Test Loss=0.00394\n", + "[n_func] Epoch 400 | Train Loss=0.00288 | Test Loss=0.00387\n", + "[n_func] Epoch 405 | Train Loss=0.00284 | Test Loss=0.00380\n", + "[n_func] Epoch 410 | Train Loss=0.00280 | Test Loss=0.00374\n", + "[n_func] Epoch 415 | Train Loss=0.00276 | Test Loss=0.00367\n", + "[n_func] Epoch 420 | Train Loss=0.00272 | Test Loss=0.00361\n", + "[n_func] Epoch 425 | Train Loss=0.00268 | Test Loss=0.00355\n", + "[n_func] Epoch 430 | Train Loss=0.00264 | Test Loss=0.00349\n", + "[n_func] Epoch 435 | Train Loss=0.00261 | Test Loss=0.00343\n", + "[n_func] Epoch 440 | Train Loss=0.00257 | Test Loss=0.00337\n", + "[n_func] Epoch 445 | Train Loss=0.00254 | Test Loss=0.00332\n", + "[n_func] Epoch 450 | Train Loss=0.00250 | Test Loss=0.00326\n", + "[n_func] Epoch 455 | Train Loss=0.00247 | Test Loss=0.00321\n", + "[n_func] Epoch 460 | Train Loss=0.00244 | Test Loss=0.00316\n", + "[n_func] Epoch 465 | Train Loss=0.00241 | Test Loss=0.00311\n", + "[n_func] Epoch 470 | Train Loss=0.00238 | Test Loss=0.00306\n", + "[n_func] Epoch 475 | Train Loss=0.00235 | Test Loss=0.00302\n", + "[n_func] Epoch 480 | Train Loss=0.00232 | Test Loss=0.00297\n", + "[n_func] Epoch 485 | Train Loss=0.00229 | Test Loss=0.00293\n", + "[n_func] Epoch 490 | Train Loss=0.00227 | Test Loss=0.00288\n", + "[n_func] Epoch 495 | Train Loss=0.00224 | Test Loss=0.00284\n", + "[n_func] Epoch 500 | Train Loss=0.00222 | Test Loss=0.00280\n" + ] + } + ], + "source": [ + "# ============================================================\n", + "# Fully Quantum KAN (QuKAN) – Regression on Nonlinear Functions\n", + "# With build logs and QCBM pretraining\n", + "# ============================================================\n", + "\n", + "import math\n", + "import numpy as np\n", + "from dataclasses import dataclass\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import pennylane as qml\n", + "from sklearn.model_selection import train_test_split\n", + "import matplotlib.pyplot as plt\n", + "\n", + "torch.set_default_dtype(torch.float32)\n", + "\n", + "\n", + "# ---------------------------\n", + "# B-spline basis (for QCBM pretraining)\n", + "# ---------------------------\n", + "def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray:\n", + " assert num_splines >= degree + 1\n", + " n = num_splines - 1\n", + " p = degree\n", + " if n - p > 0:\n", + " interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1]\n", + " else:\n", + " interior = np.array([], dtype=float)\n", + " knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)])\n", + "\n", + " def N(i, r, t):\n", + " if r == 0:\n", + " left, right = knots[i], knots[i + 1]\n", + " return np.where(((t >= left) & (t < right)) | ((right == 1.0) & (t == 1.0)), 1.0, 0.0)\n", + " left_den = knots[i + r] - knots[i]\n", + " right_den = knots[i + r + 1] - knots[i + 1]\n", + " left_term = ((t - knots[i]) / left_den) * N(i, r - 1, t) if left_den > 0 else 0\n", + " right_term = ((knots[i + r + 1] - t) / right_den) * N(i + 1, r - 1, t) if right_den > 0 else 0\n", + " return left_term + right_term\n", + "\n", + " tgrid = np.asarray(grid, dtype=float)\n", + " return np.vstack([N(i, p, tgrid) for i in range(num_splines)])\n", + "\n", + "\n", + "# ---------------------------\n", + "# QCBM over label+position\n", + "# ---------------------------\n", + "class QCBMState(nn.Module):\n", + " def __init__(self, n_label_qubits: int, n_pos_qubits: int, depth: int = 3, seed: int = 0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.L, self.P = n_label_qubits, n_pos_qubits\n", + " self.n_qubits = self.L + self.P\n", + " self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3).float())\n", + "\n", + " self.dev = qml.device(\"default.qubit\", wires=self.n_qubits)\n", + "\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights):\n", + " qml.templates.StronglyEntanglingLayers(weights, wires=range(self.n_qubits))\n", + " return qml.probs(wires=range(self.n_qubits))\n", + "\n", + " self._qprobs = qnode\n", + "\n", + " def forward(self):\n", + " return self._qprobs(self.theta.float()).to(torch.float32)\n", + "\n", + "\n", + "class LabelMixer(nn.Module):\n", + " def __init__(self, qcbm: QCBMState, depth=1, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.qcbm = qcbm\n", + " self.L, self.P = qcbm.L, qcbm.P\n", + " self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3).float())\n", + "\n", + " self.dev = qml.device(\"default.qubit\", wires=self.L + self.P)\n", + "\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(weights_qcbm, weights_label):\n", + " qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=range(self.L + self.P))\n", + " if self.L > 0:\n", + " qml.templates.StronglyEntanglingLayers(weights_label, wires=range(self.L))\n", + " return qml.probs(wires=range(self.L + self.P))\n", + "\n", + " self._qprobs = qnode\n", + "\n", + " def forward(self):\n", + " return self._qprobs(self.qcbm.theta.float(), self.phi.float()).to(torch.float32)\n", + "\n", + "\n", + "class QuantumBlock(nn.Module):\n", + " def __init__(self, k_frequencies=3, depth=1, seed=0):\n", + " super().__init__()\n", + " torch.manual_seed(seed)\n", + " self.K = k_frequencies\n", + " self.log_omega = nn.Parameter(torch.randn(self.K).float() * 0.05)\n", + " self.phase = nn.Parameter(torch.zeros(self.K).float())\n", + " self.w_cos = nn.Parameter(torch.randn(self.K).float() * 0.1)\n", + " self.w_sin = nn.Parameter(torch.randn(self.K).float() * 0.1)\n", + "\n", + " self.dev = qml.device(\"default.qubit\", wires=self.K)\n", + "\n", + " @qml.qnode(self.dev, interface=\"torch\", diff_method=\"backprop\")\n", + " def qnode(alpha_vec):\n", + " for k in range(self.K):\n", + " qml.RY(alpha_vec[k], wires=k)\n", + " for k in range(self.K - 1):\n", + " qml.CNOT(wires=[k, k + 1])\n", + " z = [qml.expval(qml.PauliZ(k)) for k in range(self.K)]\n", + " x = [qml.expval(qml.PauliX(k)) for k in range(self.K)]\n", + " return z + x\n", + "\n", + " self._qnode = qnode\n", + "\n", + " def forward_scalar(self, x01_scalar: torch.Tensor) -> torch.Tensor:\n", + " x01 = torch.clamp(x01_scalar.reshape(()).float(), 0.0, 1.0)\n", + " omega = F.softplus(self.log_omega.float()) + 1e-4\n", + " alpha = omega * (2 * math.pi * x01) + self.phase.float()\n", + " outs = self._qnode(alpha.float())\n", + " outs = torch.stack([torch.as_tensor(o, dtype=torch.float32) for o in outs], 0)\n", + " z, x = outs[:self.K], outs[self.K:]\n", + " return (self.w_cos.float() * z).sum() + (self.w_sin.float() * x).sum()\n", + "\n", + " def forward_batch(self, x01_vec: torch.Tensor) -> torch.Tensor:\n", + " return torch.stack([self.forward_scalar(val) for val in x01_vec.float()], 0)\n", + "\n", + "\n", + "class QuKANResidualEdge(nn.Module):\n", + " def __init__(self, mixer: LabelMixer, n_label_qubits, n_pos_qubits, k=3):\n", + " super().__init__()\n", + " self.mixer = mixer\n", + " self.Nlabel, self.Npos = 2 ** n_label_qubits, 2 ** n_pos_qubits\n", + " self.wf = nn.Parameter(torch.tensor(0.5).float())\n", + " self.wq = nn.Parameter(torch.tensor(0.5).float())\n", + " self.qfour = QuantumBlock(k)\n", + "\n", + " def batch_forward(self, x_pos01, probs_flat):\n", + " lp = probs_flat.view(self.Nlabel, self.Npos)\n", + " idx = torch.round(torch.clamp(x_pos01.float(), 0, 1) * (self.Npos - 1)).long()\n", + " idx = torch.clamp(idx, 0, self.Npos - 1)\n", + " p_vals = lp[:, idx].sum(0).float()\n", + " qfr_vals = self.qfour.forward_batch(x_pos01.float())\n", + " return self.wf * p_vals + self.wq * qfr_vals\n", + "\n", + "\n", + "class QuKANRegressor(nn.Module):\n", + " def __init__(self, input_dim=1, hidden_nodes=6, seed=0):\n", + " super().__init__()\n", + " self.qcbm = QCBMState(2, 5, depth=3, seed=seed)\n", + " self.mixers, self.edges = nn.ModuleList(), nn.ModuleList()\n", + " for m in range(hidden_nodes):\n", + " for j in range(input_dim):\n", + " mixer = LabelMixer(self.qcbm, depth=1, seed=seed + 97 * m + j)\n", + " edge = QuKANResidualEdge(mixer, 2, 5, k=3)\n", + " self.mixers.append(mixer)\n", + " self.edges.append(edge)\n", + "\n", + " print(f\"[QuKANRegressor] built edges: {hidden_nodes} nodes × {input_dim} inputs = {len(self.edges)} edges\")\n", + "\n", + " self.readout = QuantumBlock(k_frequencies=3, seed=seed + 123)\n", + "\n", + " def pretrain_qcbm(self, degree=2, epochs=50, lr=5e-2):\n", + " num_spl, Npos = 2 ** self.qcbm.L, 2 ** self.qcbm.P\n", + " grid = np.linspace(0, 1, Npos)\n", + " B = np.maximum(bspline_basis_matrix(num_spl, degree, grid), 0.0)\n", + " B = (B + 1e-8) / B.sum(1, keepdims=True)\n", + " target = torch.tensor((B / num_spl).reshape(-1), dtype=torch.float32)\n", + "\n", + " opt = torch.optim.Adam(self.qcbm.parameters(), lr=lr)\n", + " for ep in range(epochs):\n", + " opt.zero_grad()\n", + " probs = self.qcbm()\n", + " loss = F.mse_loss(probs, target)\n", + " loss.backward()\n", + " opt.step()\n", + " if ep % 10 == 0 or ep == epochs - 1:\n", + " tv = 0.5 * torch.sum(torch.abs(probs - target)).item()\n", + " print(f\"[QCBM pretrain] {ep:03d} | MSE={loss.item():.6f} | TV={tv:.6f}\")\n", + "\n", + " self.qcbm.theta.requires_grad_(False)\n", + " print(\">> QCBM frozen.\")\n", + "\n", + " def forward(self, X):\n", + " X01 = torch.sigmoid(X.float())\n", + " edge_probs = [mix().float() for mix in self.mixers]\n", + " nodes, eidx = [], 0\n", + " for m in range(len(self.mixers) // X.shape[1]):\n", + " acc = torch.zeros(X.shape[0]).float()\n", + " for j in range(X.shape[1]):\n", + " out = self.edges[eidx].batch_forward(X01[:, j], edge_probs[eidx])\n", + " acc = acc + out\n", + " eidx += 1\n", + " nodes.append(acc)\n", + " H = torch.stack(nodes, 1).float()\n", + " return self.readout.forward_batch(H.mean(1))\n", + "\n", + "\n", + "# ---------------------------\n", + "# Target functions\n", + "# ---------------------------\n", + "def f_func(x): return torch.tanh(10*x + 0.5 + F.relu(x**2) * 10)\n", + "def g_func(x): return torch.sin(x) + torch.cos(5*x) * torch.exp(-x**2) + F.relu(x - 0.5)\n", + "def h_func(x): return torch.sigmoid(3*x) + F.relu(torch.sin(2*x) + x**3)\n", + "def k_func(x): return torch.tanh(5*x - 2) + 3 * F.relu(torch.cos(x**2))\n", + "def m_func(x): return F.softplus(x**2 - 1) + torch.tanh(4*x + 0.1)\n", + "def n_func(x): return torch.exp(-x**2 + 0.3*x) + F.relu(torch.tanh(2*x - 1))\n", + "\n", + "FUNCTION_MAP = {\n", + " \"f_func\": f_func,\n", + " \"g_func\": g_func,\n", + " \"h_func\": h_func,\n", + " \"k_func\": k_func,\n", + " \"m_func\": m_func,\n", + " \"n_func\": n_func,\n", + "}\n", + "\n", + "\n", + "def train_one_function(name, func, epochs=100, batch=64, seed=0):\n", + " x = torch.linspace(-1, 1, 500).unsqueeze(1).float()\n", + " y = func(x).float()\n", + " X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=42)\n", + "\n", + " model = QuKANRegressor(input_dim=1, hidden_nodes=6, seed=seed)\n", + " model.pretrain_qcbm()\n", + "\n", + " opt = torch.optim.AdamW(model.parameters(), lr=2e-3, weight_decay=8e-4)\n", + " mse = nn.MSELoss()\n", + "\n", + " train_losses, test_losses = [], []\n", + "\n", + " for ep in range(epochs):\n", + " model.train()\n", + " opt.zero_grad()\n", + " pred = model(X_train)\n", + " loss = mse(pred, y_train.squeeze())\n", + " loss.backward()\n", + " nn.utils.clip_grad_norm_(model.parameters(), 1.0)\n", + " opt.step()\n", + "\n", + " with torch.no_grad():\n", + " model.eval()\n", + " test_loss = mse(model(X_test), y_test.squeeze())\n", + "\n", + " train_losses.append(loss.item())\n", + " test_losses.append(test_loss.item())\n", + "\n", + " if (ep + 1) % 5 == 0 or ep == epochs - 1:\n", + " print(f\"[{name}] Epoch {ep+1} | Train Loss={loss.item():.5f} | Test Loss={test_loss.item():.5f}\")\n", + "\n", + " # ---------- Plotting ----------\n", + " model.eval()\n", + " with torch.no_grad():\n", + " preds = model(X_test).cpu().numpy()\n", + " true = y_test.cpu().numpy()\n", + " x_plot = X_test.cpu().numpy().squeeze()\n", + " sort_idx = x_plot.argsort()\n", + "\n", + " plt.figure(figsize=(12, 5))\n", + "\n", + " # Prediction vs Ground Truth\n", + " plt.subplot(1, 2, 1)\n", + " plt.plot(x_plot[sort_idx], true[sort_idx], label='Ground Truth', color='blue')\n", + " plt.plot(x_plot[sort_idx], preds[sort_idx], '--', label='Prediction', color='red')\n", + " plt.title(f\"{name} – Prediction vs Ground Truth\")\n", + " plt.xlabel(\"Input x\")\n", + " plt.ylabel(\"f(x)\")\n", + " plt.legend()\n", + " plt.grid(True)\n", + "\n", + " # Loss curves\\\n", + " \n", + " plt.subplot(1, 2, 2)\n", + " plt.plot(train_losses, label='Train Loss')\n", + " plt.plot(test_losses, label='Test Loss')\n", + " plt.title(f\"{name} – Loss over Epochs\")\n", + " plt.xlabel(\"Epoch\")\n", + " plt.ylabel(\"MSE\")\n", + " plt.legend()\n", + " plt.grid(True)\n", + "\n", + " plt.tight_layout()\n", + " plt.savefig(f\"{name}_results.png\") # single file with both plots\n", + " plt.close()\n", + "\n", + "\n", + "def main():\n", + " for name, fn in FUNCTION_MAP.items():\n", + " print(f\"\\n=== Training QuKAN Regressor on {name} ===\")\n", + " train_one_function(name, fn, epochs=500, batch=64, seed=0)\n", + "\n", + "\n", + "\n", + "if __name__ == \"__main__\":\n", + " main()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8cd4ec0-6db3-4604-a76c-b497d0999400", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "558eadb6-54a2-4763-9f07-8fd0b577b519", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6428869b-8e78-41c5-afb5-cc715428a530", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_digits.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_digits.py new file mode 100644 index 0000000..4996773 --- /dev/null +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_digits.py @@ -0,0 +1,263 @@ +import math, numpy as np +from dataclasses import dataclass, field + +import torch +import torch.nn as nn +import torch.nn.functional as F +import pennylane as qml + +from sklearn.datasets import load_digits +from sklearn.model_selection import train_test_split +from sklearn.preprocessing import MinMaxScaler + +torch.set_default_dtype(torch.float32) + + +def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray: + assert num_splines >= degree + 1 + n = num_splines - 1 + p = degree + if n - p > 0: + interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1] + else: + interior = np.array([], dtype=float) + knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)]) + + def N(i, r, t): + if r == 0: + left, right = knots[i], knots[i + 1] + return np.where(((t >= left) & (t < right)) | ((right == 1.0) & (t == 1.0)), 1.0, 0.0) + left_den = knots[i + r] - knots[i] + right_den = knots[i + r + 1] - knots[i + 1] + left_term = ((t - knots[i]) / left_den) * N(i, r - 1, t) if left_den > 0 else 0 + right_term = ((knots[i + r + 1] - t) / right_den) * N(i + 1, r - 1, t) if right_den > 0 else 0 + return left_term + right_term + + tgrid = np.asarray(grid, dtype=float) + return np.vstack([N(i, p, tgrid) for i in range(num_splines)]) + + +class QCBMState(nn.Module): + def __init__(self, n_label_qubits, n_pos_qubits, depth=3, seed=0): + super().__init__() + torch.manual_seed(seed) + self.L, self.P = n_label_qubits, n_pos_qubits + self.n_qubits = self.L + self.P + self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32)) + self.dev = qml.device("default.qubit", wires=self.n_qubits) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights): + qml.templates.StronglyEntanglingLayers(weights, wires=range(self.n_qubits)) + return qml.probs(wires=range(self.n_qubits)) + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.theta).to(torch.float32) + + def freeze(self): + self.theta.requires_grad_(False) + +class LabelMixer(nn.Module): + def __init__(self, qcbm: QCBMState, depth=1, seed=0): + super().__init__() + torch.manual_seed(seed) + self.qcbm = qcbm + self.L, self.P = qcbm.L, qcbm.P + self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32)) + self.dev = qml.device("default.qubit", wires=self.L + self.P) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights_qcbm, weights_label): + qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=range(self.L + self.P)) + if self.L > 0: + qml.templates.StronglyEntanglingLayers(weights_label, wires=range(self.L)) + return qml.probs(wires=range(self.L + self.P)) + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.qcbm.theta, self.phi).to(torch.float32) + + +class QuantumBlock(nn.Module): + def __init__(self, k_frequencies=3, seed=0): + super().__init__() + torch.manual_seed(seed) + self.K = k_frequencies + self.log_omega = nn.Parameter(torch.randn(self.K) * 0.05) + self.phase = nn.Parameter(torch.zeros(self.K)) + self.w_cos = nn.Parameter(torch.randn(self.K) * 0.1) + self.w_sin = nn.Parameter(torch.randn(self.K) * 0.1) + + def forward_batch(self, x01_vec): + x01_vec = torch.clamp(x01_vec, 0, 1) + omega = F.softplus(self.log_omega) + 1e-4 + vals = [] + for val in x01_vec: + alpha = omega * (2*math.pi*val) + self.phase + z = torch.cos(alpha) + x = torch.sin(alpha) + vals.append((self.w_cos*z).sum() + (self.w_sin*x).sum()) + return torch.stack(vals) + +class QuKANResidualEdge(nn.Module): + def __init__(self, mixer, n_label_qubits, n_pos_qubits, fourier_k=3, seed=0, w_init=0.5): + super().__init__() + self.mixer = mixer + self.L, self.P = n_label_qubits, n_pos_qubits + self.Nlabel, self.Npos = 2**self.L, 2**self.P + self.wf = nn.Parameter(torch.tensor(float(w_init))) + self.wq = nn.Parameter(torch.tensor(float(w_init))) + self.qfour = QuantumBlock(fourier_k, seed=seed) + + def batch_forward(self, x_pos01, probs_flat): + lp = probs_flat.view(self.Nlabel, self.Npos) + idx = torch.round(torch.clamp(x_pos01,0,1)*(self.Npos-1)).long() + idx = torch.clamp(idx, 0, self.Npos-1) + p_vals = lp[:,idx].sum(0) + qfr_vals = self.qfour.forward_batch(x_pos01) + return self.wf*p_vals + self.wq*qfr_vals + + +@dataclass +class QuKANLayerCfg: + n_nodes: int = 4 + n_label_qubits: int = 2 + n_pos_qubits: int = 6 + qcbm_depth: int = 3 + label_mixer_depth: int = 1 + fourier_k: int = 3 + mixers_trainable: bool = False + +class QuKANLayer(nn.Module): + def __init__(self, cfg: QuKANLayerCfg, seed=0): + super().__init__() + self.cfg = cfg + self.qcbm = QCBMState(cfg.n_label_qubits, cfg.n_pos_qubits, cfg.qcbm_depth, seed) + self.mixers, self.edges = nn.ModuleList(), nn.ModuleList() + self._built=False + self._train_mixers = cfg.mixers_trainable + + def build(self, input_dim, seed=0): + for m in range(self.cfg.n_nodes): + for j in range(input_dim): + mixer = LabelMixer(self.qcbm, self.cfg.label_mixer_depth, seed+97*m+j) + edge = QuKANResidualEdge(mixer, self.cfg.n_label_qubits, self.cfg.n_pos_qubits, + self.cfg.fourier_k, seed=seed+991*m+13*j) + self.mixers.append(mixer); self.edges.append(edge) + self._built=True + print(f"built edges: {self.cfg.n_nodes} nodes × {input_dim} inputs = {len(self.edges)} edges") + + def pretrain_qcbm_on_splines(self, degree=2, epochs=80, lr=5e-2, verbose=True): + num_spl, Npos = 2**self.cfg.n_label_qubits, 2**self.cfg.n_pos_qubits + grid = np.linspace(0,1,Npos) + B = bspline_basis_matrix(num_spl, degree, grid) + B = (B+1e-8)/B.sum(1,keepdims=True) + target = torch.tensor((B/num_spl).reshape(-1), dtype=torch.float32) + opt=torch.optim.Adam(self.qcbm.parameters(), lr=lr) + for ep in range(epochs): + opt.zero_grad(); probs=self.qcbm() + loss=F.mse_loss(probs, target); loss.backward(); opt.step() + if verbose and (ep%20==0 or ep==epochs-1): + tv=0.5*torch.sum(torch.abs(probs-target)).item() + print(f"[QCBM pretrain] {ep:03d} | MSE={loss.item():.6f} | TV={tv:.6f}") + self.qcbm.freeze() + print("QCBM frozen.") + + def forward(self,X, input_is_01=True): + X01 = (X if input_is_01 else torch.sigmoid(X)) + if self._train_mixers: + edge_probs=[mix() for mix in self.mixers] + else: + with torch.no_grad(): + edge_probs=[mix() for mix in self.mixers] + nodes=[]; eidx=0 + for m in range(self.cfg.n_nodes): + acc=torch.zeros(X.shape[0], dtype=torch.float32) + for j in range(X.shape[1]): + out=self.edges[eidx].batch_forward(X01[:,j], edge_probs[eidx]) + acc=acc+out; eidx+=1 + nodes.append(acc) + return torch.stack(nodes,1) + +@dataclass +class KANReadoutCfg: + n_classes:int; in_dim:int; fourier_k:int=3 + +class KANReadout(nn.Module): + def __init__(self,cfg:KANReadoutCfg,seed=0): + super().__init__() + self.cfg=cfg; C,M=cfg.n_classes,cfg.in_dim + self.qfr=nn.ModuleList([QuantumBlock(cfg.fourier_k,seed+131*c+m) + for c in range(C) for m in range(M)]) + self.b=nn.Parameter(torch.zeros(C)) + def _idx(self,c,m): return c*self.cfg.in_dim+m + def forward(self,H): + H01=torch.sigmoid(H); logits=[] + for c in range(self.cfg.n_classes): + acc=torch.zeros(H.shape[0], dtype=torch.float32) + for m in range(H.shape[1]): + acc=acc+self.qfr[self._idx(c,m)].forward_batch(H01[:,m]) + logits.append(acc+self.b[c]) + return torch.stack(logits,1) + + +@dataclass +class QuKANNetCfg: + layer1:QuKANLayerCfg=field(default_factory=QuKANLayerCfg) + layer2:QuKANLayerCfg=field(default_factory=lambda: QuKANLayerCfg(n_pos_qubits=6)) + n_classes:int=10 + +class QuKANNet(nn.Module): + def __init__(self,cfg,input_dim,seed=0): + super().__init__() + self.l1=QuKANLayer(cfg.layer1,seed); self.l1.build(input_dim,seed) + self.l2=QuKANLayer(cfg.layer2,seed+1); self.l2.build(cfg.layer1.n_nodes,seed+1) + self.readout=KANReadout(KANReadoutCfg(cfg.n_classes,cfg.layer2.n_nodes),seed+123) + def pretrain_qcbms(self,degree=2,epochs=80,lr=5e-2): + print("\n[Pretrain] Layer 1 QCBM"); self.l1.pretrain_qcbm_on_splines(degree,epochs,lr) + print("\n[Pretrain] Layer 2 QCBM"); self.l2.pretrain_qcbm_on_splines(degree,epochs,lr) + def forward(self,X): + h1=self.l1(X,True); h2=self.l2(h1,False); return self.readout(h2) + +def run_digits(seed=0): + torch.manual_seed(seed); np.random.seed(seed) + digits = load_digits() + X, y = digits.data.astype(np.float32), digits.target.astype(np.int64) + X, y = X[:1000], y[:1000] + + X = MinMaxScaler((0,1)).fit_transform(X).astype(np.float32) + X_tr, X_te, y_tr, y_te = train_test_split(X, y, test_size=0.3, random_state=seed, stratify=y) + X_tr, X_te = torch.tensor(X_tr), torch.tensor(X_te) + y_tr, y_te = torch.tensor(y_tr), torch.tensor(y_te) + + model = QuKANNet(QuKANNetCfg(), input_dim=64, seed=seed) + model.pretrain_qcbms() + + opt = torch.optim.AdamW(model.parameters(), lr=1e-3, weight_decay=8e-4) + sched = torch.optim.lr_scheduler.CosineAnnealingLR(opt, T_max=40) + ce = nn.CrossEntropyLoss(label_smoothing=0.05) + + print("\nTraining QuKAN on Digits Dataset (1000 samples)") + for ep in range(1, 41): + model.train() + perm = torch.randperm(X_tr.shape[0]) + Xb_all, yb_all = X_tr[perm], y_tr[perm] + loss_sum, tot, corr = 0.0, 0, 0 + for i in range(0, Xb_all.shape[0], 64): + xb, yb = Xb_all[i:i+64], yb_all[i:i+64] + opt.zero_grad(set_to_none=True) + logits = model(xb) + loss = ce(logits, yb) + loss.backward() + opt.step() + loss_sum += float(loss.item()) * xb.size(0) + tot += xb.size(0) + corr += (logits.argmax(1) == yb).sum().item() + sched.step() + train_acc = 100.0 * corr / tot + val_acc = (model(X_te).argmax(1) == y_te).float().mean().item() * 100.0 + print(f"Epoch {ep:03d} | Train Acc={train_acc:.2f}% | Val Acc={val_acc:.2f}%") + +if __name__ == "__main__": + run_digits(0) diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_equations.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_equations.py new file mode 100644 index 0000000..3a7e0fc --- /dev/null +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_equations.py @@ -0,0 +1,285 @@ +import math +import numpy as np +from dataclasses import dataclass +import torch +import torch.nn as nn +import torch.nn.functional as F +import pennylane as qml +from sklearn.model_selection import train_test_split +import matplotlib.pyplot as plt + +torch.set_default_dtype(torch.float32) + + +def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray: + assert num_splines >= degree + 1 + n = num_splines - 1 + p = degree + if n - p > 0: + interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1] + else: + interior = np.array([], dtype=float) + knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)]) + + def N(i, r, t): + if r == 0: + left, right = knots[i], knots[i + 1] + return np.where(((t >= left) & (t < right)) | ((right == 1.0) & (t == 1.0)), 1.0, 0.0) + left_den = knots[i + r] - knots[i] + right_den = knots[i + r + 1] - knots[i + 1] + left_term = ((t - knots[i]) / left_den) * N(i, r - 1, t) if left_den > 0 else 0 + right_term = ((knots[i + r + 1] - t) / right_den) * N(i + 1, r - 1, t) if right_den > 0 else 0 + return left_term + right_term + + tgrid = np.asarray(grid, dtype=float) + return np.vstack([N(i, p, tgrid) for i in range(num_splines)]) + + +class QCBMState(nn.Module): + def __init__(self, n_label_qubits: int, n_pos_qubits: int, depth: int = 3, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.L, self.P = n_label_qubits, n_pos_qubits + self.n_qubits = self.L + self.P + self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3).float()) + + self.dev = qml.device("default.qubit", wires=self.n_qubits) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights): + qml.templates.StronglyEntanglingLayers(weights, wires=range(self.n_qubits)) + return qml.probs(wires=range(self.n_qubits)) + + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.theta.float()).to(torch.float32) + + +class LabelMixer(nn.Module): + def __init__(self, qcbm: QCBMState, depth=1, seed=0): + super().__init__() + torch.manual_seed(seed) + self.qcbm = qcbm + self.L, self.P = qcbm.L, qcbm.P + self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3).float()) + + self.dev = qml.device("default.qubit", wires=self.L + self.P) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights_qcbm, weights_label): + qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=range(self.L + self.P)) + if self.L > 0: + qml.templates.StronglyEntanglingLayers(weights_label, wires=range(self.L)) + return qml.probs(wires=range(self.L + self.P)) + + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.qcbm.theta.float(), self.phi.float()).to(torch.float32) + + +class QuantumBlock(nn.Module): + def __init__(self, k_frequencies=3, depth=1, seed=0): + super().__init__() + torch.manual_seed(seed) + self.K = k_frequencies + self.log_omega = nn.Parameter(torch.randn(self.K).float() * 0.05) + self.phase = nn.Parameter(torch.zeros(self.K).float()) + self.w_cos = nn.Parameter(torch.randn(self.K).float() * 0.1) + self.w_sin = nn.Parameter(torch.randn(self.K).float() * 0.1) + + self.dev = qml.device("default.qubit", wires=self.K) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(alpha_vec): + for k in range(self.K): + qml.RY(alpha_vec[k], wires=k) + for k in range(self.K - 1): + qml.CNOT(wires=[k, k + 1]) + z = [qml.expval(qml.PauliZ(k)) for k in range(self.K)] + x = [qml.expval(qml.PauliX(k)) for k in range(self.K)] + return z + x + + self._qnode = qnode + + def forward_scalar(self, x01_scalar: torch.Tensor) -> torch.Tensor: + x01 = torch.clamp(x01_scalar.reshape(()).float(), 0.0, 1.0) + omega = F.softplus(self.log_omega.float()) + 1e-4 + alpha = omega * (2 * math.pi * x01) + self.phase.float() + outs = self._qnode(alpha.float()) + outs = torch.stack([torch.as_tensor(o, dtype=torch.float32) for o in outs], 0) + z, x = outs[:self.K], outs[self.K:] + return (self.w_cos.float() * z).sum() + (self.w_sin.float() * x).sum() + + def forward_batch(self, x01_vec: torch.Tensor) -> torch.Tensor: + return torch.stack([self.forward_scalar(val) for val in x01_vec.float()], 0) + + +class QuKANResidualEdge(nn.Module): + def __init__(self, mixer: LabelMixer, n_label_qubits, n_pos_qubits, k=3): + super().__init__() + self.mixer = mixer + self.Nlabel, self.Npos = 2 ** n_label_qubits, 2 ** n_pos_qubits + self.wf = nn.Parameter(torch.tensor(0.5).float()) + self.wq = nn.Parameter(torch.tensor(0.5).float()) + self.qfour = QuantumBlock(k) + + def batch_forward(self, x_pos01, probs_flat): + lp = probs_flat.view(self.Nlabel, self.Npos) + idx = torch.round(torch.clamp(x_pos01.float(), 0, 1) * (self.Npos - 1)).long() + idx = torch.clamp(idx, 0, self.Npos - 1) + p_vals = lp[:, idx].sum(0).float() + qfr_vals = self.qfour.forward_batch(x_pos01.float()) + return self.wf * p_vals + self.wq * qfr_vals + + +class QuKANRegressor(nn.Module): + def __init__(self, input_dim=1, hidden_nodes=6, seed=0): + super().__init__() + self.qcbm = QCBMState(2, 5, depth=3, seed=seed) + self.mixers, self.edges = nn.ModuleList(), nn.ModuleList() + for m in range(hidden_nodes): + for j in range(input_dim): + mixer = LabelMixer(self.qcbm, depth=1, seed=seed + 97 * m + j) + edge = QuKANResidualEdge(mixer, 2, 5, k=3) + self.mixers.append(mixer) + self.edges.append(edge) + + print(f"[QuKANRegressor] built edges: {hidden_nodes} nodes × {input_dim} inputs = {len(self.edges)} edges") + + self.readout = QuantumBlock(k_frequencies=3, seed=seed + 123) + + def pretrain_qcbm(self, degree=2, epochs=50, lr=5e-2): + num_spl, Npos = 2 ** self.qcbm.L, 2 ** self.qcbm.P + grid = np.linspace(0, 1, Npos) + B = np.maximum(bspline_basis_matrix(num_spl, degree, grid), 0.0) + B = (B + 1e-8) / B.sum(1, keepdims=True) + target = torch.tensor((B / num_spl).reshape(-1), dtype=torch.float32) + + opt = torch.optim.Adam(self.qcbm.parameters(), lr=lr) + for ep in range(epochs): + opt.zero_grad() + probs = self.qcbm() + loss = F.mse_loss(probs, target) + loss.backward() + opt.step() + if ep % 10 == 0 or ep == epochs - 1: + tv = 0.5 * torch.sum(torch.abs(probs - target)).item() + print(f"[QCBM pretrain] {ep:03d} | MSE={loss.item():.6f} | TV={tv:.6f}") + + self.qcbm.theta.requires_grad_(False) + print("QCBM frozen.") + + def forward(self, X): + X01 = torch.sigmoid(X.float()) + edge_probs = [mix().float() for mix in self.mixers] + nodes, eidx = [], 0 + for m in range(len(self.mixers) // X.shape[1]): + acc = torch.zeros(X.shape[0]).float() + for j in range(X.shape[1]): + out = self.edges[eidx].batch_forward(X01[:, j], edge_probs[eidx]) + acc = acc + out + eidx += 1 + nodes.append(acc) + H = torch.stack(nodes, 1).float() + return self.readout.forward_batch(H.mean(1)) + + + + + +def f_func(x): return torch.tanh(10*x + 0.5 + F.relu(x**2) * 10) +def g_func(x): return torch.sin(x) + torch.cos(5*x) * torch.exp(-x**2) + F.relu(x - 0.5) +def h_func(x): return torch.sigmoid(3*x) + F.relu(torch.sin(2*x) + x**3) +def k_func(x): return torch.tanh(5*x - 2) + 3 * F.relu(torch.cos(x**2)) +def m_func(x): return F.softplus(x**2 - 1) + torch.tanh(4*x + 0.1) +def n_func(x): return torch.exp(-x**2 + 0.3*x) + F.relu(torch.tanh(2*x - 1)) + +FUNCTION_MAP = { + "f_func": f_func, + "g_func": g_func, + "h_func": h_func, + "k_func": k_func, + "m_func": m_func, + "n_func": n_func, +} + + +def train_one_function(name, func, epochs=100, batch=64, seed=0): + x = torch.linspace(-1, 1, 500).unsqueeze(1).float() + y = func(x).float() + X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=42) + + model = QuKANRegressor(input_dim=1, hidden_nodes=6, seed=seed) + model.pretrain_qcbm() + + opt = torch.optim.AdamW(model.parameters(), lr=2e-3, weight_decay=8e-4) + mse = nn.MSELoss() + + train_losses, test_losses = [], [] + + for ep in range(epochs): + model.train() + opt.zero_grad() + pred = model(X_train) + loss = mse(pred, y_train.squeeze()) + loss.backward() + nn.utils.clip_grad_norm_(model.parameters(), 1.0) + opt.step() + + with torch.no_grad(): + model.eval() + test_loss = mse(model(X_test), y_test.squeeze()) + + train_losses.append(loss.item()) + test_losses.append(test_loss.item()) + + if (ep + 1) % 5 == 0 or ep == epochs - 1: + print(f"[{name}] Epoch {ep+1} | Train Loss={loss.item():.5f} | Test Loss={test_loss.item():.5f}") + + + model.eval() + with torch.no_grad(): + preds = model(X_test).cpu().numpy() + true = y_test.cpu().numpy() + x_plot = X_test.cpu().numpy().squeeze() + sort_idx = x_plot.argsort() + + plt.figure(figsize=(12, 5)) + + + plt.subplot(1, 2, 1) + plt.plot(x_plot[sort_idx], true[sort_idx], label='Ground Truth', color='blue') + plt.plot(x_plot[sort_idx], preds[sort_idx], '--', label='Prediction', color='red') + plt.title(f"{name} – Prediction vs Ground Truth") + plt.xlabel("Input x") + plt.ylabel("f(x)") + plt.legend() + plt.grid(True) + + + + plt.subplot(1, 2, 2) + plt.plot(train_losses, label='Train Loss') + plt.plot(test_losses, label='Test Loss') + plt.title(f"{name} – Loss over Epochs") + plt.xlabel("Epoch") + plt.ylabel("MSE") + plt.legend() + plt.grid(True) + + plt.tight_layout() + plt.savefig(f"{name}_results.png") + plt.close() + + +def main(): + for name, fn in FUNCTION_MAP.items(): + print(f"\nTraining QuKAN Regressor on {name}") + train_one_function(name, fn, epochs=500, batch=64, seed=0) + + + +if __name__ == "__main__": + main() diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_higgs_boson.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_higgs_boson.py new file mode 100644 index 0000000..d7dd150 --- /dev/null +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_higgs_boson.py @@ -0,0 +1,285 @@ +import math, numpy as np +from dataclasses import dataclass, field +import torch +import torch.nn as nn +import torch.nn.functional as F +import pennylane as qml +from sklearn.model_selection import train_test_split +from sklearn.preprocessing import MinMaxScaler + +torch.set_default_dtype(torch.float32) + +class QCBMState(nn.Module): + def __init__(self, n_label_qubits, n_pos_qubits, depth=3, seed=0): + super().__init__() + torch.manual_seed(seed) + self.L, self.P = n_label_qubits, n_pos_qubits + self.n_qubits = self.L + self.P + self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32)) + self.dev = qml.device("default.qubit", wires=self.n_qubits) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights): + qml.templates.StronglyEntanglingLayers(weights, wires=range(self.n_qubits)) + return qml.probs(wires=range(self.n_qubits)) + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.theta).to(torch.float32) + +class LabelMixer(nn.Module): + def __init__(self, qcbm: QCBMState, depth=2, seed=0): + super().__init__() + torch.manual_seed(seed) + self.qcbm = qcbm + self.L, self.P = qcbm.L, qcbm.P + self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32)) + self.dev = qml.device("default.qubit", wires=self.L + self.P) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights_qcbm, weights_label): + qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=range(self.L + self.P)) + if self.L > 0: + qml.templates.StronglyEntanglingLayers(weights_label, wires=range(self.L)) + return qml.probs(wires=range(self.L + self.P)) + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.qcbm.theta, self.phi).to(torch.float32) + +class QuantumBlock(nn.Module): + def __init__(self, k_frequencies=4, entangle_depth=1, seed=0): + super().__init__() + torch.manual_seed(seed) + self.K = k_frequencies + self.log_omega = nn.Parameter(torch.randn(self.K) * 0.05) + self.phase = nn.Parameter(torch.zeros(self.K)) + self.w_cos = nn.Parameter(torch.randn(self.K) * 0.1) + self.w_sin = nn.Parameter(torch.randn(self.K) * 0.1) + self.dev = qml.device("default.qubit", wires=self.K) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(alpha_vec): + + for k in range(self.K): + qml.RY(alpha_vec[k], wires=k) + + for _ in range(entangle_depth): + for k in range(self.K): + qml.CNOT([k, (k + 1) % self.K]) + z = [qml.expval(qml.PauliZ(k)) for k in range(self.K)] + x = [qml.expval(qml.PauliX(k)) for k in range(self.K)] + return z + x + self._qnode = qnode + + def forward_batch(self, x01_vec: torch.Tensor): + + x01_vec = torch.clamp(x01_vec, 0, 1) + omega = F.softplus(self.log_omega) + 1e-4 + vals = [] + for val in x01_vec: + + alpha = omega * (2 * math.pi * val) + self.phase + outs = self._qnode(alpha.to(torch.float32)) + outs = torch.stack([torch.as_tensor(o, dtype=torch.float32) for o in outs]) + + vals.append((self.w_cos * outs[:self.K]).sum() + (self.w_sin * outs[self.K:]).sum()) + return torch.stack(vals) + +class QuKANResidualEdge(nn.Module): + """ + Edge output = w_f * (QCBM label⊗position probability at discretized x) + + w_q * (QuantumFourierBlock(x)) + """ + def __init__(self, mixer, n_label_qubits, n_pos_qubits, fourier_k=4, fourier_depth=1, seed=0, w_init=0.5): + super().__init__() + self.mixer = mixer + self.L, self.P = n_label_qubits, n_pos_qubits + self.Nlabel, self.Npos = 2 ** self.L, 2 ** self.P + self.wf = nn.Parameter(torch.tensor(float(w_init))) + self.wq = nn.Parameter(torch.tensor(float(w_init))) + self.qfour = QuantumBlock(fourier_k, fourier_depth, seed=seed) + + def batch_forward(self, x_pos01: torch.Tensor, probs_flat: torch.Tensor): + """ + x_pos01: (B,) in [0,1] + probs_flat: (2^(L+P),) from LabelMixer (shared across batch; edge-wise) + """ + + lp = probs_flat.view(self.Nlabel, self.Npos) + + idx = torch.round(torch.clamp(x_pos01, 0, 1) * (self.Npos - 1)).long() + idx = torch.clamp(idx, 0, self.Npos - 1) + + p_vals = lp[:, idx].sum(0) + + qfr_vals = self.qfour.forward_batch(x_pos01) + return self.wf * p_vals + self.wq * qfr_vals + +@dataclass +class QuKANLayerCfg: + n_nodes: int = 5 + n_label_qubits: int = 2 + n_pos_qubits: int = 6 + qcbm_depth: int = 3 + label_mixer_depth: int = 2 + fourier_k: int = 4 + fourier_depth: int = 1 + +class QuKANLayer(nn.Module): + def __init__(self, cfg: QuKANLayerCfg, seed=0): + super().__init__() + self.cfg = cfg + self.qcbm = QCBMState(cfg.n_label_qubits, cfg.n_pos_qubits, cfg.qcbm_depth, seed) + self.mixers, self.edges = nn.ModuleList(), nn.ModuleList() + + def build(self, input_dim, seed=0): + print(f"[QuKANLayer] Building with {input_dim} inputs...") + for m in range(self.cfg.n_nodes): + for j in range(input_dim): + mixer = LabelMixer(self.qcbm, self.cfg.label_mixer_depth, seed + 97 * m + j) + edge = QuKANResidualEdge( + mixer, self.cfg.n_label_qubits, self.cfg.n_pos_qubits, + self.cfg.fourier_k, self.cfg.fourier_depth, seed + 991 * m + 13 * j + ) + self.mixers.append(mixer) + self.edges.append(edge) + print(f"[QuKANLayer] Built edges: {self.cfg.n_nodes} nodes × {input_dim} inputs = {len(self.edges)} edges") + + def forward(self, X): + + X01 = torch.sigmoid(X) + + edge_probs = [mix() for mix in self.mixers] + nodes = [] + eidx = 0 + for m in range(self.cfg.n_nodes): + acc = torch.zeros(X.shape[0], dtype=torch.float32, device=X.device) + for j in range(X.shape[1]): + out = self.edges[eidx].batch_forward(X01[:, j], edge_probs[eidx]) + acc = acc + out + eidx += 1 + nodes.append(acc) + return torch.stack(nodes, 1) + +@dataclass +class KANReadoutCfg: + n_classes: int + in_dim: int + fourier_k: int = 3 + fourier_depth: int = 1 + +class KANReadout(nn.Module): + """ + KAN-style readout: for each output class, sum QuantumFourier transforms of each hidden unit (no Linear). + """ + def __init__(self, cfg: KANReadoutCfg, seed=0): + super().__init__() + self.cfg = cfg + C, M = cfg.n_classes, cfg.in_dim + self.qfr = nn.ModuleList([ + QuantumBlock(cfg.fourier_k, cfg.fourier_depth, seed + 131 * c + m) + for c in range(C) for m in range(M) + ]) + self.b = nn.Parameter(torch.zeros(C)) + + def _idx(self, c, m): + return c * self.cfg.in_dim + m + + def forward(self, H): + H01 = torch.sigmoid(H) + logits = [] + for c in range(self.cfg.n_classes): + acc = torch.zeros(H.shape[0], dtype=torch.float32, device=H.device) + for m in range(H.shape[1]): + acc = acc + self.qfr[self._idx(c, m)].forward_batch(H01[:, m]) + logits.append(acc + self.b[c]) + return torch.stack(logits, 1) + +@dataclass +class QuKANNetCfg: + layer1: QuKANLayerCfg = field(default_factory=QuKANLayerCfg) + layer2: QuKANLayerCfg = field(default_factory=QuKANLayerCfg) + n_classes: int = 2 + +class QuKANNet(nn.Module): + def __init__(self, cfg, input_dim, seed=0): + super().__init__() + print("[QuKANNet] Initializing network...") + self.l1 = QuKANLayer(cfg.layer1, seed); self.l1.build(input_dim, seed) + self.l2 = QuKANLayer(cfg.layer2, seed+1); self.l2.build(cfg.layer1.n_nodes, seed+1) + self.readout = KANReadout(KANReadoutCfg(cfg.n_classes, cfg.layer2.n_nodes), seed+123) + print("[QuKANNet] Build complete.") + + def forward(self, X): + h1 = self.l1(X) + h2 = self.l2(h1) + return self.readout(h2) + +def load_higgs_csv_first_n(csv_path: str, n_samples: int): + data = np.loadtxt(csv_path, delimiter=",", max_rows=n_samples) + y = data[:, 0].astype(np.int64) + X = data[:, 1:29].astype(np.float32) + scaler = MinMaxScaler((0, 1)) + X = scaler.fit_transform(X).astype(np.float32) + return X, y + +def run_higgs(csv_path: str, + n_samples: int = 20000, + epochs: int = 20, + batch_size: int = 128, + seed: int = 0): + torch.manual_seed(seed); np.random.seed(seed) + + print(f"Loading first {n_samples} rows from: {csv_path}") + X, y = load_higgs_csv_first_n(csv_path, n_samples) + + X_tr, X_te, y_tr, y_te = train_test_split( + X, y, test_size=0.2, random_state=seed, stratify=y + ) + X_tr = torch.tensor(X_tr, dtype=torch.float32) + X_te = torch.tensor(X_te, dtype=torch.float32) + y_tr = torch.tensor(y_tr, dtype=torch.long) + y_te = torch.tensor(y_te, dtype=torch.long) + + + input_dim = X_tr.shape[1] + model = QuKANNet(QuKANNetCfg(), input_dim=input_dim, seed=seed) + + opt = torch.optim.AdamW(model.parameters(), lr=1e-3, weight_decay=1e-4) + ce = nn.CrossEntropyLoss(label_smoothing=0.05) + + print("\nTraining QuKAN on HIGGS (28 features)") + for ep in range(1, epochs + 1): + model.train() + perm = torch.randperm(X_tr.shape[0]) + xb_all, yb_all = X_tr[perm], y_tr[perm] + + tot, corr = 0, 0 + epoch_loss = 0.0 + + for i in range(0, xb_all.shape[0], batch_size): + xb = xb_all[i:i+batch_size] + yb = yb_all[i:i+batch_size] + + opt.zero_grad(set_to_none=True) + logits = model(xb) + loss = ce(logits, yb) + loss.backward() + opt.step() + + epoch_loss += loss.item() * xb.size(0) + tot += xb.size(0) + corr += (logits.argmax(1) == yb).sum().item() + + train_acc = 100.0 * corr / tot + with torch.no_grad(): + val_logits = model(X_te) + val_acc = (val_logits.argmax(1) == y_te).float().mean().item() * 100.0 + + avg_loss = epoch_loss / tot + print(f"Epoch {ep:03d} | Loss={avg_loss:.4f} | Train Acc={train_acc:.2f}% | Val Acc={val_acc:.2f}%") + +if __name__ == "__main__": + CSV_PATH = r"C:\Users\riakh\Downloads\archive (26)\HIGGS.csv" + run_higgs(CSV_PATH, n_samples=2000, epochs=20, batch_size=128, seed=0) diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_iris.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_iris.py new file mode 100644 index 0000000..161afa9 --- /dev/null +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_iris.py @@ -0,0 +1,457 @@ + +import math +import numpy as np +from dataclasses import dataclass, field + +import torch +import torch.nn as nn +import torch.nn.functional as F +import pennylane as qml + +from sklearn.datasets import load_iris +from sklearn.model_selection import train_test_split +from sklearn.preprocessing import MinMaxScaler + + +torch.set_default_dtype(torch.float32) + + +def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray: + """ + Open-uniform B-spline basis on [0,1]. + num_splines = n+1, degree = p. Knot vector length must be n+p+2 with p+1 repeats at each end, + and exactly (n-p) interior knots. + """ + assert num_splines >= degree + 1 + n = num_splines - 1 + p = degree + + + if n - p > 0: + interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1] + else: + interior = np.array([], dtype=float) + + knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)]) + + def N(i, r, t): + if r == 0: + left = knots[i] + right = knots[i + 1] + + return np.where(((t >= left) & (t < right)) | ((right == 1.0) & (t == 1.0)), 1.0, 0.0) + left_den = knots[i + r] - knots[i] + right_den = knots[i + r + 1] - knots[i + 1] + left_term = 0.0 + right_term = 0.0 + if left_den > 0: + left_term = ((t - knots[i]) / left_den) * N(i, r - 1, t) + if right_den > 0: + right_term = ((knots[i + r + 1] - t) / right_den) * N(i + 1, r - 1, t) + return left_term + right_term + + tgrid = np.asarray(grid, dtype=float) + B = np.vstack([N(i, p, tgrid) for i in range(num_splines)]) + return np.maximum(B, 0.0) + + + + +class QCBMState(nn.Module): + def __init__(self, n_label_qubits: int, n_pos_qubits: int, depth: int = 3, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.L = n_label_qubits + self.P = n_pos_qubits + self.n_qubits = self.L + self.P + self.depth = depth + + init = 0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32) + self.theta = nn.Parameter(init) + + self.dev = qml.device("default.qubit", wires=self.n_qubits) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights): + qml.templates.StronglyEntanglingLayers(weights, wires=list(range(self.n_qubits))) + return qml.probs(wires=list(range(self.n_qubits))) + + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.theta).to(torch.float32) + + @torch.no_grad() + def freeze(self): + self.theta.requires_grad_(False) + + + + +class LabelMixer(nn.Module): + def __init__(self, qcbm: QCBMState, depth: int = 2, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.qcbm = qcbm + self.L = qcbm.L + self.P = qcbm.P + self.n_qubits = qcbm.n_qubits + self.depth = depth + + init = 0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32) + self.phi = nn.Parameter(init) + + self.dev = qml.device("default.qubit", wires=self.n_qubits) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights_qcbm, weights_label): + qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=list(range(self.n_qubits))) + if self.L > 0: + qml.templates.StronglyEntanglingLayers(weights_label, wires=list(range(self.L))) + return qml.probs(wires=list(range(self.n_qubits))) + + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.qcbm.theta, self.phi).to(torch.float32) + + + + +class QuantumBlock(nn.Module): + def __init__(self, k_frequencies: int = 4, entangle_depth: int = 1, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.K = k_frequencies + self.depth = entangle_depth + + self.log_omega = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.05) + self.phase = nn.Parameter(torch.zeros(self.K, dtype=torch.float32)) + self.w_cos = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1) + self.w_sin = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1) + + self.dev = qml.device("default.qubit", wires=self.K) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(alpha_vec): + for k in range(self.K): + qml.RY(alpha_vec[k], wires=k) + for _ in range(self.depth): + for k in range(self.K): + qml.CNOT(wires=[k, (k + 1) % self.K]) + z = [qml.expval(qml.PauliZ(k)) for k in range(self.K)] + x = [qml.expval(qml.PauliX(k)) for k in range(self.K)] + return z + x + + self._qnode = qnode + + def forward_scalar(self, x01_scalar: torch.Tensor) -> torch.Tensor: + x01 = torch.clamp(x01_scalar.reshape(()), 0.0, 1.0) + omega = F.softplus(self.log_omega) + 1e-4 + alpha = omega * (2.0 * math.pi * x01) + self.phase + outs = self._qnode(alpha.to(torch.float32)) + outs = torch.stack([o if isinstance(o, torch.Tensor) else torch.as_tensor(o, dtype=torch.float32) + for o in outs], dim=0).to(torch.float32) + z = outs[: self.K] + x = outs[self.K:] + return (self.w_cos * z).sum() + (self.w_sin * x).sum() + + def forward_batch(self, x01_vec: torch.Tensor) -> torch.Tensor: + x01_vec = torch.clamp(x01_vec.to(torch.float32), 0.0, 1.0) + vals = [self.forward_scalar(x01_vec[i]) for i in range(x01_vec.shape[0])] + return torch.stack(vals, dim=0).to(torch.float32) + + + + +class QuKANResidualEdge(nn.Module): + def __init__(self, mixer: LabelMixer, n_label_qubits: int, n_pos_qubits: int, + fourier_k: int = 4, fourier_depth: int = 1, seed: int = 0, w_init=0.5): + super().__init__() + self.mixer = mixer + self.L = n_label_qubits + self.P = n_pos_qubits + self.Nlabel = 2 ** self.L + self.Npos = 2 ** self.P + + self.wf = nn.Parameter(torch.tensor(float(w_init), dtype=torch.float32)) + self.wq = nn.Parameter(torch.tensor(float(w_init), dtype=torch.float32)) + + self.qfour = QuantumBlock(k_frequencies=fourier_k, entangle_depth=fourier_depth, seed=seed) + + def batch_forward(self, x_raw: torch.Tensor, x_pos01: torch.Tensor, probs_flat: torch.Tensor) -> torch.Tensor: + x_pos01 = x_pos01.to(torch.float32) + probs_flat = probs_flat.to(torch.float32) + + B = x_pos01.shape[0] + lp = probs_flat.view(self.Nlabel, self.Npos) + + idx = torch.round(torch.clamp(x_pos01, 0.0, 1.0) * (self.Npos - 1)).long() + idx = torch.clamp(idx, 0, self.Npos - 1) + + p_vals = lp[:, idx].sum(dim=0).to(torch.float32) + qfr_vals = self.qfour.forward_batch(x_pos01) + + out = (self.wf * p_vals + self.wq * qfr_vals).to(torch.float32) + return out + +@dataclass +class QuKANLayerCfg: + n_nodes: int = 6 + n_label_qubits: int = 2 + n_pos_qubits: int = 5 + qcbm_depth: int = 3 + label_mixer_depth: int = 2 + fourier_k: int = 4 + fourier_depth: int = 1 + +class QuKANLayer(nn.Module): + """ + KAN-style: node_m = sum_j f_edge_{m,j}(x_j) + Quantum part is independent of x (probabilities over pos bins); + x affects which position bin is read out and the Fourier phase. + """ + def __init__(self, cfg: QuKANLayerCfg, seed=0): + super().__init__() + torch.manual_seed(seed) + self.cfg = cfg + self.L = cfg.n_label_qubits + self.P = cfg.n_pos_qubits + + self.qcbm = QCBMState(self.L, self.P, depth=cfg.qcbm_depth, seed=seed) + self.mixers = nn.ModuleList() + self.edges = nn.ModuleList() + self._built = False + + def build(self, input_dim: int, seed: int = 0): + self.input_dim = input_dim + torch.manual_seed(seed) + for m in range(self.cfg.n_nodes): + for j in range(input_dim): + mixer = LabelMixer(self.qcbm, depth=self.cfg.label_mixer_depth, seed=seed + 97 * m + j) + edge = QuKANResidualEdge( + mixer, + self.L, self.P, + fourier_k=self.cfg.fourier_k, + fourier_depth=self.cfg.fourier_depth, + seed=seed + 991 * m + 13 * j, + w_init=0.5 + ) + self.mixers.append(mixer) + self.edges.append(edge) + self._built = True + print(f"built edges: nodes={self.cfg.n_nodes}, in_dim={input_dim}, total_edges={len(self.edges)}") + + def pretrain_qcbm_on_splines(self, degree=2, epochs=200, lr=5e-2, verbose=True): + num_splines = 2 ** self.L + Npos = 2 ** self.P + grid = np.linspace(0.0, 1.0, Npos, dtype=float) + B = bspline_basis_matrix(num_splines, degree, grid) + + B = B + 1e-8 + B = B / B.sum(axis=1, keepdims=True) + target = torch.tensor((B / num_splines).reshape(-1), dtype=torch.float32) + + opt = torch.optim.Adam(self.qcbm.parameters(), lr=lr) + for ep in range(1, epochs + 1): + opt.zero_grad() + probs = self.qcbm().to(torch.float32) + loss = F.mse_loss(probs, target) + loss.backward() + opt.step() + if verbose and (ep % 50 == 0 or ep == 1): + with torch.no_grad(): + tv = 0.5 * torch.sum(torch.abs(probs - target)).item() + print(f"[QCBM pretrain] epoch {ep:03d} | MSE={loss.item():.6f} | TV={tv:.6f}") + self.qcbm.freeze() + + def forward(self, X_in: torch.Tensor, input_is_01: bool) -> torch.Tensor: + """ + X_in: (B, D) inputs to the layer + input_is_01: True if X_in is already in [0,1] (layer 1); False for layer 2 (we'll squash for position/phase) + returns: (B, M) node outputs + """ + assert self._built, "Call build(input_dim) first." + X_in = X_in.to(torch.float32) + B, D = X_in.shape + M = self.cfg.n_nodes + + edge_probs = [mix().to(torch.float32) for mix in self.mixers] + X01_pos = (X_in if input_is_01 else torch.sigmoid(X_in)).to(torch.float32) + + nodes = [] + eidx = 0 + for m in range(M): + acc = torch.zeros(B, dtype=torch.float32, device=X_in.device) + for j in range(D): + probs_flat = edge_probs[eidx] + edge = self.edges[eidx] + x_pos = X01_pos[:, j].to(torch.float32) + out_j = edge.batch_forward(x_pos, x_pos, probs_flat).to(torch.float32) + acc = acc + out_j + eidx += 1 + nodes.append(acc) + nodes = torch.stack(nodes, dim=1).to(torch.float32) + return nodes + + +@dataclass +class KANReadoutCfg: + n_classes: int + in_dim: int + fourier_k: int = 3 + fourier_depth: int = 1 + +class KANReadout(nn.Module): + def __init__(self, cfg: KANReadoutCfg, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.cfg = cfg + C, M = cfg.n_classes, cfg.in_dim + + self.qfr = nn.ModuleList([ + QuantumBlock(k_frequencies=cfg.fourier_k, + entangle_depth=cfg.fourier_depth, + seed=seed + 131 * c + m) + for c in range(C) for m in range(M) + ]) + self.b = nn.Parameter(torch.zeros(C, dtype=torch.float32)) + + def _edge_idx(self, c: int, m: int) -> int: + return c * self.cfg.in_dim + m + + def forward(self, H: torch.Tensor) -> torch.Tensor: + H = H.to(torch.float32) + B, M = H.shape + C = self.cfg.n_classes + H01 = torch.sigmoid(H) + + logits = [] + for c in range(C): + acc_c = torch.zeros(B, dtype=torch.float32, device=H.device) + for m in range(M): + qfr = self.qfr[self._edge_idx(c, m)] + acc_c = acc_c + qfr.forward_batch(H01[:, m]) + logits.append(acc_c + self.b[c]) + return torch.stack(logits, dim=1).to(torch.float32) + + + + +@dataclass +class QuKANNetCfg: + layer1: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1)) + layer2: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1)) + n_classes: int = 3 + +class QuKANNet(nn.Module): + def __init__(self, cfg: QuKANNetCfg, input_dim: int, seed=0): + super().__init__() + torch.manual_seed(seed) + self.cfg = cfg + + self.l1 = QuKANLayer(cfg.layer1, seed=seed) + self.l1.build(input_dim=input_dim, seed=seed) + + self.l2 = QuKANLayer(cfg.layer2, seed=seed + 1) + self.l2.build(input_dim=cfg.layer1.n_nodes, seed=seed + 1) + + + self.readout = KANReadout( + KANReadoutCfg( + n_classes=cfg.n_classes, + in_dim=cfg.layer2.n_nodes, + fourier_k=3, + fourier_depth=1 + ), + seed=seed + 1234 + ) + + def pretrain_qcbms(self, degree=2, epochs=200, lr=5e-2, verbose=True): + print("[Pretrain] Layer 1 QCBM on degree-2 B-splines") + self.l1.pretrain_qcbm_on_splines(degree=degree, epochs=epochs, lr=lr, verbose=verbose) + print("[Pretrain] Layer 2 QCBM on degree-2 B-splines") + self.l2.pretrain_qcbm_on_splines(degree=degree, epochs=epochs, lr=lr, verbose=verbose) + + def forward(self, X01: torch.Tensor) -> torch.Tensor: + X01 = X01.to(torch.float32) + h1 = self.l1(X01, input_is_01=True).to(torch.float32) + h2 = self.l2(h1, input_is_01=False).to(torch.float32) + return self.readout(h2) + + + + +def run_iris(seed=0): + torch.manual_seed(seed) + np.random.seed(seed) + + iris = load_iris() + X = iris.data.astype(np.float32) + y = iris.target.astype(np.int64) + + scaler = MinMaxScaler(feature_range=(0.0, 1.0)) + X01 = scaler.fit_transform(X).astype(np.float32) + + X_tr, X_te, y_tr, y_te = train_test_split( + X01, y, test_size=0.3, random_state=seed, stratify=y + ) + + X_tr = torch.tensor(X_tr, dtype=torch.float32) + X_te = torch.tensor(X_te, dtype=torch.float32) + y_tr = torch.tensor(y_tr, dtype=torch.long) + y_te = torch.tensor(y_te, dtype=torch.long) + + cfg = QuKANNetCfg( + layer1=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, qcbm_depth=3, label_mixer_depth=2, fourier_k=4, fourier_depth=1), + layer2=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, qcbm_depth=3, label_mixer_depth=2, fourier_k=4, fourier_depth=1), + n_classes=3, + ) + model = QuKANNet(cfg, input_dim=4, seed=seed) + + + model.pretrain_qcbms(degree=2, epochs=200, lr=5e-2, verbose=True) + + opt = torch.optim.AdamW(model.parameters(), lr=2e-3, weight_decay=8e-4) + sched = torch.optim.lr_scheduler.CosineAnnealingLR(opt, T_max=60) + ce = nn.CrossEntropyLoss(label_smoothing=0.03) + + print("\nTraining QuKAN on Iris") + epochs = 60 + B = 16 + for ep in range(1, epochs + 1): + model.train() + perm = torch.randperm(X_tr.shape[0]) + Xb_all, yb_all = X_tr[perm], y_tr[perm] + + tot, corr, loss_sum = 0, 0, 0.0 + for i in range(0, Xb_all.shape[0], B): + xb = Xb_all[i:i+B] + yb = yb_all[i:i+B] + opt.zero_grad(set_to_none=True) + logits = model(xb) + loss = ce(logits, yb) + loss.backward() + torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0) + opt.step() + + loss_sum += float(loss.item()) * xb.size(0) + tot += xb.size(0) + corr += (logits.argmax(1) == yb).sum().item() + + sched.step() + train_acc = 100.0 * corr / tot + train_loss = loss_sum / tot + + model.eval() + with torch.no_grad(): + logits_te = model(X_te) + val_acc = 100.0 * (logits_te.argmax(1) == y_te).float().mean().item() + + if ep % 2 == 1 or ep >= epochs - 10: + print(f"Epoch {ep:03d} | Loss={train_loss:.4f} | Train Acc={train_acc:.2f}% | Val Acc={val_acc:.2f}%") + + print("Done.") + +if __name__ == "__main__": + run_iris(seed=0) diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_social_networks_ad.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_social_networks_ad.py new file mode 100644 index 0000000..a6de402 --- /dev/null +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_social_networks_ad.py @@ -0,0 +1,486 @@ +import os +import math +import numpy as np +from dataclasses import dataclass, field + +import torch +import torch.nn as nn +import torch.nn.functional as F +import pennylane as qml +import pandas as pd + +from sklearn.model_selection import train_test_split +from sklearn.preprocessing import MinMaxScaler + + +CSV_PATH = "C:\\Users\\riakh\\Downloads\\Social_Network_Ads.csv" + + +torch.set_default_dtype(torch.float32) + + +def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray: + """ + Open-uniform B-spline basis on [0,1]. + num_splines = n+1, degree = p. Knot vector length must be n+p+2 with p+1 repeats at each end, + and exactly (n-p) interior knots. + """ + assert num_splines >= degree + 1 + n = num_splines - 1 + p = degree + + + if n - p > 0: + interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1] + else: + interior = np.array([], dtype=float) + + knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)]) + + def N(i, r, t): + if r == 0: + left = knots[i] + right = knots[i + 1] + + return np.where(((t >= left) & (t < right)) | ((right == 1.0) & (t == 1.0)), 1.0, 0.0) + left_den = knots[i + r] - knots[i] + right_den = knots[i + r + 1] - knots[i + 1] + left_term = 0.0 + right_term = 0.0 + if left_den > 0: + left_term = ((t - knots[i]) / left_den) * N(i, r - 1, t) + if right_den > 0: + right_term = ((knots[i + r + 1] - t) / right_den) * N(i + 1, r - 1, t) + return left_term + right_term + + tgrid = np.asarray(grid, dtype=float) + B = np.vstack([N(i, p, tgrid) for i in range(num_splines)]) + return np.maximum(B, 0.0) + + + + +class QCBMState(nn.Module): + def __init__(self, n_label_qubits: int, n_pos_qubits: int, depth: int = 3, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.L = n_label_qubits + self.P = n_pos_qubits + self.n_qubits = self.L + self.P + self.depth = depth + + init = 0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32) + self.theta = nn.Parameter(init) + + self.dev = qml.device("default.qubit", wires=self.n_qubits) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights): + qml.templates.StronglyEntanglingLayers(weights, wires=list(range(self.n_qubits))) + return qml.probs(wires=list(range(self.n_qubits))) + + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.theta).to(torch.float32) + + @torch.no_grad() + def freeze(self): + self.theta.requires_grad_(False) + + + + +class LabelMixer(nn.Module): + def __init__(self, qcbm: QCBMState, depth: int = 2, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.qcbm = qcbm + self.L = qcbm.L + self.P = qcbm.P + self.n_qubits = qcbm.n_qubits + self.depth = depth + + init = 0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32) + self.phi = nn.Parameter(init) + + self.dev = qml.device("default.qubit", wires=self.n_qubits) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights_qcbm, weights_label): + qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=list(range(self.n_qubits))) + if self.L > 0: + qml.templates.StronglyEntanglingLayers(weights_label, wires=list(range(self.L))) + return qml.probs(wires=list(range(self.n_qubits))) + + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.qcbm.theta, self.phi).to(torch.float32) + + + + +class QuantumBlock(nn.Module): + def __init__(self, k_frequencies: int = 4, entangle_depth: int = 1, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.K = k_frequencies + self.depth = entangle_depth + + self.log_omega = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.05) + self.phase = nn.Parameter(torch.zeros(self.K, dtype=torch.float32)) + self.w_cos = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1) + self.w_sin = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1) + + self.dev = qml.device("default.qubit", wires=self.K) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(alpha_vec): + for k in range(self.K): + qml.RY(alpha_vec[k], wires=k) + for _ in range(self.depth): + for k in range(self.K): + qml.CNOT(wires=[k, (k + 1) % self.K]) + z = [qml.expval(qml.PauliZ(k)) for k in range(self.K)] + x = [qml.expval(qml.PauliX(k)) for k in range(self.K)] + return z + x + + self._qnode = qnode + + def forward_scalar(self, x01_scalar: torch.Tensor) -> torch.Tensor: + x01 = torch.clamp(x01_scalar.reshape(()), 0.0, 1.0) + omega = F.softplus(self.log_omega) + 1e-4 + alpha = omega * (2.0 * math.pi * x01) + self.phase + outs = self._qnode(alpha.to(torch.float32)) + outs = torch.stack([o if isinstance(o, torch.Tensor) else torch.as_tensor(o, dtype=torch.float32) + for o in outs], dim=0).to(torch.float32) + z = outs[: self.K] + x = outs[self.K:] + return (self.w_cos * z).sum() + (self.w_sin * x).sum() + + def forward_batch(self, x01_vec: torch.Tensor) -> torch.Tensor: + x01_vec = torch.clamp(x01_vec.to(torch.float32), 0.0, 1.0) + vals = [self.forward_scalar(x01_vec[i]) for i in range(x01_vec.shape[0])] + return torch.stack(vals, dim=0).to(torch.float32) + + + + +class QuKANResidualEdge(nn.Module): + def __init__(self, mixer: LabelMixer, n_label_qubits: int, n_pos_qubits: int, + fourier_k: int = 4, fourier_depth: int = 1, seed: int = 0, w_init=0.5): + super().__init__() + self.mixer = mixer + self.L = n_label_qubits + self.P = n_pos_qubits + self.Nlabel = 2 ** self.L + self.Npos = 2 ** self.P + + self.wf = nn.Parameter(torch.tensor(float(w_init), dtype=torch.float32)) + self.wq = nn.Parameter(torch.tensor(float(w_init), dtype=torch.float32)) + + self.qfour = QuantumBlock(k_frequencies=fourier_k, entangle_depth=fourier_depth, seed=seed) + + def batch_forward(self, x_pos01: torch.Tensor, probs_flat: torch.Tensor) -> torch.Tensor: + """ + x_pos01: (B,) clamped to [0,1] (for position index AND Fourier phase) + probs_flat: (Nlabel*Npos,) from the edge's LabelMixer() + returns: (B,) + """ + x_pos01 = x_pos01.to(torch.float32) + probs_flat = probs_flat.to(torch.float32) + + B = x_pos01.shape[0] + lp = probs_flat.view(self.Nlabel, self.Npos) + + idx = torch.round(torch.clamp(x_pos01, 0.0, 1.0) * (self.Npos - 1)).long() + idx = torch.clamp(idx, 0, self.Npos - 1) + + p_vals = lp[:, idx].sum(dim=0).to(torch.float32) + qfr_vals = self.qfour.forward_batch(x_pos01) + + out = (self.wf * p_vals + self.wq * qfr_vals).to(torch.float32) + return out + + + + +@dataclass +class QuKANLayerCfg: + n_nodes: int = 6 + n_label_qubits: int = 2 + n_pos_qubits: int = 5 + qcbm_depth: int = 3 + label_mixer_depth: int = 2 + fourier_k: int = 4 + fourier_depth: int = 1 + +class QuKANLayer(nn.Module): + """ + KAN-style: node_m = sum_j f_edge_{m,j}(x_j) + Quantum part is independent of x (probabilities over pos bins); + x affects which position bin is read out and the Fourier phase. + """ + def __init__(self, cfg: QuKANLayerCfg, seed=0): + super().__init__() + torch.manual_seed(seed) + self.cfg = cfg + self.L = cfg.n_label_qubits + self.P = cfg.n_pos_qubits + + self.qcbm = QCBMState(self.L, self.P, depth=cfg.qcbm_depth, seed=seed) + self.mixers = nn.ModuleList() + self.edges = nn.ModuleList() + self._built = False + + def build(self, input_dim: int, seed: int = 0): + self.input_dim = input_dim + torch.manual_seed(seed) + for m in range(self.cfg.n_nodes): + for j in range(input_dim): + mixer = LabelMixer(self.qcbm, depth=self.cfg.label_mixer_depth, seed=seed + 97 * m + j) + edge = QuKANResidualEdge( + mixer, + self.L, self.P, + fourier_k=self.cfg.fourier_k, + fourier_depth=self.cfg.fourier_depth, + seed=seed + 991 * m + 13 * j, + w_init=0.5 + ) + self.mixers.append(mixer) + self.edges.append(edge) + self._built = True + print(f"built edges: nodes={self.cfg.n_nodes}, in_dim={input_dim}, total_edges={len(self.edges)}") + + def pretrain_qcbm_on_splines(self, degree=2, epochs=200, lr=5e-2, verbose=True): + num_splines = 2 ** self.L + Npos = 2 ** self.P + grid = np.linspace(0.0, 1.0, Npos, dtype=float) + B = bspline_basis_matrix(num_splines, degree, grid) + + B = B + 1e-8 + B = B / B.sum(axis=1, keepdims=True) + target = torch.tensor((B / num_splines).reshape(-1), dtype=torch.float32) + + opt = torch.optim.Adam(self.qcbm.parameters(), lr=lr) + for ep in range(1, epochs + 1): + opt.zero_grad() + probs = self.qcbm().to(torch.float32) + loss = F.mse_loss(probs, target) + loss.backward() + opt.step() + if verbose and (ep % 50 == 0 or ep == 1): + with torch.no_grad(): + tv = 0.5 * torch.sum(torch.abs(probs - target)).item() + print(f"[QCBM pretrain] epoch {ep:03d} | MSE={loss.item():.6f} | TV={tv:.6f}") + self.qcbm.freeze() + + def forward(self, X_in: torch.Tensor, input_is_01: bool) -> torch.Tensor: + """ + X_in: (B, D) inputs to the layer + input_is_01: True if X_in is already in [0,1] (layer 1); False for layer 2 (we'll squash for position/phase) + returns: (B, M) node outputs + """ + assert self._built, "Call build(input_dim) first." + X_in = X_in.to(torch.float32) + B, D = X_in.shape + M = self.cfg.n_nodes + + edge_probs = [mix().to(torch.float32) for mix in self.mixers] + X01_pos = (X_in if input_is_01 else torch.sigmoid(X_in)).to(torch.float32) + + nodes = [] + eidx = 0 + for m in range(M): + acc = torch.zeros(B, dtype=torch.float32, device=X_in.device) + for j in range(D): + probs_flat = edge_probs[eidx] + edge = self.edges[eidx] + x_pos = X01_pos[:, j].to(torch.float32) + out_j = edge.batch_forward(x_pos, probs_flat).to(torch.float32) + acc = acc + out_j + eidx += 1 + nodes.append(acc) + nodes = torch.stack(nodes, dim=1).to(torch.float32) + return nodes + + +@dataclass +class KANReadoutCfg: + n_classes: int + in_dim: int + fourier_k: int = 3 + fourier_depth: int = 1 + +class KANReadout(nn.Module): + def __init__(self, cfg: KANReadoutCfg, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.cfg = cfg + C, M = cfg.n_classes, cfg.in_dim + + self.qfr = nn.ModuleList([ + QuantumBlock(k_frequencies=cfg.fourier_k, + entangle_depth=cfg.fourier_depth, + seed=seed + 131 * c + m) + for c in range(C) for m in range(M) + ]) + self.b = nn.Parameter(torch.zeros(C, dtype=torch.float32)) + + def _edge_idx(self, c: int, m: int) -> int: + return c * self.cfg.in_dim + m + + def forward(self, H: torch.Tensor) -> torch.Tensor: + H = H.to(torch.float32) + B, M = H.shape + C = self.cfg.n_classes + H01 = torch.sigmoid(H) + + logits = [] + for c in range(C): + acc_c = torch.zeros(B, dtype=torch.float32, device=H.device) + for m in range(M): + qfr = self.qfr[self._edge_idx(c, m)] + acc_c = acc_c + qfr.forward_batch(H01[:, m]) + logits.append(acc_c + self.b[c]) + return torch.stack(logits, dim=1).to(torch.float32) + + + + +@dataclass +class QuKANNetCfg: + layer1: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1)) + layer2: QuKANLayerCfg = field(default_factory=lambda: QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, fourier_k=4, fourier_depth=1)) + n_classes: int = 2 + +class QuKANNet(nn.Module): + def __init__(self, cfg: QuKANNetCfg, input_dim: int, seed=0): + super().__init__() + torch.manual_seed(seed) + self.cfg = cfg + + self.l1 = QuKANLayer(cfg.layer1, seed=seed) + self.l1.build(input_dim=input_dim, seed=seed) + + self.l2 = QuKANLayer(cfg.layer2, seed=seed + 1) + self.l2.build(input_dim=cfg.layer1.n_nodes, seed=seed + 1) + + + self.readout = KANReadout( + KANReadoutCfg( + n_classes=cfg.n_classes, + in_dim=cfg.layer2.n_nodes, + fourier_k=3, + fourier_depth=1 + ), + seed=seed + 1234 + ) + + def pretrain_qcbms(self, degree=2, epochs=200, lr=5e-2, verbose=True): + print("[Pretrain] Layer 1 QCBM on degree-2 B-splines") + self.l1.pretrain_qcbm_on_splines(degree=degree, epochs=epochs, lr=lr, verbose=verbose) + print("[Pretrain] Layer 2 QCBM on degree-2 B-splines") + self.l2.pretrain_qcbm_on_splines(degree=degree, epochs=epochs, lr=lr, verbose=verbose) + + def forward(self, X01: torch.Tensor) -> torch.Tensor: + X01 = X01.to(torch.float32) + h1 = self.l1(X01, input_is_01=True).to(torch.float32) + h2 = self.l2(h1, input_is_01=False).to(torch.float32) + return self.readout(h2) + + + + +def run_social(seed=0): + torch.manual_seed(seed) + np.random.seed(seed) + + assert os.path.exists(CSV_PATH), f"CSV not found: {CSV_PATH}" + df = pd.read_csv(CSV_PATH) + + + cols = [c.lower() for c in df.columns] + col_map = {c.lower(): c for c in df.columns} + needed = ["age", "estimatedsalary", "purchased"] + for k in needed: + assert k in cols, f"Column '{k}' not found in CSV. Found columns: {df.columns.tolist()}" + + X_np = df[[col_map["age"], col_map["estimatedsalary"]]].values.astype(np.float32) + y_np = df[col_map["purchased"]].values.astype(np.int64) + + + scaler = MinMaxScaler(feature_range=(0.0, 1.0)) + X01 = scaler.fit_transform(X_np).astype(np.float32) + + X_tr, X_te, y_tr, y_te = train_test_split( + X01, y_np, test_size=0.3, random_state=seed, stratify=y_np + ) + + X_tr = torch.tensor(X_tr, dtype=torch.float32) + X_te = torch.tensor(X_te, dtype=torch.float32) + y_tr = torch.tensor(y_tr, dtype=torch.long) + y_te = torch.tensor(y_te, dtype=torch.long) + + cfg = QuKANNetCfg( + layer1=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, + qcbm_depth=3, label_mixer_depth=2, fourier_k=4, fourier_depth=1), + layer2=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, + qcbm_depth=3, label_mixer_depth=2, fourier_k=4, fourier_depth=1), + n_classes=2, + ) + model = QuKANNet(cfg, input_dim=2, seed=seed) + + + model.pretrain_qcbms(degree=2, epochs=200, lr=5e-2, verbose=True) + + opt = torch.optim.AdamW(model.parameters(), lr=2e-3, weight_decay=8e-4) + sched = torch.optim.lr_scheduler.CosineAnnealingLR(opt, T_max=60) + ce = nn.CrossEntropyLoss(label_smoothing=0.03) + + print("\nTraining QuKAN on Social_Network_Ads") + epochs = 60 + B = 32 + for ep in range(1, epochs + 1): + model.train() + perm = torch.randperm(X_tr.shape[0]) + Xb_all, yb_all = X_tr[perm], y_tr[perm] + + tot, corr, loss_sum = 0, 0, 0.0 + for i in range(0, Xb_all.shape[0], B): + xb = Xb_all[i:i+B] + yb = yb_all[i:i+B] + opt.zero_grad(set_to_none=True) + logits = model(xb) + loss = ce(logits, yb) + loss.backward() + torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0) + opt.step() + + loss_sum += float(loss.item()) * xb.size(0) + tot += xb.size(0) + corr += (logits.argmax(1) == yb).sum().item() + + sched.step() + train_acc = 100.0 * corr / tot + train_loss = loss_sum / tot + + model.eval() + with torch.no_grad(): + logits_te = model(X_te) + val_acc = 100.0 * (logits_te.argmax(1) == y_te).float().mean().item() + + if ep % 2 == 1 or ep >= epochs - 10: + print(f"Epoch {ep:03d} | Loss={train_loss:.4f} | Train Acc={train_acc:.2f}% | Val Acc={val_acc:.2f}%") + + print("Done.") + + + with torch.no_grad(): + pred = model(X_te).argmax(1).cpu().numpy() + acc = (pred == y_te.cpu().numpy()).mean() * 100 + print(f"\nFinal Test Accuracy: {acc:.2f}%") + +if __name__ == "__main__": + run_social(seed=0) diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_titanic.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_titanic.py new file mode 100644 index 0000000..14e20c1 --- /dev/null +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/fully_qkan_titanic.py @@ -0,0 +1,334 @@ +import os, math, numpy as np, pandas as pd +from dataclasses import dataclass, field + +import torch +import torch.nn as nn +import torch.nn.functional as F +import pennylane as qml + +from sklearn.model_selection import train_test_split +from sklearn.preprocessing import MinMaxScaler + +torch.set_default_dtype(torch.float32) + +CSV_PATH = "C:\Users\riakh\Downloads\archive\Titanic-Dataset.csv" + + +def bspline_basis_matrix(num_splines: int, degree: int, grid: np.ndarray) -> np.ndarray: + assert num_splines >= degree + 1 + n = num_splines - 1 + p = degree + if n - p > 0: + interior = np.linspace(0.0, 1.0, (n - p) + 2, dtype=float)[1:-1] + else: + interior = np.array([], dtype=float) + knots = np.concatenate([np.zeros(p + 1), interior, np.ones(p + 1)]) + + def N(i, r, t): + if r == 0: + left, right = knots[i], knots[i + 1] + return np.where(((t >= left) & (t < right)) | ((right == 1.0) & (t == 1.0)), 1.0, 0.0) + left_den = knots[i + r] - knots[i] + right_den = knots[i + r + 1] - knots[i + 1] + left_term = ((t - knots[i]) / left_den) * N(i, r - 1, t) if left_den > 0 else 0 + right_term = ((knots[i + r + 1] - t) / right_den) * N(i + 1, r - 1, t) if right_den > 0 else 0 + return left_term + right_term + + tgrid = np.asarray(grid, dtype=float) + return np.vstack([N(i, p, tgrid) for i in range(num_splines)]) + + +class QCBMState(nn.Module): + def __init__(self, n_label_qubits, n_pos_qubits, depth=3, seed=0): + super().__init__() + torch.manual_seed(seed) + self.L, self.P = n_label_qubits, n_pos_qubits + self.n_qubits = self.L + self.P + self.theta = nn.Parameter(0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32)) + self.dev = qml.device("default.qubit", wires=self.n_qubits) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights): + qml.templates.StronglyEntanglingLayers(weights, wires=range(self.n_qubits)) + return qml.probs(wires=range(self.n_qubits)) + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.theta).to(torch.float32) + + def freeze(self): + self.theta.requires_grad_(False) + +class LabelMixer(nn.Module): + def __init__(self, qcbm: QCBMState, depth=1, seed=0): + super().__init__() + torch.manual_seed(seed) + self.qcbm = qcbm + self.L, self.P = qcbm.L, qcbm.P + self.phi = nn.Parameter(0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32)) + self.dev = qml.device("default.qubit", wires=self.L + self.P) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights_qcbm, weights_label): + qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=range(self.L + self.P)) + if self.L > 0: + qml.templates.StronglyEntanglingLayers(weights_label, wires=range(self.L)) + return qml.probs(wires=range(self.L + self.P)) + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.qcbm.theta, self.phi).to(torch.float32) + + +class QuantumBlock(nn.Module): + def __init__(self, k_frequencies=3, seed=0): + super().__init__() + torch.manual_seed(seed) + self.K = k_frequencies + self.log_omega = nn.Parameter(torch.randn(self.K) * 0.05) + + self.phase = nn.Parameter(torch.zeros(self.K)) + self.w_cos = nn.Parameter(torch.randn(self.K) * 0.1) + self.w_sin = nn.Parameter(torch.randn(self.K) * 0.1) + + def forward_batch(self, x01_vec): + x01_vec = torch.clamp(x01_vec, 0, 1) + omega = F.softplus(self.log_omega) + 1e-4 + vals = [] + for val in x01_vec: + alpha = omega * (2*math.pi*val) + self.phase + z = torch.cos(alpha) + x = torch.sin(alpha) + vals.append((self.w_cos*z).sum() + (self.w_sin*x).sum()) + return torch.stack(vals) + +class QuKANResidualEdge(nn.Module): + def __init__(self, mixer, n_label_qubits, n_pos_qubits, fourier_k=3, seed=0, w_init=0.5): + super().__init__() + self.mixer = mixer + self.L, self.P = n_label_qubits, n_pos_qubits + self.Nlabel, self.Npos = 2**self.L, 2**self.P + self.wf = nn.Parameter(torch.tensor(float(w_init))) + self.wq = nn.Parameter(torch.tensor(float(w_init))) + self.qfour = QuantumBlock(fourier_k, seed=seed) + + def batch_forward(self, x_pos01, probs_flat): + lp = probs_flat.view(self.Nlabel, self.Npos) + idx = torch.round(torch.clamp(x_pos01,0,1)*(self.Npos-1)).long() + idx = torch.clamp(idx, 0, self.Npos-1) + p_vals = lp[:,idx].sum(0) + qfr_vals = self.qfour.forward_batch(x_pos01) + return self.wf*p_vals + self.wq*qfr_vals + + +@dataclass +class QuKANLayerCfg: + n_nodes: int = 6 + n_label_qubits: int = 2 + n_pos_qubits: int = 5 + qcbm_depth: int = 3 + label_mixer_depth: int = 1 + fourier_k: int = 3 + mixers_trainable: bool = False + +class QuKANLayer(nn.Module): + def __init__(self, cfg: QuKANLayerCfg, seed=0): + super().__init__() + self.cfg = cfg + self.qcbm = QCBMState(cfg.n_label_qubits, cfg.n_pos_qubits, cfg.qcbm_depth, seed) + self.mixers, self.edges = nn.ModuleList(), nn.ModuleList() + self._built=False + self._train_mixers = cfg.mixers_trainable + + def build(self, input_dim, seed=0): + for m in range(self.cfg.n_nodes): + for j in range(input_dim): + mixer = LabelMixer(self.qcbm, self.cfg.label_mixer_depth, seed+97*m+j) + edge = QuKANResidualEdge(mixer, self.cfg.n_label_qubits, self.cfg.n_pos_qubits, + self.cfg.fourier_k, seed=seed+991*m+13*j) + self.mixers.append(mixer); self.edges.append(edge) + self._built=True + print(f"[QuKANLayer] built edges: {self.cfg.n_nodes} nodes × {input_dim} inputs = {len(self.edges)} edges") + + def pretrain_qcbm_on_splines(self, degree=2, epochs=80, lr=5e-2, verbose=True): + num_spl, Npos = 2**self.cfg.n_label_qubits, 2**self.cfg.n_pos_qubits + grid = np.linspace(0,1,Npos) + B = bspline_basis_matrix(num_spl, degree, grid) + B = (B+1e-8)/B.sum(1,keepdims=True) + target = torch.tensor((B/num_spl).reshape(-1), dtype=torch.float32) + opt=torch.optim.Adam(self.qcbm.parameters(), lr=lr) + for ep in range(epochs): + opt.zero_grad(); probs=self.qcbm() + loss=F.mse_loss(probs, target); loss.backward(); opt.step() + if verbose and (ep%20==0 or ep==epochs-1): + tv=0.5*torch.sum(torch.abs(probs-target)).item() + print(f"[QCBM pretrain] {ep:03d} | MSE={loss.item():.6f} | TV={tv:.6f}") + self.qcbm.freeze() + print(">> QCBM frozen.") + + def forward(self,X, input_is_01=True): + X01 = (X if input_is_01 else torch.sigmoid(X)) + if self._train_mixers: + edge_probs=[mix() for mix in self.mixers] + else: + with torch.no_grad(): + edge_probs=[mix() for mix in self.mixers] + nodes=[]; eidx=0 + for m in range(self.cfg.n_nodes): + acc=torch.zeros(X.shape[0], dtype=torch.float32) + for j in range(X.shape[1]): + out=self.edges[eidx].batch_forward(X01[:,j], edge_probs[eidx]) + acc=acc+out; eidx+=1 + nodes.append(acc) + return torch.stack(nodes,1) + +@dataclass +class KANReadoutCfg: + n_classes:int; in_dim:int; fourier_k:int=3 + +class KANReadout(nn.Module): + def __init__(self,cfg:KANReadoutCfg,seed=0): + super().__init__() + self.cfg=cfg; C,M=cfg.n_classes,cfg.in_dim + self.qfr=nn.ModuleList([QuantumBlock(cfg.fourier_k,seed+131*c+m) + for c in range(C) for m in range(M)]) + self.b=nn.Parameter(torch.zeros(C)) + def _idx(self,c,m): return c*self.cfg.in_dim+m + def forward(self,H): + H01=torch.sigmoid(H); logits=[] + for c in range(self.cfg.n_classes): + acc=torch.zeros(H.shape[0], dtype=torch.float32) + for m in range(H.shape[1]): + acc=acc+self.qfr[self._idx(c,m)].forward_batch(H01[:,m]) + logits.append(acc+self.b[c]) + return torch.stack(logits,1) + +@dataclass +class QuKANNetCfg: + layer1:QuKANLayerCfg=field(default_factory=QuKANLayerCfg) + layer2:QuKANLayerCfg=field(default_factory=QuKANLayerCfg) + n_classes:int=2 + +class QuKANNet(nn.Module): + def __init__(self,cfg,input_dim,seed=0): + super().__init__() + self.l1=QuKANLayer(cfg.layer1,seed); self.l1.build(input_dim,seed) + self.l2=QuKANLayer(cfg.layer2,seed+1); self.l2.build(cfg.layer1.n_nodes,seed+1) + self.readout=KANReadout(KANReadoutCfg(cfg.n_classes,cfg.layer2.n_nodes),seed+123) + def pretrain_qcbms(self,degree=2,epochs=80,lr=5e-2): + print("\n[Pretrain] Layer 1 QCBM"); self.l1.pretrain_qcbm_on_splines(degree,epochs,lr) + print("\n[Pretrain] Layer 2 QCBM"); self.l2.pretrain_qcbm_on_splines(degree,epochs,lr) + def forward(self,X): + h1=self.l1(X,True); h2=self.l2(h1,False); return self.readout(h2) + + +def _first_present(cols_map, names): + """Return the original-cased column name for the first present alias.""" + for n in names: + if n in cols_map: + return cols_map[n] + return None + +def load_titanic_features(csv_path: str): + assert os.path.exists(csv_path), f"CSV not found: {csv_path}" + df = pd.read_csv(csv_path) + + + cols_map = {c.lower(): c for c in df.columns} + + survived = _first_present(cols_map, ["survived"]) + pclass = _first_present(cols_map, ["pclass","p class","p_class"]) + sex = _first_present(cols_map, ["sex","gender"]) + age = _first_present(cols_map, ["age"]) + sibsp = _first_present(cols_map, ["sibsp","siblings/spouses aboard","siblingsaboard","siblings_spouses_aboard"]) + parch = _first_present(cols_map, ["parch","parents/children aboard","parentschildrenaboard","parents_children_aboard"]) + fare = _first_present(cols_map, ["fare"]) + embarked = _first_present(cols_map, ["embarked","port of embarkation","emb"]) + + for k,v in {"survived":survived,"pclass":pclass,"sex":sex,"age":age, + "sibsp":sibsp,"parch":parch,"fare":fare,"embarked":embarked}.items(): + if v is None: + raise ValueError(f"Could not find required column alias for '{k}'. Found columns: {list(df.columns)}") + + + sub = df[[survived, pclass, sex, age, sibsp, parch, fare, embarked]].copy() + sub.columns = [c.lower() for c in sub.columns] + + + sub["sex"] = sub["sex"].astype(str).str.lower().map({"male": 0, "m": 0, "female": 1, "f": 1}) + sub["embarked"] = sub["embarked"].astype(str).str.upper().map({"S": "S", "C": "C", "Q": "Q"}) + + sub["age"] = pd.to_numeric(sub["age"], errors="coerce") + sub["fare"] = pd.to_numeric(sub["fare"], errors="coerce") + sub["age"] = sub["age"].fillna(sub["age"].median()) + sub["fare"] = sub["fare"].fillna(sub["fare"].median()) + sub["sex"] = sub["sex"].fillna(0) + sub["embarked"] = sub["embarked"].fillna("S") + + + one_hot_emb = pd.get_dummies(sub["embarked"], prefix="emb", drop_first=False) + one_hot_cls = pd.get_dummies(sub["pclass"].astype(int), prefix="pcls", drop_first=False) + + X = pd.concat( + [ + one_hot_cls, + sub[["sex", "age", "sibsp", "parch", "fare"]].reset_index(drop=True), + one_hot_emb.reset_index(drop=True), + ], + axis=1, + ).astype(np.float32) + + y = sub["survived"].astype(np.int64).to_numpy() + + + scaler = MinMaxScaler((0.0, 1.0)) + X01 = scaler.fit_transform(X.to_numpy().astype(np.float32)).astype(np.float32) + return X01, y + +def run_titanic(seed=0): + torch.manual_seed(seed); np.random.seed(seed) + + X, y = load_titanic_features(CSV_PATH) + X_tr, X_te, y_tr, y_te = train_test_split(X, y, test_size=0.3, random_state=seed, stratify=y) + + X_tr, X_te = torch.tensor(X_tr, dtype=torch.float32), torch.tensor(X_te, dtype=torch.float32) + y_tr, y_te = torch.tensor(y_tr, dtype=torch.long), torch.tensor(y_te, dtype=torch.long) + + input_dim = X_tr.shape[1] + print(f"Input dim: {input_dim}") + + cfg = QuKANNetCfg( + layer1=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, label_mixer_depth=1, fourier_k=3, mixers_trainable=False), + layer2=QuKANLayerCfg(n_nodes=6, n_label_qubits=2, n_pos_qubits=5, label_mixer_depth=1, fourier_k=3, mixers_trainable=False), + n_classes=2 + ) + model = QuKANNet(cfg, input_dim=input_dim, seed=seed) + model.pretrain_qcbms() + + opt = torch.optim.AdamW(model.parameters(), lr=1.5e-3, weight_decay=8e-4) + sched = torch.optim.lr_scheduler.CosineAnnealingLR(opt, T_max=40) + ce = nn.CrossEntropyLoss(label_smoothing=0.03) + + print("\nTraining QuKAN on Titanic") + for ep in range(1, 41): + model.train() + perm = torch.randperm(X_tr.shape[0]) + Xb_all, yb_all = X_tr[perm], y_tr[perm] + loss_sum, tot, corr = 0.0, 0, 0 + for i in range(0, Xb_all.shape[0], 64): + xb, yb = Xb_all[i:i+64], yb_all[i:i+64] + opt.zero_grad(set_to_none=True) + logits = model(xb) + loss = ce(logits, yb) + loss.backward() + opt.step() + loss_sum += float(loss.item()) * xb.size(0) + tot += xb.size(0) + corr += (logits.argmax(1) == yb).sum().item() + sched.step() + train_acc = 100.0 * corr / tot + val_acc = (model(X_te).argmax(1) == y_te).float().mean().item() * 100.0 + print(f"Epoch {ep:03d} | Train Acc={train_acc:.2f}% | Val Acc={val_acc:.2f}%") + +if __name__ == "__main__": + run_titanic(0) diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/digits.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_digits.py similarity index 96% rename from Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/digits.py rename to Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_digits.py index 3bc215b..08e6664 100644 --- a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/digits.py +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_digits.py @@ -4,7 +4,7 @@ from sklearn.datasets import load_digits from sklearn.preprocessing import MinMaxScaler import numpy as np -from HybridQKAN_model_components import QSVT, quantum_lcu_block, QuantumSumBlock, KANLayer +from models.HybridQKAN_model_components import QSVT, quantum_lcu_block, QuantumSumBlock, KANLayer digits = load_digits() X = digits.data[:1000] # 1000 samples diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/equations.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_equations.py similarity index 97% rename from Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/equations.py rename to Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_equations.py index 0e9637a..070788d 100644 --- a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/equations.py +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_equations.py @@ -2,7 +2,7 @@ import torch.nn as nn from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt -from HybridQKAN_model_components import QSVT, quantum_lcu_block, QuantumSumBlock, KANLayer +from models.HybridQKAN_model_components import QSVT, quantum_lcu_block, QuantumSumBlock, KANLayer diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/iris.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_iris.py similarity index 96% rename from Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/iris.py rename to Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_iris.py index 63b08b4..76861f1 100644 --- a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/iris.py +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_iris.py @@ -2,7 +2,7 @@ import torch.nn as nn from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt -from HybridQKAN_model_components import QSVT, quantum_lcu_block, QuantumSumBlock, KANLayer +from models.HybridQKAN_model_components import QSVT, quantum_lcu_block, QuantumSumBlock, KANLayer from sklearn.datasets import load_iris import numpy as np from sklearn.preprocessing import MinMaxScaler diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/quark_gluon.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_quark_gluon.py similarity index 100% rename from Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/quark_gluon.py rename to Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_quark_gluon.py diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/social_networks_ad.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_social_networks_ad.py similarity index 96% rename from Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/social_networks_ad.py rename to Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_social_networks_ad.py index 3a3ebd7..8968ced 100644 --- a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/social_networks_ad.py +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_social_networks_ad.py @@ -3,7 +3,7 @@ import torch.nn as nn from sklearn.preprocessing import MinMaxScaler from sklearn.model_selection import train_test_split -from HybridQKAN_model_components import QSVT, quantum_lcu_block, QuantumSumBlock, KANLayer +from models.HybridQKAN_model_components import QSVT, quantum_lcu_block, QuantumSumBlock, KANLayer df = pd.read_csv("C://Users//riakh//Downloads//archive//Social_Network_Ads.csv") X = df[['Age', 'EstimatedSalary']].values diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/titanic.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_titanic.py similarity index 96% rename from Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/titanic.py rename to Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_titanic.py index cd3c732..8515e76 100644 --- a/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/titanic.py +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/experiments/hybrid_qkan_titanic.py @@ -3,7 +3,7 @@ import torch.nn as nn from sklearn.preprocessing import MinMaxScaler from sklearn.model_selection import train_test_split -from HybridQKAN_model_components import QSVT, quantum_lcu_block, QuantumSumBlock, KANLayer +from models.HybridQKAN_model_components import QSVT, quantum_lcu_block, QuantumSumBlock, KANLayer df = pd.read_csv("C://Users//riakh//Downloads//archive//Titanic-Dataset.csv") diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/LabelMixer.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/LabelMixer.py new file mode 100644 index 0000000..7c572d2 --- /dev/null +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/LabelMixer.py @@ -0,0 +1,35 @@ +import torch +import torch.nn as nn +import pennylane as qml +from QCBM import QCBMState + +class LabelMixer(nn.Module): + """ + Applies an extra entangling block on label qubits, + after the QCBM has been prepared. + """ + def __init__(self, qcbm: QCBMState, depth: int = 2, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.qcbm = qcbm + self.L = qcbm.L + self.P = qcbm.P + self.n_qubits = qcbm.n_qubits + self.depth = depth + + init = 0.01 * torch.randn(depth, self.L, 3, dtype=torch.float32) + self.phi = nn.Parameter(init) + + self.dev = qml.device("default.qubit", wires=self.n_qubits) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights_qcbm, weights_label): + qml.templates.StronglyEntanglingLayers(weights_qcbm, wires=list(range(self.n_qubits))) + if self.L > 0: + qml.templates.StronglyEntanglingLayers(weights_label, wires=list(range(self.L))) + return qml.probs(wires=list(range(self.n_qubits))) + + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.qcbm.theta, self.phi).to(torch.float32) diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/QCBM.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/QCBM.py new file mode 100644 index 0000000..5875d05 --- /dev/null +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/QCBM.py @@ -0,0 +1,35 @@ +import torch +import torch.nn as nn +import pennylane as qml + +class QCBMState(nn.Module): + """ + Quantum Circuit Born Machine (QCBM) state preparation. + Produces probability distribution over 2^(L+P) outcomes. + """ + def __init__(self, n_label_qubits: int, n_pos_qubits: int, depth: int = 3, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.L = n_label_qubits + self.P = n_pos_qubits + self.n_qubits = self.L + self.P + self.depth = depth + + init = 0.01 * torch.randn(depth, self.n_qubits, 3, dtype=torch.float32) + self.theta = nn.Parameter(init) + + self.dev = qml.device("default.qubit", wires=self.n_qubits) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(weights): + qml.templates.StronglyEntanglingLayers(weights, wires=list(range(self.n_qubits))) + return qml.probs(wires=list(range(self.n_qubits))) + + self._qprobs = qnode + + def forward(self): + return self._qprobs(self.theta).to(torch.float32) + + @torch.no_grad() + def freeze(self): + self.theta.requires_grad_(False) diff --git a/Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/QuantumBlock.py b/Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/QuantumBlock.py new file mode 100644 index 0000000..ecf26d2 --- /dev/null +++ b/Quantum_KAN_for_HEP_Ria_Khatoniar/models/FullyQKAN_model_components/QuantumBlock.py @@ -0,0 +1,52 @@ +import math +import torch +import torch.nn as nn +import torch.nn.functional as F +import pennylane as qml + +class QuantumFourierBlock(nn.Module): + """ + Quantum Fourier Residual block. + Maps input scalars in [0,1] into Fourier-like features + using quantum rotations and entangling layers. + """ + def __init__(self, k_frequencies: int = 4, entangle_depth: int = 1, seed: int = 0): + super().__init__() + torch.manual_seed(seed) + self.K = k_frequencies + self.depth = entangle_depth + + self.log_omega = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.05) + self.phase = nn.Parameter(torch.zeros(self.K, dtype=torch.float32)) + self.w_cos = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1) + self.w_sin = nn.Parameter(torch.randn(self.K, dtype=torch.float32) * 0.1) + + self.dev = qml.device("default.qubit", wires=self.K) + + @qml.qnode(self.dev, interface="torch", diff_method="backprop") + def qnode(alpha_vec): + for k in range(self.K): + qml.RY(alpha_vec[k], wires=k) + for _ in range(self.depth): + for k in range(self.K): + qml.CNOT(wires=[k, (k + 1) % self.K]) + z = [qml.expval(qml.PauliZ(k)) for k in range(self.K)] + x = [qml.expval(qml.PauliX(k)) for k in range(self.K)] + return z + x + + self._qnode = qnode + + def forward_scalar(self, x01_scalar: torch.Tensor) -> torch.Tensor: + x01 = torch.clamp(x01_scalar.reshape(()), 0.0, 1.0) + omega = F.softplus(self.log_omega) + 1e-4 + alpha = omega * (2.0 * math.pi * x01) + self.phase + outs = self._qnode(alpha.to(torch.float32)) + outs = torch.stack([torch.as_tensor(o, dtype=torch.float32) for o in outs], dim=0) + z = outs[: self.K] + x = outs[self.K:] + return (self.w_cos * z).sum() + (self.w_sin * x).sum() + + def forward_batch(self, x01_vec: torch.Tensor) -> torch.Tensor: + x01_vec = torch.clamp(x01_vec.to(torch.float32), 0.0, 1.0) + vals = [self.forward_scalar(x01_vec[i]) for i in range(x01_vec.shape[0])] + return torch.stack(vals, dim=0).to(torch.float32)