From 7b1184890ff5347810e609b603ff66919cff75fd Mon Sep 17 00:00:00 2001 From: wejoncy Date: Mon, 16 Dec 2024 14:10:29 +0800 Subject: [PATCH] better hash --- .../providers/coreml/builders/model_builder.cc | 17 +++++++---------- .../coreml/coreml_execution_provider.cc | 7 ++++++- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/onnxruntime/core/providers/coreml/builders/model_builder.cc b/onnxruntime/core/providers/coreml/builders/model_builder.cc index 92d577770804a..9c770fe05185f 100644 --- a/onnxruntime/core/providers/coreml/builders/model_builder.cc +++ b/onnxruntime/core/providers/coreml/builders/model_builder.cc @@ -391,7 +391,7 @@ void CreateEmptyFile(const std::string& filename) { #endif // defined(COREML_ENABLE_MLPROGRAM) std::string GetModelOutputPath(const CoreMLOptions& coreml_options, - const std::vector& onnx_input_names) { + const std::string& graph_name) { std::string path; if (coreml_options.ModelCachePath().empty()) { // path is used to create the ML Package directory for ML Program, and for the model directly otherwise. @@ -400,14 +400,11 @@ std::string GetModelOutputPath(const CoreMLOptions& coreml_options, path += ".model.mlmodel"; } } else { - // input names in onnx are unique. so we can use them as the key in the cache. - std::string inputs_collections = std::accumulate( - onnx_input_names.begin(), onnx_input_names.end(), std::string(), - [](const std::string& a, const std::string& b) { return a + "," + b; }); - std::hash hasher; - // different subgraph has different folders. so we need to hash the inputs. - path = std::string(coreml_options.ModelCachePath()) + - "/" + std::to_string(hasher(inputs_collections)); + // graph_name is uniquely generated by + // onnxruntime/core/providers/coreml/coreml_execution_provider.cc::gen_metadef_name + // int metadef_id = metadef_id_generator_.GenerateId(graph_viewer, model_hash); + // MakeString(COREML, "_", model_hash, "_", metadef_id);. + path = std::string(coreml_options.ModelCachePath()) + "/" + graph_name; if (!coreml_options.CreateMLProgram()) { ORT_THROW_IF_ERROR(Env::Default().CreateFolder(path)); path += "/mlmodel"; @@ -427,7 +424,7 @@ ModelBuilder::ModelBuilder(const GraphViewer& graph_viewer, const logging::Logge coreml_version_(coreml_version), coreml_options_(coreml_options), create_ml_program_(coreml_options.CreateMLProgram()), - model_output_path_(GetModelOutputPath(coreml_options, onnx_input_names)), + model_output_path_(GetModelOutputPath(coreml_options, graph_viewer.Name())), onnx_input_names_(std::move(onnx_input_names)), onnx_output_names_(std::move(onnx_output_names)), coreml_model_(std::make_unique()) { diff --git a/onnxruntime/core/providers/coreml/coreml_execution_provider.cc b/onnxruntime/core/providers/coreml/coreml_execution_provider.cc index 5a2867e5524e4..835a70e8ef1e3 100644 --- a/onnxruntime/core/providers/coreml/coreml_execution_provider.cc +++ b/onnxruntime/core/providers/coreml/coreml_execution_provider.cc @@ -18,6 +18,7 @@ #include "core/providers/coreml/model/host_utils.h" #include "core/providers/coreml/model/model.h" #include "core/providers/coreml/shape_utils.h" +#include "core/graph/model.h" namespace onnxruntime { @@ -57,7 +58,11 @@ CoreMLExecutionProvider::GetCapability(const onnxruntime::GraphViewer& graph_vie [&]() { HashValue model_hash; int metadef_id = metadef_id_generator_.GenerateId(graph_viewer, model_hash); - return MakeString(COREML, "_", model_hash, "_", metadef_id); + std::string user_provide_hash; + if (graph_viewer.GetGraph().GetModel().MetaData().count("CACHE_KEY") > 0) { + user_provide_hash = graph_viewer.GetGraph().GetModel().MetaData().at("CACHE_KEY"); + } + return MakeString(user_provide_hash, "_", COREML, "_", model_hash, "_", metadef_id); }; result = utils::CreateSupportedPartitions(graph_viewer, supported_nodes, {},