-
Notifications
You must be signed in to change notification settings - Fork 18
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #475 from wildmeshing/dzint/445-add-cache-class
Add `Cache` class
- Loading branch information
Showing
9 changed files
with
482 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,7 @@ | ||
|
||
set(SRC_FILES | ||
Cache.cpp | ||
Cache.hpp | ||
HDF5Reader.hpp | ||
HDF5Reader.cpp | ||
HDF5Writer.hpp | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,231 @@ | ||
#include "Cache.hpp" | ||
|
||
#include <fmt/format.h> | ||
#include <chrono> | ||
#include <exception> | ||
#include <fstream> | ||
#include <iostream> | ||
#include <sstream> | ||
#include <wmtk/io/HDF5Writer.hpp> | ||
#include <wmtk/io/MeshReader.hpp> | ||
#include <wmtk/utils/Logger.hpp> | ||
|
||
#include <nlohmann/json.hpp> | ||
|
||
#include <filesystem> | ||
|
||
namespace fs = std::filesystem; | ||
|
||
long long nanoseconds_timestamp() | ||
{ | ||
return std::chrono::duration_cast<std::chrono::nanoseconds>( | ||
std::chrono::system_clock::now().time_since_epoch()) | ||
.count(); | ||
} | ||
|
||
std::string number_to_hex(long long l) | ||
{ | ||
return fmt::format("{0:x}", l); | ||
} | ||
|
||
namespace wmtk::io { | ||
|
||
std::filesystem::path Cache::create_unique_directory( | ||
const std::string& prefix, | ||
const std::filesystem::path& location, | ||
size_t max_tries) | ||
{ | ||
const fs::path tmp = location.empty() ? std::filesystem::temp_directory_path() : location; | ||
|
||
const std::string timestamp = number_to_hex(nanoseconds_timestamp()); | ||
|
||
fs::path unique_dir; | ||
for (size_t i = 0; i < max_tries; ++i) { | ||
unique_dir = tmp / (prefix + "_" + timestamp + "_" + number_to_hex(i)); | ||
|
||
if (std::filesystem::create_directory(unique_dir)) { | ||
return unique_dir; | ||
} | ||
} | ||
|
||
throw std::runtime_error("Could not generate a unique directory."); | ||
} | ||
|
||
Cache::Cache(const std::string& prefix, const std::filesystem::path location) | ||
: m_cache_dir(location) | ||
{ | ||
m_cache_dir = create_unique_directory(prefix, location); | ||
} | ||
|
||
Cache::~Cache() | ||
{ | ||
const size_t max_tries = 1000; | ||
for (size_t i = 0; fs::exists(m_cache_dir) && i < max_tries; ++i) { | ||
fs::remove_all(m_cache_dir); | ||
} | ||
|
||
if (fs::exists(m_cache_dir)) { | ||
wmtk::logger().warn("Could not remove cache folder {}", fs::absolute(m_cache_dir)); | ||
} | ||
} | ||
|
||
const std::filesystem::path& Cache::create_unique_file( | ||
const std::string& filename, | ||
const std::string& extension, | ||
size_t max_tries) | ||
{ | ||
const std::string timestamp = number_to_hex(nanoseconds_timestamp()); | ||
|
||
for (size_t i = 0; i < max_tries; ++i) { | ||
const fs::path p = | ||
m_cache_dir / (filename + "_" + timestamp + "_" + number_to_hex(i) + extension); | ||
|
||
if (fs::exists(p)) { | ||
continue; | ||
} | ||
|
||
// try to touch the file | ||
std::ofstream ofs(p); | ||
if (ofs.is_open()) { | ||
m_file_paths[filename] = p; | ||
ofs.close(); | ||
return m_file_paths[filename]; | ||
} | ||
ofs.close(); | ||
} | ||
|
||
throw std::runtime_error("Could not generate a unique file."); | ||
} | ||
|
||
const std::filesystem::path& Cache::get_file_path(const std::string& filename) | ||
{ | ||
const auto it = m_file_paths.find(filename); | ||
|
||
if (it == m_file_paths.end()) { | ||
// filename does not exist yet --> create it | ||
return create_unique_file(filename, ""); | ||
} else { | ||
return it->second; | ||
} | ||
} | ||
|
||
std::filesystem::path Cache::get_file_path(const std::string& filename) const | ||
{ | ||
const auto it = m_file_paths.find(filename); | ||
|
||
if (it == m_file_paths.end()) { | ||
// filename does not exist yet --> create it | ||
throw std::runtime_error("File with name '" + filename + "' does not exist in cache"); | ||
} else { | ||
return it->second; | ||
} | ||
} | ||
|
||
std::filesystem::path Cache::get_cache_path() const | ||
{ | ||
return m_cache_dir; | ||
} | ||
|
||
std::shared_ptr<Mesh> Cache::read_mesh(const std::string& name) const | ||
{ | ||
const fs::path p = get_file_path(name); | ||
return wmtk::read_mesh(p); | ||
} | ||
|
||
void Cache::write_mesh(Mesh& m, const std::string& name) | ||
{ | ||
const auto it = m_file_paths.find(name); | ||
|
||
fs::path p; | ||
|
||
if (it == m_file_paths.end()) { | ||
// file does not exist yet --> create it | ||
p = create_unique_file(name, ".hdf5"); | ||
m_file_paths[name] = p; | ||
} else { | ||
p = it->second; | ||
} | ||
|
||
HDF5Writer writer(p); | ||
m.serialize(writer); | ||
} | ||
|
||
bool Cache::export_cache(const std::filesystem::path& export_location) | ||
{ | ||
if (fs::exists(export_location)) { | ||
return false; | ||
} | ||
|
||
fs::path cache_content_path; | ||
|
||
// create a json with all cached names | ||
{ | ||
nlohmann::json cache_content; | ||
for (const auto& [first, second] : m_file_paths) { | ||
cache_content[first] = fs::relative(second, m_cache_dir).string(); | ||
} | ||
|
||
cache_content_path = create_unique_file(m_cache_content_name, ".json"); | ||
std::ofstream o(cache_content_path); | ||
o << std::setw(4) << cache_content << std::endl; | ||
o.close(); | ||
} | ||
|
||
// copy folder to export location | ||
fs::copy(m_cache_dir, export_location, fs::copy_options::recursive); | ||
|
||
// delete json | ||
fs::remove(cache_content_path); | ||
|
||
return true; | ||
} | ||
|
||
bool Cache::import_cache(const std::filesystem::path& import_location) | ||
{ | ||
if (!fs::exists(import_location)) { | ||
return false; | ||
} | ||
if (!m_file_paths.empty()) { | ||
return false; | ||
} | ||
|
||
// remove current directory | ||
fs::remove_all(m_cache_dir); | ||
// copy import | ||
fs::copy(import_location, m_cache_dir, fs::copy_options::recursive); | ||
|
||
// find json | ||
fs::path cache_content_path; | ||
for (const auto& f : fs::directory_iterator(m_cache_dir)) { | ||
const fs::path p = f.path(); | ||
if (p.stem().string().rfind(m_cache_content_name, 0) == 0) { | ||
cache_content_path = p; | ||
break; | ||
} | ||
} | ||
|
||
if (cache_content_path.empty()) { | ||
return false; | ||
} | ||
|
||
// read json | ||
{ | ||
std::ifstream i(cache_content_path); | ||
const nlohmann::json cache_content = nlohmann::json::parse(i); | ||
|
||
std::map<std::string, std::string> map_paths = | ||
cache_content.get<std::map<std::string, std::string>>(); | ||
|
||
// make file paths absolute | ||
for (auto& [first, second] : map_paths) { | ||
m_file_paths[first] = m_cache_dir / second; | ||
} | ||
} | ||
|
||
// delete json | ||
fs::remove(cache_content_path); | ||
|
||
return true; | ||
} | ||
|
||
} // namespace wmtk::io |
Oops, something went wrong.