-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathLRUCache.h
151 lines (130 loc) · 4.49 KB
/
LRUCache.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
#include <iterator>
#include <list>
#include <thread>
#include <mutex>
#include <unordered_map>
template <typename TKey, typename TValue> class LRUCache {
private:
class _LRUEntry {
public:
_LRUEntry(const TKey &key, const TValue &value) : Key(key), Value(value) { }
TKey Key;
TValue Value;
};
class _LookUpEntry {
public:
typename std::list<_LRUEntry>::iterator LRUEntryRef;
};
// Cache store cum LRU tracker
std::list<_LRUEntry> m_lruList;
// Key look up map
std::unordered_map<TKey, _LookUpEntry> m_lookUpMap;
float m_compactFactor;
int m_capacity;
bool m_threadSafe;
static constexpr const float DEFAULT_COMPACT_FACTOR = 0.4;
static constexpr const int DEFAULT_CAPACITY = 50;
static constexpr const char *LOG_TAG = "LRUCache";
// For conditional thread-safety support (RAII way)
class _ConditionalMutex {
public:
_ConditionalMutex(bool enable) :
m_enable(enable) { }
void lock() { if (m_enable) m_lock.lock(); }
void unlock() noexcept { if (m_enable) m_lock.unlock(); }
bool try_lock() { m_enable ? m_lock.try_lock() : true; }
private:
std::mutex m_lock;
bool m_enable;
};
_ConditionalMutex m_cacheLock;
public:
LRUCache(int capacity = DEFAULT_CAPACITY,
float compactionFactor = DEFAULT_COMPACT_FACTOR,
bool threadSafe = true) : m_cacheLock(threadSafe) {
m_capacity = capacity;
m_compactFactor = compactionFactor;
m_threadSafe = threadSafe;
}
bool Exists(const TKey &key) {
std::lock_guard<_ConditionalMutex> guard(m_cacheLock);
return m_lookUpMap.find(key) != m_lookUpMap.end();
}
TValue Get(const TKey &key) {
std::lock_guard<_ConditionalMutex> guard(m_cacheLock);
auto mapEntry = m_lookUpMap.find(key);
if (mapEntry != m_lookUpMap.end()) {
auto lruEntry = (mapEntry->second).LRUEntryRef;
// move the item to front.
if (lruEntry != m_lruList.begin()) {
m_lruList.splice(m_lruList.begin(), m_lruList, lruEntry, std::next(lruEntry));
}
// return the copy
return lruEntry->Value;
} else {
throw std::invalid_argument("key");
}
}
bool Get(const TKey &key, TValue &outValue) {
std::lock_guard<_ConditionalMutex> guard(m_cacheLock);
auto mapEntry = m_lookUpMap.find(key);
if (mapEntry != m_lookUpMap.end()) {
auto lruEntry = (mapEntry->second).LRUEntryRef;
// move the item to front.
if (lruEntry != m_lruList.begin()) {
m_lruList.splice(m_lruList.begin(), m_lruList, lruEntry, std::next(lruEntry));
}
// copy the value
outValue = lruEntry->Value;
return true;
} else {
return false;
}
}
void Put(const TKey &key, const TValue &value) {
std::lock_guard<_ConditionalMutex> guard(m_cacheLock);
auto mapEntry = m_lookUpMap.find(key);
if (mapEntry != m_lookUpMap.end()) {
auto lruEntry = (mapEntry->second).LRUEntryRef;
// move the item to front.
if (lruEntry != m_lruList.begin()) {
m_lruList.splice(m_lruList.begin(), m_lruList, lruEntry, std::next(lruEntry));
}
// copy the new value
lruEntry->Value = value;
} else {
m_lruList.emplace_front(_LRUEntry(key, value));
m_lookUpMap.emplace(key, _LookUpEntry{m_lruList.begin()});
}
ensureCompaction();
}
private:
void ensureCompaction() {
// TODO - size() is O(n) for list, keep size internally.
int cacheOrigSize = m_lruList.size();
if (cacheOrigSize < m_capacity) {
return;
}
int cutPosition = m_capacity * (1 - m_compactFactor);
Log("Evicting Cache, at capacity:" + std::to_string(cacheOrigSize) + "; Cut at:" +
to_string(cutPosition));
// Remove entries from the look-up map
auto lruIter = m_lruList.begin();
for (std::advance(lruIter, cutPosition);
lruIter != m_lruList.end();
lruIter++) {
m_lookUpMap.erase(lruIter->Key);
}
// Trim the LRU list
{
auto lruIter = m_lruList.begin();
std::advance(lruIter, cutPosition);
m_lruList.erase(lruIter, m_lruList.end());
}
Log("Cache trimmed to:" + std::to_string(m_lruList.size()));
}
void Log(const std::string &msg) {
// TODO - it will be replaced with logging call.
cout << LOG_TAG << ": " << msg << endl;
}
};