-
Notifications
You must be signed in to change notification settings - Fork 131
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Add TokenSniffer API wrapper with a persistent cache
- Loading branch information
Showing
9 changed files
with
709 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,6 @@ | ||
# Current | ||
|
||
- TODO | ||
- Add: TokenSniffer API wrapper with a persistent cache | ||
|
||
# 0.26 | ||
|
||
|
Submodule terms-of-service
updated
4 files
+2 −31 | README.md | |
+0 −12 | scripts/deploy-ethereum.sh | |
+0 −84 | scripts/update-ethereum.py | |
+4 −2 | terms_of_service/acceptance_message.py |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
Token analysis API | ||
------------------ | ||
|
||
This is Python documentation for high-level token analysis API integrations. | ||
|
||
Functionality includes: | ||
|
||
- Checking if an ERC-20 token is a scam, honeypot or similar | ||
|
||
.. autosummary:: | ||
:toctree: _autosummary_token_analysis | ||
:recursive: | ||
|
||
eth_defi.token_analysis.tokensniffer |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
"""Different token analyis backends. | ||
- Scam detectors, honey pot detectors, etc. | ||
""" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
|
||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
"""Key value cache based on SQLite | ||
""" | ||
|
||
import sqlite3 | ||
from pathlib import Path | ||
|
||
|
||
class PersistentKeyValueStore(dict): | ||
"""A simple key-value cache for sqlite3. | ||
Designed to cache JSON blobs from integrated API services like TokenSniffer. | ||
Based on https://stackoverflow.com/questions/47237807/use-sqlite-as-a-keyvalue-store | ||
""" | ||
|
||
def __init__(self, filename: Path, autocommit=True): | ||
super().__init__() | ||
self.autocommit = autocommit | ||
assert isinstance(filename, Path) | ||
self.filename = filename | ||
try: | ||
self.conn = sqlite3.connect(filename) | ||
except Exception as e: | ||
raise RuntimeError(f"Sqlite3 connect failed: {filename}") from e | ||
self.conn.execute("CREATE TABLE IF NOT EXISTS kv (key text unique, value text)") | ||
|
||
def close(self): | ||
self.conn.commit() | ||
self.conn.close() | ||
|
||
def __len__(self): | ||
rows = self.conn.execute('SELECT COUNT(*) FROM kv').fetchone()[0] | ||
return rows if rows is not None else 0 | ||
|
||
def iterkeys(self): | ||
c = self.conn.cursor() | ||
for row in c.execute('SELECT key FROM kv'): | ||
yield row[0] | ||
|
||
def itervalues(self): | ||
c = self.conn.cursor() | ||
for row in c.execute('SELECT value FROM kv'): | ||
yield row[0] | ||
|
||
def iteritems(self): | ||
c = self.conn.cursor() | ||
for row in c.execute('SELECT key, value FROM kv'): | ||
yield row[0], row[1] | ||
|
||
def keys(self): | ||
return list(self.iterkeys()) | ||
|
||
def values(self): | ||
return list(self.itervalues()) | ||
|
||
def items(self): | ||
return list(self.iteritems()) | ||
|
||
def __contains__(self, key): | ||
return self.conn.execute('SELECT 1 FROM kv WHERE key = ?', (key,)).fetchone() is not None | ||
|
||
def __getitem__(self, key): | ||
assert type(key) == str, f"Only string keys allowed, got {key}" | ||
item = self.conn.execute('SELECT value FROM kv WHERE key = ?', (key,)).fetchone() | ||
if item is None: | ||
raise KeyError(key) | ||
return item[0] | ||
|
||
def __setitem__(self, key, value): | ||
assert type(key) == str, f"Only string keys allowed, got {key}" | ||
assert type(value) == str, f"Only string values allowed, got {value}" | ||
self.conn.execute('REPLACE INTO kv (key, value) VALUES (?,?)', (key, value)) | ||
if self.autocommit: | ||
self.conn.commit() | ||
|
||
def __delitem__(self, key): | ||
if key not in self: | ||
raise KeyError(key) | ||
self.conn.execute('DELETE FROM kv WHERE key = ?', (key,)) | ||
|
||
def __iter__(self): | ||
return self.iterkeys() | ||
|
||
def get(self, key, default=None): | ||
if key in self: | ||
return self[key] | ||
return None |
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
"""TokenSniffer integration tests.""" | ||
import os | ||
|
||
import pytest | ||
|
||
from eth_defi.token_analysis.tokensniffer import CachedTokenSniffer, is_tradeable_token | ||
|
||
TOKENSNIFFER_API_KEY = os.environ.get("TOKENSNIFFER_API_KEY") | ||
pytestmark = pytest.mark.skipif(not TOKENSNIFFER_API_KEY, reason="This test needs TOKENSNIFFER_API_KEY set") | ||
|
||
|
||
def test_token_sniffer_cached(tmp_path): | ||
"""Check TokenSniffer API works""" | ||
|
||
db_file = tmp_path / "test.sqlite" | ||
|
||
sniffer = CachedTokenSniffer( | ||
db_file, | ||
TOKENSNIFFER_API_KEY, | ||
) | ||
# Ponzio the Cat | ||
# https://tradingstrategy.ai/trading-view/ethereum/tokens/0x873259322be8e50d80a4b868d186cc5ab148543a | ||
data = sniffer.fetch_token_info(1, "0x873259322be8e50d80a4b868d186cc5ab148543a") | ||
assert data["cached"] is False | ||
|
||
data = sniffer.fetch_token_info(1, "0x873259322be8e50d80a4b868d186cc5ab148543a") | ||
assert data["cached"] is True | ||
|
||
assert not is_tradeable_token(data) | ||
|
||
info = sniffer.get_diagnostics() | ||
assert type(info) == str |