Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add TokenSniffer integration #226

Merged
merged 4 commits into from
Sep 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Current

- TODO
- Add: TokenSniffer API wrapper with a persistent cache

# 0.26

Expand Down
1 change: 1 addition & 0 deletions docs/source/api/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ API documentation
chainlink/index
foundry/index
etherscan/index
token_analysis/index
event_reader/index
price_oracle/index
data_research/index
Expand Down
14 changes: 14 additions & 0 deletions docs/source/api/token_analysis/index.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
Token analysis API
------------------

This is Python documentation for high-level token analysis API integrations.

Functionality includes:

- Checking if an ERC-20 token is a scam, honeypot or similar

.. autosummary::
:toctree: _autosummary_token_analysis
:recursive:

eth_defi.token_analysis.tokensniffer
5 changes: 5 additions & 0 deletions eth_defi/token_analysis/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
"""Different token analyis backends.

- Scam detectors, honey pot detectors, etc.

"""
3 changes: 3 additions & 0 deletions eth_defi/token_analysis/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@



88 changes: 88 additions & 0 deletions eth_defi/token_analysis/sqlite_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
"""Key value cache based on SQLite

"""

import sqlite3
from pathlib import Path


class PersistentKeyValueStore(dict):
"""A simple key-value cache for sqlite3.

Designed to cache JSON blobs from integrated API services like TokenSniffer.

Based on https://stackoverflow.com/questions/47237807/use-sqlite-as-a-keyvalue-store
"""

def __init__(self, filename: Path, autocommit=True):
super().__init__()
self.autocommit = autocommit
assert isinstance(filename, Path)
self.filename = filename
try:
self.conn = sqlite3.connect(filename)
except Exception as e:
raise RuntimeError(f"Sqlite3 connect failed: {filename}") from e
self.conn.execute("CREATE TABLE IF NOT EXISTS kv (key text unique, value text)")

def close(self):
self.conn.commit()
self.conn.close()

def __len__(self):
rows = self.conn.execute('SELECT COUNT(*) FROM kv').fetchone()[0]
return rows if rows is not None else 0

def iterkeys(self):
c = self.conn.cursor()
for row in c.execute('SELECT key FROM kv'):
yield row[0]

def itervalues(self):
c = self.conn.cursor()
for row in c.execute('SELECT value FROM kv'):
yield row[0]

def iteritems(self):
c = self.conn.cursor()
for row in c.execute('SELECT key, value FROM kv'):
yield row[0], row[1]

def keys(self):
return list(self.iterkeys())

def values(self):
return list(self.itervalues())

def items(self):
return list(self.iteritems())

def __contains__(self, key):
return self.conn.execute('SELECT 1 FROM kv WHERE key = ?', (key,)).fetchone() is not None

def __getitem__(self, key):
assert type(key) == str, f"Only string keys allowed, got {key}"
item = self.conn.execute('SELECT value FROM kv WHERE key = ?', (key,)).fetchone()
if item is None:
raise KeyError(key)
return item[0]

def __setitem__(self, key, value):
assert type(key) == str, f"Only string keys allowed, got {key}"
assert type(value) == str, f"Only string values allowed, got {value}"
self.conn.execute('REPLACE INTO kv (key, value) VALUES (?,?)', (key, value))
if self.autocommit:
self.conn.commit()

def __delitem__(self, key):
if key not in self:
raise KeyError(key)
self.conn.execute('DELETE FROM kv WHERE key = ?', (key,))

def __iter__(self):
return self.iterkeys()

def get(self, key, default=None):
if key in self:
return self[key]
return None
564 changes: 564 additions & 0 deletions eth_defi/token_analysis/tokensniffer.py

Large diffs are not rendered by default.

32 changes: 32 additions & 0 deletions tests/token_analysis/test_token_sniffer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
"""TokenSniffer integration tests."""
import os

import pytest

from eth_defi.token_analysis.tokensniffer import CachedTokenSniffer, is_tradeable_token

TOKENSNIFFER_API_KEY = os.environ.get("TOKENSNIFFER_API_KEY")
pytestmark = pytest.mark.skipif(not TOKENSNIFFER_API_KEY, reason="This test needs TOKENSNIFFER_API_KEY set")


def test_token_sniffer_cached(tmp_path):
"""Check TokenSniffer API works"""

db_file = tmp_path / "test.sqlite"

sniffer = CachedTokenSniffer(
db_file,
TOKENSNIFFER_API_KEY,
)
# Ponzio the Cat
# https://tradingstrategy.ai/trading-view/ethereum/tokens/0x873259322be8e50d80a4b868d186cc5ab148543a
data = sniffer.fetch_token_info(1, "0x873259322be8e50d80a4b868d186cc5ab148543a")
assert data["cached"] is False

data = sniffer.fetch_token_info(1, "0x873259322be8e50d80a4b868d186cc5ab148543a")
assert data["cached"] is True

assert not is_tradeable_token(data)

info = sniffer.get_diagnostics()
assert type(info) == str
Loading