diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
index 856961a..0b85cf8 100644
--- a/.github/workflows/publish.yml
+++ b/.github/workflows/publish.yml
@@ -16,13 +16,14 @@ jobs:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
with:
submodules: true
- - name: Set up Python 3.7
- uses: actions/setup-python@v1
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
with:
- python-version: 3.7
+ python-version: 3.12
+ cache: 'pip'
- name: Build and publish container
run: |
export BRANCH=${GITHUB_REF##*/}
diff --git a/.github/workflows/test_metrics.yml b/.github/workflows/test_metrics.yml
new file mode 100644
index 0000000..d005486
--- /dev/null
+++ b/.github/workflows/test_metrics.yml
@@ -0,0 +1,30 @@
+name: Test metrics
+on: [push, pull_request]
+
+jobs:
+ test_proxy:
+ defaults:
+ run:
+ working-directory: metrics
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: [3.12]
+ env:
+ ETH_ENDPOINT: ${{ secrets.ETH_ENDPOINT }}
+ PYTHONPATH: ${{ github.workspace }}/metrics
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ submodules: true
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ cache: 'pip'
+ - name: Install python dependencies
+ run: pip install -r requirements.txt && pip install -r requirements-dev.txt
+ - name: Lint with ruff
+ run: ruff check src/
+ - name: Run metrics tests
+ run: pytest tests/
diff --git a/.github/workflows/test.yml b/.github/workflows/test_proxy.yml
similarity index 60%
rename from .github/workflows/test.yml
rename to .github/workflows/test_proxy.yml
index 7abb402..4fcefeb 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test_proxy.yml
@@ -1,4 +1,4 @@
-name: Test
+name: Test proxy
on: [push, pull_request]
jobs:
@@ -6,30 +6,22 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python-version: [3.7]
+ python-version: [3.12]
env:
ETH_PRIVATE_KEY: ${{ secrets.ETH_PRIVATE_KEY }}
ENDPOINT: ${{ secrets.ENDPOINT }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
MANAGER_TAG: "1.9.3-beta.0"
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
with:
submodules: true
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v1
+ uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
+ cache: 'pip'
- name: Install python dependencies
run: bash ./scripts/install_python_dependencies_dev.sh
- name: Lint with flake8
run: flake8 .
- # - name: Deploy manager contracts
- # run: |
- # bash ./helper-scripts/install_python_dependencies_dev.sh
- # - name: Run tests
- # run: |
- # bash ./scripts/run_manager_tests.sh
- # - name: Codecov
- # run: |
- # codecov -t $CODECOV_TOKEN
diff --git a/.gitignore b/.gitignore
index 93e39be..54612d9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,4 +13,9 @@ metrics.json
conf/upstreams/*.conf
conf/chains/*.conf
-portal-metrics.log
\ No newline at end of file
+portal-metrics.log
+
+mysql_data/
+tools/
+.ruff_cache/
+.DS_Store
\ No newline at end of file
diff --git a/README.md b/README.md
index 8ec27e4..a5a54a1 100644
--- a/README.md
+++ b/README.md
@@ -19,6 +19,18 @@ JSON-RPC endpoints for SKALE chains. It is based on NGINX.
3. Export all required environment variables (see below)
4. Run `scripts/run_proxy.sh`
+#### Pre-commit hook
+
+```bash
+ruff check --config metrics/pyproject.toml metrics/src/
+```
+
+#### Format code
+
+```bash
+ruff format src/
+```
+
#### Required environment variables
- `ETH_ENDPOINT` - endpoint of the Ethereum network where `skale-manager` contracts are deployed
diff --git a/docker-compose.yml b/docker-compose.yml
index 2a9dc05..fd67abe 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,4 +1,3 @@
-version: '3'
services:
skale-proxy:
environment:
@@ -20,8 +19,9 @@ services:
max-file: "5"
max-size: "200m"
restart: unless-stopped
+
nginx:
- image: nginx:1.20.2
+ image: nginx:1.24.0
container_name: proxy_nginx
network_mode: host
volumes:
@@ -34,10 +34,15 @@ services:
options:
max-file: "200"
max-size: "500m"
+
metrics:
environment:
ETH_ENDPOINT: ${ETH_ENDPOINT}
NETWORK_NAME: ${NETWORK_NAME}
+ MYSQL_USER: ${MYSQL_USER}
+ MYSQL_PASSWORD: ${MYSQL_PASSWORD}
+ MYSQL_HOST: db
+ MYSQL_DATABASE: metrics
image: metrics:latest
container_name: metrics
build:
@@ -50,4 +55,32 @@ services:
options:
max-file: "5"
max-size: "50m"
- restart: unless-stopped
\ No newline at end of file
+ restart: unless-stopped
+ depends_on:
+ - db
+ networks:
+ - proxy
+
+ db:
+ image: mysql:8.0
+ container_name: db
+ restart: always
+ environment:
+ - MYSQL_DATABASE=metrics
+ - MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD}
+ - MYSQL_USER=${MYSQL_USER}
+ - MYSQL_PASSWORD=${MYSQL_PASSWORD}
+ volumes:
+ - mysql_data:/var/lib/mysql
+ - ./mysql-init:/docker-entrypoint-initdb.d
+ networks:
+ - proxy
+ ports:
+ - "3306:3306"
+
+volumes:
+ mysql_data:
+
+networks:
+ proxy:
+ driver: bridge
diff --git a/metrics/Dockerfile b/metrics/Dockerfile
index f71693b..67eae95 100644
--- a/metrics/Dockerfile
+++ b/metrics/Dockerfile
@@ -1,6 +1,11 @@
FROM python:3.12.3-bookworm
-RUN apt-get update
+RUN apt-get update && apt-get install -y \
+ default-mysql-client \
+ build-essential \
+ libssl-dev \
+ libffi-dev \
+ python3-dev
RUN mkdir /usr/src/metrics /data
WORKDIR /usr/src/metrics
@@ -13,4 +18,4 @@ COPY . .
ENV PYTHONPATH="/usr/src/metrics"
ENV COLUMNS=80
-CMD python /usr/src/metrics/src/main.py
+CMD ["python", "/usr/src/metrics/src/main.py"]
\ No newline at end of file
diff --git a/metrics/pyproject.toml b/metrics/pyproject.toml
new file mode 100644
index 0000000..c0a8f1e
--- /dev/null
+++ b/metrics/pyproject.toml
@@ -0,0 +1,12 @@
+[tool.ruff]
+line-length = 100
+indent-width = 4
+lint.select = ["Q"]
+
+target-version = "py312"
+
+[tool.ruff.format]
+quote-style = "single"
+
+[tool.ruff.lint.flake8-quotes]
+inline-quotes = 'single'
diff --git a/metrics/requirements-dev.txt b/metrics/requirements-dev.txt
new file mode 100644
index 0000000..67a0b14
--- /dev/null
+++ b/metrics/requirements-dev.txt
@@ -0,0 +1,5 @@
+ruff==0.6.4
+pytest==8.3.3
+Faker==28.4.1
+eth-typing==4.1.0
+pytest-aiohttp==1.0.5
\ No newline at end of file
diff --git a/metrics/requirements.txt b/metrics/requirements.txt
index 1174916..9451feb 100644
--- a/metrics/requirements.txt
+++ b/metrics/requirements.txt
@@ -1,2 +1,6 @@
-web3==6.19.0
-requests==2.32.3
\ No newline at end of file
+web3==6.15.1
+requests==2.31.0
+aiohttp==3.9.3
+peewee==3.17.1
+PyMySQL==1.1.0
+cryptography==42.0.5
\ No newline at end of file
diff --git a/metrics/src/collector.py b/metrics/src/collector.py
index 65f8691..187a807 100644
--- a/metrics/src/collector.py
+++ b/metrics/src/collector.py
@@ -21,93 +21,160 @@
import logging
import asyncio
import aiohttp
-from datetime import datetime
-from typing import Any, Dict, List, Tuple
+from datetime import datetime, timedelta
+from typing import Tuple, Optional, Dict
+from aiohttp import ClientError, ClientSession
+
+from src.explorer import get_address_counters_url, get_chain_stats
+from src.gas import calc_avg_gas_price
+from src.db import update_transaction_counts, get_address_transaction_counts
+from src.utils import transform_to_dict, decimal_default
+from src.config import (
+ METRICS_FILEPATH,
+ API_ERROR_TIMEOUT,
+ API_ERROR_RETRIES,
+ GITHUB_RAW_URL,
+ OFFCHAIN_KEY,
+)
+from src.metrics_types import AddressCounter, AddressCountersMap, MetricsData, ChainMetrics
-import requests
-
-from explorer import get_address_counters_url, get_chain_stats
-from gas import calc_avg_gas_price
-from config import METRICS_FILEPATH
logger = logging.getLogger(__name__)
-def get_metadata_url(network_name: str):
- return f'https://raw.githubusercontent.com/skalenetwork/skale-network/master/metadata/{network_name}/chains.json' # noqa
+def get_metadata_url(network_name: str) -> str:
+ return f'{GITHUB_RAW_URL}/skalenetwork/skale-network/master/metadata/{network_name}/chains.json'
-def download_metadata(network_name: str):
+async def download_metadata(session, network_name: str) -> Dict:
url = get_metadata_url(network_name)
- response = requests.get(url)
- response.raise_for_status()
- return response.json()
+ async with session.get(url) as response:
+ metadata_srt = await response.text()
+ return json.loads(metadata_srt)
+
+
+def get_empty_address_counter() -> AddressCounter:
+ return {
+ 'gas_usage_count': '0',
+ 'token_transfers_count': '0',
+ 'transactions_count': '0',
+ 'validations_count': '0',
+ 'transactions_last_day': 0,
+ 'transactions_last_7_days': 0,
+ 'transactions_last_30_days': 0,
+ }
-async def get_address_counters(session, network, chain_name, address):
- url = get_address_counters_url(network, chain_name, address)
+async def fetch_address_data(session: ClientSession, url: str) -> AddressCounter:
async with session.get(url) as response:
+ if response.status == 404:
+ data = await response.json()
+ if data.get('message') == 'Not found':
+ logger.warning(f'Address not found at {url}. Returning empty counter.')
+ return get_empty_address_counter()
+ response.raise_for_status()
return await response.json()
-async def get_all_address_counters(network, chain_name, addresses):
- results = {}
- async with aiohttp.ClientSession() as session:
- tasks = []
- for address in addresses:
- tasks.append(get_address_counters(session, network, chain_name, address))
-
- responses = await asyncio.gather(*tasks)
-
- for address, response in zip(addresses, responses):
- results[address] = response
-
- return results
-
-
-async def _fetch_counters_for_app(network_name, chain_name, app_name, app_info):
+async def get_address_counters(
+ session: ClientSession, network: str, chain_name: str, app_name: str, address: str
+) -> AddressCounter:
+ url = get_address_counters_url(network, chain_name, address)
+ for attempt in range(API_ERROR_RETRIES):
+ try:
+ data = await fetch_address_data(session, url)
+
+ today = datetime.now().date()
+ yesterday = today - timedelta(days=1)
+ week_ago = today - timedelta(days=7)
+ month_ago = today - timedelta(days=30)
+
+ transactions_last_day = await get_address_transaction_counts(
+ chain_name, app_name, address, yesterday, yesterday
+ )
+ transactions_last_7_days = await get_address_transaction_counts(
+ chain_name, app_name, address, week_ago, yesterday
+ )
+ transactions_last_30_days = await get_address_transaction_counts(
+ chain_name, app_name, address, month_ago, yesterday
+ )
+
+ data['transactions_last_day'] = transactions_last_day
+ data['transactions_last_7_days'] = transactions_last_7_days
+ data['transactions_last_30_days'] = transactions_last_30_days
+
+ await update_transaction_counts(chain_name, app_name, address, data)
+
+ return data
+ except ClientError as e:
+ if attempt < API_ERROR_RETRIES - 1:
+ logger.warning(f'Attempt {attempt + 1} failed for {url}. Retrying... Error: {e}')
+ await asyncio.sleep(API_ERROR_TIMEOUT)
+ else:
+ logger.error(f'All attempts failed for {url}. Error: {e}')
+ raise
+ raise Exception(f'Failed to fetch data for {url}')
+
+
+async def get_all_address_counters(
+ session, network, chain_name, app_name, addresses
+) -> AddressCountersMap:
+ results = [
+ await get_address_counters(session, network, chain_name, app_name, address)
+ for address in addresses
+ ]
+ return dict(zip(addresses, results))
+
+
+async def fetch_counters_for_app(
+ session, network_name, chain_name, app_name, app_info
+) -> Tuple[str, Optional[AddressCountersMap]]:
logger.info(f'fetching counters for app {app_name}')
if 'contracts' in app_info:
- counters = await get_all_address_counters(network_name, chain_name, app_info['contracts'])
+ counters = await get_all_address_counters(
+ session, network_name, chain_name, app_name, app_info['contracts']
+ )
return app_name, counters
return app_name, None
-async def fetch_counters_for_apps(chain_info, network_name, chain_name):
- tasks = []
- for app_name, app_info in chain_info['apps'].items():
- task = _fetch_counters_for_app(network_name, chain_name, app_name, app_info)
- tasks.append(task)
+async def fetch_counters_for_apps(session, chain_info, network_name, chain_name):
+ tasks = [
+ fetch_counters_for_app(session, network_name, chain_name, app_name, app_info)
+ for app_name, app_info in chain_info['apps'].items()
+ ]
return await asyncio.gather(*tasks)
-def transform_to_dict(apps_counters: List[Tuple[str, Any]] | None) -> Dict[str, Any]:
- if not apps_counters:
- return {}
- results = {}
- for app_name, counters in apps_counters:
- results[app_name] = counters
- return results
-
-
-def collect_metrics(network_name: str):
- metadata = download_metadata(network_name)
- metrics = {}
- for chain_name, chain_info in metadata.items():
- apps_counters = None
- chain_stats = get_chain_stats(network_name, chain_name)
- if 'apps' in chain_info:
- apps_counters = asyncio.run(
- fetch_counters_for_apps(chain_info, network_name, chain_name))
- metrics[chain_name] = {
- 'chain_stats': chain_stats,
- 'apps_counters': transform_to_dict(apps_counters),
+async def collect_metrics(network_name: str) -> MetricsData:
+ async with aiohttp.ClientSession() as session:
+ metadata = await download_metadata(session, network_name)
+ metrics: Dict[str, ChainMetrics] = {}
+
+ for chain_name, chain_info in metadata.items():
+ if chain_name == OFFCHAIN_KEY:
+ continue
+ chain_stats = await get_chain_stats(session, network_name, chain_name)
+ apps_counters = None
+
+ if 'apps' in chain_info:
+ apps_counters = await fetch_counters_for_apps(
+ session, chain_info, network_name, chain_name
+ )
+
+ metrics[chain_name] = {
+ 'chain_stats': chain_stats,
+ 'apps_counters': transform_to_dict(apps_counters),
+ }
+
+ data: MetricsData = {
+ 'metrics': metrics,
+ 'gas': int(calc_avg_gas_price()),
+ 'last_updated': int(datetime.now().timestamp()),
}
- data = {
- 'metrics': metrics,
- 'gas': int(calc_avg_gas_price()),
- 'last_updated': int(datetime.now().timestamp())
- }
- logger.info(f'Saving metrics to {METRICS_FILEPATH}')
- with open(METRICS_FILEPATH, 'w') as f:
- json.dump(data, f, indent=4, sort_keys=True)
+
+ logger.info(f'Saving metrics to {METRICS_FILEPATH}')
+ with open(METRICS_FILEPATH, 'w') as f:
+ json.dump(data, f, indent=4, sort_keys=True, default=decimal_default)
+
+ return data
diff --git a/metrics/src/config.py b/metrics/src/config.py
index 5c73a6e..fc2b07c 100644
--- a/metrics/src/config.py
+++ b/metrics/src/config.py
@@ -28,27 +28,38 @@
'mainnet': 'mainnet.skalenodes.com',
'legacy': 'legacy-proxy.skaleserver.com',
'regression': 'regression-proxy.skalenodes.com',
- 'testnet': 'testnet.skalenodes.com'
+ 'testnet': 'testnet.skalenodes.com',
}
BASE_EXPLORER_URLS = {
'mainnet': 'explorer.mainnet.skalenodes.com',
'legacy': 'legacy-explorer.skalenodes.com',
'regression': 'regression-explorer.skalenodes.com',
- 'testnet': 'explorer.testnet.skalenodes.com'
+ 'testnet': 'explorer.testnet.skalenodes.com',
}
+STATS_API = {'mainnet': 'https://stats.explorer.mainnet.skalenodes.com/v2/stats'}
-STATS_API = {
- 'mainnet': 'https://stats.explorer.mainnet.skalenodes.com/v2/stats',
-}
-
-HTTPS_PREFIX = "https://"
+HTTPS_PREFIX = 'https://'
BLOCK_SAMPLING = 100
-GAS_ESTIMATION_ITERATIONS = 300
+GAS_ESTIMATION_ITERATIONS = 1
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
PROJECT_PATH = os.path.join(DIR_PATH, os.pardir)
NGINX_WWW_FOLDER = os.path.join(PROJECT_PATH, 'www')
METRICS_FILEPATH = os.path.join(NGINX_WWW_FOLDER, 'metrics.json')
+
+METRICS_CHECK_INTERVAL = 300
+METRICS_ERROR_CHECK_INTERVAL = 30
+API_ERROR_TIMEOUT = 2
+API_ERROR_RETRIES = 3
+
+GITHUB_RAW_URL = 'https://raw.githubusercontent.com'
+OFFCHAIN_KEY = '__offchain'
+
+DB_CONNECTION_RETRIES = 30
+DB_CONNECTION_INTERVAL = 2
+
+TRANSACTION_COUNT_FIELD = 'transactions_count'
+BACKFILL_DB_DAYS = 30
diff --git a/metrics/src/db.py b/metrics/src/db.py
new file mode 100644
index 0000000..ba5c270
--- /dev/null
+++ b/metrics/src/db.py
@@ -0,0 +1,121 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of portal-metrics
+#
+# Copyright (C) 2024 SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import logging
+from datetime import date, timedelta
+from typing import List, Dict, Any
+from decimal import Decimal
+
+from peewee import fn, IntegrityError, DoesNotExist
+
+from src.models import db, Address, TransactionCount
+from src.config import TRANSACTION_COUNT_FIELD, BACKFILL_DB_DAYS
+from src.explorer import get_current_total_transactions
+
+logger = logging.getLogger(__name__)
+
+
+async def bootstrap_db(session, apps_data: Dict[str, Dict[str, List[str]]]) -> None:
+ today = date.today()
+ thirty_days_ago = today - timedelta(days=BACKFILL_DB_DAYS)
+ with db.atomic():
+ if Address.select().count() > 0:
+ logger.info('Database is not empty. Skipping bootstrap.')
+ return
+ logger.info('Bootstrapping database with initial data...')
+ for chain_name, chain_data in apps_data.items():
+ for app_name, addresses in chain_data.items():
+ for address in addresses:
+ logger.info(f'Bootstrapping data for {address} on {chain_name}...')
+ addr = Address.create(chain_name=chain_name, address=address, app_name=app_name)
+ total_transactions = await get_current_total_transactions(
+ session, chain_name, address
+ )
+ for day in range(BACKFILL_DB_DAYS):
+ current_date = thirty_days_ago + timedelta(days=day)
+ try:
+ TransactionCount.create(
+ address=addr,
+ date=current_date,
+ total_transactions=total_transactions,
+ daily_transactions=0,
+ )
+ except IntegrityError:
+ logger.warning(
+ f'Record already exists for {address} on {current_date}. Skipping.'
+ )
+ logger.info('Database bootstrap completed successfully.')
+
+
+async def update_transaction_counts(
+ chain_name: str, app_name: str, address: str, contract_data: Dict[str, Any]
+) -> None:
+ today = date.today()
+
+ with db.atomic():
+ addr, _ = Address.get_or_create(chain_name=chain_name, address=address, app_name=app_name)
+
+ total_transactions = int(contract_data.get(TRANSACTION_COUNT_FIELD, 0))
+
+ yesterday_count = (
+ TransactionCount.select(fn.MAX(TransactionCount.total_transactions))
+ .where((TransactionCount.address == addr) & (TransactionCount.date < today))
+ .scalar()
+ )
+
+ if yesterday_count is None:
+ yesterday_count = 0
+
+ daily_transactions = total_transactions - yesterday_count
+
+ TransactionCount.replace(
+ address=addr,
+ date=today,
+ total_transactions=total_transactions,
+ daily_transactions=daily_transactions,
+ ).execute()
+
+ logger.info(
+ f'Updated transaction count for {address} on {today}: '
+ f'total={total_transactions}, daily={daily_transactions}'
+ )
+
+
+async def get_or_create_address(chain_name: str, address: str, app_name: str) -> Address:
+ try:
+ return Address.get(Address.address == address)
+ except DoesNotExist:
+ return Address.create(chain_name=chain_name, address=address, app_name=app_name)
+
+
+async def get_address_transaction_counts(
+ chain_name: str, app_name: str, address: str, start_date: date, end_date: date
+) -> int:
+ with db.atomic():
+ addr = await get_or_create_address(chain_name, address, app_name)
+ result = (
+ TransactionCount.select(fn.SUM(TransactionCount.daily_transactions))
+ .where(
+ (TransactionCount.address == addr)
+ & (TransactionCount.date.between(start_date, end_date))
+ )
+ .scalar()
+ or 0
+ )
+ return int(result) if isinstance(result, Decimal) else result
diff --git a/metrics/src/explorer.py b/metrics/src/explorer.py
index 7b4a0c1..e58013a 100644
--- a/metrics/src/explorer.py
+++ b/metrics/src/explorer.py
@@ -18,10 +18,9 @@
# along with this program. If not, see .
import logging
-import requests
from typing import Any
-from config import BASE_EXPLORER_URLS, HTTPS_PREFIX
+from src.config import BASE_EXPLORER_URLS, HTTPS_PREFIX, NETWORK_NAME
logger = logging.getLogger(__name__)
@@ -31,11 +30,11 @@ def _get_explorer_url(network, chain_name):
return HTTPS_PREFIX + chain_name + '.' + explorer_base_url
-def get_chain_stats(network: str, chain_name: str) -> Any:
+async def get_chain_stats(session, network: str, chain_name: str) -> Any:
try:
explorer_url = _get_explorer_url(network, chain_name)
- response = requests.get(f'{explorer_url}/api/v2/stats')
- return response.json()
+ async with session.get(f'{explorer_url}/api/v2/stats') as response:
+ return await response.json()
except Exception as e:
logger.exception(e)
logger.error(f'Failed to get chain stats: {e}')
@@ -45,3 +44,10 @@ def get_chain_stats(network: str, chain_name: str) -> Any:
def get_address_counters_url(network: str, chain_name: str, address: str) -> str:
explorer_url = _get_explorer_url(network, chain_name)
return f'{explorer_url}/api/v2/addresses/{address}/counters'
+
+
+async def get_current_total_transactions(session, chain_name: str, address: str) -> int:
+ url = get_address_counters_url(NETWORK_NAME, chain_name, address)
+ async with session.get(url) as response:
+ data = await response.json()
+ return int(data.get('transactions_count', 0))
diff --git a/metrics/src/gas.py b/metrics/src/gas.py
index ab3c5b7..b0e971c 100644
--- a/metrics/src/gas.py
+++ b/metrics/src/gas.py
@@ -19,7 +19,7 @@
import logging
from web3 import Web3
-from config import ENDPOINT, GAS_ESTIMATION_ITERATIONS, BLOCK_SAMPLING
+from src.config import ENDPOINT, GAS_ESTIMATION_ITERATIONS, BLOCK_SAMPLING
logger = logging.getLogger(__name__)
@@ -36,7 +36,7 @@ def calc_avg_gas_price():
block_number = w3.eth.block_number
total_gas_used = 0
- logger.info(f'Getting historic block gas prices...')
+ logger.info('Getting historic block gas prices...')
for index in range(GAS_ESTIMATION_ITERATIONS):
block_number = block_number - BLOCK_SAMPLING * index
if block_number < 0:
diff --git a/metrics/src/logs.py b/metrics/src/logs.py
index 5d023ee..d3d8033 100644
--- a/metrics/src/logs.py
+++ b/metrics/src/logs.py
@@ -35,8 +35,8 @@
def get_file_handler(log_filepath, log_level):
formatter = Formatter(LOG_FORMAT)
f_handler = py_handlers.RotatingFileHandler(
- log_filepath, maxBytes=LOG_FILE_SIZE_BYTES,
- backupCount=LOG_BACKUP_COUNT)
+ log_filepath, maxBytes=LOG_FILE_SIZE_BYTES, backupCount=LOG_BACKUP_COUNT
+ )
f_handler.setFormatter(formatter)
f_handler.setLevel(log_level)
return f_handler
diff --git a/metrics/src/main.py b/metrics/src/main.py
index b6b5326..62bdabb 100644
--- a/metrics/src/main.py
+++ b/metrics/src/main.py
@@ -18,31 +18,115 @@
# along with this program. If not, see .
import sys
+import asyncio
+import aiohttp
import logging
from time import sleep
+from datetime import datetime
-from logs import init_default_logger
-from collector import collect_metrics
-from config import MONITOR_INTERVAL, ERROR_TIMEOUT, NETWORK_NAME, PROXY_ENDPOINTS
+from src.logs import init_default_logger
+from src.collector import collect_metrics, download_metadata
+from src.config import (
+ NETWORK_NAME,
+ PROXY_ENDPOINTS,
+ METRICS_CHECK_INTERVAL,
+ METRICS_ERROR_CHECK_INTERVAL,
+ DB_CONNECTION_RETRIES,
+ DB_CONNECTION_INTERVAL,
+ OFFCHAIN_KEY,
+)
+from src.models import db, Address, TransactionCount
+from src.db import bootstrap_db
logger = logging.getLogger(__name__)
+def run_migrations():
+ logger.info('Running database migrations...')
+ try:
+ with db:
+ db.create_tables([Address, TransactionCount])
+ logger.info('Database migrations completed successfully.')
+ except Exception as e:
+ logger.error(f'Error running migrations: {e}')
+ sys.exit(1)
+
+
def run_metrics_loop():
if NETWORK_NAME not in PROXY_ENDPOINTS:
logger.error(f'Unsupported network: {NETWORK_NAME}')
sys.exit(1)
+
+ logger.info(f'Starting metrics collection loop for network: {NETWORK_NAME}')
+ last_run_date = None
+
while True:
- logger.info('Metrics collector iteration started...')
+ current_date = datetime.now().date()
+
+ if last_run_date is None or current_date > last_run_date:
+ logger.info(f'Daily metrics collection started for {NETWORK_NAME}...')
+ try:
+ with db.connection_context():
+ asyncio.run(collect_metrics(NETWORK_NAME))
+ last_run_date = current_date
+ logger.info(f'Daily metrics collection completed for {NETWORK_NAME}.')
+ logger.info(f'Sleeping for {METRICS_CHECK_INTERVAL} seconds.')
+ sleep(METRICS_CHECK_INTERVAL)
+ except Exception as e:
+ logger.exception(f'Error during metrics collection for {NETWORK_NAME}: {e}')
+ logger.info(f'Sleeping for {METRICS_ERROR_CHECK_INTERVAL} seconds after error.')
+ sleep(METRICS_ERROR_CHECK_INTERVAL)
+ else:
+ logger.info(
+ f'Not time for collection yet. \
+ Last run: {last_run_date}, Current date: {current_date}'
+ )
+ logger.info(f'Sleeping for {METRICS_CHECK_INTERVAL} seconds.')
+ sleep(METRICS_CHECK_INTERVAL)
+
+
+def wait_for_db():
+ for _ in range(DB_CONNECTION_RETRIES):
try:
- collect_metrics(NETWORK_NAME)
- logger.info(f'Metrics collector iteration done, sleeping for {MONITOR_INTERVAL}s...')
- sleep(MONITOR_INTERVAL)
+ db.connect()
+ db.close()
+ logger.info('Successfully connected to the database.')
+ return
except Exception as e:
- logger.error(f'Something went wrong: {e}')
- sleep(ERROR_TIMEOUT)
+ logger.exception(e)
+ logger.warning(
+ f'Database connection failed. Retrying in {DB_CONNECTION_INTERVAL} seconds...'
+ )
+ sleep(DB_CONNECTION_INTERVAL)
+
+ logger.error('Failed to connect to the database after multiple attempts.')
+ sys.exit(1)
+
+
+async def bootstrap_database():
+ logger.info('Checking if database needs bootstrapping...')
+ try:
+ async with aiohttp.ClientSession() as session:
+ metadata = await download_metadata(session, NETWORK_NAME)
+
+ apps_data = {}
+ for chain_name, chain_info in metadata.items():
+ if chain_name != OFFCHAIN_KEY and 'apps' in chain_info:
+ apps_data[chain_name] = {
+ app_name: app_info.get('contracts', [])
+ for app_name, app_info in chain_info['apps'].items()
+ }
+
+ await bootstrap_db(session, apps_data)
+ except Exception as e:
+ logger.exception(f'Error bootstrapping database: {e}')
+ sys.exit(1)
if __name__ == '__main__':
init_default_logger()
+ logger.info(f'Starting metrics collector for network: {NETWORK_NAME}')
+ wait_for_db()
+ run_migrations()
+ asyncio.run(bootstrap_database())
run_metrics_loop()
diff --git a/metrics/src/metrics_types.py b/metrics/src/metrics_types.py
new file mode 100644
index 0000000..8ee6c5e
--- /dev/null
+++ b/metrics/src/metrics_types.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of portal-metrics
+#
+# Copyright (C) 2024 SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+from typing import TypedDict, Optional, Dict, NewType
+
+AddressType = NewType('AddressType', str)
+
+
+class AddressCounter(TypedDict):
+ gas_usage_count: str
+ token_transfers_count: str
+ transactions_count: str
+ validations_count: str
+ transactions_last_day: int
+ transactions_last_7_days: int
+ transactions_last_30_days: int
+
+
+AddressCountersMap = Dict[AddressType, AddressCounter]
+
+
+class GasPrices(TypedDict):
+ average: float
+ fast: float
+ slow: float
+
+
+class ChainStats(TypedDict):
+ average_block_time: float
+ coin_image: Optional[str]
+ coin_price: Optional[float]
+ coin_price_change_percentage: Optional[float]
+ gas_price_updated_at: str
+ gas_prices: GasPrices
+ gas_prices_update_in: int
+ gas_used_today: str
+ market_cap: str
+ network_utilization_percentage: float
+ static_gas_price: Optional[float]
+ total_addresses: str
+ total_blocks: str
+ total_gas_used: str
+ total_transactions: str
+ transactions_today: str
+ tvl: Optional[float]
+
+
+class AppCounters(TypedDict):
+ app_name: str
+ counters: AddressCountersMap
+
+
+class ChainMetrics(TypedDict):
+ chain_stats: ChainStats
+ apps_counters: Dict[str, AppCounters]
+
+
+class MetricsData(TypedDict):
+ metrics: Dict[str, ChainMetrics]
+ gas: int
+ last_updated: int
diff --git a/metrics/src/migrations.py b/metrics/src/migrations.py
new file mode 100644
index 0000000..6fdbe9b
--- /dev/null
+++ b/metrics/src/migrations.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of portal-metrics
+#
+# Copyright (C) 2024 SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+from models import db, Address, TransactionCount
+
+
+def create_tables():
+ with db:
+ db.create_tables([Address, TransactionCount])
+
+
+if __name__ == '__main__':
+ create_tables()
+ print('Tables created successfully')
diff --git a/metrics/src/models.py b/metrics/src/models.py
new file mode 100644
index 0000000..280d313
--- /dev/null
+++ b/metrics/src/models.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of portal-metrics
+#
+# Copyright (C) 2024 SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import os
+from playhouse.pool import PooledMySQLDatabase
+from peewee import Model, CharField, ForeignKeyField, DateField, IntegerField
+
+
+db = PooledMySQLDatabase(
+ os.getenv('MYSQL_DATABASE'),
+ user=os.getenv('MYSQL_USER'),
+ password=os.getenv('MYSQL_PASSWORD'),
+ host=os.getenv('MYSQL_HOST'),
+ port=int(os.getenv('MYSQL_PORT', 3306)),
+ max_connections=8,
+ stale_timeout=300,
+)
+
+
+class BaseModel(Model):
+ class Meta:
+ database = db
+
+
+class Address(BaseModel):
+ chain_name = CharField()
+ address = CharField()
+ app_name = CharField()
+
+
+class TransactionCount(BaseModel):
+ address = ForeignKeyField(Address, backref='transaction_counts')
+ date = DateField()
+ total_transactions = IntegerField()
+ daily_transactions = IntegerField()
+
+ class Meta:
+ indexes = ((('address', 'date'), True),)
diff --git a/metrics/src/utils.py b/metrics/src/utils.py
new file mode 100644
index 0000000..fdc6a63
--- /dev/null
+++ b/metrics/src/utils.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of portal-metrics
+#
+# Copyright (C) 2024 SKALE Labs
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+from typing import Any, Dict, List, Tuple
+from decimal import Decimal
+
+
+def decimal_default(obj):
+ if isinstance(obj, Decimal):
+ return float(obj)
+ raise TypeError
+
+
+def transform_to_dict(apps_counters: List[Tuple[str, Any]] | None) -> Dict[str, Any]:
+ if not apps_counters:
+ return {}
+ return {app_name: counters for app_name, counters in apps_counters if counters is not None}
diff --git a/metrics/tests/conftest.py b/metrics/tests/conftest.py
new file mode 100644
index 0000000..d21cf29
--- /dev/null
+++ b/metrics/tests/conftest.py
@@ -0,0 +1,127 @@
+import os
+import json
+import pytest
+from faker import Faker
+from aiohttp import web
+
+
+fake = Faker()
+
+SAMPLE_APPS = {
+ 'app1': {'contracts': ['0x1111', '0x2222', '0x3333']},
+ 'app2': {'contracts': ['0x4444', '0x5555', '0x6666', '0x7777', '0x8888']},
+ 'app3': {'contracts': ['0x9999']},
+ 'app4': {'contracts': ['0xaaaa', '0xbbbb']},
+ 'app5': {'contracts': ['0xcccc', '0xdddd', '0xeeee', '0xffff']},
+}
+
+SAMPLE_CHAIN_INFO = {'apps': SAMPLE_APPS}
+
+SAMPLE_METADATA = {
+ 'chain1': SAMPLE_CHAIN_INFO,
+ 'chain2': {
+ 'apps': {
+ 'app6': {'contracts': ['0x1234', '0x5678']},
+ 'app7': {'contracts': ['0x90ab', '0xcdef', '0x1122', '0x3344']},
+ }
+ },
+ 'chain3': {
+ 'apps': {
+ 'app8': {'contracts': ['0x5566', '0x7788', '0x99aa', '0xbbcc', '0xddee', '0xff00']},
+ 'app9': {'contracts': ['0x1357']},
+ 'app10': {'contracts': ['0x2468', '0x369c', '0x48bd']},
+ }
+ },
+}
+
+CHAIN_STATS = {
+ 'average_block_time': 5.0,
+ 'total_transactions': '1000000',
+ 'gas_price': {'average': 20.0, 'fast': 25.0, 'slow': 15.0},
+}
+
+TEST_NETWORK = 'testnet'
+TEST_CHAIN = 'chain2'
+TEST_ADDRESS = '0x1234'
+
+
+@pytest.fixture
+def sample_apps():
+ return SAMPLE_APPS
+
+
+@pytest.fixture
+def sample_chain_info():
+ return SAMPLE_CHAIN_INFO
+
+
+@pytest.fixture
+def sample_metadata():
+ return SAMPLE_METADATA
+
+
+def load_counters():
+ current_dir = os.path.dirname(os.path.abspath(__file__))
+ json_file_path = os.path.join(current_dir, 'counters.json')
+
+ with open(json_file_path, 'r') as file:
+ return json.load(file)
+
+
+def get_latest_day_counters(counters):
+ return counters['2024-09-17']
+
+
+@pytest.fixture
+def counters():
+ return load_counters()
+
+
+@pytest.fixture
+def latest_day_counters():
+ return get_latest_day_counters()
+
+
+@pytest.fixture
+def mock_chain_stats_data():
+ return CHAIN_STATS
+
+
+async def chain_stats_api(request):
+ return web.json_response(CHAIN_STATS)
+
+
+async def address_counter_api(request):
+ print('request')
+ address = request.match_info['address']
+ if not address:
+ return web.json_response({'error': 'Address parameter is required'}, status=400)
+
+ all_counters = get_latest_day_counters(load_counters())
+
+ for chain in all_counters.values():
+ for app in chain.values():
+ if address in app:
+ return web.json_response(app[address])
+
+ return web.json_response({}, status=404)
+
+
+def create_app():
+ app = web.Application()
+ app.router.add_route('GET', '/api/v2/stats', chain_stats_api)
+ app.router.add_route('GET', '/api/v2/addresses/{address}/counters', address_counter_api)
+ return app
+
+
+@pytest.fixture
+def mock_explorer_url(monkeypatch):
+ def mock_get_explorer_url(network, chain_name):
+ return ''
+
+ monkeypatch.setattr('src.explorer._get_explorer_url', mock_get_explorer_url)
+
+
+@pytest.fixture
+async def client(aiohttp_client):
+ return await aiohttp_client(create_app())
diff --git a/metrics/tests/counters.json b/metrics/tests/counters.json
new file mode 100644
index 0000000..59b3cef
--- /dev/null
+++ b/metrics/tests/counters.json
@@ -0,0 +1,464 @@
+{
+ "2024-09-11": {
+ "chain1": {
+ "app1": {
+ "0x1111": {
+ "gas_usage_count": "12345",
+ "token_transfers_count": "123",
+ "transactions_count": "1234",
+ "validations_count": "12",
+ "transactions_last_day": 50,
+ "transactions_last_7_days": 250,
+ "transactions_last_30_days": 1000
+ },
+ "0x2222": {
+ "gas_usage_count": "23456",
+ "token_transfers_count": "234",
+ "transactions_count": "2345",
+ "validations_count": "23",
+ "transactions_last_day": 60,
+ "transactions_last_7_days": 300,
+ "transactions_last_30_days": 1200
+ },
+ "0x3333": {
+ "gas_usage_count": "34567",
+ "token_transfers_count": "345",
+ "transactions_count": "3456",
+ "validations_count": "34",
+ "transactions_last_day": 70,
+ "transactions_last_7_days": 350,
+ "transactions_last_30_days": 1400
+ }
+ },
+ "app2": {
+ "0x4444": {
+ "gas_usage_count": "45678",
+ "token_transfers_count": "456",
+ "transactions_count": "4567",
+ "validations_count": "45",
+ "transactions_last_day": 80,
+ "transactions_last_7_days": 400,
+ "transactions_last_30_days": 1600
+ },
+ "0x5555": {
+ "gas_usage_count": "56789",
+ "token_transfers_count": "567",
+ "transactions_count": "5678",
+ "validations_count": "56",
+ "transactions_last_day": 90,
+ "transactions_last_7_days": 450,
+ "transactions_last_30_days": 1800
+ }
+ }
+ },
+ "chain2": {
+ "app6": {
+ "0x1234": {
+ "gas_usage_count": "16890",
+ "token_transfers_count": "168",
+ "transactions_count": "1689",
+ "validations_count": "16",
+ "transactions_last_day": 5,
+ "transactions_last_7_days": 25,
+ "transactions_last_30_days": 100
+ }
+ }
+ }
+ },
+ "2024-09-12": {
+ "chain1": {
+ "app1": {
+ "0x1111": {
+ "gas_usage_count": "12395",
+ "token_transfers_count": "125",
+ "transactions_count": "1284",
+ "validations_count": "13",
+ "transactions_last_day": 52,
+ "transactions_last_7_days": 252,
+ "transactions_last_30_days": 1052
+ },
+ "0x2222": {
+ "gas_usage_count": "23516",
+ "token_transfers_count": "236",
+ "transactions_count": "2405",
+ "validations_count": "24",
+ "transactions_last_day": 62,
+ "transactions_last_7_days": 302,
+ "transactions_last_30_days": 1262
+ },
+ "0x3333": {
+ "gas_usage_count": "34637",
+ "token_transfers_count": "347",
+ "transactions_count": "3526",
+ "validations_count": "35",
+ "transactions_last_day": 72,
+ "transactions_last_7_days": 352,
+ "transactions_last_30_days": 1472
+ }
+ },
+ "app2": {
+ "0x4444": {
+ "gas_usage_count": "45758",
+ "token_transfers_count": "458",
+ "transactions_count": "4647",
+ "validations_count": "46",
+ "transactions_last_day": 82,
+ "transactions_last_7_days": 402,
+ "transactions_last_30_days": 1682
+ },
+ "0x5555": {
+ "gas_usage_count": "56879",
+ "token_transfers_count": "569",
+ "transactions_count": "5768",
+ "validations_count": "57",
+ "transactions_last_day": 92,
+ "transactions_last_7_days": 452,
+ "transactions_last_30_days": 1892
+ }
+ }
+ },
+ "chain2": {
+ "app6": {
+ "0x1234": {
+ "gas_usage_count": "16895",
+ "token_transfers_count": "169",
+ "transactions_count": "1694",
+ "validations_count": "17",
+ "transactions_last_day": 6,
+ "transactions_last_7_days": 26,
+ "transactions_last_30_days": 106
+ }
+ }
+ }
+ },
+ "2024-09-13": {
+ "chain1": {
+ "app1": {
+ "0x1111": {
+ "gas_usage_count": "12447",
+ "token_transfers_count": "127",
+ "transactions_count": "1336",
+ "validations_count": "14",
+ "transactions_last_day": 54,
+ "transactions_last_7_days": 254,
+ "transactions_last_30_days": 1106
+ },
+ "0x2222": {
+ "gas_usage_count": "23578",
+ "token_transfers_count": "238",
+ "transactions_count": "2467",
+ "validations_count": "25",
+ "transactions_last_day": 64,
+ "transactions_last_7_days": 304,
+ "transactions_last_30_days": 1326
+ },
+ "0x3333": {
+ "gas_usage_count": "34709",
+ "token_transfers_count": "349",
+ "transactions_count": "3598",
+ "validations_count": "36",
+ "transactions_last_day": 74,
+ "transactions_last_7_days": 354,
+ "transactions_last_30_days": 1546
+ }
+ },
+ "app2": {
+ "0x4444": {
+ "gas_usage_count": "45840",
+ "token_transfers_count": "460",
+ "transactions_count": "4729",
+ "validations_count": "47",
+ "transactions_last_day": 84,
+ "transactions_last_7_days": 404,
+ "transactions_last_30_days": 1766
+ },
+ "0x5555": {
+ "gas_usage_count": "56971",
+ "token_transfers_count": "571",
+ "transactions_count": "5860",
+ "validations_count": "58",
+ "transactions_last_day": 94,
+ "transactions_last_7_days": 454,
+ "transactions_last_30_days": 1986
+ }
+ }
+ },
+ "chain2": {
+ "app6": {
+ "0x1234": {
+ "gas_usage_count": "16901",
+ "token_transfers_count": "170",
+ "transactions_count": "1700",
+ "validations_count": "18",
+ "transactions_last_day": 7,
+ "transactions_last_7_days": 27,
+ "transactions_last_30_days": 113
+ }
+ }
+ }
+ },
+ "2024-09-14": {
+ "chain1": {
+ "app1": {
+ "0x1111": {
+ "gas_usage_count": "12501",
+ "token_transfers_count": "129",
+ "transactions_count": "1390",
+ "validations_count": "15",
+ "transactions_last_day": 56,
+ "transactions_last_7_days": 256,
+ "transactions_last_30_days": 1162
+ },
+ "0x2222": {
+ "gas_usage_count": "23642",
+ "token_transfers_count": "240",
+ "transactions_count": "2531",
+ "validations_count": "26",
+ "transactions_last_day": 66,
+ "transactions_last_7_days": 306,
+ "transactions_last_30_days": 1392
+ },
+ "0x3333": {
+ "gas_usage_count": "34783",
+ "token_transfers_count": "351",
+ "transactions_count": "3672",
+ "validations_count": "37",
+ "transactions_last_day": 76,
+ "transactions_last_7_days": 356,
+ "transactions_last_30_days": 1622
+ }
+ },
+ "app2": {
+ "0x4444": {
+ "gas_usage_count": "45924",
+ "token_transfers_count": "462",
+ "transactions_count": "4813",
+ "validations_count": "48",
+ "transactions_last_day": 86,
+ "transactions_last_7_days": 406,
+ "transactions_last_30_days": 1852
+ },
+ "0x5555": {
+ "gas_usage_count": "57065",
+ "token_transfers_count": "573",
+ "transactions_count": "5954",
+ "validations_count": "59",
+ "transactions_last_day": 96,
+ "transactions_last_7_days": 456,
+ "transactions_last_30_days": 2082
+ }
+ }
+ },
+ "chain2": {
+ "app6": {
+ "0x1234": {
+ "gas_usage_count": "16908",
+ "token_transfers_count": "171",
+ "transactions_count": "1707",
+ "validations_count": "19",
+ "transactions_last_day": 8,
+ "transactions_last_7_days": 28,
+ "transactions_last_30_days": 121
+ }
+ }
+ }
+ },
+ "2024-09-15": {
+ "chain1": {
+ "app1": {
+ "0x1111": {
+ "gas_usage_count": "12557",
+ "token_transfers_count": "131",
+ "transactions_count": "1446",
+ "validations_count": "16",
+ "transactions_last_day": 58,
+ "transactions_last_7_days": 258,
+ "transactions_last_30_days": 1220
+ },
+ "0x2222": {
+ "gas_usage_count": "23708",
+ "token_transfers_count": "242",
+ "transactions_count": "2597",
+ "validations_count": "27",
+ "transactions_last_day": 68,
+ "transactions_last_7_days": 308,
+ "transactions_last_30_days": 1460
+ },
+ "0x3333": {
+ "gas_usage_count": "34859",
+ "token_transfers_count": "353",
+ "transactions_count": "3748",
+ "validations_count": "38",
+ "transactions_last_day": 78,
+ "transactions_last_7_days": 358,
+ "transactions_last_30_days": 1700
+ }
+ },
+ "app2": {
+ "0x4444": {
+ "gas_usage_count": "46010",
+ "token_transfers_count": "464",
+ "transactions_count": "4899",
+ "validations_count": "49",
+ "transactions_last_day": 88,
+ "transactions_last_7_days": 408,
+ "transactions_last_30_days": 1940
+ },
+ "0x5555": {
+ "gas_usage_count": "57161",
+ "token_transfers_count": "575",
+ "transactions_count": "6050",
+ "validations_count": "60",
+ "transactions_last_day": 98,
+ "transactions_last_7_days": 458,
+ "transactions_last_30_days": 2180
+ }
+ }
+ },
+ "chain2": {
+ "app6": {
+ "0x1234": {
+ "gas_usage_count": "16916",
+ "token_transfers_count": "172",
+ "transactions_count": "1715",
+ "validations_count": "20",
+ "transactions_last_day": 9,
+ "transactions_last_7_days": 29,
+ "transactions_last_30_days": 130
+ }
+ }
+ }
+ },
+ "2024-09-16": {
+ "chain1": {
+ "app1": {
+ "0x1111": {
+ "gas_usage_count": "12615",
+ "token_transfers_count": "133",
+ "transactions_count": "1504",
+ "validations_count": "17",
+ "transactions_last_day": 60,
+ "transactions_last_7_days": 260,
+ "transactions_last_30_days": 1280
+ },
+ "0x2222": {
+ "gas_usage_count": "23776",
+ "token_transfers_count": "244",
+ "transactions_count": "2665",
+ "validations_count": "28",
+ "transactions_last_day": 70,
+ "transactions_last_7_days": 310,
+ "transactions_last_30_days": 1530
+ },
+ "0x3333": {
+ "gas_usage_count": "34937",
+ "token_transfers_count": "355",
+ "transactions_count": "3826",
+ "validations_count": "39",
+ "transactions_last_day": 80,
+ "transactions_last_7_days": 360,
+ "transactions_last_30_days": 1780
+ }
+ },
+ "app2": {
+ "0x4444": {
+ "gas_usage_count": "46098",
+ "token_transfers_count": "466",
+ "transactions_count": "4987",
+ "validations_count": "50",
+ "transactions_last_day": 90,
+ "transactions_last_7_days": 410,
+ "transactions_last_30_days": 2030
+ },
+ "0x5555": {
+ "gas_usage_count": "57259",
+ "token_transfers_count": "577",
+ "transactions_count": "6148",
+ "validations_count": "61",
+ "transactions_last_day": 100,
+ "transactions_last_7_days": 460,
+ "transactions_last_30_days": 2280
+ }
+ }
+ },
+ "chain2": {
+ "app6": {
+ "0x1234": {
+ "gas_usage_count": "16925",
+ "token_transfers_count": "173",
+ "transactions_count": "1724",
+ "validations_count": "21",
+ "transactions_last_day": 10,
+ "transactions_last_7_days": 30,
+ "transactions_last_30_days": 140
+ }
+ }
+ }
+ },
+ "2024-09-17": {
+ "chain1": {
+ "app1": {
+ "0x1111": {
+ "gas_usage_count": "12675",
+ "token_transfers_count": "135",
+ "transactions_count": "1564",
+ "validations_count": "18",
+ "transactions_last_day": 62,
+ "transactions_last_7_days": 262,
+ "transactions_last_30_days": 1342
+ },
+ "0x2222": {
+ "gas_usage_count": "23846",
+ "token_transfers_count": "246",
+ "transactions_count": "2735",
+ "validations_count": "29",
+ "transactions_last_day": 72,
+ "transactions_last_7_days": 312,
+ "transactions_last_30_days": 1602
+ },
+ "0x3333": {
+ "gas_usage_count": "35017",
+ "token_transfers_count": "357",
+ "transactions_count": "3906",
+ "validations_count": "40",
+ "transactions_last_day": 82,
+ "transactions_last_7_days": 362,
+ "transactions_last_30_days": 1862
+ }
+ },
+ "app2": {
+ "0x4444": {
+ "gas_usage_count": "46188",
+ "token_transfers_count": "468",
+ "transactions_count": "5077",
+ "validations_count": "51",
+ "transactions_last_day": 92,
+ "transactions_last_7_days": 412,
+ "transactions_last_30_days": 2122
+ },
+ "0x5555": {
+ "gas_usage_count": "57359",
+ "token_transfers_count": "579",
+ "transactions_count": "6248",
+ "validations_count": "62",
+ "transactions_last_day": 102,
+ "transactions_last_7_days": 462,
+ "transactions_last_30_days": 2382
+ }
+ }
+ },
+ "chain2": {
+ "app6": {
+ "0x1234": {
+ "gas_usage_count": "16935",
+ "token_transfers_count": "174",
+ "transactions_count": "1734",
+ "validations_count": "22",
+ "transactions_last_day": 11,
+ "transactions_last_7_days": 31,
+ "transactions_last_30_days": 151
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/metrics/tests/test_metrics_collector.py b/metrics/tests/test_metrics_collector.py
new file mode 100644
index 0000000..3738cf3
--- /dev/null
+++ b/metrics/tests/test_metrics_collector.py
@@ -0,0 +1,22 @@
+import pytest
+from typing import Dict
+from src.collector import get_chain_stats, fetch_address_data
+from conftest import TEST_NETWORK, TEST_CHAIN
+
+pytestmark = pytest.mark.asyncio
+pytest_plugins = ('pytest_asyncio',)
+
+
+async def test_get_chain_stats(mock_chain_stats_data: Dict, client, mock_explorer_url) -> None:
+ result = await get_chain_stats(client, TEST_NETWORK, TEST_CHAIN)
+ assert isinstance(result, dict)
+ assert result == mock_chain_stats_data
+
+
+async def test_fetch_address_data(client, mock_explorer_url) -> None:
+ result = await fetch_address_data(client, '/api/v2/addresses/0x1234/counters')
+ assert isinstance(result, dict)
+ assert result['gas_usage_count'] == '16935'
+ assert result['token_transfers_count'] == '174'
+ assert result['transactions_count'] == '1734'
+ assert result['validations_count'] == '22'
diff --git a/pytest.ini b/pytest.ini
index 5785bf3..bef33e0 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -4,3 +4,4 @@ log_cli_level = INFO
log_cli_format = %(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s)
log_cli_date_format=%Y-%m-%d %H:%M:%S
filterwarnings = ignore::DeprecationWarning
+asyncio_mode = auto
diff --git a/requirements-dev.txt b/requirements-dev.txt
index a1d9774..0a81b7e 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,3 +1 @@
-flake8==3.7.8
-pytest==5.4.3
-skale.py==5.7dev7
+flake8==7.1.1
\ No newline at end of file
diff --git a/scripts/backup_db.sh b/scripts/backup_db.sh
new file mode 100644
index 0000000..9d069db
--- /dev/null
+++ b/scripts/backup_db.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+set -e
+
+[ $# -eq 0 ] && { echo "Usage: $0 "; exit 1; }
+
+BACKUP_DIR="$1"
+CONTAINER_NAME="db"
+DB_NAME="metrics"
+
+
+mkdir -p "$BACKUP_DIR"
+
+TIMESTAMP=$(date +"%Y%m%d_%H%M%S")
+BACKUP_FILE="$BACKUP_DIR/${DB_NAME}_${TIMESTAMP}.sql"
+
+docker exec $CONTAINER_NAME /usr/bin/mysqldump -u root -p"${MYSQL_ROOT_PASSWORD}" "$DB_NAME" > "$BACKUP_FILE"
+
+gzip "$BACKUP_FILE"
+
+echo "Backup completed: ${BACKUP_FILE}.gz"
\ No newline at end of file
diff --git a/scripts/restore_db.sh b/scripts/restore_db.sh
new file mode 100644
index 0000000..3f98f83
--- /dev/null
+++ b/scripts/restore_db.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+set -e
+
+[ $# -eq 0 ] && { echo "Usage: $0 "; exit 1; }
+
+BACKUP_FILE=$1
+CONTAINER_NAME="db"
+DB_NAME="metrics"
+
+[ ! -f "$BACKUP_FILE" ] && { echo "Backup file not found: $BACKUP_FILE"; exit 1; }
+
+if [[ $BACKUP_FILE == *.gz ]]; then
+ gunzip -c "$BACKUP_FILE" | docker exec -i $CONTAINER_NAME /usr/bin/mysql -u root -p"${MYSQL_ROOT_PASSWORD}" "$DB_NAME"
+else
+ docker exec -i $CONTAINER_NAME /usr/bin/mysql -u root -p"${MYSQL_ROOT_PASSWORD}" "$DB_NAME" < "$BACKUP_FILE"
+fi
+
+echo "Database restored successfully from $BACKUP_FILE"
\ No newline at end of file