Skip to content
This repository has been archived by the owner on Oct 4, 2024. It is now read-only.

Commit

Permalink
Cache index (#273)
Browse files Browse the repository at this point in the history
* use seconds instead of ms

* use 5 seconds instead of 100

* use 10 seconds for updater

* remove old state sync, add new gen server, add gen server start on app start

* refactor query and server

* use lowest block number in fetcher

* add updater

* fix updater

* remove unused modules

* remove N+1 queries, remove unneeded fetch to rpc

* update env vars

* add PK to tx and tx receipts

* Fix some bugs

* replace blocks PK

* Fix network bugs

* Uncomment stuff

* Fix fetcher when there are holes in blocks

* Fix migrations for sqlite

* add count table

* fix match

* Add a cache for the index

* Use actual counts on index

* add support for sqlite

* Handle nil case for `counts`

---------

Co-authored-by: SantiagoPittella <[email protected]>
  • Loading branch information
jrchatruc and SantiagoPittella authored Sep 27, 2023
1 parent b77254d commit bc33710
Show file tree
Hide file tree
Showing 28 changed files with 529 additions and 544 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/deploy-production.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ jobs:
NEWRELIC_APP_NAME: ${{ vars.NEWRELIC_APP_NAME }}
SENTRY_ENV: "production"
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
ENABLE_LISTENER: "true"
ENABLE_UPDATER: "true"
ENABLE_FETCHER: "true"
ENABLE_MAINNET_SYNC: "true"
ENABLE_TESTNET_SYNC: "true"
ENABLE_TESTNET2_SYNC: "true"
run: |
ansible-playbook -i ansible/inventory.yaml ansible/playbooks/deployment.yaml
6 changes: 3 additions & 3 deletions .github/workflows/deploy-testing.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,9 @@ jobs:
SSH_HOST: ${{ vars.SSH_HOST }}
GIT_BRANCH: ${{ github.head_ref || github.ref_name }}
ANSIBLE_STDOUT_CALLBACK: "yaml"
ENABLE_LISTENER: "true"
ENABLE_UPDATER: "true"
ENABLE_FETCHER: ${{ vars.ENABLE_FETCHER }}
ENABLE_MAINNET_SYNC: "true"
ENABLE_TESTNET_SYNC: "true"
ENABLE_TESTNET2_SYNC: "true"
ENABLE_GATEWAY_DATA: "true"
NEWRELIC_KEY: ${{ secrets.NEWRELIC_KEY }}
NEWRELIC_APP_NAME: ${{ vars.NEWRELIC_APP_NAME }}
Expand Down
13 changes: 6 additions & 7 deletions ansible/playbooks/deployment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@
PORT: "4100"
SENTRY_ENV: "{{ lookup('ansible.builtin.env', 'SENTRY_ENV') }}"
SENTRY_DSN: "{{ lookup('ansible.builtin.env', 'SENTRY_DSN') }}"
ENABLE_LISTENER: "true"
ENABLE_UPDATER: "true"
ENABLE_FETCHER: "true"
ENABLE_MAINNET_SYNC: "true"
ENABLE_TESTNET_SYNC: "true"
ENABLE_TESTNET2_SYNC: "true"
args:
chdir: /home/starknet_explorer/tmp/madara_explorer
register: build_output
Expand Down Expand Up @@ -88,15 +88,14 @@
aws_secret_access_key: "{{ lookup('ansible.builtin.env', 'AWS_SECRET_ACCESS_KEY') }}"
aws_region: "{{ lookup('ansible.builtin.env', 'AWS_REGION') }}"
prover_storage: "{{ lookup('ansible.builtin.env', 'PROVER_STORAGE') }}"
enable_listener: "{{ lookup('ansible.builtin.env', 'ENABLE_LISTENER') }}"
enable_gateway_data: "{{ lookup('ansible.builtin.env', 'ENABLE_GATEWAY_DATA') }}"
newrelic_key: "{{ lookup('ansible.builtin.env', 'NEWRELIC_KEY') }}"
newrelic_app_name: "{{ lookup('ansible.builtin.env', 'NEWRELIC_APP_NAME') }}"
sentry_env: "{{ lookup('ansible.builtin.env', 'SENTRY_ENV') }}"
sentry_dsn: "{{ lookup('ansible.builtin.env', 'SENTRY_DSN') }}"
enable_listener: "true"
enable_updater: "true"
enable_fetcher: "true"
enable_mainnet_sync: "true"
enable_testnet_sync: "true"
enable_testnet2_sync: "true"

- name: Create user systemd directory
ansible.builtin.file:
Expand Down
7 changes: 3 additions & 4 deletions ansible/playbooks/templates/.env.j2
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,11 @@ AWS_ACCESS_KEY_ID={{ aws_access_key_id }}
AWS_SECRET_ACCESS_KEY={{ aws_secret_access_key }}
AWS_REGION={{ aws_region }}
PROVER_STORAGE={{ prover_storage }}
ENABLE_LISTENER={{ enable_listener }}
ENABLE_GATEWAY_DATA={{ enable_gateway_data }}
NEWRELIC_KEY={{ newrelic_key }}
NEWRELIC_APP_NAME={{ newrelic_app_name }}
SENTRY_DSN={{ sentry_dsn }}
SENTRY_ENV={{ sentry_env }}
ENABLE_FETCHER={{ enable_fetcher }}
ENABLE_LISTENER={{ enable_listener }}
ENABLE_UPDATER={{ enable_updater }}
ENABLE_MAINNET_SYNC={{ enable_mainnet_sync }}
ENABLE_TESTNET_SYNC={{ enable_testnet_sync }}
ENABLE_TESTNET2_SYNC={{ enable_testnet2_sync }}
48 changes: 44 additions & 4 deletions lib/starknet_explorer/application.ex
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,50 @@ defmodule StarknetExplorer.Application do
# @networks
# |> Enum.flat_map(fn net -> cache_supervisor_spec(net) end)

mainnet_state_sync =
if System.get_env("ENABLE_MAINNET_SYNC") == "true" do
# Start the State Sync System server for mainnet.
[
Supervisor.child_spec(
{StarknetExplorer.Blockchain.StateSyncSystem,
[network: :mainnet, name: :mainnet_state_sync]},
id: :mainnet_state_sync
)
]
else
[]
end

testnet_state_sync =
if System.get_env("ENABLE_TESTNET_SYNC") == "true" do
# Start the State Sync System server for testnet.
[
Supervisor.child_spec(
{StarknetExplorer.Blockchain.StateSyncSystem,
[network: :testnet, name: :testnet_state_sync]},
id: :testnet_state_sync
)
]
else
[]
end

testnet2_state_sync =
if System.get_env("ENABLE_TESTNET2_SYNC") == "true" do
# Start the State Sync System server for testnet2.
[
Supervisor.child_spec(
{StarknetExplorer.Blockchain.StateSyncSystem,
[network: :testnet2, name: :testnet2_state_sync]},
id: :testnet2_state_sync
)
]
else
[]
end

children =
[
# Start the Blockchain supervisor
StarknetExplorer.Blockchain.BlockchainSupervisor,
# Start the Telemetry supervisor
StarknetExplorerWeb.Telemetry,
# Start the Ecto repository
Expand All @@ -27,8 +67,8 @@ defmodule StarknetExplorer.Application do
StarknetExplorerWeb.Endpoint,
# Start a worker by calling: StarknetExplorer.Worker.start_link(arg)
# {StarknetExplorer.Worker, arg}
{DynamicSupervisor, strategy: :one_for_one, name: StarknetExplorer.BlockFetcher}
]
StarknetExplorer.IndexCache
] ++ testnet2_state_sync ++ testnet_state_sync ++ mainnet_state_sync

# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
Expand Down
115 changes: 114 additions & 1 deletion lib/starknet_explorer/block/block.ex
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,62 @@ defmodule StarknetExplorer.Block do
def changeset(block = %__MODULE__{}, attrs) do
block
|> cast(attrs, @cast_fields)
end

def changeset_with_validations(block = %__MODULE__{}, attrs) do
block
|> cast(attrs, @cast_fields)
|> validate_required(@required_fields)
|> unique_constraint(:number)
|> unique_constraint(:hash)
end

@doc """
Given a block from the RPC response, our block from SQL, and transactions receipts
update them into the DB.
"""
def update_from_rpc_response(
block_from_sql,
_block_from_rpc = %{
"status" => status,
"gas_fee_in_wei" => gas_fee_in_wei,
"execution_resources" => execution_resources
},
receipts
) do
tx_receipts =
Enum.map(receipts, fn {tx_hash, rpc_receipt} ->
sql_receipt =
Enum.find(block_from_sql.transactions, fn tx ->
tx.receipt.transaction_hash == tx_hash
end).receipt

{sql_receipt, rpc_receipt}
end)

StarknetExplorer.Repo.transaction(fn ->
block_changeset =
Ecto.Changeset.change(block_from_sql,
status: status,
gas_fee_in_wei: gas_fee_in_wei,
execution_resources: execution_resources
)

Repo.update!(block_changeset)

Enum.each(tx_receipts, fn {tx_receipt, rpc_tx_receipt} ->
tx_receipt_changeset =
Ecto.Changeset.change(tx_receipt,
actual_fee: rpc_tx_receipt["actual_fee"],
finality_status: rpc_tx_receipt["finality_status"],
execution_status: rpc_tx_receipt["execution_status"]
)

Repo.update!(tx_receipt_changeset)
end)
end)
end

@doc """
Given a block from the RPC response, and transactions receipts
insert them into the DB.
Expand Down Expand Up @@ -88,7 +139,7 @@ defmodule StarknetExplorer.Block do

transaction_result =
StarknetExplorer.Repo.transaction(fn ->
block_changeset = Block.changeset(%Block{}, block)
block_changeset = Block.changeset_with_validations(%Block{}, block)

{:ok, block} = Repo.insert(block_changeset)

Expand Down Expand Up @@ -137,6 +188,45 @@ defmodule StarknetExplorer.Block do
struct(__MODULE__, rpc_block)
end

@doc """
This function will return the lowest continuous block starting from the highest one
stored so far. Note this is not the same as the lowest block. Example:
If we have stored the blocks [5, 6, 20, 21, 22], this returns `20`, not `5`.
We are using this for the block fetcher logic, where we want to go downwards in order.
The problem is a block with a lower number could be added by someone visiting a details
page for a block, so we need to account for that.
"""
def get_lowest_block_number(network) do
Repo.query(
"SELECT number - 1
FROM blocks block
WHERE NOT EXISTS
(
SELECT NULL
FROM blocks mi
WHERE mi.number = block.number - 1 AND mi.network = $1
) AND block.network = $1
ORDER BY number DESC
LIMIT 1",
[network]
)
end

@doc """
Returns the highest block number stored in the DB.
"""
def block_height(network) do
query =
from(b in Block,
where: b.network == ^network,
order_by: [desc: b.number],
limit: 1
)

Repo.one(query)
end

@doc """
Returns the highest block number fetched from the RPC.
"""
Expand Down Expand Up @@ -217,6 +307,16 @@ defmodule StarknetExplorer.Block do
|> Repo.preload(:transactions)
end

def get_by_number_with_receipts_preload(num, network) do
query =
from b in Block,
where: b.number == ^num and b.network == ^network,
preload: [transactions: :receipt]

Repo.one(query)
|> Repo.preload(:transactions)
end

def get_by_height(height, network) when is_integer(height) do
query =
from b in Block,
Expand All @@ -225,6 +325,19 @@ defmodule StarknetExplorer.Block do
Repo.one(query)
end

def get_lowest_not_completed_block(network) do
query =
from b in Block,
where:
b.status != "ACCEPTED_ON_L1" or is_nil(b.gas_fee_in_wei) or b.gas_fee_in_wei == "" or
is_nil(b.execution_resources),
where: b.network == ^network,
limit: 1,
order_by: [asc: b.number]

Repo.one(query)
end

def get_with_not_finalized_blocks(limit \\ 10, network) do
query =
from b in Block,
Expand Down
Loading

0 comments on commit bc33710

Please sign in to comment.