Skip to content

Commit

Permalink
Merge pull request #284 from emeraldpay/fix/unstable-head-block
Browse files Browse the repository at this point in the history
  • Loading branch information
splix authored Mar 29, 2024
2 parents d379d5d + 16768d0 commit c9e760c
Show file tree
Hide file tree
Showing 11 changed files with 539 additions and 264 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import io.emeraldpay.dshackle.reader.StandardRpcReader
import io.emeraldpay.dshackle.upstream.CurrentMultistreamHolder
import io.emeraldpay.dshackle.upstream.ForkWatchFactory
import io.emeraldpay.dshackle.upstream.Head
import io.emeraldpay.dshackle.upstream.MergedHead
import io.emeraldpay.dshackle.upstream.MergedPowHead
import io.emeraldpay.dshackle.upstream.bitcoin.BitcoinCacheUpdate
import io.emeraldpay.dshackle.upstream.bitcoin.BitcoinRpcHead
import io.emeraldpay.dshackle.upstream.bitcoin.BitcoinRpcUpstream
Expand Down Expand Up @@ -188,7 +188,7 @@ open class ConfiguredUpstreams(
val head: Head = conn.zeroMq?.let { zeroMq ->
val server = ZMQServer(zeroMq.host, zeroMq.port, "hashblock")
val zeroMqHead = BitcoinZMQHead(server, directApi, extractBlock)
MergedHead(listOf(rpcHead, zeroMqHead))
MergedPowHead(listOf(rpcHead, zeroMqHead))
} ?: rpcHead

val subscriptions = conn.zeroMq?.let { zeroMq ->
Expand Down
62 changes: 0 additions & 62 deletions src/main/kotlin/io/emeraldpay/dshackle/upstream/MergedHead.kt

This file was deleted.

128 changes: 128 additions & 0 deletions src/main/kotlin/io/emeraldpay/dshackle/upstream/MergedPosHead.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
package io.emeraldpay.dshackle.upstream

import io.emeraldpay.dshackle.cache.Caches
import io.emeraldpay.dshackle.cache.CachesEnabled
import io.emeraldpay.dshackle.data.BlockContainer
import org.slf4j.LoggerFactory
import org.springframework.context.Lifecycle
import reactor.core.Disposable
import reactor.core.publisher.Flux
import java.util.concurrent.locks.ReentrantReadWriteLock
import java.util.function.Function
import kotlin.concurrent.read
import kotlin.concurrent.write

class MergedPosHead(
private val sources: Iterable<Pair<Int, Head>>
) : AbstractHead(), Lifecycle, CachesEnabled {

companion object {
private val log = LoggerFactory.getLogger(MergedPosHead::class.java)
}
private var subscription: Disposable? = null

private val lock = ReentrantReadWriteLock()
private val headLimit = 16
private var head: List<Pair<Int, BlockContainer>> = emptyList()

override fun isRunning(): Boolean {
return subscription != null
}

override fun start() {
sources.forEach {
val head = it.second
if (head is Lifecycle && !head.isRunning) {
head.start()
}
}
subscription?.dispose()
subscription = super.follow(merge(sources.map { Pair(it.first, it.second.getFlux()) }))
}

fun merge(sources: Iterable<Pair<Int, Flux<BlockContainer>>>): Flux<BlockContainer> {
return Flux.merge(
sources.map {
it.second.transform(process(it.first))
}
).distinctUntilChanged {
it.hash
}
}

fun process(priority: Int): Function<Flux<BlockContainer>, Flux<BlockContainer>> {
return Function { source ->
source.handle { block, sink ->
if (onNext(priority, block)) {
val top = lock.read {
head.lastOrNull()
}
if (top != null) {
sink.next(top.second)
}
}
}
}
}

private fun onNext(priority: Int, block: BlockContainer): Boolean {
val prev = lock.read {
head.find { it.second.height == block.height }
}
if (prev != null && prev.first > priority) {
return false
}
lock.write {
// first, check if existing data for the height is better
val prev = head.find { it.second.height == block.height }
if (prev != null && prev.first > priority) {
return false
}

// otherwise add it to the list
val fresh = if (head.isEmpty()) {
// just the first run, so nothing to do yet
listOf(Pair(priority, block))
} else if (head.last().second.height < block.height) {
// new block, just add it on top
head + Pair(priority, block)
} else if (head.all { it.first < priority }) {
// filled with low priority upstream that may be invalid, so replace the whole list
listOf(Pair(priority, block))
} else {
// situation when we have that block in the list and since we did the checks above it can have only a lower priority
// now there are two options: the same block or different block.
// if it's in the middle keep the rest anyway b/c a higher priority upstream would fix it with the following updates
head.map {
if (it.second.height == block.height) {
Pair(priority, block)
} else {
it
}
}
}
head = fresh.takeLast(headLimit)
return true
}
}

override fun stop() {
sources.forEach {
val head = it.second
if (head is Lifecycle && head.isRunning) {
head.stop()
}
}
subscription?.dispose()
subscription = null
}

override fun setCaches(caches: Caches) {
sources.forEach {
val head = it.second
if (head is CachesEnabled) {
head.setCaches(caches)
}
}
}
}
137 changes: 137 additions & 0 deletions src/main/kotlin/io/emeraldpay/dshackle/upstream/MergedPowHead.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
/**
* Copyright (c) 2020 EmeraldPay, Inc
* Copyright (c) 2019 ETCDEV GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.emeraldpay.dshackle.upstream

import io.emeraldpay.dshackle.cache.Caches
import io.emeraldpay.dshackle.cache.CachesEnabled
import io.emeraldpay.dshackle.data.BlockContainer
import org.springframework.context.Lifecycle
import reactor.core.Disposable
import reactor.core.publisher.Flux
import java.util.concurrent.locks.ReentrantReadWriteLock
import java.util.function.Function
import kotlin.concurrent.read
import kotlin.concurrent.write

class MergedPowHead(
private val sources: Iterable<Head>
) : AbstractHead(), Lifecycle, CachesEnabled {

private var subscription: Disposable? = null

private val lock = ReentrantReadWriteLock()
private val headLimit = 16
private var head: List<BlockContainer> = emptyList()

override fun isRunning(): Boolean {
return subscription != null
}

override fun start() {
sources.forEach { head ->
if (head is Lifecycle && !head.isRunning) {
head.start()
}
}
subscription?.dispose()
subscription = super.follow(merge(sources.map { it.getFlux() }))
}

fun merge(sources: Iterable<Flux<BlockContainer>>): Flux<BlockContainer> {
return Flux.merge(
sources.map {
it.transform(process())
}
).distinctUntilChanged {
it.hash
}
}

fun process(): Function<Flux<BlockContainer>, Flux<BlockContainer>> {
return Function { source ->
source.handle { block, sink ->
if (onNext(block)) {
val top = lock.read {
head.lastOrNull()
}
if (top != null) {
sink.next(top)
}
}
}
}
}

private fun onNext(block: BlockContainer): Boolean {
val prev = lock.read {
head.find { it.height == block.height }
}
if (prev != null && prev.difficulty > block.difficulty) {
return false
}
lock.write {
// first, check if existing data for the height is better
val prev = head.find { it.height == block.height }
if (prev != null && prev.difficulty > block.difficulty) {
return false
}

// otherwise add it to the list
val fresh = if (head.isEmpty()) {
// just the first run, so nothing to do yet
listOf(block)
} else if (head.last().height < block.height) {
// new block, just add it on top
head + block
} else {
// situation when we have that block in the list and since we checked it above it has a lower priority
// now there are two options: the same block or different block.
// if it's in the middle keep the rest anyway b/c a higher priority upstream would fix it with the following updates
head.map {
if (it.height == block.height) {
block
} else {
it
}
}
}
head = fresh
// drop all blocks on top of this one if their difficulty is lower
.filterNot { it.height > block.height && it.difficulty < block.difficulty }
.takeLast(headLimit)
return true
}
}

override fun stop() {
sources.forEach { head ->
if (head is Lifecycle && head.isRunning) {
head.stop()
}
}
subscription?.dispose()
subscription = null
}

override fun setCaches(caches: Caches) {
sources.forEach {
if (it is CachesEnabled) {
it.setCaches(caches)
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import io.emeraldpay.dshackle.upstream.EmptyHead
import io.emeraldpay.dshackle.upstream.HardcodedReader
import io.emeraldpay.dshackle.upstream.Head
import io.emeraldpay.dshackle.upstream.IntegralRpcReader
import io.emeraldpay.dshackle.upstream.MergedHead
import io.emeraldpay.dshackle.upstream.MergedPowHead
import io.emeraldpay.dshackle.upstream.Multistream
import io.emeraldpay.dshackle.upstream.Upstream
import io.emeraldpay.dshackle.upstream.VerifyingReader
Expand Down Expand Up @@ -104,7 +104,7 @@ open class BitcoinMultistream(
}
}
} else {
val newHead = MergedHead(sourceUpstreams.map { it.getHead() }).apply {
val newHead = MergedPowHead(sourceUpstreams.map { it.getHead() }).apply {
this.start()
}
val lagObserver = BitcoinHeadLagObserver(newHead, sourceUpstreams)
Expand Down
Loading

0 comments on commit c9e760c

Please sign in to comment.