Skip to content

Commit

Permalink
Forked off for Hydra network
Browse files Browse the repository at this point in the history
  • Loading branch information
wigy-opensource-developer committed Apr 7, 2020
1 parent bcfa4e9 commit 3ea7761
Show file tree
Hide file tree
Showing 6 changed files with 68 additions and 63 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
# Changelog

## [2.0.0] - 2020-04-07

- Support for Hydra peers

## [1.3.0] - 2020-02-04

- Support for ARK 2.6 peers

## [1.2.10] - 2019-08-06

Expand Down
25 changes: 11 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,31 +1,28 @@
# Crawler
# Hydra Crawler

Crawler scans the ARK network to get information about the peers in the network.

#### ❤️ Support maintenance and development of plugins
If you find this or other plugins useful please consider

- voting for `deadlock` delegate
- donating to `AWtgFYbvtLDYccJvC5MChk4dpiUy2Krt2U`

to support development new plugins and tools for Ark's Ecosystem and maintenance of existing ones. Full list of contributions can be found on [https://arkdelegatesio/delegate/deadlock/](https://arkdelegates.io/delegate/deadlock/contributions/). 🖖
Hydra Crawler scans the Hydra network to get information about the peers in the network.
This is a fork of the [ARK Crawler](https://github.com/deadlock-delegate/crawler/) so make sure
you sponsor them for their hard work.

## Installation

`npm install`

## Usage

`npm start http://<ip>:<port>`
`node . http://<ip>:<port>`

For port use the p2p port, which is 4001 for Ark's mainnet or 4002 for Ark's devnet.
For port use the p2p port, which is 4701 for [Hydra mainnet](http://hydra.iop.global/) or
4702 for [Hydra devnet](http://dev.hydra.iop.global/).

## Credits

- [roks0n](https://github.com/roks0n)
- [dmvt](https://github.com/dmvt)
- [All Contributors](../../../../contributors)
- [wigy](https://github.com/wigy-opensource-developer/)
- [All Contributors](../../contributors)

## License

[MIT](LICENSE) © roks0n
ARK Delegate: [MIT](LICENSE) © roks0n
Hydra patches: [MIT](LICENSE) © Decentralized Society Foundation, Panama
30 changes: 18 additions & 12 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

22 changes: 12 additions & 10 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,29 +1,31 @@
{
"name": "@deadlock-delegate/crawler",
"version": "1.3.0",
"description": "Crawler scans the ARK network to get information about the peers in the network.",
"main": "src/crawler.js",
"name": "@internet-of-people/hydra-crawler",
"version": "2.0.0",
"description": "Crawler scans the Hydra network to get information about the peers in the network.",
"main": "app.js",
"scripts": {
"start": "node app.js",
"lint": "eslint ./ --fix"
},
"repository": {
"type": "git",
"url": "git+https://github.com/deadlock-delegate/crawler.git"
"url": "git+https://github.com/wigy-opensource-developer/hydra-crawler.git"
},
"engines": {
"node": "^8.11.2"
"node": "12.16.0"
},
"keywords": [
"ark acosystem",
"ark ecosystem",
"network",
"scanner",
"blockchain"
"blockchain",
"hydra",
"iop"
],
"author": "roks0n",
"license": "MIT",
"homepage": "https://github.com/deadlock-delegate/crawler#readme",
"homepage": "https://github.com/wigy-opensource-developer/hydra-crawler#readme",
"devDependencies": {
"@types/lodash": "^4.14.149",
"eslint": "^6.8.0",
"eslint-config-standard": "^14.1.0",
"eslint-plugin-import": "^2.20.1",
Expand Down
43 changes: 20 additions & 23 deletions src/crawler.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
const { map } = require('lodash')
const Peers = require('./peer')
const Connections = require('./peer')

const VISITED = 1
const NOT_VISITED = 0
const VISITED = 1
let NETWORK_P2P_PORT = null

class Crawler {
Expand All @@ -11,9 +11,6 @@ class Crawler {
* @method constructor
*/
constructor (timeout = 2500, disconnect = true, sampleSize = 10) {
this.headers = {}
this.timeout = timeout
this.socket = undefined
this.disconnect = disconnect
this.request = {
data: {},
Expand All @@ -22,8 +19,7 @@ class Crawler {
}
}
this.sampleSize = sampleSize

this.peers = new Peers(this.timeout)
this.connections = new Connections(timeout)
}

/**
Expand All @@ -40,8 +36,8 @@ class Crawler {

NETWORK_P2P_PORT = peer.port

if (!this.peers.get(peer.ip)) {
this.peers.add(peer.ip, NETWORK_P2P_PORT)
if (!this.connections.get(peer.ip)) {
this.connections.add(peer.ip, NETWORK_P2P_PORT)
}

try {
Expand All @@ -51,7 +47,7 @@ class Crawler {
await this.scanNetwork()
if (this.disconnect) {
console.log('... disconnecting from all peers')
this.peers.disconnectAll()
this.connections.disconnectAll()
}
} catch (err) {
console.error(err)
Expand All @@ -60,46 +56,47 @@ class Crawler {
return this
}

async discoverPeers (peer) {
async discoverPeers (currentNode) {
return new Promise((resolve, reject) => {
const connection = this.peers.get(peer.ip)
const connection = this.connections.get(currentNode.ip)
if (!connection) {
reject(new Error(`No connection exists for ${peer.ip}:${peer.port}`))
reject(new Error(`No connection exists for ${currentNode.ip}:${currentNode.port}`))
}
connection.emit(
'p2p.peer.getPeers',
this.request,
(err, response) => {
if (err) {
console.error(`Error when calling p2p.peer.getPeers on ${peer.ip}: ${err}`)
console.error(`Error when calling p2p.peer.getPeers on ${currentNode.ip}: ${err}`)
return resolve()
}

if (peer.ip in this.samplePeers) {
this.samplePeers[peer.ip] = VISITED
if (currentNode.ip in this.samplePeers) {
this.samplePeers[currentNode.ip] = VISITED
}

response.data.map((peer) => {
if (!(peer.ip in this.nodes)) {
this.nodes[peer.ip] = peer
}

if (!this.peers.get(peer.ip)) {
this.peers.add(peer.ip, NETWORK_P2P_PORT)
if (!this.connections.get(peer.ip)) {
this.connections.add(peer.ip, NETWORK_P2P_PORT)
}
})

if (this.samplePeers[peer.ip] === VISITED) {
if (this.samplePeers[currentNode.ip] === VISITED) {
return resolve()
}

// note: this is not very efficient on large arrays
const samplePeers = response.data
.filter(p => this.samplePeers[p.ip] !== VISITED)
.map(x => ({ x, r: Math.random() }))
.sort((a, b) => a.r - b.r)
.map(a => a.x)
.slice(0, this.sampleSize)
.filter(a => a.ip !== peer.ip)
.filter(a => a.ip !== currentNode.ip)
.map((peer) => {
this.samplePeers[peer.ip] = NOT_VISITED
return this.discoverPeers(peer)
Expand All @@ -113,7 +110,7 @@ class Crawler {
scanNetwork () {
const promises = map(this.nodes, (peer) => {
return new Promise((resolve, reject) => {
const connection = this.peers.get(peer.ip)
const connection = this.connections.get(peer.ip)
if (!connection) {
return resolve()
}
Expand All @@ -129,8 +126,8 @@ class Crawler {
height: response.data.state.header.height,
id: response.data.state.header.id
})
this.nodes[peer.ip].height = response.data.state.header.height
this.nodes[peer.ip].id = response.data.state.header.id
peer.height = response.data.state.header.height
peer.id = response.data.state.header.id
return resolve()
}
)
Expand Down
4 changes: 0 additions & 4 deletions src/peer.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,6 @@ class Peers {
return this.connections.get(ip)
}

map () {
return this.connections
}

disconnectAll () {
for (const [ip, connection] of this.connections.entries()) {
connection.destroy()
Expand Down

0 comments on commit 3ea7761

Please sign in to comment.