Skip to content

Commit

Permalink
scan network via p2p instead of api, dynamically fetch network config
Browse files Browse the repository at this point in the history
  • Loading branch information
roks0n committed May 5, 2019
1 parent a489b3d commit a9cfb9a
Show file tree
Hide file tree
Showing 6 changed files with 1,517 additions and 48 deletions.
16 changes: 16 additions & 0 deletions .eslintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"env": {
"es6": true,
"node": true,
"jest": true
},
"extends": [
"standard"
],
"parserOptions": {
"sourceType": "module"
},
"rules": {
"indent": ["error", 2]
}
}
16 changes: 13 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
# Crawler

Network scanning tool for ARK.

This currently works for ARK v1 and will be updated to work with V2's API structure shortly.
Crawler scans the ARK network to get information about the peers in the network.

## Installation

Expand All @@ -11,3 +9,15 @@ This currently works for ARK v1 and will be updated to work with V2's API struct
## Usage

`npm start http://<ip>:<port>`

For port use the p2p port, which is 4001 for Ark's mainnet.

## Credits

- [roks0n](https://github.com/roks0n)
- [dmvt](https://github.com/dmvt)
- [All Contributors](../../../../contributors)

## License

[MIT](LICENSE) © roks0n
26 changes: 15 additions & 11 deletions app.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
const Crawler = require('./src/crawler')
const { URL } = require('url')
const { forEach, keys } = require('lodash')

const crawler = new Crawler()
const args = process.argv.slice(2)

const report = (crawler) => {
let blockStats = {}
const blockStats = {}

forEach(crawler.heights, (item) => {
for (const item of crawler.heights) {
if (blockStats[item.height]) {
blockStats[item.height].count += 1
blockStats[item.height].ids[item.id] += 1
Expand All @@ -18,25 +17,30 @@ const report = (crawler) => {
blockStats[item.height].ids = {}
blockStats[item.height].ids[item.id] = 1
}
})
}

console.log(`===========================================`)
console.log(`Total nodes visited: ${keys(crawler.nodes).length}`)
console.log(`Total nodes visited: ${Object.keys(crawler.nodes).length}`)
console.log(`Total nodes online: ${crawler.heights.length}`)
console.log(`------------------------------------------`)
console.log(`Block stats:`)
console.log(blockStats)
for (const stat in blockStats) {
console.log(`${blockStats[stat].count} nodes on height ${stat} with hashes:`)
for (const hash in blockStats[stat].ids) {
console.log(` - ${hash} (${blockStats[stat].ids[hash]} nodes)`)
}
}
console.log(`------------------------------------------`)
console.log(`Finished scanning in ${new Date() - crawler.startTime}ms`)

process.exit(0)
}

let node = {ip: '167.99.243.111', port: 4003}

let node = { ip: undefined, port: undefined }
if (args.length === 1) {
const url = new URL(args[0])
node.ip = url.hostname
node.port = url.port
const url = new URL(args[0])
node.ip = url.hostname
node.port = url.port
}

crawler.run(node).then(report)
Loading

0 comments on commit a9cfb9a

Please sign in to comment.