Skip to content

Commit

Permalink
Merge pull request #1 from deadlock-delegate/version
Browse files Browse the repository at this point in the history
merge existing project
  • Loading branch information
roks0n authored May 5, 2019
2 parents 95a7493 + a9cfb9a commit 8992211
Show file tree
Hide file tree
Showing 7 changed files with 1,793 additions and 0 deletions.
16 changes: 16 additions & 0 deletions .eslintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"env": {
"es6": true,
"node": true,
"jest": true
},
"extends": [
"standard"
],
"parserOptions": {
"sourceType": "module"
},
"rules": {
"indent": ["error", 2]
}
}
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -59,3 +59,7 @@ typings/

# next.js build output
.next
.idea
*.log
node_modules
.DS_Store
23 changes: 23 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Crawler

Crawler scans the ARK network to get information about the peers in the network.

## Installation

`npm install`

## Usage

`npm start http://<ip>:<port>`

For port use the p2p port, which is 4001 for Ark's mainnet.

## Credits

- [roks0n](https://github.com/roks0n)
- [dmvt](https://github.com/dmvt)
- [All Contributors](../../../../contributors)

## License

[MIT](LICENSE) © roks0n
46 changes: 46 additions & 0 deletions app.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
const Crawler = require('./src/crawler')
const { URL } = require('url')

const crawler = new Crawler()
const args = process.argv.slice(2)

const report = (crawler) => {
const blockStats = {}

for (const item of crawler.heights) {
if (blockStats[item.height]) {
blockStats[item.height].count += 1
blockStats[item.height].ids[item.id] += 1
} else {
blockStats[item.height] = {}
blockStats[item.height].count = 1
blockStats[item.height].ids = {}
blockStats[item.height].ids[item.id] = 1
}
}

console.log(`===========================================`)
console.log(`Total nodes visited: ${Object.keys(crawler.nodes).length}`)
console.log(`Total nodes online: ${crawler.heights.length}`)
console.log(`------------------------------------------`)
console.log(`Block stats:`)
for (const stat in blockStats) {
console.log(`${blockStats[stat].count} nodes on height ${stat} with hashes:`)
for (const hash in blockStats[stat].ids) {
console.log(` - ${hash} (${blockStats[stat].ids[hash]} nodes)`)
}
}
console.log(`------------------------------------------`)
console.log(`Finished scanning in ${new Date() - crawler.startTime}ms`)

process.exit(0)
}

let node = { ip: undefined, port: undefined }
if (args.length === 1) {
const url = new URL(args[0])
node.ip = url.hostname
node.port = url.port
}

crawler.run(node).then(report)
Loading

0 comments on commit 8992211

Please sign in to comment.