Skip to content

Commit

Permalink
Merge branch 'David-DT-feature/add-location'
Browse files Browse the repository at this point in the history
  • Loading branch information
wigy-opensource-developer committed Feb 9, 2021
2 parents 080b327 + d6ca922 commit 131255e
Show file tree
Hide file tree
Showing 5 changed files with 88 additions and 9 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -63,3 +63,5 @@ typings/
*.log
node_modules
.DS_Store

.vscode
27 changes: 25 additions & 2 deletions app.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ const Crawler = require('./src/crawler')

const report = (crawler) => {
const blockStats = {}
const nodeStats = {}
const versionStats = {}

const nodes = Object.values(crawler.nodes)

for (const node of nodes) {
if (node.height === undefined || node.id === undefined) {
continue
Expand All @@ -36,13 +36,37 @@ const report = (crawler) => {
version: node.version
}
}

if (nodeStats[node.ip]) {
continue
} else {
nodeStats[node.ip] = {
ip: node.ip,
location: node.location,
version: node.version,
height: node.height
}
}
}

const allDelays = nodes.filter(item => item.latency).map(item => item.latency)
const averageDelay = (allDelays.reduce((a, b) => a + b, 0) / allDelays.length).toFixed(2)
const maxDelay = Math.max(...allDelays)
const minDelay = Math.min(...allDelays)

// Node stats;
console.log('Individual node stats');
for (const node of orderBy(Object.values(nodeStats),['ip'],['desc'])) {
console.log(`\nIP: ${node.ip}`)
console.log(`Version: ${node.version} at height: ${node.height}`)
if (node.location) {
console.log(`Location: ${node.location.city},${node.location.region},${node.location.country}`)
console.log(`Organization: ${node.location.org}`)
} else {
console.log('Could not fetch location data')
}
}

console.log('===========================================')
console.log(`All nodes: ${Object.keys(crawler.nodes).length}`)
console.log(`Nodes online: ${crawler.heights.length}`)
Expand Down Expand Up @@ -89,7 +113,6 @@ const main = async () => {
if ('list' in input) {
input = input.list
}

for (const node of input) {
crawler.add(node)
}
Expand Down
5 changes: 5 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
"dependencies": {
"lodash": "^4.17.15",
"moment": "2.24.0",
"socketcluster-client": "^14.2.2"
"socketcluster-client": "^14.2.2",
"xmlhttprequest": "^1.8.0"
}
}
60 changes: 54 additions & 6 deletions src/crawler.js
Original file line number Diff line number Diff line change
@@ -1,18 +1,24 @@
const { map, sample } = require('lodash')
const Connections = require('./peer')
const XMLHttpRequest = require('xmlhttprequest').XMLHttpRequest

const GET_PEERS_FAILED = -2
const CONNECTION_FAILED = -1
const NOT_VISITED = 0
const GET_PEERS_SUCCESS = 1
let NETWORK_P2P_PORT = null

function delay(millisec) {
return new Promise(resolve => {
setTimeout(() => { resolve() }, millisec);
})
}
class Crawler {
/**
* Initializes the internal request reactor.
* @method constructor
*/
constructor (timeout = 2500, disconnect = true) {
constructor(timeout = 2500, disconnect = true) {
this.disconnect = disconnect
this.request = {
data: {},
Expand All @@ -26,7 +32,7 @@ class Crawler {
this.traversalState = {}
}

add (peer) {
add(peer) {
if (!NETWORK_P2P_PORT) {
NETWORK_P2P_PORT = peer.port
} else {
Expand All @@ -50,15 +56,16 @@ class Crawler {
}
}



/**
* Runs a height check on the entire network connected to the initial peer.
* @method run
* @param {object} peer {ip: [address], port: [4001]}
* @return {Promise}
*/
async run () {
async run() {
this.startTime = new Date()

try {
console.log('... discovering network peers')
while (true) {
Expand All @@ -72,14 +79,15 @@ class Crawler {
console.log('... disconnecting from all peers')
this.connections.disconnectAll()
}
await this.addLocationToNodes()
} catch (err) {
console.error(err)
} finally {
this.endTime = new Date()
}
}

async discoverPeers (ip) {
async discoverPeers(ip) {
return new Promise((resolve, reject) => {
const connection = this.connections.get(ip)
if (!connection) {
Expand Down Expand Up @@ -109,7 +117,7 @@ class Crawler {
})
}

scanNetwork () {
scanNetwork() {
const promises = map(this.nodes, (peer) => {
return new Promise((resolve, reject) => {
const connection = this.connections.get(peer.ip)
Expand Down Expand Up @@ -139,6 +147,46 @@ class Crawler {

return Promise.all(promises)
}

async addLocationToNodes() {
for (const node of Object.values(this.nodes)) {
try {
const location = await this.fetchLocationFromIp(node.ip)
this.nodes[node.ip].location = location
await delay(200)
} catch (error) {
console.error(error)
await delay(20000)
}
}
}

async fetchLocationFromIp(ip) {
return new Promise((resolve, reject) => {
let request = new XMLHttpRequest()

request.open('GET', `https://ipinfo.io/${ip}/json`)
request.send()

request.onreadystatechange = function () {
if (request.readyState != 4) {
return
}

if (request.status == 200) {
const json = JSON.parse(request.responseText);
delete json.ip
delete json.anycast
delete json.readme
resolve(json)
} else if (request.status == 429) {
reject(new Error("Too many requests"))
} else {
reject(new Error(`Location API failed and returned status ${request.status}: ${request.responseText}`))
}
}
})
}
}

module.exports = Crawler

0 comments on commit 131255e

Please sign in to comment.