From 4ec1f34c32df83b14232137f95c7c014a85ec478 Mon Sep 17 00:00:00 2001 From: 2DT Date: Tue, 9 Feb 2021 15:08:36 +0100 Subject: [PATCH] Added node location data derived from ipinfo --- .gitignore | 2 ++ app.js | 27 +++++++++++++++++++++++-- package-lock.json | 5 +++++ package.json | 3 ++- src/crawler.js | 50 ++++++++++++++++++++++++++++++++++++++++++++++- 5 files changed, 83 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 3b36221..1f52330 100644 --- a/.gitignore +++ b/.gitignore @@ -63,3 +63,5 @@ typings/ *.log node_modules .DS_Store + +.vscode diff --git a/app.js b/app.js index e9864c0..d41ab1b 100644 --- a/app.js +++ b/app.js @@ -7,10 +7,10 @@ const Crawler = require('./src/crawler') const report = (crawler) => { const blockStats = {} + const nodeStats = {} const versionStats = {} const nodes = Object.values(crawler.nodes) - for (const node of nodes) { if (node.height === undefined || node.id === undefined) { continue @@ -36,6 +36,17 @@ const report = (crawler) => { version: node.version } } + + if (nodeStats[node.ip]) { + continue + } else { + nodeStats[node.ip] = { + ip: node.ip, + location: node.location, + version: node.version, + height: node.height + } + } } const allDelays = nodes.filter(item => item.latency).map(item => item.latency) @@ -43,6 +54,19 @@ const report = (crawler) => { const maxDelay = Math.max(...allDelays) const minDelay = Math.min(...allDelays) + // Node stats; + console.log('Individual node stats'); + for (const node of orderBy(Object.values(nodeStats),['ip'],['desc'])) { + console.log(`\nIP: ${node.ip}`) + console.log(`Version: ${node.version} at height: ${node.height}`) + if (node.location) { + console.log(`Location: ${node.location.city},${node.location.region},${node.location.country}`) + console.log(`Organization: ${node.location.org}`) + } else { + console.log('Could not fetch location data') + } + } + console.log('===========================================') console.log(`All nodes: ${Object.keys(crawler.nodes).length}`) console.log(`Nodes online: ${crawler.heights.length}`) @@ -89,7 +113,6 @@ const main = async () => { if ('list' in input) { input = input.list } - for (const node of input) { crawler.add(node) } diff --git a/package-lock.json b/package-lock.json index d046a02..0bc5488 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1699,6 +1699,11 @@ "requires": { "async-limiter": "^1.0.0" } + }, + "xmlhttprequest": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/xmlhttprequest/-/xmlhttprequest-1.8.0.tgz", + "integrity": "sha1-Z/4HXFwk/vOfnWX197f+dRcZaPw=" } } } diff --git a/package.json b/package.json index 27f3ad8..819b93f 100644 --- a/package.json +++ b/package.json @@ -36,6 +36,7 @@ "dependencies": { "lodash": "^4.17.15", "moment": "2.24.0", - "socketcluster-client": "^14.2.2" + "socketcluster-client": "^14.2.2", + "xmlhttprequest": "^1.8.0" } } diff --git a/src/crawler.js b/src/crawler.js index 946cdc8..bdc66ac 100644 --- a/src/crawler.js +++ b/src/crawler.js @@ -1,5 +1,6 @@ const { map, sample } = require('lodash') const Connections = require('./peer') +const XMLHttpRequest = require('xmlhttprequest').XMLHttpRequest const GET_PEERS_FAILED = -2 const CONNECTION_FAILED = -1 @@ -7,6 +8,11 @@ const NOT_VISITED = 0 const GET_PEERS_SUCCESS = 1 let NETWORK_P2P_PORT = null +function delay(milisec) { + return new Promise(resolve => { + setTimeout(() => { resolve() }, milisec); + }) +} class Crawler { /** * Initializes the internal request reactor. @@ -50,6 +56,8 @@ class Crawler { } } + + /** * Runs a height check on the entire network connected to the initial peer. * @method run @@ -58,7 +66,6 @@ class Crawler { */ async run () { this.startTime = new Date() - try { console.log('... discovering network peers') while (true) { @@ -72,6 +79,7 @@ class Crawler { console.log('... disconnecting from all peers') this.connections.disconnectAll() } + await this.addLocationToNodes() } catch (err) { console.error(err) } finally { @@ -139,6 +147,46 @@ class Crawler { return Promise.all(promises) } + + async addLocationToNodes() { + for (const node of Object.values(this.nodes)) { + try{ + const location = await this.fetchLocationFromIp(node.ip) + this.nodes[node.ip].location = location + await delay(200) + } catch (error) { + console.error(error) + await delay(20000) + } + } + } + + async fetchLocationFromIp(ip) { + return new Promise((resolve,reject) => { + let request = new XMLHttpRequest() + + request.open('GET', `https://ipinfo.io/${ip}/json`) + request.send() + + request.onreadystatechange = function() { + if (request.readyState != 4) { + return + } + + if (request.status == 200) { + const json = JSON.parse(request.responseText); + delete json.ip + delete json.anycast + delete json.readme + resolve(json) + } else if (request.status == 429) { + reject(new Error("Too many requests")) + } else { + reject(new Error(`Location API failed and returned status ${request.status}: ${request.responseText}`)) + } + } + }) + } } module.exports = Crawler