forked from dessalines/torrent-tracker-health
-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.js
154 lines (134 loc) · 4.01 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
var utils = require('./utils'),
Client = require('bittorrent-tracker'),
readTorrent = require('read-torrent');
function read(uri, options) {
return new Promise(async (resolve, reject) => {
// If its a torrent directory or infohash file, collect them up
var uris = utils.collectUris(uri);
options = utils.rearrange(options);
// Get only the ones without read errors, and uniq infohashes
var allTorrents = await Promise.all(uris.map(p => singleRead(p, options).catch(e => e)));
var noErrors = allTorrents.filter(result => !(result instanceof Error));
var torrents = noErrors.filter((e, i) => noErrors.findIndex(a => a.hash === e.hash) === i);
resolve({
torrents: torrents,
options: options
});
});
}
function singleRead(uri, options) {
return new Promise((resolve, reject) => {
readTorrent(uri, (err, info) => {
if (!err) {
// Make sure info.announce is an array
if (!Array.isArray(info.announce)) {
info.announce = info.announce ? [info.announce] : [];
}
// Removing some extra fields from files
if (info.files) {
info.files.forEach(f => {
delete f.name;
delete f.offset;
});
}
var created = (info.created) ? info.created : new Date().toISOString();
resolve({
name: info.name,
hash: info.infoHash,
length: info.length,
created: created,
files: info.files
});
} else {
utils.debug('Error in read-torrent: ' + err.message + ' for torrent uri: ' + uri);
reject(err);
}
});
});
}
async function scrapeAll(req) {
// Loop over trackers and hashes in batches
var hashes = req.torrents.map(t => t.hash);
let slices = utils.chunk(hashes, req.options.batchSize);
for (const subhashes of slices) {
for (const trUri of req.options.trackers) {
let data = await scrape(trUri, subhashes);
// Add the peer counts to the req
Object.entries(data).map(e => {
var torrent = req.torrents.find(t => t.hash === e[0]);
if (torrent) {
if (!torrent.fetches) torrent.fetches = [];
torrent.fetches.push({
seeders: e[1].complete,
completed: e[1].downloaded,
leechers: e[1].incomplete,
tracker: trUri,
});
}
});
}
}
console.log(`Req: ${JSON.stringify(req)}`)
return req;
}
function scrape(trUri, infohashes) {
return new Promise((resolve, reject) => {
Client.scrape({ announce: trUri, infoHash: infohashes }, (err, data) => {
if (err) {
if (err.message === 'timed out' || err.code === 'ETIMEDOUT') {
utils.debug('Scrape timed out for ' + trUri);
} else {
utils.debug('Error in torrent-tracker: ' + err.message);
}
resolve({
tracker: trUri,
error: err.message
});
} else {
utils.debug('Scrape successful for ' + trUri);
// Coerce single fetch as same structure as multiple
var firstHash = infohashes[0];
if (data[firstHash] == undefined) {
var map = {};
map[firstHash] = data;
data = map;
}
resolve(data);
}
});
});
}
function calc(res) {
var options = res.options;
var torrents = res.torrents;
// Early return if they want all the fetches
if (options.showAllFetches) {
return {
results: torrents,
options: options
}
}
var maxes = torrents.map(f => {
var maxFetch = f.fetches.reduce((a, b) => a.seeders > b.seeders ? a : b);
return {
name: f.name,
hash: f.hash,
length: f.length,
created: f.created,
files: f.files,
tracker: maxFetch.tracker,
seeders: maxFetch.seeders,
leechers: maxFetch.leechers,
completed: maxFetch.completed,
}
});
return {
results: maxes,
options: options
}
}
module.exports = function (uri, options) {
return read(uri, options)
.then(scrapeAll)
.then(calc);
}