Skip to content

Commit

Permalink
added hardlink support, added support to find torrents in client not …
Browse files Browse the repository at this point in the history
…seeded in a path, added torrent filter support
JohnDoee committed Mar 10, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
1 parent 5176305 commit 100a794
Showing 7 changed files with 565 additions and 43 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelog

## [Unreleased] -

### Add

- Way to find torrents in a client not seeded from specified paths
- Hardlink (inode) support for several commands and features
- Query support for several commands that interact with already seeding torrents

## [1.3.0] - 2024-02-17

### Add
199 changes: 185 additions & 14 deletions src/autotorrent/__main__.py
Original file line number Diff line number Diff line change
@@ -30,6 +30,7 @@
create_link_path,
humanize_bytes,
parse_torrent,
filter_torrents,
)

DEFAULT_CONFIG_FILE = """[autotorrent]
@@ -45,6 +46,7 @@
fast_resume = false
ignore_file_patterns = [ ]
ignore_directory_patterns = [ ]
scan_hardlinks = false
"""

BASE_CONFIG_FILE = """[autotorrent]
@@ -69,6 +71,7 @@
fast_resume = false
ignore_file_patterns = [ ]
ignore_directory_patterns = [ ]
scan_hardlinks = false
[clients]
@@ -87,15 +90,19 @@ def parse_config_file(path, utf8_compat_mode=False):

database_path = path.parent / Path(parsed_config["database_path"])
parsed_config["db"] = db = Database(
database_path, utf8_compat_mode=utf8_compat_mode
database_path,
utf8_compat_mode=utf8_compat_mode,
)
parsed_config["indexer"] = indexer = Indexer(
db,
ignore_file_patterns=parsed_config["ignore_file_patterns"],
ignore_directory_patterns=parsed_config["ignore_directory_patterns"],
include_inodes=parsed_config["scan_hardlinks"],
)
parsed_config["rewriter"] = rewriter = PathRewriter(parsed_config["same_paths"])
parsed_config["matcher"] = matcher = Matcher(rewriter, db)
parsed_config["matcher"] = matcher = Matcher(
rewriter, db, include_inodes=parsed_config["scan_hardlinks"]
)

rw_file_cache_chown = parsed_config.get("rw_file_cache_chown")

@@ -191,9 +198,23 @@ def cli(ctx, config, verbose, utf8_compat_mode):
default=False,
)
@click.option("-d", "--depth", type=int, default=0)
@click.option(
"-i",
"--include-indirect-seeded",
help="Include indirectly seeded files, i.e. hardlinked files. Deleting these files will not make the client stop seeding.",
flag_value=True,
default=False,
)
@click.argument("path", nargs=-1, type=click.Path(exists=True))
@click.pass_context
def ls(ctx, summary, depth, path):
def ls(ctx, summary, depth, include_indirect_seeded, path):
if include_indirect_seeded and not ctx.obj["scan_hardlinks"]:
raise click.BadOptionUsage(
option_name="include_indirect_seeded",
message="This option can only be used if scan_hardlinks is enabled in the config.\nRemember to rescan clients afterwards.",
ctx=ctx,
)

if path:
paths = [Path(p) for p in path]
else:
@@ -211,17 +232,17 @@ def scan_paths(paths):
for path in paths:
p = Path(os.path.abspath(path))
map_result = matcher.map_path_to_clients(p)
seeded_size = map_result.seeded_size + (
include_indirect_seeded and map_result.indirect_seeded_size or 0
)
percent = (
map_result.total_size
and int((map_result.seeded_size / map_result.total_size) * 100)
and int((seeded_size / map_result.total_size) * 100)
or 0
)
if (
map_result.total_size == map_result.seeded_size
and map_result.total_size > 0
):
if map_result.total_size == seeded_size and map_result.total_size > 0:
color = "green"
elif map_result.seeded_size:
elif seeded_size:
color = "yellow"
if percent == 0:
percent = 1
@@ -232,7 +253,7 @@ def scan_paths(paths):

stats["count"] += 1
stats["total_size"] += map_result.total_size
stats["total_seed_size"] += map_result.seeded_size
stats["total_seed_size"] += seeded_size

click.echo(
f"[{click.style((str(percent) + '%').rjust(4), fg=color)}] {os.fsencode(path).decode(errors='replace')}"
@@ -273,9 +294,23 @@ def dive_paths(paths, depth):
flag_value=True,
default=False,
)
@click.option(
"-i",
"--include-indirect-seeded",
help="Include indirectly seeded files, i.e. hardlinked files. Deleting these files will not make the client stop seeding.",
flag_value=True,
default=False,
)
@click.argument("path", nargs=-1, type=click.Path(exists=True))
@click.pass_context
def find_unseeded(ctx, escape_paths, path):
def find_unseeded(ctx, escape_paths, include_indirect_seeded, path):
if include_indirect_seeded and not ctx.obj["scan_hardlinks"]:
raise click.BadOptionUsage(
option_name="include_indirect_seeded",
message="This option can only be used if scan_hardlinks is enabled in the config.\nRemember to rescan clients afterwards.",
ctx=ctx,
)

if path:
paths = [Path(p) for p in path]
else:
@@ -291,7 +326,9 @@ def find_unseeded(ctx, escape_paths, path):
if f.is_symlink():
continue
ff = f
is_seeded = len(mapped_file.clients) > 0
is_seeded = len(mapped_file.clients) > 0 or (
include_indirect_seeded and len(mapped_file.indirect_clients) > 0
)
while p in ff.parents or p == ff:
if not is_seeded and ff in path_seeds:
break
@@ -314,6 +351,133 @@ def find_unseeded(ctx, escape_paths, path):
click.echo(unseeded_path)


@cli.command(
help="Find torrents not in current paths. This is useful for e.g. the torrent are seeded but not sorted into folders (with links)."
)
@click.option(
"-s",
"--summary",
help="End the listing with a summary",
flag_value=True,
default=False,
)
@click.option(
"-i",
"--include-indirect-seeded",
help="Include indirectly seeded files, i.e. hardlinked files. Deleting these files will not make the client stop seeding.",
flag_value=True,
default=False,
)
@click.option(
"--remove-from-client",
help="Remove the unmoved torrents from clients, i.e. the ones NOT found in path.",
flag_value=True,
default=False,
)
@click.option("-l", "--client", help="Check a specific client", type=str)
@click.option("-q", "--query", help="SQL query to match against torrents", type=str)
@click.argument("path", nargs=-1, type=click.Path(exists=True))
@click.pass_context
def find_unmoved(
ctx, summary, include_indirect_seeded, remove_from_client, client, query, path
):
if include_indirect_seeded and not ctx.obj["scan_hardlinks"]:
raise click.BadOptionUsage(
option_name="include_indirect_seeded",
message="This option can only be used if scan_hardlinks is enabled in the config.\nRemember to rescan clients afterwards.",
ctx=ctx,
)

db = ctx.obj["db"]
matcher = ctx.obj["matcher"]
clients = ctx.obj["clients"]
clients = {
name: c["client"]
for (name, c) in clients.items()
if not client or name == client
}

if not clients:
click.echo("No clients found")
quit(1)

if path:
paths = [Path(p) for p in path]
else:
paths = Path(".").iterdir()

found_infohashes = {client_name: set() for client_name in clients.keys()}
for path in paths:
p = Path(os.path.abspath(path))
map_result = matcher.map_path_to_clients(p)
for f, mapped_file in map_result.files.items():
file_clients = mapped_file.clients
if include_indirect_seeded:
file_clients += mapped_file.indirect_clients
for file_client, infohash in file_clients:
if file_client not in clients:
continue
found_infohashes[file_client].add(infohash)

client_stats = {}
for client in clients.keys():
total_found_count, total_missing_count, total_found_size, total_missing_size = (
0,
0,
0,
0,
)
missing_infohashes = []
seeded_infohashes = db.get_seeded_infohashes(client)
usable_infohashes = None
if query:
usable_infohashes = set(filter_torrents(
clients[client], [s[0] for s in seeded_infohashes], query
))
for infohash, name, size, count in seeded_infohashes:
if usable_infohashes is not None and infohash not in usable_infohashes:
logger.debug(f"Skipping {infohash} / {name} because it does not match filter")
continue

if infohash in found_infohashes[client]:
total_found_count += 1
total_found_size += size
else:
total_missing_count += 1
total_missing_size += size
missing_infohashes.append((infohash, name))

client_stats[client] = {
"total_found_count": total_found_count,
"total_missing_count": total_missing_count,
"total_found_size": total_found_size,
"total_missing_size": total_missing_size,
"missing_infohashes": missing_infohashes,
}

for client, stats in client_stats.items():
click.echo(f"Removing {len(stats['missing_infohashes'])} from {client}")
for infohash, name in stats["missing_infohashes"]:
if remove_from_client:
click.echo(f"Removing torrent {infohash} / {name} from {client}")
clients[client].remove(infohash)
else:
click.echo(f"Would remove {name} ({infohash}) from {client}")

if summary:
click.echo("Summary:")
for client, stats in client_stats.items():
click.echo(f"Client {client}")
click.echo(f" Total found: {stats['total_found_count']}")
click.echo(
f" Total found size: {humanize_bytes(stats['total_found_size'])}"
)
click.echo(f" Total missing: {stats['total_missing_count']}")
click.echo(
f" Total missing size: {humanize_bytes(stats['total_missing_size'])}"
)


@cli.command(help="Checks if the config file exists and is loadable.")
@click.pass_context
def check_config(ctx):
@@ -324,9 +488,10 @@ def check_config(ctx):
help="Remove all torrents seeding data from a path. Does not delete the actual data."
)
@click.option("-l", "--client", help="Remove from a specific client", type=str)
@click.option("-q", "--query", help="SQL query to match against torrents", type=str)
@click.argument("path", nargs=-1, type=click.Path(exists=True), required=True)
@click.pass_context
def rm(ctx, client, path):
def rm(ctx, client, query, path):
matcher = ctx.obj["matcher"]
clients = ctx.obj["clients"]
clients = {
@@ -349,6 +514,12 @@ def rm(ctx, client, path):
continue
infohashes_to_remove.setdefault(client_name, set()).add(infohash)

if query:
for client_name, infohashes in list(infohashes_to_remove.items()):
infohashes_to_remove[client_name] = set(filter_torrents(
clients[client_name], infohashes, query
))

if not infohashes_to_remove:
click.echo("Nothing found to remove")
quit()
@@ -474,7 +645,7 @@ def add(

if not exact and not re.findall(r"\{[^\}]+\}", store_path):
click.echo(
f"Store path does not contain any variables and therefore will be the same for each torrent."
"Store path does not contain any variables and therefore will be the same for each torrent."
)
quit(1)

87 changes: 79 additions & 8 deletions src/autotorrent/db.py
Original file line number Diff line number Diff line change
@@ -13,7 +13,8 @@
)

InsertTorrentFile = namedtuple(
"InsertTorrentFile", ["infohash", "name", "download_path", "paths"]
"InsertTorrentFile",
["infohash", "name", "download_path", "paths"],
)


@@ -68,6 +69,13 @@ def create_tables(self):
UNIQUE(path, torrent_id)
)"""
)
try:
c.execute("""ALTER TABLE client_torrentfiles ADD COLUMN inode INTEGER""")
except sqlite3.OperationalError:
pass
c.execute(
"""CREATE INDEX IF NOT EXISTS client_torrentfiles_inode ON client_torrentfiles (inode)"""
)
self.db.commit()

def commit(self):
@@ -217,15 +225,17 @@ def insert_torrent_files_paths(self, client, insert_torrent_files):

for itf in insert_torrent_files:
insert_args = []
for path, size in itf.paths:
for path, size, inode in itf.paths:
path = decode_str(path, try_fix=self.utf8_compat_mode)
if path is None:
continue

insert_args.append((infohash_id_mapping[itf.infohash], path, size))
insert_args.append(
(infohash_id_mapping[itf.infohash], path, size, inode)
)

c.executemany(
"INSERT OR IGNORE INTO client_torrentfiles (torrent_id, path, size) VALUES (?, ?, ?)",
"INSERT OR IGNORE INTO client_torrentfiles (torrent_id, path, size, inode) VALUES (?, ?, ?, ?)",
insert_args,
)
self.commit()
@@ -266,16 +276,77 @@ def remove_non_existing_infohashes(self, client, infohashes):
],
)

def get_seeded_paths(self, paths):
def get_seeded_paths(self, paths, inodes):
c = self.db.cursor()
c.execute(
f"""SELECT name, download_path, infohash, client, path, size FROM client_torrentfiles
f"""SELECT client_torrentfiles.torrent_id, name, download_path, infohash, client, path, size FROM client_torrentfiles
LEFT JOIN client_torrents ON client_torrents.id = client_torrentfiles.torrent_id
WHERE path IN ({','.join(['?'] * len(paths))})""",
[decode_str(p, try_fix=self.utf8_compat_mode) for p in paths],
)

seeded_files = []
indirect_seeded_files = []
seen_files = set()

for (
torrent_id,
name,
download_path,
infohash,
client,
path,
size,
) in c.fetchall():
seeded_files.append(
SeededFile(name, Path(path), download_path, infohash, client, size)
)
seen_files.add((torrent_id, client, path))

if inodes:
c.execute(
f"""SELECT client_torrentfiles.torrent_id, inode, name, download_path, infohash, client, path, size FROM client_torrentfiles
LEFT JOIN client_torrents ON client_torrents.id = client_torrentfiles.torrent_id
WHERE inode IN ({','.join(['?'] * len(inodes))})""",
list(inodes.keys()),
)
for (
torrent_id,
inode,
name,
download_path,
infohash,
client,
path,
size,
) in c.fetchall():
if (torrent_id, client, path) in seen_files:
continue
seen_files.add((torrent_id, client, path))
full_path = Path(path)
if not full_path.is_file():
continue
stat = full_path.stat()
for p, dev in inodes[inode]:
if dev == stat.st_dev:
indirect_seeded_files.append(
SeededFile(name, p, download_path, infohash, client, size)
)
break
return seeded_files, indirect_seeded_files

def get_seeded_infohashes(self, client):
c = self.db.cursor()
c.execute(
f"""SELECT infohash, name, sum(size), count(*)
FROM client_torrents
LEFT JOIN client_torrentfiles ON client_torrents.id = client_torrentfiles.torrent_id
AND client_torrentfiles.path LIKE (client_torrents.download_path || '%')
WHERE client = ?
GROUP BY infohash, name""",
(client,),
)

return [
SeededFile(name, Path(path), download_path, infohash, client, size)
for (name, download_path, infohash, client, path, size) in c.fetchall()
(infohash, name, size, count) for (infohash, size, count) in c.fetchall()
]
16 changes: 13 additions & 3 deletions src/autotorrent/indexer.py
Original file line number Diff line number Diff line change
@@ -82,7 +82,12 @@ class IndexAction(Enum):


class Indexer:
def __init__(self, db, ignore_file_patterns=None, ignore_directory_patterns=None):
def __init__(
self,
db,
ignore_file_patterns=None,
ignore_directory_patterns=None,
):
self.db = db
self.ignore_file_patterns = ignore_file_patterns or []
self.ignore_directory_patterns = ignore_directory_patterns or []
@@ -186,6 +191,10 @@ def scan_clients(self, clients, full_scan=False, fast_scan=False):
def _scan_client(self, client_name, client, fast_scan):
torrents = client.list()
insert_queue = []

def get_file_inode(path):
return path.stat().st_ino

for torrent in torrents:
_, current_download_path = self.db.get_torrent_file_info(
client_name, torrent.infohash
@@ -209,10 +218,11 @@ def _scan_client(self, client_name, client, fast_scan):
paths = []
for f in files:
f_path = download_path / f.path
paths.append((str(f_path), f.size))
inode = get_file_inode(f_path)
paths.append((str(f_path), f.size, inode))
f_path_resolved = f_path.resolve()
if f_path_resolved != f_path:
paths.append((str(f_path_resolved), f.size))
paths.append((str(f_path_resolved), f.size, inode))
insert_queue.append(
InsertTorrentFile(torrent.infohash, torrent.name, download_path, paths)
)
63 changes: 48 additions & 15 deletions src/autotorrent/matcher.py
Original file line number Diff line number Diff line change
@@ -14,8 +14,10 @@

MatchedFile = namedtuple("MatchedFile", ["torrent_file", "searched_files"])
MatchResult = namedtuple("MatchResult", ["root_path", "matched_files", "size"])
MappedFile = namedtuple("MappedFile", ["size", "clients"])
MapResult = namedtuple("MapResult", ["total_size", "seeded_size", "files"])
MappedFile = namedtuple("MappedFile", ["size", "clients", "indirect_clients"])
MapResult = namedtuple(
"MapResult", ["total_size", "seeded_size", "indirect_seeded_size", "files"]
)
DynamicMatchResult = namedtuple(
"DynamicMatchResult", ["success", "missing_size", "matched_files", "touched_files"]
)
@@ -35,9 +37,10 @@ def is_relative_to(path, *other):


class Matcher:
def __init__(self, rewriter, db):
def __init__(self, rewriter, db, include_inodes=False):
self.rewriter = rewriter
self.db = db
self.include_inodes = include_inodes

def _match_filelist_exact(
self,
@@ -247,14 +250,14 @@ def _select_best_candidate(
for match_result in candidates:
candidate_result = {}
for matched_file in match_result.matched_files:
candidate_result[
matched_file.torrent_file.path
] = self._match_best_file(
torrent,
matched_file.torrent_file,
matched_file.searched_files,
hash_probe=hash_probe,
match_hash_size=match_hash_size,
candidate_result[matched_file.torrent_file.path] = (
self._match_best_file(
torrent,
matched_file.torrent_file,
matched_file.searched_files,
hash_probe=hash_probe,
match_hash_size=match_hash_size,
)
)
evaluated_candidates.append(candidate_result)
return sorted(
@@ -435,21 +438,36 @@ def map_path_to_clients(self, path):

def flush_check_queue():
logger.debug("Flushing queue")
path_inodes = {}
for p in path_check_queue:
resolved_p = p.resolve()
size = p.stat().st_size
stat = p.stat()
size = stat.st_size
if self.include_inodes:
if stat.st_ino not in path_inodes:
path_inodes[stat.st_ino] = []
path_inodes[stat.st_ino].append((p, stat.st_dev))
if resolved_p not in real_files_seen:
total["size"] += size
real_files_seen.add(resolved_p)

real_files_mapping[p] = resolved_p
path_seeded[p] = MappedFile(size=size, clients=[])
path_seeded[p] = MappedFile(size=size, clients=[], indirect_clients=[])

for seeded_file in self.db.get_seeded_paths(path_check_queue):
seeded_files, indirect_seeded_files = self.db.get_seeded_paths(
path_check_queue, path_inodes
)

for seeded_file in seeded_files:
path_seeded[seeded_file.path].clients.append(
(seeded_file.client, seeded_file.infohash)
)

for indirect_seeded_file in indirect_seeded_files:
path_seeded[indirect_seeded_file.path].indirect_clients.append(
(indirect_seeded_file.client, indirect_seeded_file.infohash)
)

path_check_queue.clear()

def looper(path, initial_path=False):
@@ -480,6 +498,7 @@ def looper(path, initial_path=False):
looper(path)
flush_check_queue()
seeded_size = 0
indirect_seeded_size = 0
already_counted_paths = set()
for p, mapped_file in path_seeded.items():
if not mapped_file.clients:
@@ -492,6 +511,20 @@ def looper(path, initial_path=False):
already_counted_paths.add(resolved_p)
seeded_size += mapped_file.size

for p, mapped_file in path_seeded.items():
if not mapped_file.indirect_clients:
continue

resolved_p = real_files_mapping[p]
if resolved_p in already_counted_paths:
continue

already_counted_paths.add(resolved_p)
indirect_seeded_size += mapped_file.size

return MapResult(
total_size=total["size"], seeded_size=seeded_size, files=path_seeded
total_size=total["size"],
seeded_size=seeded_size,
indirect_seeded_size=indirect_seeded_size,
files=path_seeded,
)
79 changes: 77 additions & 2 deletions src/autotorrent/utils.py
Original file line number Diff line number Diff line change
@@ -6,13 +6,13 @@
import platform
import re
import shutil
import sqlite3
from collections import namedtuple
from fnmatch import fnmatch
from pathlib import Path, PurePath

import chardet
import click
from libtc import TorrentProblems

from .exceptions import FailedToCreateLinkException, FailedToParseTorrentException

@@ -489,7 +489,7 @@ def verify_hash(self, fnmatches, file_mapping):
if (
inner_piece_status
and all(inner_piece_status)
and all([p != False for p in edge_piece_status])
and all([p is not False for p in edge_piece_status])
):
file_status_mapping[torrent_file] = "hash-success"
elif not inner_piece_status and all(edge_piece_status):
@@ -879,3 +879,78 @@ def add_status_formatter(status, torrent_path, message):

status_msg = f"[{click.style(status_spec[1], fg=status_spec[0])}]"
click.echo(f" {status_msg:18s} {torrent_path.name!r} {message}")


def filter_torrents(client, torrents_or_infohashes, query):
"""Extract all infohashes from client, filter them against sqlite query and torrents variable."""

db = sqlite3.connect(":memory:")
c = db.cursor()
c.execute(
"""CREATE TABLE torrents (
infohash TEXT UNIQUE,
name TEXT,
size INTEGER,
state TEXT,
progress REAL,
uploaded INTEGER,
added DATETIME,
tracker TEXT,
upload_rate INTEGER,
download_rate INTEGER,
label TEXT,
ratio REAL,
complete BOOL
)"""
)
db.commit()

torrents = client.list()
while torrents:
c.executemany(
"INSERT INTO torrents VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
[
(
t.infohash,
t.name,
t.size,
t.state,
t.progress,
t.uploaded,
t.added,
t.tracker,
t.upload_rate,
t.download_rate,
t.label,
(
t.progress > 0
and (t.uploaded / ((t.progress * t.size) / 100))
or None
),
t.progress == 100.0,
)
for t in torrents[:500]
],
)
torrents = torrents[500:]

infohashes = set(
[
infohash
for (infohash,) in c.execute(
f"SELECT infohash FROM torrents WHERE {query}"
).fetchall()
]
)
return [
torrents_or_infohash
for torrents_or_infohash in torrents_or_infohashes
if (
isinstance(torrents_or_infohash, str) and torrents_or_infohash in infohashes
)
or (
hasattr(torrents_or_infohash, "infohash")
and torrents_or_infohash.infohash in infohashes
)
]
156 changes: 155 additions & 1 deletion tests/test_scan_ls_clients.py
Original file line number Diff line number Diff line change
@@ -270,7 +270,10 @@ def test_multiple_clients(tmp_path, indexer, matcher, client, client2):
)


def test_symlink(tmp_path, indexer, matcher, client, client2, client3):
@pytest.mark.parametrize("include_inodes", [False, True])
def test_symlink(tmp_path, indexer, matcher, client, client2, client3, include_inodes):
indexer.include_inodes = include_inodes
matcher.include_inodes = include_inodes
infohash = "da39a3ee5e6b4b0d3255bfef95601890afd80709"
name = "test torrent 1"
download_path = tmp_path / "test torrent 1"
@@ -477,3 +480,154 @@ def test_rewrite(tmp_path, rewriter, indexer, matcher, client):
assert len(map_result.files) == 2
for mf in map_result.files.values():
assert len(mf.clients) == 0


@pytest.mark.parametrize("include_inodes", [False, True])
def test_hardlink(tmp_path, indexer, matcher, client, client2, client3, include_inodes):
indexer.include_inodes = include_inodes
matcher.include_inodes = include_inodes
infohash = "da39a3ee5e6b4b0d3255bfef95601890afd80709"
name = "test torrent 1"
download_path = tmp_path / "test torrent 1"
files = []
size = 0
seeded_files = [("file1", 400, True), ("file2", 600, True)]
for fn, fsize, add_to_files in seeded_files:
fp = download_path / Path(fn)
fp.parent.mkdir(parents=True, exist_ok=True)
fp.write_bytes(b"a" * fsize)
if add_to_files:
size += fsize
files.append(TorrentFile(fn, fsize, 100))

client._inject_torrent(
TorrentData(
infohash,
name,
size,
TorrentState.ACTIVE,
100,
1000,
datetime(2020, 1, 1, 1, 1),
"example.com",
0,
0,
None,
),
files,
download_path,
)

download_path_symlink = tmp_path / "test torrent 2"
download_path_symlink.symlink_to(download_path)

download_path_hardlink_files = tmp_path / "test torrent 3"
download_path_hardlink_files.mkdir()
(download_path_hardlink_files / "file1").hardlink_to(download_path_symlink / "file1")
(download_path_hardlink_files / "file2").hardlink_to(download_path_symlink / "file2")
indexer.scan_clients({"test_client": client}, full_scan=False, fast_scan=False)

map_result = matcher.map_path_to_clients(download_path)
assert map_result.seeded_size == 1000
assert len(map_result.files) == 2
for mf in map_result.files.values():
assert len(mf.clients) == 1

map_result = matcher.map_path_to_clients(download_path_symlink)
assert map_result.seeded_size == 0
assert map_result.total_size == 1000
assert len(map_result.files) == 2
for f, mf in map_result.files.items():
assert len(mf.clients) == 0

map_result = matcher.map_path_to_clients(download_path_hardlink_files)
assert map_result.seeded_size == 0
assert map_result.total_size == 1000
assert len(map_result.files) == 2
for f, mf in map_result.files.items():
assert len(mf.clients) == 0

client2._inject_torrent(
TorrentData(
infohash,
name,
size,
TorrentState.ACTIVE,
100,
1000,
datetime(2020, 1, 1, 1, 1),
"example.com",
0,
0,
None,
),
files,
download_path_symlink,
)

indexer.scan_clients({"test_client2": client2, "test_client3": client3}, full_scan=False, fast_scan=False)

map_result = matcher.map_path_to_clients(download_path)
assert map_result.seeded_size == 1000
assert len(map_result.files) == 2
for mf in map_result.files.values():
assert len(mf.clients) == 2
assert sorted(mf.clients) == sorted(
[
("test_client", "da39a3ee5e6b4b0d3255bfef95601890afd80709"),
("test_client2", "da39a3ee5e6b4b0d3255bfef95601890afd80709"),
]
)
if include_inodes:
assert sorted(mf.indirect_clients) == sorted(
[
("test_client2", "da39a3ee5e6b4b0d3255bfef95601890afd80709"),
]
)
assert len(mf.indirect_clients) == 1
else:
assert len(mf.indirect_clients) == 0


map_result = matcher.map_path_to_clients(download_path_symlink)
assert map_result.seeded_size == 1000
assert map_result.total_size == 1000
assert len(map_result.files) == 2
for f, mf in map_result.files.items():
assert len(mf.clients) == 1

map_result = matcher.map_path_to_clients(download_path_hardlink_files)
assert map_result.seeded_size == 0
if include_inodes:
assert map_result.indirect_seeded_size == 1000
else:
assert map_result.indirect_seeded_size == 0
assert map_result.total_size == 1000
assert len(map_result.files) == 2

client3._inject_torrent(
TorrentData(
infohash,
name,
size,
TorrentState.ACTIVE,
100,
1000,
datetime(2020, 1, 1, 1, 1),
"example.com",
0,
0,
None,
),
files,
download_path_hardlink_files,
)

indexer.scan_clients({"test_client3": client3}, full_scan=False, fast_scan=False)

map_result = matcher.map_path_to_clients(download_path_hardlink_files)
assert map_result.seeded_size == 1000
assert map_result.total_size == 1000
assert len(map_result.files) == 2
for f, mf in map_result.files.items():
assert len(mf.clients) == 1

0 comments on commit 100a794

Please sign in to comment.