Skip to content

Commit dafb2d2

Browse files
Leaner Generators
1 parent 3bc44b9 commit dafb2d2

File tree

2 files changed

+18
-18
lines changed

2 files changed

+18
-18
lines changed

src/board_game_scraper/spiders/bgg.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -128,13 +128,13 @@ def __init__(
128128
if self.premium_users_dir:
129129
self.logger.info("Premium users dir: <%s>", self.premium_users_dir)
130130

131-
def start_requests(self) -> Generator[Request, None, None]:
131+
def start_requests(self) -> Generator[Request]:
132132
yield from self.premium_users_requests_from_dir()
133133
yield from self.user_and_collection_requests_from_files()
134134
yield from self.game_requests_from_files()
135135
yield from super().start_requests()
136136

137-
def game_requests_from_files(self) -> Generator[Request, None, None]:
137+
def game_requests_from_files(self) -> Generator[Request]:
138138
bgg_ids = frozenset(
139139
extract_field_from_files(
140140
file_paths=self.game_files,
@@ -149,7 +149,7 @@ def game_requests_from_files(self) -> Generator[Request, None, None]:
149149
)
150150
yield from self.game_requests(bgg_ids=bgg_ids, page=1, priority=1)
151151

152-
def user_and_collection_requests_from_files(self) -> Generator[Request, None, None]:
152+
def user_and_collection_requests_from_files(self) -> Generator[Request]:
153153
user_names = frozenset(
154154
extract_field_from_files(
155155
file_paths=self.user_files,
@@ -168,7 +168,7 @@ def user_and_collection_requests_from_files(self) -> Generator[Request, None, No
168168
for user_name in user_names:
169169
yield self.user_request(user_name=user_name, priority=3)
170170

171-
def premium_users_requests_from_dir(self) -> Generator[Request, None, None]:
171+
def premium_users_requests_from_dir(self) -> Generator[Request]:
172172
premium_users = frozenset(load_premium_users(dirs=self.premium_users_dir))
173173
self.logger.info(
174174
"Loaded %d premium user(s) from <%s> to request",
@@ -197,7 +197,7 @@ def _get_sitemap_body(self, response: Response) -> bytes:
197197
self.logger.warning("YOLO – trying to parse sitemap from <%s>", response.url)
198198
return response.body
199199

200-
def _parse_sitemap(self, response: Response) -> Generator[Request, None, None]:
200+
def _parse_sitemap(self, response: Response) -> Generator[Request]:
201201
"""
202202
@url https://boardgamegeek.com/sitemap_geekitems_boardgame_1
203203
@returns requests 500 500
@@ -223,7 +223,7 @@ def game_requests(
223223
page: int = 1,
224224
priority: int = 0,
225225
**kwargs: Any,
226-
) -> Generator[Request, None, None]:
226+
) -> Generator[Request]:
227227
bgg_ids = frozenset(bgg_ids)
228228

229229
if page == 1:
@@ -321,7 +321,7 @@ def api_url(self, action: str, **kwargs: str | None) -> str:
321321
def parse_games(
322322
self,
323323
response: TextResponse,
324-
) -> Generator[Request | GameItem | CollectionItem, None, None]:
324+
) -> Generator[Request | GameItem | CollectionItem]:
325325
"""
326326
@url https://boardgamegeek.com/xmlapi2/thing?id=13,822,36218&type=boardgame&ratingcomments=1&stats=1&videos=1&pagesize=100
327327
@returns requests 0 0
@@ -400,7 +400,7 @@ def parse_collection(
400400
self,
401401
response: TextResponse,
402402
bgg_user_name: str | None = None,
403-
) -> Generator[Request | CollectionItem, None, None]:
403+
) -> Generator[Request | CollectionItem]:
404404
"""
405405
@url https://boardgamegeek.com/xmlapi2/collection?username=markus+shepherd&subtype=boardgame&excludesubtype=boardgameexpansion&stats=1&version=0
406406
@returns requests 100
@@ -822,7 +822,7 @@ def extract_page_number(
822822
def value_id(
823823
items: Selector | SelectorList | Iterable[Selector],
824824
sep: str = ":",
825-
) -> Generator[str, None, None]:
825+
) -> Generator[str]:
826826
for item in arg_to_iter(items):
827827
item = cast(Selector, item)
828828
value = item.xpath("@value").get() or ""
@@ -841,7 +841,7 @@ def remove_rank(value: str | None) -> str | None:
841841
def value_id_rank(
842842
items: Selector | SelectorList | Iterable[Selector],
843843
sep: str = ":",
844-
) -> Generator[str, None, None]:
844+
) -> Generator[str]:
845845
for item in arg_to_iter(items):
846846
item = cast(Selector, item)
847847
value = remove_rank(item.xpath("@friendlyname").get()) or ""
@@ -875,7 +875,7 @@ def parse_int_from_elem(
875875

876876
def parse_player_count(
877877
poll: Selector,
878-
) -> Generator[tuple[int, int, int, int], None, None]:
878+
) -> Generator[tuple[int, int, int, int]]:
879879
for result in poll.xpath("results"):
880880
numplayers = normalize_space(result.xpath("@numplayers").get())
881881
players = parse_int(numplayers)
@@ -908,7 +908,7 @@ def parse_votes(
908908
attr: str = "value",
909909
*,
910910
enum: bool = False,
911-
) -> Generator[int, None, None]:
911+
) -> Generator[int]:
912912
if not poll:
913913
return
914914

src/board_game_scraper/utils/files.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ def extract_field_from_jsonlines_file(
2626
field: str,
2727
encoding: str = "utf-8",
2828
converter: Callable[[Any], Any] | None = None,
29-
) -> Generator[Any, None, None]:
29+
) -> Generator[Any]:
3030
LOGGER.info("Extracting field <%s> from JSON lines file <%s>", field, file_path)
3131
with file_path.open(mode="r", encoding=encoding) as file:
3232
for line in file:
@@ -48,7 +48,7 @@ def extract_field_from_csv_file(
4848
field: str,
4949
encoding: str = "utf-8",
5050
converter: Callable[[Any], Any] | None = None,
51-
) -> Generator[Any, None, None]:
51+
) -> Generator[Any]:
5252
LOGGER.info("Extracting field <%s> from CSV file <%s>", field, file_path)
5353
with file_path.open(mode="r", encoding=encoding) as file:
5454
reader = csv.DictReader(file)
@@ -66,7 +66,7 @@ def extract_field_from_files(
6666
field: str,
6767
encoding: str = "utf-8",
6868
converter: Callable[[Any], Any] | None = None,
69-
) -> Generator[Any, None, None]:
69+
) -> Generator[Any]:
7070
for file_path_str in arg_to_iter(file_paths):
7171
file_path = Path(file_path_str).resolve()
7272
if not file_path.exists():
@@ -112,7 +112,7 @@ def parse_file_paths(paths: Iterable[Path | str] | str | None) -> tuple[Path, ..
112112
def _load_yaml(
113113
path: str | Path,
114114
encoding: str = "utf-8",
115-
) -> Generator[dict[str, Any], None, None]:
115+
) -> Generator[dict[str, Any]]:
116116
path = Path(path).resolve()
117117
LOGGER.info("Loading YAML from <%s>", path)
118118
try:
@@ -125,7 +125,7 @@ def _load_yaml(
125125
def _load_yamls(
126126
paths: Iterable[str | Path],
127127
encoding: str = "utf-8",
128-
) -> Generator[dict[str, Any], None, None]:
128+
) -> Generator[dict[str, Any]]:
129129
for path in paths:
130130
yield from _load_yaml(path, encoding)
131131

@@ -135,7 +135,7 @@ def load_premium_users(
135135
files: str | Path | Iterable[str | Path] | None = None,
136136
compare_date: datetime | str | None = None,
137137
encoding: str = "utf-8",
138-
) -> Generator[str, None, None]:
138+
) -> Generator[str]:
139139
"""Load premium users from YAML files and compare against given date."""
140140

141141
compare_date = parse_date(compare_date) or now()

0 commit comments

Comments
 (0)