@@ -54,11 +54,11 @@ async def get_proxies(self) -> None:
54
54
"""
55
55
Asynchronously fill the self.proxies queue with fresh proxies.
56
56
"""
57
- self .logger .info ('Initializing parsers ...' )
57
+ self .logger .info ('[aProxyRelay] Initializing parsers ...' )
58
58
ggs = []
59
59
scrapes = []
60
60
for item in proxy_list :
61
- self .logger .info (f'Loading: { item ["parser" ].__name__ } ' )
61
+ self .logger .info (f'[aProxyRelay] Loading: { item ["parser" ].__name__ } ' )
62
62
parser = item ['parser' ]
63
63
for zone in self .zones :
64
64
url = await parser .format_url (url = item ['url' ], zone = zone )
@@ -68,34 +68,32 @@ async def get_proxies(self) -> None:
68
68
scrapes .append (url )
69
69
ggs = list (set (ggs ))
70
70
scrapes = list (set (scrapes ))
71
- self .logger .info (f'Parsers loaded: GG: { len (ggs )} , Other: { len (scrapes )} , Total: { len (ggs + scrapes )} ...' )
71
+ self .logger .info (f'[aProxyRelay] Parsers loaded: GG: { len (ggs )} , Other: { len (scrapes )} , Total: { len (ggs + scrapes )} ...' )
72
72
73
73
if self .scrape :
74
74
async with ClientSession (conn_timeout = self .timeout ) as session :
75
75
await self ._fetch_proxy_page (scrapes , session )
76
- self .logger .info (f'Scraper: Found { self ._queue_filter .qsize ()} competent proxy servers' )
76
+ self .logger .info (f'[aProxyRelay] Scraper: Found { self ._queue_filter .qsize ()} competent proxy servers' )
77
77
else :
78
- self .logger .info ('Scraper: Skip discovery of new proxy servers ...' )
78
+ self .logger .info ('[aProxyRelay] Scraper: Skip discovery of new proxy servers ...' )
79
79
80
80
if self .filter and self .scrape :
81
- self .logger .info (
82
- f'Validating: Proxies ({ self ._queue_filter .qsize ()} ), checking if proxies meet connection requirements ...'
83
- )
81
+ self .logger .info (f'[aProxyRelay] Validating: Proxies ({ self ._queue_filter .qsize ()} ), checking if proxies meet connection requirements ...' ) # noqa: B950
84
82
async with ClientSession (conn_timeout = 15 ) as session :
85
83
await self ._test_all_proxies (session )
86
- self .logger .info (f'Filter: Found { self ._filtered_failed } incompetent and { self ._filtered_available } available proxy servers in { datetime .now (UTC ) - self .started } ' ) # noqa: B950
84
+ self .logger .info (f'[aProxyRelay] Filter: Found { self ._filtered_failed } incompetent and { self ._filtered_available } available proxy servers in { datetime .now (UTC ) - self .started } ' ) # noqa: B950
87
85
else :
88
86
while not self ._queue_filter .empty ():
89
87
_target = self ._queue_filter .get ()
90
88
_target ['proxy' ] = f"{ _target ['protocol' ].replace ('https' , 'http' )} ://{ _target ['ip' ]} :{ _target ['port' ]} "
91
89
self .proxies .put (_target )
92
- self .logger .info ('Filter: Skip tests for scraped proxy servers ...' )
90
+ self .logger .info ('[aProxyRelay] Filter: Skip tests for scraped proxy servers ...' )
93
91
94
92
async with ClientSession (conn_timeout = self .timeout ) as session :
95
93
await self ._fetch_proxy_servers (ggs , session )
96
94
97
- self .logger .info (f'Scraper: Found { self ._filtered_ggs } additional available proxy servers' )
98
- self .logger .info (f'Found { self .proxies .qsize ()} working proxies, took { datetime .now (UTC ) - self .started } , Please wait...' )
95
+ self .logger .info (f'[aProxyRelay] Scraper: Found { self ._filtered_ggs } additional available proxy servers' )
96
+ self .logger .info (f'[aProxyRelay] Found { self .proxies .qsize ()} working proxies, took { datetime .now (UTC ) - self .started } , Please wait...' ) # noqa: B950
99
97
100
98
async def process_targets (self ) -> None :
101
99
"""
0 commit comments