Skip to content

Commit

Permalink
Correcting the return string for the Response object in all functio…
Browse files Browse the repository at this point in the history
…ns/methods
  • Loading branch information
D4Vinci committed Nov 15, 2024
1 parent 5e8275c commit 90e38af
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 13 deletions.
2 changes: 1 addition & 1 deletion scrapling/engines/camo.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def fetch(self, url: str) -> Response:
"""Opens up the browser and do your request based on your chosen options.
:param url: Target url.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
with Camoufox(
proxy=self.proxy,
Expand Down
2 changes: 1 addition & 1 deletion scrapling/engines/pw.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def fetch(self, url: str) -> Response:
"""Opens up the browser and do your request based on your chosen options.
:param url: Target url.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
if not self.stealth:
from playwright.sync_api import sync_playwright
Expand Down
10 changes: 5 additions & 5 deletions scrapling/engines/static.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def _prepare_response(self, response: httpxResponse) -> Response:
"""Takes httpx response and generates `Response` object from it.
:param response: httpx response object
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
return Response(
url=str(response.url),
Expand All @@ -69,7 +69,7 @@ def get(self, url: str, stealthy_headers: Optional[bool] = True, **kwargs: Dict)
:param stealthy_headers: If enabled (default), Fetcher will create and add real browser's headers and
create a referer header as if this request had came from Google's search of this URL's domain.
:param kwargs: Any additional keyword arguments are passed directly to `httpx.get()` function so check httpx documentation for details.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
headers = self._headers_job(kwargs.pop('headers', {}), url, stealthy_headers)
request = httpx.get(url=url, headers=headers, follow_redirects=self.follow_redirects, timeout=self.timeout, **kwargs)
Expand All @@ -81,7 +81,7 @@ def post(self, url: str, stealthy_headers: Optional[bool] = True, **kwargs: Dict
:param stealthy_headers: If enabled (default), Fetcher will create and add real browser's headers and
create a referer header as if this request had came from Google's search of this URL's domain.
:param kwargs: Any additional keyword arguments are passed directly to `httpx.post()` function so check httpx documentation for details.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
headers = self._headers_job(kwargs.pop('headers', {}), url, stealthy_headers)
request = httpx.post(url=url, headers=headers, follow_redirects=self.follow_redirects, timeout=self.timeout, **kwargs)
Expand All @@ -93,7 +93,7 @@ def delete(self, url: str, stealthy_headers: Optional[bool] = True, **kwargs: Di
:param stealthy_headers: If enabled (default), Fetcher will create and add real browser's headers and
create a referer header as if this request had came from Google's search of this URL's domain.
:param kwargs: Any additional keyword arguments are passed directly to `httpx.delete()` function so check httpx documentation for details.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
headers = self._headers_job(kwargs.pop('headers', {}), url, stealthy_headers)
request = httpx.delete(url=url, headers=headers, follow_redirects=self.follow_redirects, timeout=self.timeout, **kwargs)
Expand All @@ -105,7 +105,7 @@ def put(self, url: str, stealthy_headers: Optional[bool] = True, **kwargs: Dict)
:param stealthy_headers: If enabled (default), Fetcher will create and add real browser's headers and
create a referer header as if this request had came from Google's search of this URL's domain.
:param kwargs: Any additional keyword arguments are passed directly to `httpx.put()` function so check httpx documentation for details.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
headers = self._headers_job(kwargs.pop('headers', {}), url, stealthy_headers)
request = httpx.put(url=url, headers=headers, follow_redirects=self.follow_redirects, timeout=self.timeout, **kwargs)
Expand Down
12 changes: 6 additions & 6 deletions scrapling/fetchers.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def get(self, url: str, follow_redirects: bool = True, timeout: Optional[Union[i
:param stealthy_headers: If enabled (default), Fetcher will create and add real browser's headers and
create a referer header as if this request had came from Google's search of this URL's domain.
:param kwargs: Any additional keyword arguments are passed directly to `httpx.get()` function so check httpx documentation for details.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
response_object = StaticEngine(follow_redirects, timeout, adaptor_arguments=self.adaptor_arguments).get(url, stealthy_headers, **kwargs)
return response_object
Expand All @@ -30,7 +30,7 @@ def post(self, url: str, follow_redirects: bool = True, timeout: Optional[Union[
:param stealthy_headers: If enabled (default), Fetcher will create and add real browser's headers and
create a referer header as if this request came from Google's search of this URL's domain.
:param kwargs: Any additional keyword arguments are passed directly to `httpx.post()` function so check httpx documentation for details.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
response_object = StaticEngine(follow_redirects, timeout, adaptor_arguments=self.adaptor_arguments).post(url, stealthy_headers, **kwargs)
return response_object
Expand All @@ -43,7 +43,7 @@ def put(self, url: str, follow_redirects: bool = True, timeout: Optional[Union[i
:param stealthy_headers: If enabled (default), Fetcher will create and add real browser's headers and
create a referer header as if this request came from Google's search of this URL's domain.
:param kwargs: Any additional keyword arguments are passed directly to `httpx.put()` function so check httpx documentation for details.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
response_object = StaticEngine(follow_redirects, timeout, adaptor_arguments=self.adaptor_arguments).put(url, stealthy_headers, **kwargs)
return response_object
Expand All @@ -56,7 +56,7 @@ def delete(self, url: str, follow_redirects: bool = True, timeout: Optional[Unio
:param stealthy_headers: If enabled (default), Fetcher will create and add real browser's headers and
create a referer header as if this request came from Google's search of this URL's domain.
:param kwargs: Any additional keyword arguments are passed directly to `httpx.delete()` function so check httpx documentation for details.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
response_object = StaticEngine(follow_redirects, timeout, adaptor_arguments=self.adaptor_arguments).delete(url, stealthy_headers, **kwargs)
return response_object
Expand Down Expand Up @@ -97,7 +97,7 @@ def fetch(
:param google_search: Enabled by default, Scrapling will set the referer header to be as if this request came from a Google search for this website's domain name.
:param extra_headers: A dictionary of extra headers to add to the request. _The referer set by the `google_search` argument takes priority over the referer set here if used together._
:param proxy: The proxy to be used with requests, it can be a string or a dictionary with the keys 'server', 'username', and 'password' only.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
engine = CamoufoxEngine(
proxy=proxy,
Expand Down Expand Up @@ -167,7 +167,7 @@ def fetch(
:param cdp_url: Instead of launching a new browser instance, connect to this CDP URL to control real browsers/NSTBrowser through CDP.
:param nstbrowser_mode: Enables NSTBrowser mode, it have to be used with `cdp_url` argument or it will get completely ignored.
:param nstbrowser_config: The config you want to send with requests to the NSTBrowser. If left empty, Scrapling defaults to an optimized NSTBrowser's docker browserless config.
:return: A Response object with `url`, `text`, `content`, `status`, `reason`, `encoding`, `cookies`, `headers`, `request_headers`, and the `adaptor` class for parsing, of course.
:return: A `Response` object that is the same as `Adaptor` object except it has these added attributes: `status`, `reason`, `cookies`, `headers`, and `request_headers`
"""
engine = PlaywrightEngine(
proxy=proxy,
Expand Down

0 comments on commit 90e38af

Please sign in to comment.