Skip to content

Commit

Permalink
feat(animepahe): refactor API calls to use query parameters and impro…
Browse files Browse the repository at this point in the history
…ve stream retrieval logic
  • Loading branch information
Benexl committed Dec 2, 2024
1 parent 000bae9 commit 30fa985
Show file tree
Hide file tree
Showing 2 changed files with 73 additions and 66 deletions.
133 changes: 68 additions & 65 deletions fastanime/libs/anime_provider/animepahe/api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import logging
import random
import re
import time
from typing import TYPE_CHECKING

Expand All @@ -15,17 +14,16 @@
from .constants import (
ANIMEPAHE_BASE,
ANIMEPAHE_ENDPOINT,
JUICY_STREAM_REGEX,
REQUEST_HEADERS,
SERVER_HEADERS,
)
from .extractors import process_animepahe_embed_page

if TYPE_CHECKING:
from .types import AnimePaheAnimePage, AnimePaheSearchPage, AnimePaheSearchResult
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
logger = logging.getLogger(__name__)

KWIK_RE = re.compile(r"Player\|(.+?)'")
logger = logging.getLogger(__name__)


class AnimePahe(AnimeProvider):
Expand All @@ -35,9 +33,8 @@ class AnimePahe(AnimeProvider):

@debug_provider
def search_for_anime(self, search_keywords: str, *args, **kwargs):
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={search_keywords}"
response = self.session.get(
url,
ANIMEPAHE_ENDPOINT, params={"m": "search", "q": search_keywords}
)
response.raise_for_status()
data: "AnimePaheSearchPage" = response.json()
Expand Down Expand Up @@ -76,12 +73,10 @@ def _pages_loader(
self,
data,
session_id,
url,
params,
page,
):
response = self.session.get(
url,
)
response = self.session.get(ANIMEPAHE_ENDPOINT, params=params)
response.raise_for_status()
if not data:
data.update(response.json())
Expand All @@ -102,12 +97,16 @@ def _pages_loader(
)
)
page += 1
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
self._pages_loader(
data,
session_id,
url,
page,
params={
"m": "release",
"page": page,
"id": session_id,
"sort": "episode_asc",
},
page=page,
)
return data

Expand All @@ -118,13 +117,16 @@ def get_anime(self, session_id: str, *args):
anime_result: "AnimePaheSearchResult" = d
data: "AnimePaheAnimePage" = {} # pyright:ignore

url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"

data = self._pages_loader(
data,
session_id,
url,
page,
params={
"m": "release",
"id": session_id,
"sort": "episode_asc",
"page": page,
},
page=page,
)

if not data:
Expand Down Expand Up @@ -159,42 +161,52 @@ def get_anime(self, session_id: str, *args):
}

@debug_provider
def _get_streams(self, res_dict, streams, translation_type):
embed_url = res_dict["data-src"]
data_audio = "dub" if res_dict["data-audio"] == "eng" else "sub"
# filter streams by translation_type
if data_audio != translation_type:
return

if not embed_url:
logger.warning(
"[ANIMEPAHE-WARN]: embed url not found please report to the developers"
def _get_server(self, episode, res_dicts, anime_title, translation_type):
# get all links
streams = {
"server": "kwik",
"links": [],
"episode_title": f"{episode['title'] or anime_title}; Episode {episode['episode']}",
"subtitles": [],
"headers": {},
}
for res_dict in res_dicts:
# get embed url
embed_url = res_dict["data-src"]
data_audio = "dub" if res_dict["data-audio"] == "eng" else "sub"
# filter streams by translation_type
if data_audio != translation_type:
continue

if not embed_url:
logger.warning(
"[ANIMEPAHE-WARN]: embed url not found please report to the developers"
)
continue
# get embed page
embed_response = self.session.get(
embed_url, headers={"User-Agent": self.USER_AGENT, **SERVER_HEADERS}
)
embed_response.raise_for_status()
embed_page = embed_response.text

decoded_js = process_animepahe_embed_page(embed_page)
if not decoded_js:
logger.error("[ANIMEPAHE-ERROR]: failed to decode embed page")
continue
juicy_stream = JUICY_STREAM_REGEX.search(decoded_js)
if not juicy_stream:
logger.error("[ANIMEPAHE-ERROR]: failed to find juicy stream")
continue
juicy_stream = juicy_stream.group(1)
# add the link
streams["links"].append(
{
"quality": res_dict["data-resolution"],
"translation_type": data_audio,
"link": juicy_stream,
}
)
return
# get embed page
embed_response = self.session.get(
embed_url, headers={"User-Agent": self.USER_AGENT, **SERVER_HEADERS}
)
embed_response.raise_for_status()
embed_page = embed_response.text

decoded_js = process_animepahe_embed_page(embed_page)
if not decoded_js:
logger.error("[ANIMEPAHE-ERROR]: failed to decode embed page")
return
juicy_stream = JUICY_STREAM_REGEX.search(decoded_js)
if not juicy_stream:
logger.error("[ANIMEPAHE-ERROR]: failed to find juicy stream")
return
juicy_stream = juicy_stream.group(1)
# add the link
streams["links"].append(
{
"quality": res_dict["data-resolution"],
"translation_type": data_audio,
"link": juicy_stream,
}
)
return streams

@debug_provider
Expand Down Expand Up @@ -239,19 +251,10 @@ def get_episode_streams(
# data-audio
# data-resolution
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]

# get all links
streams = {
"server": "kwik",
"links": [],
"episode_title": f"{episode['title'] or anime_title}; Episode {episode['episode']}",
"subtitles": [],
"headers": {},
}
for res_dict in res_dicts:
# get embed url
if _streams := self._get_streams(res_dict, streams, translation_type):
yield _streams
if _server := self._get_server(
episode, res_dicts, anime_title, translation_type
):
yield _server


if __name__ == "__main__":
Expand Down
6 changes: 5 additions & 1 deletion fastanime/libs/anime_provider/animepahe/constants.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import re

ANIMEPAHE = "animepahe.ru"
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api?"
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api"

SERVERS_AVAILABLE = ["kwik"]
REQUEST_HEADERS = {
Expand Down Expand Up @@ -31,3 +33,5 @@
"Priority": "u=4",
"TE": "trailers",
}
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
KWIK_RE = re.compile(r"Player\|(.+?)'")

0 comments on commit 30fa985

Please sign in to comment.