Skip to content

Commit

Permalink
Added anime description
Browse files Browse the repository at this point in the history
  • Loading branch information
Tmaster055 committed Nov 12, 2024
1 parent fe846f5 commit f58fb20
Show file tree
Hide file tree
Showing 6 changed files with 155 additions and 81 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -405,6 +405,7 @@ These packages are automatically installed when you set up AniWorld Downloader u
- [x] Allow changing final output folder
- [x] Add option to select a random anime optionally via genre
- [x] Add additional installation variants.
- [x] Add anime description
- **Bug Fixes**
- [x] Fix season episode count.
Expand Down
47 changes: 47 additions & 0 deletions src/aniworld/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import threading
import random
import signal
import textwrap
from concurrent.futures import ThreadPoolExecutor, as_completed

import npyscreen
Expand Down Expand Up @@ -39,6 +40,8 @@
show_messagebox,
check_internet_connection,
adventure,
get_description,
get_description_with_ID
)


Expand Down Expand Up @@ -190,6 +193,20 @@ def process_url(url):
)
logging.debug("Episode selector created")

self.add(npyscreen.FixedText, value="")

self.display_text = False

self.toggle_button = self.add(
npyscreen.ButtonPress,
name="Description",
max_height=1,
when_pressed_function=self.go_to_second_form,
scroll_exit=True
)



self.action_selector.when_value_edited = self.update_directory_visibility
logging.debug("Set update_directory_visibility as callback for action_selector")

Expand Down Expand Up @@ -321,6 +338,35 @@ def on_cancel(self):
self.cancel_timer()
self.parentApp.setNextForm(None)

def go_to_second_form(self):
self.parentApp.switchForm("SECOND")

class SecondForm(npyscreen.ActionFormV2):
def create(self):

anime_slug = self.parentApp.anime_slug
anime_title = format_anime_title(anime_slug)

text_content1 = get_description(anime_slug)
text_content2 = get_description_with_ID(anime_title, 1)

wrapped_text1 = "\n".join(textwrap.wrap(text_content1, width=100))
wrapped_text2 = "\n".join(textwrap.wrap(text_content2, width=100))

text_content = f"{wrapped_text1}\n\n{wrapped_text2}"

self.expandable_text = self.add(
npyscreen.MultiLineEdit,
value=text_content,
max_height=30,
editable=False
)

def on_ok(self):
self.parentApp.switchForm("MAIN")

def on_cancel(self):
self.parentApp.switchForm("MAIN")

class AnimeApp(npyscreen.NPSAppManaged):
def __init__(self, anime_slug):
Expand All @@ -338,6 +384,7 @@ def onStart(self):
"MAIN", EpisodeForm,
name=name
)
self.addForm("SECOND", SecondForm, name="Description")


def parse_arguments():
Expand Down
79 changes: 1 addition & 78 deletions src/aniworld/aniskip/aniskip.py
Original file line number Diff line number Diff line change
@@ -1,93 +1,16 @@
import json
import re
import tempfile
from typing import Dict
import logging
from bs4 import BeautifulSoup
import requests
import aniworld.globals as aniworld_globals
from aniworld.common import raise_runtime_error, ftoi, get_season_episode_count, fetch_url_content
from aniworld.common import raise_runtime_error, ftoi, get_season_episode_count, fetch_ID

CHAPTER_FORMAT = "\n[CHAPTER]\nTIMEBASE=1/1000\nSTART={}\nEND={}\nTITLE={}\n"
OPTION_FORMAT = "skip-{}_start={},skip-{}_end={}"


def fetch_ID(anime_title, season):
ID = None

logging.debug("Fetching MAL ID for: %s", anime_title)

name = re.sub(r' \(\d+ episodes\)', '', anime_title)
logging.debug("Processed name: %s", name)
keyword = re.sub(r'\s+', '%20', name)
logging.debug("Keyword for search: %s", keyword)

response = requests.get(
f"https://myanimelist.net/search/prefix.json?type=anime&keyword={keyword}",
headers={"User-Agent": aniworld_globals.DEFAULT_USER_AGENT},
timeout=10
)
logging.debug("Response status code: %d", response.status_code)

if response.status_code != 200:
logging.debug("Failed to fetch MyAnimeList data.")

mal_metadata = response.json()
logging.debug("MAL metadata: %s", json.dumps(mal_metadata, indent=2))
results = [entry['name'] for entry in mal_metadata['categories'][0]['items']]
logging.debug("Results: %s", results)

filtered_choices = [choice for choice in results if 'OVA' not in choice]
logging.debug("Filtered choices: %s", filtered_choices)
best_match = filtered_choices[0]
logging.debug("Best match: %s", best_match)

if best_match:
for entry in mal_metadata['categories'][0]['items']:
if entry['name'] == best_match:
logging.debug("Found MAL ID: %s for %s", entry['id'], best_match)
logging.debug(entry['id'])
ID = entry['id']


while season > 1:
url = f"https://myanimelist.net/anime/{ID}"

response = requests.get(url)
response.raise_for_status()

soup = BeautifulSoup(response.text, 'html.parser')

sequel_div = soup.find("div", string=lambda text: text and "Sequel" in text and "(TV)" in text)

if sequel_div:
title_div = sequel_div.find_next("div", class_="title")
if title_div:
link_element = title_div.find("a")
if link_element:
link_url = link_element.get("href")
logging.debug("Found Link: %s", link_url)
match = re.search(r'/anime/(\d+)', link_url)
if match:
anime_id = match.group(1)
logging.debug("Anime ID: %s", anime_id)
ID = anime_id
season -= 1
else:
logging.debug("No Anime-ID found")
return None
else:
logging.debug("No Link found in 'title'-Div")
return None
else:
logging.debug("No 'title'-Div found")
return None
else:
logging.debug("Sequel (TV) not found")
return None

return ID

def check_episodes(ID):
url = f"https://myanimelist.net/anime/{ID}"

Expand Down
5 changes: 4 additions & 1 deletion src/aniworld/common/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,10 @@
get_windows_version,
check_internet_connection,
show_messagebox,
get_season_episode_count
get_season_episode_count,
get_description,
get_description_with_ID,
fetch_ID
)

from .ascii_art import display_ascii_art
Expand Down
98 changes: 98 additions & 0 deletions src/aniworld/common/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -1424,5 +1424,103 @@ def set_temp_wallpaper():
logging.debug(f"An error occurred: {e}")


def fetch_ID(anime_title, season):
ID = None

logging.debug("Fetching MAL ID for: %s", anime_title)

name = re.sub(r' \(\d+ episodes\)', '', anime_title)
logging.debug("Processed name: %s", name)
keyword = re.sub(r'\s+', '%20', name)
logging.debug("Keyword for search: %s", keyword)

response = requests.get(
f"https://myanimelist.net/search/prefix.json?type=anime&keyword={keyword}",
headers={"User-Agent": aniworld_globals.DEFAULT_USER_AGENT},
timeout=10
)
logging.debug("Response status code: %d", response.status_code)

if response.status_code != 200:
logging.debug("Failed to fetch MyAnimeList data.")

mal_metadata = response.json()
logging.debug("MAL metadata: %s", json.dumps(mal_metadata, indent=2))
results = [entry['name'] for entry in mal_metadata['categories'][0]['items']]
logging.debug("Results: %s", results)

filtered_choices = [choice for choice in results if 'OVA' not in choice]
logging.debug("Filtered choices: %s", filtered_choices)
best_match = filtered_choices[0]
logging.debug("Best match: %s", best_match)

if best_match:
for entry in mal_metadata['categories'][0]['items']:
if entry['name'] == best_match:
logging.debug("Found MAL ID: %s for %s", entry['id'], best_match)
logging.debug(entry['id'])
ID = entry['id']


while season > 1:
url = f"https://myanimelist.net/anime/{ID}"

response = requests.get(url)
response.raise_for_status()

soup = BeautifulSoup(response.text, 'html.parser')

sequel_div = soup.find("div", string=lambda text: text and "Sequel" in text and "(TV)" in text)

if sequel_div:
title_div = sequel_div.find_next("div", class_="title")
if title_div:
link_element = title_div.find("a")
if link_element:
link_url = link_element.get("href")
logging.debug("Found Link: %s", link_url)
match = re.search(r'/anime/(\d+)', link_url)
if match:
anime_id = match.group(1)
logging.debug("Anime ID: %s", anime_id)
ID = anime_id
season -= 1
else:
logging.debug("No Anime-ID found")
return None
else:
logging.debug("No Link found in 'title'-Div")
return None
else:
logging.debug("No 'title'-Div found")
return None
else:
logging.debug("Sequel (TV) not found")
return None

return ID


def get_description(anime_slug: str):
url = f"https://aniworld.to/anime/stream/{anime_slug}"

response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')

description = soup.find('p', class_='seri_des')['data-full-description']

return description


def get_description_with_ID(anime_title: str, season: int):
ID = fetch_ID(anime_title=anime_title, season=1)
url = f"https://myanimelist.net/anime/{ID}"

response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
description = soup.find('meta', property='og:description')['content']
return description


if __name__ == "__main__":
pass
6 changes: 4 additions & 2 deletions src/aniworld/execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,15 +236,17 @@ def perform_action(params: Dict[str, Any]) -> None:

if action == "Watch":
if not only_command:
countdown()
if not platform.system() == "Windows":
countdown()
handle_watch_action(
link, mpv_title, aniskip_selected, aniskip_options, only_command
)
elif action == "Download":
handle_download_action(params)
elif action == "Syncplay":
if not only_command:
countdown()
if not platform.system() == "Windows":
countdown()
setup_autostart()
setup_autoexit()
handle_syncplay_action(
Expand Down

0 comments on commit f58fb20

Please sign in to comment.