diff --git a/comet/api/core.py b/comet/api/core.py index 57f96e5..34d5d2e 100644 --- a/comet/api/core.py +++ b/comet/api/core.py @@ -9,25 +9,52 @@ templates = Jinja2Templates("comet/templates") main = APIRouter() + @main.get("/", status_code=200) async def root(): return RedirectResponse("/configure") + @main.get("/health", status_code=200) async def health(): return {"status": "ok"} + indexers = settings.INDEXER_MANAGER_INDEXERS web_config = { "indexers": [indexer.replace(" ", "_").lower() for indexer in indexers], - "languages": [language.replace(" ", "_") for language in RTN.patterns.language_code_mapping.keys()], - "resolutions": ["360p", "480p", "576p", "720p", "1080p", "1440p", "2160p", "4K", "Unknown"] + "languages": [ + language.replace(" ", "_") + for language in RTN.patterns.language_code_mapping.keys() + ], + "resolutions": [ + "360p", + "480p", + "576p", + "720p", + "1080p", + "1440p", + "2160p", + "4K", + "Unknown", + ], } + @main.get("/configure") @main.get("/{b64config}/configure") async def configure(request: Request): - return templates.TemplateResponse("index.html", {"request": request, "CUSTOM_HEADER_HTML": settings.CUSTOM_HEADER_HTML if settings.CUSTOM_HEADER_HTML and settings.CUSTOM_HEADER_HTML != "None" else "", "webConfig": web_config}) + return templates.TemplateResponse( + "index.html", + { + "request": request, + "CUSTOM_HEADER_HTML": settings.CUSTOM_HEADER_HTML + if settings.CUSTOM_HEADER_HTML and settings.CUSTOM_HEADER_HTML != "None" + else "", + "webConfig": web_config, + }, + ) + @main.get("/manifest.json") @main.get("/{b64config}/manifest.json") @@ -39,18 +66,9 @@ async def manifest(): "description": "Stremio's fastest torrent/debrid search add-on.", "logo": "https://i.imgur.com/jmVoVMu.jpeg", "background": "https://i.imgur.com/WwnXB3k.jpeg", - "resources": [ - "stream" - ], - "types": [ - "movie", - "series" - ], - "idPrefixes": [ - "tt" - ], + "resources": ["stream"], + "types": ["movie", "series"], + "idPrefixes": ["tt"], "catalogs": [], - "behaviorHints": { - "configurable": True - } + "behaviorHints": {"configurable": True}, } diff --git a/comet/api/stream.py b/comet/api/stream.py index 3395599..91b344e 100644 --- a/comet/api/stream.py +++ b/comet/api/stream.py @@ -8,42 +8,50 @@ from fastapi.responses import RedirectResponse from RTN import Torrent, parse, sort_torrents, title_match -from comet.utils.general import (bytes_to_size, config_check, check_info_hash, - generate_download_link, get_indexer_manager, - get_torrent_hash, is_video, translate, get_balanced_hashes) +from comet.debrid.manager import getDebrid + +from comet.utils.general import ( + bytes_to_size, + config_check, + get_indexer_manager, + get_torrent_hash, + is_video, + translate, + get_balanced_hashes, +) from comet.utils.logger import logger from comet.utils.models import database, rtn, settings streams = APIRouter() + @streams.get("/stream/{type}/{id}.json") @streams.get("/{b64config}/stream/{type}/{id}.json") async def stream(request: Request, b64config: str, type: str, id: str): config = config_check(b64config) if not config: - return { - "streams": [ - { - "name": "[⚠️] Comet", - "title": "Invalid Comet config.", - "url": "https://comet.fast" - } - ] - } - + return { + "streams": [ + { + "name": "[⚠️] Comet", + "title": "Invalid Comet config.", + "url": "https://comet.fast", + } + ] + } + connector = aiohttp.TCPConnector(limit=0) async with aiohttp.ClientSession(connector=connector) as session: - check_debrid = await session.get("https://api.real-debrid.com/rest/1.0/user", headers={ - "Authorization": f"Bearer {config['debridApiKey']}" - }) - check_debrid = await check_debrid.text() - if not '"type": "premium"' in check_debrid: + debrid = getDebrid(session, config) + + check_debrid = await debrid.check_premium() + if not check_debrid: return { "streams": [ { - "name": "[⚠️] Comet", - "title": "Invalid Real-Debrid account.", - "url": "https://comet.fast" + "name": "[⚠️] Comet", + "title": f"Invalid {config['debridService']} account.", + "url": "https://comet.fast", } ] } @@ -57,7 +65,9 @@ async def stream(request: Request, b64config: str, type: str, id: str): season = int(info[1]) episode = int(info[2]) - get_metadata = await session.get(f"https://v3.sg.media-imdb.com/suggestion/a/{id}.json") + get_metadata = await session.get( + f"https://v3.sg.media-imdb.com/suggestion/a/{id}.json" + ) metadata = await get_metadata.json() name = metadata["d"][0]["l"] @@ -66,47 +76,76 @@ async def stream(request: Request, b64config: str, type: str, id: str): if type == "series": logName = f"{name} S{season:02d}E{episode:02d}" - cache_key = hashlib.md5(json.dumps({"debridService": config["debridService"], "name": name, "season": season, "episode": episode, "indexers": config["indexers"]}).encode("utf-8")).hexdigest() - cached = await database.fetch_one(f"SELECT EXISTS (SELECT 1 FROM cache WHERE cacheKey = '{cache_key}')") + cache_key = hashlib.md5( + json.dumps( + { + "debridService": config["debridService"], + "name": name, + "season": season, + "episode": episode, + "indexers": config["indexers"], + } + ).encode("utf-8") + ).hexdigest() + cached = await database.fetch_one( + f"SELECT EXISTS (SELECT 1 FROM cache WHERE cacheKey = '{cache_key}')" + ) if cached[0] != 0: logger.info(f"Cache found for {logName}") - timestamp = await database.fetch_one(f"SELECT timestamp FROM cache WHERE cacheKey = '{cache_key}'") + timestamp = await database.fetch_one( + f"SELECT timestamp FROM cache WHERE cacheKey = '{cache_key}'" + ) if timestamp[0] + settings.CACHE_TTL < time.time(): - await database.execute(f"DELETE FROM cache WHERE cacheKey = '{cache_key}'") + await database.execute( + f"DELETE FROM cache WHERE cacheKey = '{cache_key}'" + ) logger.info(f"Cache expired for {logName}") else: - sorted_ranked_files = await database.fetch_one(f"SELECT results FROM cache WHERE cacheKey = '{cache_key}'") + sorted_ranked_files = await database.fetch_one( + f"SELECT results FROM cache WHERE cacheKey = '{cache_key}'" + ) sorted_ranked_files = json.loads(sorted_ranked_files[0]) - + balanced_hashes = await get_balanced_hashes(sorted_ranked_files, config) results = [] for hash in sorted_ranked_files: for resolution in balanced_hashes: if hash in balanced_hashes[resolution]: - results.append({ - "name": f"[RD⚡] Comet {sorted_ranked_files[hash]['data']['resolution'][0] if len(sorted_ranked_files[hash]['data']['resolution']) > 0 else 'Unknown'}", - "title": f"{sorted_ranked_files[hash]['data']['title']}\n💾 {bytes_to_size(sorted_ranked_files[hash]['data']['size'])}", - "url": f"{request.url.scheme}://{request.url.netloc}/{b64config}/playback/{hash}/{sorted_ranked_files[hash]['data']['index']}" - }) + results.append( + { + "name": f"[RD⚡] Comet {sorted_ranked_files[hash]['data']['resolution'][0] if len(sorted_ranked_files[hash]['data']['resolution']) > 0 else 'Unknown'}", + "title": f"{sorted_ranked_files[hash]['data']['title']}\n💾 {bytes_to_size(sorted_ranked_files[hash]['data']['size'])}", + "url": f"{request.url.scheme}://{request.url.netloc}/{b64config}/playback/{hash}/{sorted_ranked_files[hash]['data']['index']}", + } + ) continue - return { - "streams": results - } + return {"streams": results} else: logger.info(f"No cache found for {logName} with user configuration") indexer_manager_type = settings.INDEXER_MANAGER_TYPE - logger.info(f"Start of {indexer_manager_type} search for {logName} with indexers {config['indexers']}") + logger.info( + f"Start of {indexer_manager_type} search for {logName} with indexers {config['indexers']}" + ) tasks = [] - tasks.append(get_indexer_manager(session, indexer_manager_type, config["indexers"], name)) + tasks.append( + get_indexer_manager(session, indexer_manager_type, config["indexers"], name) + ) if type == "series": - tasks.append(get_indexer_manager(session, indexer_manager_type, config["indexers"], f"{name} S0{season}E0{episode}")) + tasks.append( + get_indexer_manager( + session, + indexer_manager_type, + config["indexers"], + f"{name} S0{season}E0{episode}", + ) + ) search_response = await asyncio.gather(*tasks) torrents = [] @@ -117,17 +156,19 @@ async def stream(request: Request, b64config: str, type: str, id: str): for result in results: torrents.append(result) - logger.info(f"{len(torrents)} torrents found for {logName} with {indexer_manager_type}") + logger.info( + f"{len(torrents)} torrents found for {logName} with {indexer_manager_type}" + ) zilean_hashes_count = 0 try: if settings.ZILEAN_URL: - get_dmm = await session.post(f"{settings.ZILEAN_URL}/dmm/search", json={ - "queryText": name - }) + get_dmm = await session.post( + f"{settings.ZILEAN_URL}/dmm/search", json={"queryText": name} + ) get_dmm = await get_dmm.json() - if not "status" in get_dmm: + if "status" not in get_dmm: for result in get_dmm: zilean_hashes_count += 1 @@ -135,21 +176,25 @@ async def stream(request: Request, b64config: str, type: str, id: str): object = { "Title": result["filename"], "InfoHash": result["infoHash"], - "zilean": True + "zilean": True, } if indexer_manager_type == "prowlarr": object = { "title": result["filename"], "infoHash": result["infoHash"], - "zilean": True + "zilean": True, } torrents.append(object) - logger.info(f"{zilean_hashes_count} torrents found for {logName} with Zilean API") + logger.info( + f"{zilean_hashes_count} torrents found for {logName} with Zilean API" + ) except: - logger.warning(f"Exception while getting torrents for {logName} with Zilean API") + logger.warning( + f"Exception while getting torrents for {logName} with Zilean API" + ) if len(torrents) == 0: return {"streams": []} @@ -159,7 +204,11 @@ async def stream(request: Request, b64config: str, type: str, id: str): for torrent in torrents: # Only title match check if from Zilean if "zilean" in torrent: - parsed_torrent = parse(torrent["Title"] if indexer_manager_type == "jackett" else torrent["title"]) + parsed_torrent = parse( + torrent["Title"] + if indexer_manager_type == "jackett" + else torrent["title"] + ) if not title_match(name.lower(), parsed_torrent.parsed_title.lower()): filtered += 1 continue @@ -167,20 +216,20 @@ async def stream(request: Request, b64config: str, type: str, id: str): tasks.append(get_torrent_hash(session, indexer_manager_type, torrent)) logger.info(f"{filtered} filtered torrents from Zilean API for {logName}") - + torrent_hashes = await asyncio.gather(*tasks) torrent_hashes = list(set([hash for hash in torrent_hashes if hash])) logger.info(f"{len(torrent_hashes)} info hashes found for {logName}") torrent_hashes = list(set([hash for hash in torrent_hashes if hash])) - + if len(torrent_hashes) == 0: return {"streams": []} tasks = [] for hash in torrent_hashes: - tasks.append(check_info_hash(config["debridApiKey"], hash)) + tasks.append(debrid.check_hash_cache(hash)) responses = await asyncio.gather(*tasks) @@ -188,28 +237,31 @@ async def stream(request: Request, b64config: str, type: str, id: str): for response in responses: if not response: continue - + availability.update(await response.json()) files = {} for hash, details in availability.items(): - if not "rd" in details: + if "rd" not in details: continue if type == "series": for variants in details["rd"]: for index, file in variants.items(): filename = file["filename"] - + if not is_video(filename): continue filename_parsed = parse(filename) - if season in filename_parsed.season and episode in filename_parsed.episode: + if ( + season in filename_parsed.season + and episode in filename_parsed.episode + ): files[hash] = { "index": index, "title": filename, - "size": file["filesize"] + "size": file["filesize"], } continue @@ -224,32 +276,36 @@ async def stream(request: Request, b64config: str, type: str, id: str): files[hash] = { "index": index, "title": filename, - "size": file["filesize"] + "size": file["filesize"], } ranked_files = set() for hash in files: ranked_file = rtn.rank(files[hash]["title"], hash) ranked_files.add(ranked_file) - + sorted_ranked_files = sort_torrents(ranked_files) - logger.info(f"{len(sorted_ranked_files)} cached files found on Real-Debrid for {logName}") + logger.info( + f"{len(sorted_ranked_files)} cached files found on Real-Debrid for {logName}" + ) if len(sorted_ranked_files) == 0: return {"streams": []} - + sorted_ranked_files = { key: (value.model_dump() if isinstance(value, Torrent) else value) for key, value in sorted_ranked_files.items() } - for hash in sorted_ranked_files: # needed for caching + for hash in sorted_ranked_files: # needed for caching sorted_ranked_files[hash]["data"]["title"] = files[hash]["title"] sorted_ranked_files[hash]["data"]["size"] = files[hash]["size"] sorted_ranked_files[hash]["data"]["index"] = files[hash]["index"] - + json_data = json.dumps(sorted_ranked_files).replace("'", "''") - await database.execute(f"INSERT OR IGNORE INTO cache (cacheKey, results, timestamp) VALUES ('{cache_key}', '{json_data}', {time.time()})") + await database.execute( + f"INSERT OR IGNORE INTO cache (cacheKey, results, timestamp) VALUES ('{cache_key}', '{json_data}', {time.time()})" + ) logger.info(f"Results have been cached for {logName}") balanced_hashes = await get_balanced_hashes(sorted_ranked_files, config) @@ -257,17 +313,18 @@ async def stream(request: Request, b64config: str, type: str, id: str): for hash in sorted_ranked_files: for resolution in balanced_hashes: if hash in balanced_hashes[resolution]: - results.append({ - "name": f"[RD⚡] Comet {sorted_ranked_files[hash]['data']['resolution'][0] if len(sorted_ranked_files[hash]['data']['resolution']) > 0 else 'Unknown'}", - "title": f"{sorted_ranked_files[hash]['data']['title']}\n💾 {bytes_to_size(sorted_ranked_files[hash]['data']['size'])}", - "url": f"{request.url.scheme}://{request.url.netloc}/{b64config}/playback/{hash}/{sorted_ranked_files[hash]['data']['index']}" - }) + results.append( + { + "name": f"[RD⚡] Comet {sorted_ranked_files[hash]['data']['resolution'][0] if len(sorted_ranked_files[hash]['data']['resolution']) > 0 else 'Unknown'}", + "title": f"{sorted_ranked_files[hash]['data']['title']}\n💾 {bytes_to_size(sorted_ranked_files[hash]['data']['size'])}", + "url": f"{request.url.scheme}://{request.url.netloc}/{b64config}/playback/{hash}/{sorted_ranked_files[hash]['data']['index']}", + } + ) continue - return { - "streams": results - } + return {"streams": results} + @streams.head("/{b64config}/playback/{hash}/{index}") async def playback(b64config: str, hash: str, index: str): @@ -275,16 +332,21 @@ async def playback(b64config: str, hash: str, index: str): if not config: return - download_link = await generate_download_link(config["debridApiKey"], hash, index) + async with aiohttp.ClientSession() as session: + debrid = getDebrid(session, config) + download_link = await debrid.generate_download_link(hash, index) return RedirectResponse(download_link, status_code=302) + @streams.get("/{b64config}/playback/{hash}/{index}") async def playback(b64config: str, hash: str, index: str): config = config_check(b64config) if not config: return - download_link = await generate_download_link(config["debridApiKey"], hash, index) + async with aiohttp.ClientSession() as session: + debrid = getDebrid(session, config) + download_link = await debrid.generate_download_link(hash, index) - return RedirectResponse(download_link, status_code=302) \ No newline at end of file + return RedirectResponse(download_link, status_code=302) diff --git a/comet/debrid/__init__.py b/comet/debrid/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/comet/debrid/manager.py b/comet/debrid/manager.py new file mode 100644 index 0000000..7b50905 --- /dev/null +++ b/comet/debrid/manager.py @@ -0,0 +1,8 @@ +import aiohttp + +from .realdebrid import RealDebrid + + +def getDebrid(session: aiohttp.ClientSession, config: dict): + if config["debridService"] == "realdebrid": + return RealDebrid(session, config["debridApiKey"]) diff --git a/comet/debrid/realdebrid.py b/comet/debrid/realdebrid.py new file mode 100644 index 0000000..137aac3 --- /dev/null +++ b/comet/debrid/realdebrid.py @@ -0,0 +1,91 @@ +import aiohttp + +from comet.utils.logger import logger +from comet.utils.models import settings + + +class RealDebrid: + def __init__(self, session: aiohttp.ClientSession, debrid_api_key: str): + session.headers["Authorization"] = f"Bearer {debrid_api_key}" + self.session = session + + async def check_premium(self): + try: + check_premium = await self.session.get( + "https://api.real-debrid.com/rest/1.0/user" + ) + check_premium = await check_premium.text() + if '"type": "premium"' not in check_premium: + return False + + return True + except Exception as e: + logger.warning( + f"Exception while checking premium status on Real Debrid: {e}" + ) + return False + + async def check_hash_cache(self, hash: str): + try: + response = await self.session.get( + f"https://api.real-debrid.com/rest/1.0/torrents/instantAvailability/{hash}" + ) + return response + except Exception as e: + logger.warning( + f"Exception while checking hash cache on Real Debrid for {hash}: {e}" + ) + return + + async def generate_download_link(self, hash: str, index: str): + try: + check_blacklisted = await self.session.get("https://real-debrid.com/vpn") + check_blacklisted = await check_blacklisted.text() + proxy = None + if ( + "Your ISP or VPN provider IP address is currently blocked on our website" + in check_blacklisted + ): + proxy = settings.DEBRID_PROXY_URL + if not proxy: + logger.warning( + "Real-Debrid blacklisted server's IP. No proxy found." + ) + return "https://comet.fast" + else: + logger.warning( + f"Real-Debrid blacklisted server's IP. Switching to proxy {proxy} for {hash}|{index}" + ) + + add_magnet = await self.session.post( + "https://api.real-debrid.com/rest/1.0/torrents/addMagnet", + data={"magnet": f"magnet:?xt=urn:btih:{hash}"}, + proxy=proxy, + ) + add_magnet = await add_magnet.json() + + get_magnet_info = await self.session.get(add_magnet["uri"], proxy=proxy) + get_magnet_info = await get_magnet_info.json() + + await self.session.post( + f"https://api.real-debrid.com/rest/1.0/torrents/selectFiles/{add_magnet['id']}", + data={"files": index}, + proxy=proxy, + ) + + get_magnet_info = await self.session.get(add_magnet["uri"], proxy=proxy) + get_magnet_info = await get_magnet_info.json() + + unrestrict_link = await self.session.post( + "https://api.real-debrid.com/rest/1.0/unrestrict/link", + data={"link": get_magnet_info["links"][0]}, + proxy=proxy, + ) + unrestrict_link = await unrestrict_link.json() + + return unrestrict_link["download"] + except Exception as e: + logger.warning( + f"Exception while getting download link from Real Debrid for {hash}|{index}: {e}" + ) + return "https://comet.fast" diff --git a/comet/main.py b/comet/main.py index b0e5b2a..0c4ded3 100644 --- a/comet/main.py +++ b/comet/main.py @@ -19,6 +19,7 @@ from comet.utils.logger import logger from comet.utils.models import settings + class LoguruMiddleware(BaseHTTPMiddleware): async def dispatch(self, request: Request, call_next): start_time = time.time() @@ -35,12 +36,14 @@ async def dispatch(self, request: Request, call_next): ) return response + @asynccontextmanager async def lifespan(app: FastAPI): await setup_database() yield await teardown_database() + app = FastAPI( title="Comet", summary="Stremio's fastest torrent/debrid search add-on.", @@ -63,6 +66,7 @@ async def lifespan(app: FastAPI): app.include_router(main) app.include_router(streams) + class Server(uvicorn.Server): def install_signal_handlers(self): pass @@ -83,12 +87,14 @@ def run_in_thread(self): self.should_exit = True sys.exit(0) + def signal_handler(sig, frame): # This will handle kubernetes/docker shutdowns better # Toss anything that needs to be gracefully shutdown here logger.log("COMET", "Exiting Gracefully.") sys.exit(0) + signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) @@ -97,20 +103,32 @@ def signal_handler(sig, frame): host=settings.FASTAPI_HOST, port=settings.FASTAPI_PORT, workers=settings.FASTAPI_WORKERS, - log_config=None + log_config=None, ) server = Server(config=config) + def start_log(): - logger.log("COMET", f"Server started on http://{settings.FASTAPI_HOST}:{settings.FASTAPI_PORT} - {settings.FASTAPI_WORKERS} workers") - logger.log("COMET", f"Database: {settings.DATABASE_PATH} - TTL: {settings.CACHE_TTL}s") + logger.log( + "COMET", + f"Server started on http://{settings.FASTAPI_HOST}:{settings.FASTAPI_PORT} - {settings.FASTAPI_WORKERS} workers", + ) + logger.log( + "COMET", f"Database: {settings.DATABASE_PATH} - TTL: {settings.CACHE_TTL}s" + ) logger.log("COMET", f"Debrid Proxy: {settings.DEBRID_PROXY_URL}") - logger.log("COMET", f"Indexer Manager: {settings.INDEXER_MANAGER_TYPE}|{settings.INDEXER_MANAGER_URL} - Timeout: {settings.INDEXER_MANAGER_TIMEOUT}s") + logger.log( + "COMET", + f"Indexer Manager: {settings.INDEXER_MANAGER_TYPE}|{settings.INDEXER_MANAGER_URL} - Timeout: {settings.INDEXER_MANAGER_TIMEOUT}s", + ) logger.log("COMET", f"Indexers: {settings.INDEXER_MANAGER_INDEXERS}") logger.log("COMET", f"Get Torrent Timeout: {settings.GET_TORRENT_TIMEOUT}s") logger.log("COMET", f"Zilean API: {settings.ZILEAN_URL}") - logger.log("COMET", f"Custom Header HTML Enabled: {bool(settings.CUSTOM_HEADER_HTML)}") - + logger.log( + "COMET", f"Custom Header HTML Enabled: {bool(settings.CUSTOM_HEADER_HTML)}" + ) + + with server.run_in_thread(): start_log() try: @@ -122,4 +140,4 @@ def start_log(): logger.error(f"Unexpected error: {e}") logger.exception(traceback.format_exc()) finally: - logger.log("COMET", "Server Shutdown") \ No newline at end of file + logger.log("COMET", "Server Shutdown") diff --git a/comet/utils/db.py b/comet/utils/db.py index 7ba165d..56b005a 100644 --- a/comet/utils/db.py +++ b/comet/utils/db.py @@ -3,24 +3,26 @@ from comet.utils.logger import logger from comet.utils.models import database, settings + async def setup_database(): - """Setup the database by ensuring the directory and file exist, and creating the necessary tables.""" try: # Ensure the database directory exists os.makedirs(os.path.dirname(settings.DATABASE_PATH), exist_ok=True) - + # Ensure the database file exists if not os.path.exists(settings.DATABASE_PATH): open(settings.DATABASE_PATH, "a").close() - + await database.connect() - await database.execute("CREATE TABLE IF NOT EXISTS cache (cacheKey BLOB PRIMARY KEY, timestamp INTEGER, results TEXT)") + await database.execute( + "CREATE TABLE IF NOT EXISTS cache (cacheKey BLOB PRIMARY KEY, timestamp INTEGER, results TEXT)" + ) except Exception as e: logger.error(f"Error setting up the database: {e}") + async def teardown_database(): - """Teardown the database by disconnecting.""" try: await database.disconnect() except Exception as e: - logger.error(f"Error tearing down the database: {e}") \ No newline at end of file + logger.error(f"Error tearing down the database: {e}") diff --git a/comet/utils/general.py b/comet/utils/general.py index 63f2449..af701d2 100644 --- a/comet/utils/general.py +++ b/comet/utils/general.py @@ -10,47 +10,181 @@ from comet.utils.models import settings translation_table = { - "ā": "a", "ă": "a", "ą": "a", "ć": "c", "č": "c", "ç": "c", - "ĉ": "c", "ċ": "c", "ď": "d", "đ": "d", "è": "e", "é": "e", - "ê": "e", "ë": "e", "ē": "e", "ĕ": "e", "ę": "e", "ě": "e", - "ĝ": "g", "ğ": "g", "ġ": "g", "ģ": "g", "ĥ": "h", "î": "i", - "ï": "i", "ì": "i", "í": "i", "ī": "i", "ĩ": "i", "ĭ": "i", - "ı": "i", "ĵ": "j", "ķ": "k", "ĺ": "l", "ļ": "l", "ł": "l", - "ń": "n", "ň": "n", "ñ": "n", "ņ": "n", "ʼn": "n", "ó": "o", - "ô": "o", "õ": "o", "ö": "o", "ø": "o", "ō": "o", "ő": "o", - "œ": "oe", "ŕ": "r", "ř": "r", "ŗ": "r", "š": "s", "ş": "s", - "ś": "s", "ș": "s", "ß": "ss", "ť": "t", "ţ": "t", "ū": "u", - "ŭ": "u", "ũ": "u", "û": "u", "ü": "u", "ù": "u", "ú": "u", - "ų": "u", "ű": "u", "ŵ": "w", "ý": "y", "ÿ": "y", "ŷ": "y", - "ž": "z", "ż": "z", "ź": "z", "æ": "ae", "ǎ": "a", "ǧ": "g", - "ə": "e", "ƒ": "f", "ǐ": "i", "ǒ": "o", "ǔ": "u", "ǚ": "u", - "ǜ": "u", "ǹ": "n", "ǻ": "a", "ǽ": "ae", "ǿ": "o" + "ā": "a", + "ă": "a", + "ą": "a", + "ć": "c", + "č": "c", + "ç": "c", + "ĉ": "c", + "ċ": "c", + "ď": "d", + "đ": "d", + "è": "e", + "é": "e", + "ê": "e", + "ë": "e", + "ē": "e", + "ĕ": "e", + "ę": "e", + "ě": "e", + "ĝ": "g", + "ğ": "g", + "ġ": "g", + "ģ": "g", + "ĥ": "h", + "î": "i", + "ï": "i", + "ì": "i", + "í": "i", + "ī": "i", + "ĩ": "i", + "ĭ": "i", + "ı": "i", + "ĵ": "j", + "ķ": "k", + "ĺ": "l", + "ļ": "l", + "ł": "l", + "ń": "n", + "ň": "n", + "ñ": "n", + "ņ": "n", + "ʼn": "n", + "ó": "o", + "ô": "o", + "õ": "o", + "ö": "o", + "ø": "o", + "ō": "o", + "ő": "o", + "œ": "oe", + "ŕ": "r", + "ř": "r", + "ŗ": "r", + "š": "s", + "ş": "s", + "ś": "s", + "ș": "s", + "ß": "ss", + "ť": "t", + "ţ": "t", + "ū": "u", + "ŭ": "u", + "ũ": "u", + "û": "u", + "ü": "u", + "ù": "u", + "ú": "u", + "ų": "u", + "ű": "u", + "ŵ": "w", + "ý": "y", + "ÿ": "y", + "ŷ": "y", + "ž": "z", + "ż": "z", + "ź": "z", + "æ": "ae", + "ǎ": "a", + "ǧ": "g", + "ə": "e", + "ƒ": "f", + "ǐ": "i", + "ǒ": "o", + "ǔ": "u", + "ǚ": "u", + "ǜ": "u", + "ǹ": "n", + "ǻ": "a", + "ǽ": "ae", + "ǿ": "o", } translation_table = str.maketrans(translation_table) info_hash_pattern = re.compile(r"\b([a-fA-F0-9]{40})\b") + def translate(title: str): return title.translate(translation_table) + def is_video(title: str): - return title.endswith(tuple([".mkv", ".mp4", ".avi", ".mov", ".flv", ".wmv", ".webm", ".mpg", ".mpeg", ".m4v", ".3gp", ".3g2", ".ogv", ".ogg", ".drc", ".gif", ".gifv", ".mng", ".avi", ".mov", ".qt", ".wmv", ".yuv", ".rm", ".rmvb", ".asf", ".amv", ".m4p", ".m4v", ".mpg", ".mp2", ".mpeg", ".mpe", ".mpv", ".mpg", ".mpeg", ".m2v", ".m4v", ".svi", ".3gp", ".3g2", ".mxf", ".roq", ".nsv", ".flv", ".f4v", ".f4p", ".f4a", ".f4b"])) + return title.endswith( + tuple( + [ + ".mkv", + ".mp4", + ".avi", + ".mov", + ".flv", + ".wmv", + ".webm", + ".mpg", + ".mpeg", + ".m4v", + ".3gp", + ".3g2", + ".ogv", + ".ogg", + ".drc", + ".gif", + ".gifv", + ".mng", + ".avi", + ".mov", + ".qt", + ".wmv", + ".yuv", + ".rm", + ".rmvb", + ".asf", + ".amv", + ".m4p", + ".m4v", + ".mpg", + ".mp2", + ".mpeg", + ".mpe", + ".mpv", + ".mpg", + ".mpeg", + ".m2v", + ".m4v", + ".svi", + ".3gp", + ".3g2", + ".mxf", + ".roq", + ".nsv", + ".flv", + ".f4v", + ".f4p", + ".f4a", + ".f4b", + ] + ) + ) + def bytes_to_size(bytes: int): sizes = ["Bytes", "KB", "MB", "GB", "TB"] if bytes == 0: return "0 Byte" - + i = int(math.floor(math.log(bytes, 1024))) return f"{round(bytes / math.pow(1024, i), 2)} {sizes[i]}" + def config_check(b64config: str): try: config = json.loads(base64.b64decode(b64config).decode()) - if not isinstance(config["debridService"], str) or config["debridService"] not in ["realdebrid"]: + if not isinstance(config["debridService"], str) or config[ + "debridService" + ] not in ["realdebrid"]: return False if not isinstance(config["debridApiKey"], str): return False @@ -58,7 +192,10 @@ def config_check(b64config: str): return False if not isinstance(config["maxResults"], int) or config["maxResults"] < 0: return False - if not isinstance(config["resolutions"], list) or len(config["resolutions"]) == 0: + if ( + not isinstance(config["resolutions"], list) + or len(config["resolutions"]) == 0 + ): return False if not isinstance(config["languages"], list) or len(config["languages"]) == 0: return False @@ -67,7 +204,13 @@ def config_check(b64config: str): except: return False -async def get_indexer_manager(session: aiohttp.ClientSession, indexer_manager_type: str, indexers: list, query: str): + +async def get_indexer_manager( + session: aiohttp.ClientSession, + indexer_manager_type: str, + indexers: list, + query: str, +): try: indexers = [indexer.replace("_", " ") for indexer in indexers] @@ -75,26 +218,36 @@ async def get_indexer_manager(session: aiohttp.ClientSession, indexer_manager_ty results = [] if indexer_manager_type == "jackett": - response = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v2.0/indexers/all/results?apikey={settings.INDEXER_MANAGER_API_KEY}&Query={query}&Tracker[]={'&Tracker[]='.join(indexer for indexer in indexers)}", timeout=timeout) # &Category[]=2000&Category[]=5000 + response = await session.get( + f"{settings.INDEXER_MANAGER_URL}/api/v2.0/indexers/all/results?apikey={settings.INDEXER_MANAGER_API_KEY}&Query={query}&Tracker[]={'&Tracker[]='.join(indexer for indexer in indexers)}", + timeout=timeout, + ) # &Category[]=2000&Category[]=5000 response = await response.json() for result in response["Results"]: results.append(result) - + if indexer_manager_type == "prowlarr": - get_indexers = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v1/indexer", headers={ - "X-Api-Key": settings.INDEXER_MANAGER_API_KEY - }) + get_indexers = await session.get( + f"{settings.INDEXER_MANAGER_URL}/api/v1/indexer", + headers={"X-Api-Key": settings.INDEXER_MANAGER_API_KEY}, + ) get_indexers = await get_indexers.json() indexers_id = [] for indexer in get_indexers: - if indexer["name"].lower() in indexers or indexer["definitionName"].lower() in indexers: + if ( + indexer["name"].lower() in indexers + or indexer["definitionName"].lower() in indexers + ): indexers_id.append(indexer["id"]) - response = await session.get(f"{settings.INDEXER_MANAGER_URL}/api/v1/search?query={query}&indexerIds={'&indexerIds='.join(str(indexer_id) for indexer_id in indexers_id)}&type=search", headers={ # &categories=2000&categories=5000 - "X-Api-Key": settings.INDEXER_MANAGER_API_KEY - }) + response = await session.get( + f"{settings.INDEXER_MANAGER_URL}/api/v1/search?query={query}&indexerIds={'&indexerIds='.join(str(indexer_id) for indexer_id in indexers_id)}&type=search", + headers={ # &categories=2000&categories=5000 + "X-Api-Key": settings.INDEXER_MANAGER_API_KEY + }, + ) response = await response.json() for result in response: @@ -102,16 +255,23 @@ async def get_indexer_manager(session: aiohttp.ClientSession, indexer_manager_ty return results except Exception as e: - logger.warning(f"Exception while getting {indexer_manager_type} results for {query} with {indexers}: {e}") + logger.warning( + f"Exception while getting {indexer_manager_type} results for {query} with {indexers}: {e}" + ) + -async def get_torrent_hash(session: aiohttp.ClientSession, indexer_manager_type: str, torrent: dict): +async def get_torrent_hash( + session: aiohttp.ClientSession, indexer_manager_type: str, torrent: dict +): if "InfoHash" in torrent and torrent["InfoHash"] != None: return torrent["InfoHash"] - + if "infoHash" in torrent: return torrent["infoHash"] - url = torrent["Link"] if indexer_manager_type == "jackett" else torrent["downloadUrl"] + url = ( + torrent["Link"] if indexer_manager_type == "jackett" else torrent["downloadUrl"] + ) try: timeout = aiohttp.ClientTimeout(total=settings.GET_TORRENT_TIMEOUT) @@ -129,12 +289,15 @@ async def get_torrent_hash(session: aiohttp.ClientSession, indexer_manager_type: match = info_hash_pattern.search(location) if not match: return - + hash = match.group(1).upper() return hash except Exception as e: - logger.warning(f"Exception while getting torrent info hash for {torrent['indexer'] if 'indexer' in torrent else (torrent['Tracker'] if 'Tracker' in torrent else '')}|{url}: {e}") + logger.warning( + f"Exception while getting torrent info hash for {torrent['indexer'] if 'indexer' in torrent else (torrent['Tracker'] if 'Tracker' in torrent else '')}|{url}: {e}" + ) + async def get_balanced_hashes(hashes: dict, config: dict): max_results = config["maxResults"] @@ -143,25 +306,33 @@ async def get_balanced_hashes(hashes: dict, config: dict): hashes_by_resolution = {} for hash in hashes: - if not "All" in config_languages and not hashes[hash]["data"]["is_multi_audio"] and not any(language.replace("_", " ").capitalize() in hashes[hash]["data"]["language"] for language in config_languages): + if ( + "All" not in config_languages + and not hashes[hash]["data"]["is_multi_audio"] + and not any( + language.replace("_", " ").capitalize() + in hashes[hash]["data"]["language"] + for language in config_languages + ) + ): continue resolution = hashes[hash]["data"]["resolution"] if len(resolution) == 0: - if not "All" in config_resolutions and not "Unknown" in config_resolutions: + if "All" not in config_resolutions and "Unknown" not in config_resolutions: continue - if not "Unknown" in hashes_by_resolution: + if "Unknown" not in hashes_by_resolution: hashes_by_resolution["Unknown"] = [hash] continue hashes_by_resolution["Unknown"].append(hash) continue - if not "All" in config_resolutions and not resolution[0] in config_resolutions: + if "All" not in config_resolutions and resolution[0] not in config_resolutions: continue - if not resolution[0] in hashes_by_resolution: + if resolution[0] not in hashes_by_resolution: hashes_by_resolution[resolution[0]] = [hash] continue @@ -187,69 +358,14 @@ async def get_balanced_hashes(hashes: dict, config: dict): selected_total = sum(len(hashes) for hashes in balanced_hashes.values()) if selected_total < max_results: missing_hashes = max_results - selected_total - + for resolution, hashes in hashes_by_resolution.items(): if missing_hashes <= 0: break - + current_count = len(balanced_hashes[resolution]) - available_hashes = hashes[current_count:current_count + missing_hashes] + available_hashes = hashes[current_count : current_count + missing_hashes] balanced_hashes[resolution].extend(available_hashes) missing_hashes -= len(available_hashes) return balanced_hashes - -async def check_info_hash(session: aiohttp.ClientSession, debrid_api_key: str, hash: str): - try: - response = await session.get(f"https://api.real-debrid.com/rest/1.0/torrents/instantAvailability/{hash}", headers={ - "Authorization": f"Bearer {debrid_api_key}" - }) - - return response - except Exception as e: - logger.warning(f"Exception while checking info hash with Real Debrid for {hash}: {e}") - - return - -async def generate_download_link(debrid_api_key: str, hash: str, index: str): - try: - async with aiohttp.ClientSession(headers={ - "Authorization": f"Bearer {debrid_api_key}" - }) as session: - check_blacklisted = await session.get("https://real-debrid.com/vpn") - check_blacklisted = await check_blacklisted.text() - - proxy = None - if "Your ISP or VPN provider IP address is currently blocked on our website" in check_blacklisted: - proxy = settings.DEBRID_PROXY_URL - if not proxy: - logger.warning(f"Real-Debrid blacklisted server's IP. No proxy found.") - return "https://comet.fast" - else: - logger.warning(f"Real-Debrid blacklisted server's IP. Switching to proxy {proxy} for {hash}|{index}") - - add_magnet = await session.post(f"https://api.real-debrid.com/rest/1.0/torrents/addMagnet", data={ - "magnet": f"magnet:?xt=urn:btih:{hash}" - }, proxy=proxy) - add_magnet = await add_magnet.json() - - get_magnet_info = await session.get(add_magnet["uri"], proxy=proxy) - get_magnet_info = await get_magnet_info.json() - - await session.post(f"https://api.real-debrid.com/rest/1.0/torrents/selectFiles/{add_magnet['id']}", data={ - "files": index - }, proxy=proxy) - - get_magnet_info = await session.get(add_magnet["uri"], proxy=proxy) - get_magnet_info = await get_magnet_info.json() - - unrestrict_link = await session.post(f"https://api.real-debrid.com/rest/1.0/unrestrict/link", data={ - "link": get_magnet_info["links"][0] - }, proxy=proxy) - unrestrict_link = await unrestrict_link.json() - - return unrestrict_link["download"] - except Exception as e: - logger.warning(f"Exception while getting download link from Real Debrid for {hash}|{index}: {e}") - - return "https://comet.fast" \ No newline at end of file diff --git a/comet/utils/logger.py b/comet/utils/logger.py index 37c1c2b..d342e77 100644 --- a/comet/utils/logger.py +++ b/comet/utils/logger.py @@ -2,13 +2,14 @@ from loguru import logger + def setupLogger(level: str): logger.level("COMET", no=50, icon="🌠", color="") logger.level("API", no=40, icon="👾", color="") logger.level("INFO", icon="📰", color="") logger.level("DEBUG", icon="🕸️", color="") - logger.level("WARNING", icon="⚠️", color="") + logger.level("WARNING", icon="⚠️", color="") log_format = ( "{time:YYYY-MM-DD} {time:HH:mm:ss} | " @@ -16,15 +17,18 @@ def setupLogger(level: str): "{module}.{function} - {message}" ) - logger.configure(handlers=[ - { - "sink": sys.stderr, - "level": level, - "format": log_format, - "backtrace": False, - "diagnose": False, - "enqueue": True, - } - ]) - -setupLogger("DEBUG") \ No newline at end of file + logger.configure( + handlers=[ + { + "sink": sys.stderr, + "level": level, + "format": log_format, + "backtrace": False, + "diagnose": False, + "enqueue": True, + } + ] + ) + + +setupLogger("DEBUG") diff --git a/comet/utils/models.py b/comet/utils/models.py index aba1e5b..f3aee5a 100644 --- a/comet/utils/models.py +++ b/comet/utils/models.py @@ -5,11 +5,9 @@ from pydantic_settings import BaseSettings, SettingsConfigDict from RTN import RTN, BaseRankingModel, SettingsModel + class AppSettings(BaseSettings): - model_config = SettingsConfigDict( - env_file=".env", - env_file_encoding="utf-8" - ) + model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8") ADDON_ID: str = "stremio.comet.fast" ADDON_NAME: str = "Comet" @@ -28,6 +26,7 @@ class AppSettings(BaseSettings): ZILEAN_URL: Optional[str] = None CUSTOM_HEADER_HTML: Optional[str] = None + class BestOverallRanking(BaseRankingModel): uhd: int = 100 fhd: int = 90 @@ -48,10 +47,11 @@ class BestOverallRanking(BaseRankingModel): bluray: int = 120 webdl: int = 90 + rtn_settings: SettingsModel = SettingsModel() rtn_ranking: BestOverallRanking = BestOverallRanking() # For use anywhere rtn: RTN = RTN(settings=rtn_settings, ranking_model=rtn_ranking) settings: AppSettings = AppSettings() -database = Database(f"sqlite:///{settings.DATABASE_PATH}") \ No newline at end of file +database = Database(f"sqlite:///{settings.DATABASE_PATH}")