From c3e7646c197c196c8d2020916aaf6f5c26aec423 Mon Sep 17 00:00:00 2001 From: F33RNI Date: Sun, 21 Jul 2024 00:22:38 +0300 Subject: [PATCH] fix: proper libraries inheriting + fix for old format of artifacts --- src/mml/artifact.py | 133 +++++++++++++++++++----------------- src/mml/deps_builder.py | 20 ++++-- src/mml/launcher.py | 4 ++ src/mml/profile_parser.py | 6 +- src/mml/resolve_artifact.py | 15 ++-- 5 files changed, 102 insertions(+), 76 deletions(-) diff --git a/src/mml/artifact.py b/src/mml/artifact.py index a1008b4..3d66352 100644 --- a/src/mml/artifact.py +++ b/src/mml/artifact.py @@ -23,6 +23,9 @@ # For calculating checksum CHUNK_SIZE = 8192 +# Default artifact url if none is specified +URL_DEFAULT = "https://libraries.minecraft.net/" + class Artifact: def __init__( @@ -65,20 +68,30 @@ def __init__( name = package_name_version[1] version = package_name_version[2] uri_from_name = f"{package}/{name}/{version}/{name}-{version}" - if ( - not uri_from_name.endswith(".jar") - and not uri_from_name.endswith(".zip") - and not uri_from_name.endswith(".dll") - and not uri_from_name.endswith(".so") - ): - uri_from_name += ".jar" - self._artifact["path"] = uri_from_name + # Split extension (just in case) i think this will never be useful and may be even wrong :) + ext = ".jar" + for ext_ in [".jar", ".zip", ".dll", ".so"]: + if uri_from_name.endswith(ext_): + ext = ext_ + uri_from_name = uri_from_name[: -len(ext_)] + break + + # Set this as path + self._artifact["path"] = uri_from_name + ext + + # VERY old format + if "url" not in self._artifact: + self._artifact["url"] = URL_DEFAULT - if "url" in self._artifact: - if not self._artifact["url"].endswith("/"): - self._artifact["url"] += "/" - self._artifact["url"] += uri_from_name + # Fix for old forge versions + if package == "net/minecraftforge": + uri_from_name += "-universal" + + # Append to the url + if not self._artifact["url"].endswith("/"): + self._artifact["url"] += "/" + self._artifact["url"] += uri_from_name + ext @property def parent_dir(self) -> str: @@ -136,30 +149,6 @@ def size(self) -> int: """ return self._artifact.get("size", 0) - @property - def checksum_alg(self) -> str or None: - """Searches for checksum algorithm in artifact - - Returns: - str or None: "sha1", "md5", "sha256", "sha512" or None if not found - """ - for alg in ["sha1", "md5", "sha256", "sha512"]: - if alg in self._artifact: - return alg - return None - - @property - def target_checksum(self) -> str or None: - """Value of checksum (value of checksum_alg) - - Returns: - str or None: checksum or None if not found - """ - alg = self.checksum_alg - if not alg: - return None - return self._artifact[alg] - @property def artifact_exists(self) -> bool: """Checks if target file exists @@ -174,39 +163,61 @@ def artifact_exists(self) -> bool: return False return True - def calculate_actual_checksum(self) -> str or None: + def verify_checksum(self) -> bool: """Calculate artifact's checksum Returns: - str or None: artifact's checksum (checksum_alg) or None if not exists + bool: if artifact doesn't have a checksum or it's checksum is valid or False if not """ if not self.artifact_exists: logging.debug("Unable to calculate checksum. No artifact or it doesn't exist") return None - alg = self.checksum_alg - if not alg: - logging.debug("Unable to calculate checksum. Unknown algorithm") - return None + # [(alg, checksum), ...] + allowed_checksums = [] + for alg in ["sha1", "md5", "sha256", "sha512"]: + if alg in self._artifact: + allowed_checksums.append((alg, self._artifact[alg])) + + # Idk think we can face this but just in case + if "checksum" in self._artifact and isinstance(self._artifact["checksum"], str): + allowed_checksums.append(("sha1", self._artifact["checksum"])) + + # Very old format + if "checksums" in self._artifact: + if isinstance(self._artifact["checksums"], List): + for checksum in self._artifact["checksums"]: + allowed_checksums.append(("sha1", checksum)) + + # Return True if no checksums available + if len(allowed_checksums) == 0: + logging.warning(f"No checksums for {self._artifact.get('name', str(self._artifact))} artifact") + return True + + # Verify + for alg, checksum in allowed_checksums: + if alg == "sha1": + file_hash = hashlib.sha1(usedforsecurity=False) + elif alg == "md5": + file_hash = hashlib.md5(usedforsecurity=False) + elif alg == "sha256": + file_hash = hashlib.sha256(usedforsecurity=False) + elif alg == "sha512": + file_hash = hashlib.sha512(usedforsecurity=False) + + artifact_path = os.path.join(self._parent_dir, self._artifact["path"]) + with open(artifact_path, "rb") as artifact_io: + chunk = artifact_io.read(CHUNK_SIZE) + while chunk: + file_hash.update(chunk) + chunk = artifact_io.read(CHUNK_SIZE) - if alg == "sha1": - file_hash = hashlib.sha1(usedforsecurity=False) - elif alg == "md5": - file_hash = hashlib.md5(usedforsecurity=False) - elif alg == "sha256": - file_hash = hashlib.sha256(usedforsecurity=False) - elif alg == "sha512": - file_hash = hashlib.sha512(usedforsecurity=False) - else: - raise Exception("Unknown algorithm") + checksum_ = file_hash.hexdigest() + logging.debug(f"Calculated {alg} checksum: {checksum_}") - artifact_path = os.path.join(self._parent_dir, self._artifact["path"]) - with open(artifact_path, "rb") as artifact_io: - chunk = artifact_io.read(CHUNK_SIZE) - while chunk: - file_hash.update(chunk) - chunk = artifact_io.read(CHUNK_SIZE) + if checksum_.lower() == checksum.lower(): + logging.debug("Checksum is valid") + return True - checksum = file_hash.hexdigest() - logging.debug(f"Calculated {alg} checksum: {checksum}") - return checksum + logging.warning(f"Wrong checksum of {self._artifact.get('name', str(self._artifact))} artifact") + return False diff --git a/src/mml/deps_builder.py b/src/mml/deps_builder.py index 8455ffb..4e3416f 100644 --- a/src/mml/deps_builder.py +++ b/src/mml/deps_builder.py @@ -218,20 +218,30 @@ def get_libraries(self) -> List[str] or None: if "name" not in library: continue - # Check rules - if "rules" in library and not rules_check(library["rules"]): + # Check rules (both new and old format) + if ("rules" in library and not rules_check(library["rules"])) or ( + "clientreq" in library and library["clientreq"] == False + ): logging.debug(f"Skipping library {library['name']}. Disallowed by rules") continue + # Determine available classifiers + classifiers_dict = library.get("downloads", {}).get("classifiers", library.get("classifiers")) + + # Determine main artifact + artifact_dict = library.get("downloads", {}).get("artifact", library.get("artifact")) + if artifact_dict is None and not classifiers_dict: + artifact_dict = library + # Add main artifact to the final list and download queue - artifact_dict = library.get("downloads", {}).get("artifact", library.get("artifact", library)) if artifact_dict: artifact_ = Artifact(artifact_dict, parent_dir=libs_dir) self._add_artifact(artifact_) libs.append(artifact_.path) + else: + logging.debug("Skipping main artifact. Only natives required?") # Add natives to the final list and download and unpack them - classifiers_dict = library.get("downloads", {}).get("classifiers", library.get("classifiers")) if classifiers_dict and "natives" in library and os_name_ in library["natives"]: classifier_name = library["natives"][os_name_] if classifier_name in classifiers_dict: @@ -262,7 +272,7 @@ def get_log_config(self) -> Tuple[str or None, str or None]: parent_dir=os.path.join(self._game_dir, LOG_CONFIGS_DIR), target_file=logging_client["file"]["id"], ) - log_config_path = resolve_artifact(logging_artifact) + log_config_path = resolve_artifact(logging_artifact, verify_checksums=False) if not log_config_path: return None, None diff --git a/src/mml/launcher.py b/src/mml/launcher.py index a612b0f..0e934e2 100644 --- a/src/mml/launcher.py +++ b/src/mml/launcher.py @@ -263,6 +263,10 @@ def run(self) -> None: if self._env_variables: env_variables_.update(self._env_variables) + # Required by old versions + if "user_properties" not in env_variables_: + env_variables_["user_properties"] = "{}" + # Add java args final_cmd = [self._java_path] final_cmd.extend(deps_builder_.get_arguments(False, self._features)) diff --git a/src/mml/profile_parser.py b/src/mml/profile_parser.py index d163b1c..3ab8cb0 100644 --- a/src/mml/profile_parser.py +++ b/src/mml/profile_parser.py @@ -55,7 +55,11 @@ def update_deep(destination: Dict, update: Dict) -> Dict: if key not in destination: destination[key] = value else: - destination[key].extend(value) + if key == "libraries": + value.extend(destination[key]) + destination[key] = value + else: + destination[key].extend(value) else: destination[key] = value return destination diff --git a/src/mml/resolve_artifact.py b/src/mml/resolve_artifact.py index 164ab26..c080c5e 100644 --- a/src/mml/resolve_artifact.py +++ b/src/mml/resolve_artifact.py @@ -38,19 +38,18 @@ ATTEMPT_DELAY = 1.0 -def resolve_artifact(artifact_: Artifact, _attempt: int = 0) -> str or None: +def resolve_artifact(artifact_: Artifact, _attempt: int = 0, verify_checksums: bool = True) -> str or None: """Checks if artifact exists (and verifies it's checksum) and downloads it if not Also, copies and unpacks it if needed Args: artifact_ (Artifact): artifact instance to download, copy and unpack + verify_checksums (bool, optional): False to ignore checksum mismatch. Defaults to True Returns: str or None: path to artifact if exists or downloaded successfully or None in case of error """ - if artifact_.artifact_exists and ( - not artifact_.checksum_alg or artifact_.target_checksum == artifact_.calculate_actual_checksum() - ): + if artifact_.artifact_exists and (not verify_checksums or artifact_.verify_checksum()): artifact_path = os.path.join(artifact_.parent_dir, artifact_.path) logging.debug(f"Artifact {artifact_path} exists") unpack_copy(artifact_, artifact_path) @@ -84,15 +83,13 @@ def resolve_artifact(artifact_: Artifact, _attempt: int = 0) -> str or None: artifact_io.flush() os.fsync(artifact_io.fileno()) else: - logging.error(f"Unable to download artifact: {response.status_code} - {response.text}") + logging.error(f"Unable to download artifact from {artifact_.url}: {response.status_code}-{response.text}") except Exception as e: - logging.error(f"Unable to download artifact: {e}") + logging.error(f"Unable to download artifact from {artifact_.url}: {e}") logging.debug("Error details", exc_info=e) # Check - if not artifact_.artifact_exists or ( - artifact_.checksum_alg and artifact_.target_checksum != artifact_.calculate_actual_checksum() - ): + if not artifact_.artifact_exists or (verify_checksums and not artifact_.verify_checksum()): # Wait a bit and try again if _attempt < DOWNLOAD_ATTEMPTS: time.sleep(ATTEMPT_DELAY)