diff --git a/Dockerfile b/Dockerfile index 3f0c190..239de81 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,5 @@ FROM python:3.12-slim -RUN python3 -m pip install --no-cache-dir requests python-frontmatter pathlib - COPY . / +RUN python3 -m pip install --no-cache-dir -r /requirements.txt ENTRYPOINT ["python", "/entrypoint.py"] diff --git a/README.md b/README.md index 30dd475..d2ab0f9 100644 --- a/README.md +++ b/README.md @@ -1,175 +1,189 @@ # Publish GitHub to Hashnode GitHub Action -This GitHub Action publishes blog posts from a GitHub repository to a specific publication on Hashnode. It reads markdown files, processes the frontmatter and content, and uses the Hashnode API to create, update, or delete posts. +A powerful GitHub Action that seamlessly publishes blog posts from your GitHub repository to Hashnode publications. This action handles markdown files with frontmatter, processes content, and manages posts through Hashnode's GraphQL API. -## Features +## 🌟 Key Features -- Create new posts on Hashnode if they do not exist. -- Update existing posts on Hashnode if they exist. -- Delists posts on Hashnode if the corresponding markdown file is not present in the current commit. -- Handles correct linking of cover images and inline images in the markdown content. +- **Automated Publishing**: Create and update posts on Hashnode directly from your GitHub repository +- **Smart Updates**: Only processes changed files, minimizing API calls +- **Image Handling**: Automatically processes both cover images and inline images +- **Post Management**: Handles creation, updates, and delisting of posts +- **Frontmatter Support**: Rich metadata support through YAML frontmatter +- **Debug Output**: Comprehensive debugging information for troubleshooting -## Inputs +## 📋 Prerequisites -- `access-token` (required): Your Hashnode API Personal Access Token. See: [Hashnode Developer Settings](https://hashnode.com/settings/developer) -- `added-files` (required): The list of added files in the repository, automatically provided by the `tj-actions/changed-files` action in the examples below. -- `changed-files` (required): The list of changed files in the repository, automatically provided by the `tj-actions/changed-files` action in the examples below. -- `publication-host` (required): The publication host (e.g., `blog.mydomain.com`). -- `posts-directory` (optional): The local directory in this repo containing the blog posts, if different from the root directory. Default: `.`. Example: `content/posts`. +- A Hashnode account and publication +- A GitHub repository containing your markdown posts +- A Hashnode Personal Access Token -## Outputs +## 🔧 Installation -- `result_json`: Publishes result as a JSON string. -- `result_summary`: Publishes result summary formatted as text. +### 1. Set Up GitHub Secrets -## Usage +1. Go to your repository's Settings → Secrets and Variables → Actions +2. Create a new secret `HASHNODE_ACCESS_TOKEN` with your Hashnode API token + ``` + Name: HASHNODE_ACCESS_TOKEN + Value: your_hashnode_api_token + ``` -### 1. Create a `.github/workflows/publish.yml` file +### 2. Create Workflow File -Create a new workflow file in your repository to define the steps required to publish the posts. +Create `.github/workflows/publish.yml`: ```yaml -name: Publish My Hashnode Blog Posts +name: Publish to Hashnode on: push: + branches: + - main + paths: + - 'content/posts/**' jobs: publish: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Get changed files - id: changed-files - uses: tj-actions/changed-files@v44 - - - uses: actions/setup-python@v5 - with: - python-version: '3.x' - - - name: Publish to Hashnode - uses: actions/publish-github-to-hashnode@v1 - with: - added-files: ${{ steps.changed-files.outputs.added_files }} # Uses output from changed-files action - changed-files: ${{ steps.changed-files.outputs.all_changed_files }} # Uses output from changed-files action - access-token: ${{ secrets.HASHNODE_ACCESS_TOKEN }} - publication-host: 'blog.mydomain.com' # Your publication host - posts-directory: 'content/posts' # Dir within your repo containing the markdown files, if different from root dir + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Get changed files + id: changed-files + uses: tj-actions/changed-files@v44 + + - uses: actions/setup-python@v5 + with: + python-version: '3.x' + + - name: Publish to Hashnode + uses: actions/publish-github-to-hashnode@v1 + with: + added-files: ${{ steps.changed-files.outputs.added_files }} + changed-files: ${{ steps.changed-files.outputs.all_changed_files }} + access-token: ${{ secrets.HASHNODE_ACCESS_TOKEN }} + publication-host: 'blog.mydomain.com' + posts-directory: 'content/posts' ``` -### 2. Store your Hashnode API Access Token as a GitHub Secret +## 📝 Post Format -1. Obtain your Hashnode API Personal Access Token. See: [Hashnode Developer Settings](https://hashnode.com/settings/developer) -2. Go to your repository on GitHub. -3. Click on `Settings`. -4. Scroll down to `Secrets and variables` and click on `Actions`. -5. Click `New repository secret`. -6. Add a new secret with the name `HASHNODE_ACCESS_TOKEN` and your Hashnode API token as the value. - -### 3. Prepare your repository structure - -Ensure your repository contains the markdown files you wish to publish in the specified directory (default is the root of the repository). - -### 4. Markdown Post Frontmatter - -#### Frontmatter Fields - -Full list of frontmatter fields that can be used in the markdown files: - -- `title` (required): Title of the post. -- `subtitle` (optional): Subtitle of the post. -- `slug` (required): Slug of the post. -- `tags` (optional): Tags of the post (comma-separated). -- `enableTableOfContents` (optional, default: false): Enable table of contents. -- `publish` (optional, default: true): Should the post be published at this time. -- `coverImage` (optional): Cover image relative path within the repository starting from `posts-directory` (as specified in pubish.yml) if provided. -- `coverImageAttribution`: Information about the cover image attribution (optional) -- `publishedAt`: Date and time when the post was published (optional) -- `disableComments` (optional, default: false): Disable comments on the post. - -#### Example Frontmatter +### Directory Structure +``` +your-repo/ +├── content/ +│ └── posts/ +│ ├── my-first-post.md +│ └── images/ +│ └── cover.jpg +└── .github/ + └── workflows/ + └── publish.yml +``` +### Markdown Format ```markdown --- -title: Creating Spaghetti in Docker Compose -slug: creating-spaghetti-in-docker-compose -tags: docker,docker-compose +title: My Awesome Post +subtitle: A detailed guide to awesomeness +slug: my-awesome-post +tags: javascript,webdev,tutorial enableTableOfContents: true coverImage: images/cover.jpg +coverImageAttribution: Photo by Author +publishedAt: 2024-03-20T10:00:00Z +disableComments: false --- -## Introduction - -This is an introduction to creating spaghetti in Docker Compose. - -## Ingredients +Your post content here... +``` -- Docker Engine -- Spaghetti -- Sauce -- Cheese -- Love +### Frontmatter Fields Reference + +| Field | Required | Type | Description | +|-------|----------|------|-------------| +| title | Yes | string | Post title | +| slug | Yes | string | URL slug for the post | +| subtitle | No | string | Post subtitle | +| tags | No | string | Comma-separated list of tags | +| enableTableOfContents | No | boolean | Enable/disable TOC | +| coverImage | No | string | Path to cover image | +| coverImageAttribution | No | string | Attribution for cover image | +| publishedAt | No | string | ISO 8601 datetime | +| disableComments | No | boolean | Disable comments | + +## 🔄 Workflow + +1. Create/edit markdown files in your repository +2. Commit and push changes +3. GitHub Action automatically: + - Detects changed files + - Processes markdown content + - Updates images to use absolute URLs + - Creates/updates posts on Hashnode + - Provides detailed output of operations + +## 📊 Outputs + +### JSON Output +```json +{ + "added": [{ + "id": "post123", + "title": "New Post", + "slug": "new-post" + }], + "modified": [{ + "id": "post456", + "title": "Updated Post", + "slug": "updated-post" + }], + "deleted": [], + "errors": [] +} +``` -## Steps +### Using Outputs in Workflow -1. ... -2. ... -3. ... +```yaml +- name: Publish to Hashnode + id: publish + uses: actions/publish-github-to-hashnode@v1 + # ... inputs ... + +- name: Process Results + run: | + echo "Published posts: ${{ fromJSON(steps.publish.outputs.result_json).added }}" + echo "Summary: ${{ steps.publish.outputs.result_summary }}" ``` -### 5. Handling Image URLs +## 🔍 Debugging -The action will automatically convert relative image URLs to absolute URLs that point to the raw content on GitHub. Ensure your image paths in the markdown are correct relative paths. +Enable debug logs by setting repository secret: +``` +ACTIONS_STEP_DEBUG=true +``` -## Example Workflow Using `result_json` +View detailed logs in GitHub Actions run. -You can utilize the `result_json` output in subsequent steps to get the result of the publish operation in json format. The approach below can also be used with `result_summary` for a text summary. +## 🤝 Contributing -```yaml -name: Publish My Hashnode Blog Posts -on: - push: +1. Fork the repository +2. Create a feature branch +3. Submit a Pull Request -jobs: - publish: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Get changed files - id: changed-files - uses: tj-actions/changed-files@v44 - - - uses: actions/setup-python@v5 - with: - python-version: '3.x' - - - name: Publish to Hashnode - id: publish - uses: actions/publish-github-to-hashnode@v1 - with: - added-files: ${{ steps.changed-files.outputs.added_files }} - changed-files: ${{ steps.changed-files.outputs.all_changed_files }} - access-token: ${{ secrets.HASHNODE_ACCESS_TOKEN }} - publication-host: 'blog.mydomain.com' - posts-directory: 'content/posts' - - - name: Get the output JSON - run: echo "${{ steps.publish.outputs.result_json }}" -``` +Please check existing issues and create a new one before submitting PRs. -## Development +## 📜 License -To contribute to the development of this action, clone the repository, make your changes, and submit a pull request. Please ensure you submit a detailed Issue describing the work you are planning to do prior to submitting a PR. +MIT License - see [LICENSE](LICENSE) for details -## License +## 🆘 Support -This project is licensed under the MIT License. +- Create an issue for bugs/features -## Questions and Support +## 🔗 Resources -If you have any questions or need support, please open an issue in the GitHub repository. I will do my best to help. +- [Hashnode API Documentation](https://api.hashnode.com) +- [GitHub Actions Documentation](https://docs.github.com/en/actions) \ No newline at end of file diff --git a/constants.py b/constants.py deleted file mode 100644 index 9de3a44..0000000 --- a/constants.py +++ /dev/null @@ -1,39 +0,0 @@ -"""This file contains the constants used in the application. - -ACCESS_TOKEN, POSTS_DIRECTORY, PUBLICATION_HOST, GITHUB_REPOSITORY, and BRANCH are read from the environment variables. - -- POSTS_DIRECTORY is the directory where the markdown posts are stored. -- PUBLICATION_HOST is the host of the Hashnode publication where the posts will be published. -- GITHUB_REPOSITORY is the name of the GitHub repository where the action is triggered. -- BRANCH is the branch or tag ref that triggered the action. -- ACCESS_TOKEN is used to authenticate the requests to the Hashnode API. -- HEADERS constant is a dictionary that contains the Authorization header with the ACCESS_TOKEN. -- GITHUB_RAW_URL is the base URL for the raw content of the GitHub repository. -- HASHNODE_API_URL is the URL of the Hashnode GraphQL API. -- ADDED_FILES and CHANGED_FILES are the paths of the added and changed files in the Git commit. -- ALL_CHANGED_FILES is the list of all changed files (added and changed). -""" - -import os -from pathlib import Path - -ACCESS_TOKEN = os.environ["ACCESS_TOKEN"] -POSTS_DIRECTORY = Path(os.environ.get("POSTS_DIRECTORY", "")) -PUBLICATION_HOST = os.environ["PUBLICATION_HOST"] -GITHUB_OUTPUT = os.getenv("GITHUB_OUTPUT") - -GITHUB_REPOSITORY = os.environ["GITHUB_REPOSITORY"] -BRANCH = os.environ["GITHUB_REF"].split("/")[-1] - -HEADERS = {"Authorization": f"Bearer {ACCESS_TOKEN}"} - -GITHUB_RAW_URL = "https://raw.githubusercontent.com" -HASHNODE_API_URL = "https://gql.hashnode.com" - -added_files = os.environ.get("ADDED_FILES", "").split() -ADDED_FILES = [Path(f) for f in added_files if f] - -changed_files = os.environ.get("CHANGED_FILES", "").split() -CHANGED_FILES = [Path(f) for f in changed_files if f and f not in added_files] - -ALL_CHANGED_FILES = list(set(ADDED_FILES + CHANGED_FILES)) diff --git a/entrypoint.py b/entrypoint.py index 4c84833..b0feaf8 100644 --- a/entrypoint.py +++ b/entrypoint.py @@ -1,319 +1,140 @@ -"""Publish markdown post files to Hashnode publications. - -The script is designed to be used as part of a GitHub Action. It reads the following -environment variables: - -- ACCESS_TOKEN: A Hashnode API access token. -- POSTS_DIRECTORY: The directory containing the markdown files to publish. (default: "") -- PUBLICATION_HOST: The host of the Hashnode publication to publish to. (e.g., "my.site.com") -- GITHUB_REPOSITORY: The GitHub repository in the format "owner/repo". -- GITHUB_REF: The branch or tag ref that triggered the action. -- CHANGED_FILES: A JSON object containing the files that were added, modified, or deleted. - -Markdown files in the POSTS_DIRECTORY are read and published to the specified Hashnode -publication. Frontmatter fields and post content are extracted from the markdown files. - -The script writes the results of the operation to GITHUB_OUTPUT in the format "result_json" -and "result_summary". - -Hashnode GraphQL API is used to interact with the Hashnode platform. -""" - +"""Main application module for publishing posts to Hashnode.""" import json -import re +import logging import uuid -from datetime import datetime from pathlib import Path -from typing import Any, Dict, List, Tuple, Union -from zoneinfo import ZoneInfo - -import frontmatter -from constants import ( - ADDED_FILES, - ALL_CHANGED_FILES, - BRANCH, - CHANGED_FILES, - GITHUB_OUTPUT, - GITHUB_RAW_URL, - GITHUB_REPOSITORY, - POSTS_DIRECTORY, -) -from graphql import HashnodeAPI +from sgqlc.operation import Operation -debug_data: List[List[Union[datetime, str]]] = [] -results: Dict[str, Any] = { - "input_added_files": [str(f) for f in ADDED_FILES], - "input_files": [str(f) for f in CHANGED_FILES], - "added": [], - "modified": [], - "deleted": [], - "errors": [], -} +from schema import Query +from src.exceptions import HashnodePublisherError +from src.graphql_client import GraphQLClient +from src.markdown_processor import MarkdownProcessor +from src.post_service import PostService +from src.settings import settings +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) -class MarkdownFileHandler: # pylint: disable=R0903 - """Handle markdown files and prepare data for Hashnode publication.""" - REQUIRED_FIELDS = ["title"] +class HashnodePublisher: + """Main application class for publishing posts to Hashnode.""" - def __init__(self, file_path: Path, publication_id: str) -> None: - self.file_path = file_path - self.publication_id = publication_id - self.metadata, self.content = self._process_markdown() - self._validate() - - def _process_markdown(self) -> Tuple[Dict[str, Any], str]: - """Extract metadata and content from a markdown file.""" - with self.file_path.open("r") as f: - post = frontmatter.load(f) - debug_data.append( - [datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), f"Processing Markdown: {self.file_path}"] + def __init__(self): + self.graphql_client = GraphQLClient( + url=settings.HASHNODE_API_URL, + headers=settings.headers, ) - return post.metadata, post.content - - def _validate(self) -> None: - """Validate that the content and frontmatter are correct.""" - self._validate_content() - self._validate_frontmatter() - - def _validate_content(self) -> None: - """Ensure content is not empty.""" - if not self.content.strip(): - raise ValueError("Content cannot be empty") - def _validate_frontmatter(self) -> None: - """Ensure the frontmatter contains the required fields and correct formats.""" - for field in self.REQUIRED_FIELDS: - if field not in self.metadata: - raise ValueError(f"Missing required frontmatter field: {field}") - - self.metadata["slug"] = self._generate_slug(self.metadata.get("slug", self.metadata["title"])) - self.metadata["tags"] = self._process_tags(self.metadata.get("tags", "")) - self.metadata["publishedAt"] = self._get_publish_date(self.metadata.get("publishedAt")) - - debug_data.append( - [datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), f"Processed Metadata: {self.metadata}"] + self.markdown_processor = MarkdownProcessor( + publication_id=self._get_publication_id(), + github_raw_url=settings.GITHUB_RAW_URL, + repository=settings.GITHUB_REPOSITORY, + branch=settings.branch, ) - def _generate_slug(self, title: str) -> str: - """Generate a slug from the title.""" - return re.sub(r"\s+", "-", title.strip().lower()) - - def _process_tags(self, tags: str) -> List[Dict[str, str]]: - """Process tags into a list of dictionaries.""" - if not isinstance(tags, str): - raise ValueError("Tags must be a comma-separated string") - return [{"slug": tag.strip().lower(), "name": tag.strip()} for tag in tags.split(",")] + self.settings = settings - def _get_publish_date(self, published_at: str = None) -> str: - """Return the publish date, defaulting to now if not provided.""" - if published_at: - return published_at - return datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%dT%H:%M:%SZ") - - def build_post_data(self, post_id: str = None) -> Dict[str, Any]: - """Build the post data for the Hashnode API.""" - self._update_image_urls() - - # If updating - if post_id: - post_data = { - "id": post_id, - "title": self.metadata["title"], - "subtitle": self.metadata.get("subtitle"), - "publicationId": self.publication_id, - "contentMarkdown": self.content, - "publishedAt": self.metadata["publishedAt"], - "coverImageOptions": { - "coverImageURL": self._get_cover_image_url(), - "coverImageAttribution": self.metadata.get("coverImageAttribution"), - }, - "slug": self.metadata["slug"], - "tags": self.metadata["tags"], - "settings": { # UpdatePostSettingsInput - "isTableOfContentEnabled": self.metadata.get("enableTableOfContents", False), - "delisted": self.metadata.get("delisted", False), - "disableComments": self.metadata.get("disableComments", False), - }, - } - - # If creating - else: - post_data = { - "title": self.metadata["title"], - "subtitle": self.metadata.get("subtitle"), - "publicationId": self.publication_id, - "contentMarkdown": self.content, - "publishedAt": self.metadata["publishedAt"], - "coverImageOptions": { - "coverImageURL": self._get_cover_image_url(), - "coverImageAttribution": self.metadata.get("coverImageAttribution"), - }, - "slug": self.metadata["slug"], - "tags": self.metadata["tags"], - "settings": { # PublishPostSettingsInput - "enableTableOfContent": self.metadata.get("enableTableOfContents", False), - "delisted": self.metadata.get("delisted", False), - "slugOverridden": True, - }, - "disableComments": self.metadata.get("disableComments", False), - } - - return post_data - - def _update_image_urls(self) -> None: - """Update relative image URLs in the content to absolute URLs.""" - relative_image_regex = re.compile(r"!\[(.*?)\]\((?!http)(.*?)\)") - self.content = relative_image_regex.sub( - lambda m: f"![{m.group(1)}]({self._get_resource_url(self.file_path.parent / m.group(2))})", - self.content, + self.post_service = PostService( + graphql_client=self.graphql_client, + markdown_processor=self.markdown_processor, + settings=self.settings, ) - def _get_cover_image_url(self) -> Union[str, None]: - """Get the full URL for the cover image if it exists and is a relative path.""" - cover_image = self.metadata.get("coverImage") - if cover_image and not cover_image.startswith("http"): - return self._get_resource_url(self.file_path.parent / cover_image) - return cover_image - - def _get_resource_url(self, path: Path) -> str: - """Get the URL for a resource in the GitHub repository.""" - return f"{GITHUB_RAW_URL}/{GITHUB_REPOSITORY}/{BRANCH}/{path.as_posix()}" - - -def get_markdown_files(directory: Path) -> List[Path]: - """Get a list of all markdown files in the specified directory.""" - if not directory.is_dir(): - raise ValueError(f"Directory not found: {directory}") - return list(directory.rglob("*.md")) - - -def handle_post(file_path: Path, api: HashnodeAPI) -> None: - """Handle a markdown post file.""" - debug_data.append([datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), f"Handling file: {file_path}"]) - - markdown_file_handler = MarkdownFileHandler(file_path, api.publication_id) - - post_id = api.get_post_id(markdown_file_handler.metadata["slug"]) - debug_data.append( - [ - datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), - f"Got Post ID: {post_id} for slug: {markdown_file_handler.metadata['slug']}", - ] - ) - - post_data = markdown_file_handler.build_post_data(post_id) - - post_action = "update_post" if post_id else "create_post" - - post = getattr(api, post_action)(post_data) - if post: - results["modified" if post_id else "added"].append(post) - else: - results["errors"].append({"file": str(file_path), "error": f"Failed to {post_action} post."}) - - debug_data.append( - [ - datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), - f"Post Action: {post_action}, Post: {post}, for file: {file_path}", - ] - ) - return api - - -def handle_deleted_posts(api: HashnodeAPI) -> None: - """Handle deleted markdown posts by delisting them from the publication.""" - markdown_files = get_markdown_files(Path(POSTS_DIRECTORY)) - slugs = {MarkdownFileHandler(file_path, api.publication_id).metadata["slug"] for file_path in markdown_files} - posts = api.get_all_publication_posts() - - for post in posts: - if post["slug"] not in slugs: - if api.delist_post(post["id"]): - results["deleted"].append(post) - - return api - - -def create_result_summary() -> str: - """Create a text summary of the results.""" - summary = "" - - # Show added, modified, and deleted posts - for action, posts in results.items(): - if action in ["added", "modified", "deleted"]: - if posts: - summary += f"{action.capitalize()} posts:\n" - for post in posts: - summary += f" - {post['title']} ({post['slug']})\n" + self.results = {"added": [], "modified": [], "deleted": [], "errors": [], "debug_data": []} + + def _get_publication_id(self) -> str: + """Get the publication ID for the given host.""" + op = Operation(Query) + publication = op.publication(host=settings.PUBLICATION_HOST) + publication.id() + + response = self.graphql_client.execute(op) + return response["publication"]["id"] + + def process_files(self): + """Process all changed files.""" + for file_path in settings.ADDED_FILES + settings.CHANGED_FILES: + if not self._is_valid_post_file(file_path): + self.results["errors"].append( + {"file": str(file_path), "error": "Not a markdown file in the posts directory"} + ) + continue + + try: + is_new = file_path in settings.ADDED_FILES + result = self.post_service.publish_post(file_path) + self.results["added" if is_new else "modified"].append(result) + logger.info("Successfully %s post: %s", "added" if is_new else "modified", result["title"]) + + except HashnodePublisherError as e: + self.results["errors"].append({"file": str(file_path), "error": str(e)}) + logger.error("Error processing %s: %s", file_path, e) + + def _is_valid_post_file(self, file_path: Path) -> bool: + """Check if the file is a markdown file in the posts directory.""" + return file_path.suffix.lower() == ".md" and file_path.is_relative_to(settings.POSTS_DIRECTORY) + + def write_results(self): + """Write results to GitHub Actions output.""" + if not settings.GITHUB_OUTPUT: + logger.warning("GITHUB_OUTPUT not set, skipping results output") + return + + # Add debug data from clients + self.results["debug_data"] = self.graphql_client.debug_data + + with open(settings.GITHUB_OUTPUT, "a", encoding="utf-8") as f: + # Write JSON results + print(f"result_json={json.dumps(self.results)}", file=f) + + # Write text summary + delimiter = str(uuid.uuid4()) + print(f"result_summary<<{delimiter}", file=f) + print(self._create_summary(), file=f) + print(delimiter, file=f) + + def _create_summary(self) -> str: + """Create a human-readable summary of the results.""" + summary = [] + + # Add results for each category + for category in ["added", "modified", "deleted"]: + if self.results[category]: + summary.append(f"{category.capitalize()} posts:") + for post in self.results[category]: + summary.append(f" - {post['title']} ({post['slug']})") else: - summary += f"No {action} posts.\n" - - # Show errors - if results["errors"]: - summary += "Errors:\n" - for error in results["errors"]: - summary += f" - {str(error['file'])}: {error['error']}\n" - else: - summary += "No errors.\n" - - # Show debug data - if results["debug_data"]: - summary += "Debug Data:\n" - for data in debug_data: - summary += f" - {str(data)}\n" - else: - summary += "No debug data.\n" - - return summary - - -def write_results_to_github_output() -> None: - """Write the results to the GitHub output.""" - with open(GITHUB_OUTPUT, "a", encoding="utf-8") as output_file: - print(f"result_json={json.dumps(results)}", file=output_file) - delimiter = uuid.uuid1() - print(f"result_summary<<{delimiter}", file=output_file) - print(create_result_summary(), file=output_file) - print(delimiter, file=output_file) - - -def build_full_debug_data(api: HashnodeAPI) -> None: - """Combine debug_data with api.debug_data, and sort by timestamp.""" - debug_data.extend(api.debug_data) - debug_data.sort(key=lambda x: x[0]) - - # debug_list is a list of lists. Convert each internal list to a string. - simplified_debug_data = [[str(item) for item in debug_list] for debug_list in debug_data] - - results["debug_data"] = simplified_debug_data - - -def main() -> None: - """Main entrypoint for the action.""" - api = HashnodeAPI() - posts_directory = Path(POSTS_DIRECTORY) - - for file_path in ALL_CHANGED_FILES: - if file_path.is_relative_to(posts_directory) and file_path.suffix == ".md": - api = handle_post(file_path=file_path, api=api) - else: - results["errors"].append( - { - "file": str(file_path), - "error": ( - "Note: File is not a markdown file or not in the posts directory. " - "If you want to publish this file, move it to the posts directory." - ), - } - ) - - api = handle_deleted_posts(api) - - build_full_debug_data(api) - - write_results_to_github_output() + summary.append(f"No {category} posts.") + + # Add errors if any + if self.results["errors"]: + summary.append("\nErrors:") + for error in self.results["errors"]: + summary.append(f" - {error['file']}: {error['error']}") + + # Add debug data if any + if self.results["debug_data"]: + summary.append("\nDebug Data:") + for timestamp, message in self.results["debug_data"]: + summary.append(f" - [{timestamp}] {message}") + + return "\n".join(summary) + + +def main(): + """Main entry point for the application.""" + try: + publisher = HashnodePublisher() + publisher.process_files() + publisher.write_results() + + except HashnodePublisherError as e: + logger.error("Fatal error: %s", e) + exit(1) + except Exception as e: + logger.exception("Unexpected error occurred: %s", e) + exit(1) if __name__ == "__main__": diff --git a/graphql.py b/graphql.py deleted file mode 100644 index 71fc945..0000000 --- a/graphql.py +++ /dev/null @@ -1,233 +0,0 @@ -"""Provides a class to manage the publication of markdown posts to a Hashnode publication.""" - -from datetime import datetime -from typing import Any, Dict, List, Optional, Union -from zoneinfo import ZoneInfo - -import requests - -from constants import HASHNODE_API_URL, HEADERS, PUBLICATION_HOST - - -class HashnodeAPI: - """Manage the publication of markdown posts to a Hashnode publication.""" - - def __init__(self, timeout: int = 30) -> None: - """Initialize the HashnodeAPI class with a timeout and obtain the publication ID.""" - self.timeout = timeout - self.debug_data: List[List[Union[datetime, str]]] = [] - self.publication_id = self._fetch_publication_id() - - def _fetch_publication_id(self) -> str: - """Fetch the publication ID for the given host.""" - query = """ - query Publication($host: String!) { - publication(host: $host) { - id - } - } - """ - response = self._execute_request(query, variables={"host": PUBLICATION_HOST}) - publication_id = response["data"]["publication"]["id"] - self.debug_data.append( - [datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), f"Publication ID: {publication_id}"] - ) - return publication_id - - def get_post_id(self, slug: str) -> Optional[str]: - """Get the post ID for the given publication and slug.""" - query = """ - query GetPost($host: String!, $slug: String!) { - publication(host: $host) { - post(slug: $slug) { - id - } - } - } - """ - response = self._execute_request(query, variables={"host": PUBLICATION_HOST, "slug": slug}) - - if "errors" in response: - self.debug_data.append( - [ - datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), - f"GraphQL errors: {response['errors']}, Slug: {slug}", - ] - ) - return None - - post = response["data"]["publication"].get("post") - post_id = post["id"] if post else None - self.debug_data.append( - [ - datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), - f"Got post id {post_id} for {slug}, Post: {post if post else None}", - ] - ) - return post_id - - def get_all_publication_posts(self) -> List[Dict[str, str]]: - """Get a list of all post IDs and slugs for a publication.""" - query = """ - query GetPosts($host: String!, $first: Int!, $after: String) { - publication(host: $host) { - posts(first: $first, after: $after) { - edges { - node { - id - slug - } - } - pageInfo { - endCursor - hasNextPage - } - } - } - } - """ - all_posts = [] - variables = {"host": PUBLICATION_HOST, "first": 50, "after": None} - while True: - response = self._execute_request(query, variables=variables) - posts_data = response["data"]["publication"]["posts"] - all_posts.extend({"id": edge["node"]["id"], "slug": edge["node"]["slug"]} for edge in posts_data["edges"]) - if not posts_data["pageInfo"]["hasNextPage"]: - break - variables["after"] = posts_data["pageInfo"]["endCursor"] - - return all_posts - - def create_post(self, post_data: Dict[str, Any]) -> Dict[str, str]: - """Create a post with the given data.""" - mutation = """ - mutation PublishPost($input: PublishPostInput!) { - publishPost(input: $input) { - post { - id - title - slug - } - } - } - """ - response = self._execute_request(mutation, variables={"input": post_data}) - return self._extract_post_data(response, "Create Post", post_data) - - def update_post(self, post_data: Dict[str, Any]) -> Dict[str, str]: - """Update a post with the given data.""" - mutation = """ - mutation UpdatePost($input: UpdatePostInput!) { - updatePost(input: $input) { - post { - id - title - slug - } - } - } - """ - response = self._execute_request(mutation, variables={"input": post_data}) - return self._extract_post_data(response, "Update Post", post_data) - - def delist_post(self, post_id: str) -> bool: - """Delist (soft-delete) the post with the given ID.""" - mutation = """ - mutation UpdatePost($input: UpdatePostInput!) { - updatePost(input: $input) { - post { - id - preferences { - isDelisted - } - } - } - } - """ - post_data = {"id": post_id, "preferences": {"isDelisted": True}} - response = self._execute_request(mutation, variables={"input": post_data}) - - try: - delisted = response["data"]["updatePost"]["post"]["preferences"]["isDelisted"] - self.debug_data.append( - [ - datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), - f"Delisted post: {post_id}, Delisted: {delisted}", - ] - ) - return delisted - except KeyError: - self._log_failure("Failed to delist post", post_id, response) - return False - - def _execute_request(self, query: str, variables: Dict[str, Any]) -> Dict[str, Any]: - """Execute a GraphQL request and return the JSON response.""" - response = requests.post( - url=HASHNODE_API_URL, - json={"query": query, "variables": variables}, - headers=HEADERS, - timeout=self.timeout, - ) - try: - response.raise_for_status() - except requests.exceptions.HTTPError as e: - self.debug_data.append( - [ - datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), - f"Request failed with status code {response.status_code}: {response.text}. " - f"{query=}, {variables=}. Original exception: {e}.", - ] - ) - return {} - except requests.exceptions.RequestException as e: - self.debug_data.append( - [ - datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), - f"Request failed with {response.text=}: {e}. {query=}, {variables=}.", - ] - ) - return {} - except Exception as e: # pylint: disable=W0718 - self.debug_data.append( - [ - datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), - f"Unexpected error {response.text=}: {e}. {query=}, {variables=}.", - ] - ) - return {} - return response.json() - - def _extract_post_data(self, response: Dict[str, Any], action: str, post_data: Dict[str, Any]) -> Dict[str, str]: - """Extract post data from the response and handle errors.""" - try: - self.debug_data.append( - [ - datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), - f"Trying to get 'response['data']{action.split()[0].lower()}Post']['post']' for the post.", - ] - ) - post = response["data"][f"{action.split()[0].lower()}Post"]["post"] - - if post: - self.debug_data.append( - [ - datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), - f"{action}: {post['id']=}, {post['title']=}, {post['slug']=}", - ] - ) - else: - self._log_failure(f"Failed to {action.lower()} (No Post was returned in response)", post_data, response) - - return post - except KeyError: - self._log_failure(f"Failed to {action.lower()}", post_data, response) - return {} - - def _log_failure(self, message: str, identifier: str, response: Dict[str, Any]) -> None: - """Log a failure with a given message, identifier, and response.""" - self.debug_data.append( - [ - datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), - f"{message}. {response=}. Tried using identifier: {identifier}.", - ] - ) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..334891d --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,12 @@ +[project] +name = "publish-github-to-hashnode" +version = "1.48" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.13" +dependencies = [ + "pydantic-settings>=2.6.1", + "python-frontmatter>=1.1.0", + "requests>=2.32.3", + "sgqlc>=16.4", +] diff --git a/requirements.txt b/requirements.txt index abf1416..e511478 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,20 @@ -requests -python-frontmatter -pathlib +# This file was autogenerated by uv via the following command: +# uv pip compile pyproject.toml -o requirements.txt +certifi==2024.8.30 + # via requests +charset-normalizer==3.4.0 + # via requests +graphql-core==3.2.5 + # via sgqlc +idna==3.10 + # via requests +python-frontmatter==1.1.0 + # via publish-github-to-hashnode (pyproject.toml) +pyyaml==6.0.2 + # via python-frontmatter +requests==2.32.3 + # via publish-github-to-hashnode (pyproject.toml) +sgqlc==16.4 + # via publish-github-to-hashnode (pyproject.toml) +urllib3==2.2.3 + # via requests diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/exceptions.py b/src/exceptions.py new file mode 100644 index 0000000..59bb482 --- /dev/null +++ b/src/exceptions.py @@ -0,0 +1,31 @@ +"""Custom exceptions for the Hashnode Publisher.""" + + +class HashnodePublisherError(Exception): + """Base exception for the application.""" + + pass + + +class InvalidPostError(HashnodePublisherError): + """Raised when a post is invalid.""" + + pass + + +class APIError(HashnodePublisherError): + """Raised when an API request fails.""" + + pass + + +class ConfigurationError(HashnodePublisherError): + """Raised when there's a configuration error.""" + + pass + + +class PublicationError(HashnodePublisherError): + """Raised when there's an error with the publication.""" + + pass diff --git a/src/graphql_client.py b/src/graphql_client.py new file mode 100644 index 0000000..30aad17 --- /dev/null +++ b/src/graphql_client.py @@ -0,0 +1,40 @@ +"""GraphQL client for making API requests.""" +from typing import Any + +import requests +from sgqlc.operation import Operation + +from .exceptions import APIError + + +class GraphQLClient: + """GraphQL client for making API requests.""" + + def __init__(self, url: str, headers: dict[str, str], timeout: int = 30): + self.url = url + self.headers = headers + self.timeout = timeout + self.debug_data: list[tuple[str, str]] = [] + + def execute(self, operation: Operation) -> dict[str, Any]: + """Execute a GraphQL operation.""" + try: + response = requests.post( + url=self.url, + json={"query": str(operation), "variables": operation.variables}, + headers=self.headers, + timeout=self.timeout, + ) + response.raise_for_status() + result = response.json() + + if "errors" in result: + raise APIError(f"GraphQL errors: {result['errors']}") + + if "data" in result: + return result["data"] + return result + + except requests.exceptions.RequestException as e: + self.debug_data.append(("error", f"GraphQL request failed: {str(e)}")) + raise APIError(f"GraphQL request failed: {str(e)}") from e diff --git a/src/hashnode.py b/src/hashnode.py new file mode 100644 index 0000000..8c79eb4 --- /dev/null +++ b/src/hashnode.py @@ -0,0 +1,242 @@ +"""Provides a class to manage the publication of markdown posts to a Hashnode publication. + +This is intended to be used with the `sgqlc` and schema.py, but we are trying a different implementation. + +Remove this file once we confirm the new implementation works as expected. +""" + +# import logging +# from datetime import datetime +# from typing import Any, Optional +# from zoneinfo import ZoneInfo + +# import requests +# from constants import ( # Was removed for new implementation +# HASHNODE_API_URL, +# HEADERS, +# PUBLICATION_HOST, +# ) +# from sgqlc.operation import Operation + +# from .schema import ( +# Mutation, +# Post, +# Publication, +# PublishPostInput, +# Query, +# RemovePostInput, +# UpdatePostInput, +# schema, +# ) + +# logger = logging.getLogger(__name__) + + +# class HashnodeAPI: +# """Manage the publication of markdown posts to a Hashnode publication.""" + +# def __init__(self, timeout: int = 30) -> None: +# """Initialize the HashnodeAPI class with a timeout and obtain the publication ID.""" +# self.timeout = timeout +# self.debug_data: list[list[datetime | str]] = [] +# self.publication_id = self._fetch_publication_id() + +# def _fetch_publication_id(self) -> str: +# """Fetch the publication ID for the given host.""" +# op = Operation(Query) +# publication = op.publication(host=PUBLICATION_HOST) +# publication.id() + +# response = self._execute_request(op) +# publication_id = response["publication"]["id"] +# self.debug_data.append( +# [datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), f"Publication ID: {publication_id}"] +# ) +# return publication_id + +# def get_post_id(self, slug: str) -> Optional[str]: +# """Get the post ID for the given publication and slug.""" +# op = Operation(Query) +# publication = op.publication(host=PUBLICATION_HOST) +# post = publication.post(slug=slug) +# post.id() + +# response = self._execute_request(op) + +# if "errors" in response: +# self.debug_data.append( +# [ +# datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), +# f"GraphQL errors: {response['errors']}, Slug: {slug}", +# ] +# ) +# return None + +# post_data = response["publication"].get("post") +# post_id = post_data["id"] if post_data else None +# self.debug_data.append( +# [ +# datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), +# f"Got post id {post_id} for {slug}, Post: {post_data if post_data else None}", +# ] +# ) +# return post_id + +# def get_all_publication_posts(self) -> list[dict[str, str]]: +# """Get a list of all post IDs and slugs for a publication.""" +# all_posts = [] +# has_next_page = True +# after_cursor = None + +# while has_next_page: +# op = Operation(Query) +# publication = op.publication(host=PUBLICATION_HOST) +# posts = publication.posts(first=50, after=after_cursor) + +# # Select fields from edges +# post_edges = posts.edges() +# node = post_edges.node() +# node.id() +# node.slug() + +# # Get pagination info +# page_info = posts.page_info() +# page_info.has_next_page() +# page_info.end_cursor() + +# response = self._execute_request(op) +# posts_data = response["publication"]["posts"] + +# all_posts.extend({"id": edge["node"]["id"], "slug": edge["node"]["slug"]} for edge in posts_data["edges"]) + +# has_next_page = posts_data["pageInfo"]["hasNextPage"] +# after_cursor = posts_data["pageInfo"]["endCursor"] if has_next_page else None + +# return all_posts + +# def create_post(self, post_data: dict[str, Any]) -> dict[str, str]: +# """Create a post with the given data.""" +# op = Operation(Mutation) +# publish_post = op.publish_post(input=post_data) +# post = publish_post.post() +# post.id() +# post.title() +# post.slug() + +# response = self._execute_request(op) +# return self._extract_post_data(response, "Create Post", post_data) + +# def update_post(self, post_data: dict[str, Any]) -> dict[str, str]: +# """Update a post with the given data.""" +# op = Operation(Mutation) +# update_post = op.update_post(input=post_data) +# post = update_post.post() +# post.id() +# post.title() +# post.slug() + +# response = self._execute_request(op) +# return self._extract_post_data(response, "Update Post", post_data) + +# def delist_post(self, post_id: str) -> bool: +# """Delist (soft-delete) the post with the given ID.""" +# op = Operation(Mutation) +# update_post = op.update_post(input={"id": post_id, "preferences": {"isDelisted": True}}) +# post = update_post.post() +# post.id() +# preferences = post.preferences() +# preferences.is_delisted() + +# response = self._execute_request(op) + +# try: +# delisted = response["updatePost"]["post"]["preferences"]["isDelisted"] +# self.debug_data.append( +# [ +# datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), +# f"Delisted post: {post_id}, Delisted: {delisted}", +# ] +# ) +# return delisted +# except KeyError: +# self._log_failure("Failed to delist post", post_id, response) +# return False + +# def _execute_request(self, operation: Operation) -> dict[str, Any]: +# """Execute a GraphQL request and return the JSON response.""" +# try: +# response = requests.post( +# url=HASHNODE_API_URL, +# json={"query": str(operation), "variables": operation.variables}, +# headers=HEADERS, +# timeout=self.timeout, +# ) +# response.raise_for_status() +# result = response.json() + +# if "data" in result: +# return result["data"] +# return result + +# except requests.exceptions.HTTPError as e: +# self.debug_data.append( +# [ +# datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), +# f"Request failed with status code {response.status_code}: {response.text}. " +# f"Query: {str(operation)}, Variables: {operation.variables}. Original exception: {e}.", +# ] +# ) +# return {} +# except requests.exceptions.RequestException as e: +# self.debug_data.append( +# [ +# datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), +# f"Request failed with {response.text=}: {e}. Query: {str(operation)}, " +# f"Variables: {operation.variables}.", +# ] +# ) +# return {} +# except Exception as e: # pylint: disable=W0718 +# self.debug_data.append( +# [ +# datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), +# f"Unexpected error {getattr(response, 'text', '')}: {e}. " +# f"Query: {str(operation)}, Variables: {operation.variables}.", +# ] +# ) +# return {} + +# def _extract_post_data(self, response: dict[str, Any], action: str, post_data: dict[str, Any]) -> dict[str, str]: +# """Extract post data from the response and handle errors.""" +# try: +# self.debug_data.append( +# [ +# datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), +# f"{action.split()[0].lower()=}", +# ] +# ) +# post = response[f"{action.split()[0].lower()}Post"]["post"] + +# if post: +# self.debug_data.append( +# [ +# datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), +# f"{action}: {post['id']=}, {post['title']=}, {post['slug']=}", +# ] +# ) +# else: +# self._log_failure(f"Failed to {action.lower()} (No Post was returned in response)", post_data, response) + +# return post +# except KeyError: +# self._log_failure(f"Failed to {action.lower()}", post_data, response) +# return {} + +# def _log_failure(self, message: str, identifier: str, response: dict[str, Any]) -> None: +# """Log a failure with a given message, identifier, and response.""" +# self.debug_data.append( +# [ +# datetime.now(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S:%f"), +# f"{message}. {response=}. Tried using identifier: {identifier}.", +# ] +# ) diff --git a/src/logging.py b/src/logging.py new file mode 100644 index 0000000..fb54e1c --- /dev/null +++ b/src/logging.py @@ -0,0 +1,41 @@ +"""Logging utilities for consistent logging across the project.""" +import logging +from functools import wraps +from typing import Any, Callable + + +def log_operation(logger: logging.Logger): + """Decorator to log operation entry and exit with timing.""" + + def decorator(func: Callable) -> Callable: + @wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> Any: + operation = func.__name__.replace("_", " ").title() + logger.info(f"Starting {operation}") + + try: + result = func(*args, **kwargs) + logger.info(f"Completed {operation}") + return result + + except Exception as e: + logger.error(f"Error in {operation}: {str(e)}") + raise + + return wrapper + + return decorator + + +def setup_logging(name: str, level: int = logging.INFO) -> logging.Logger: + """Set up logging with consistent formatting.""" + logger = logging.getLogger(name) + + if not logger.handlers: + handler = logging.StreamHandler() + formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + handler.setFormatter(formatter) + logger.addHandler(handler) + + logger.setLevel(level) + return logger diff --git a/src/markdown_processor.py b/src/markdown_processor.py new file mode 100644 index 0000000..7222252 --- /dev/null +++ b/src/markdown_processor.py @@ -0,0 +1,67 @@ +"""Process markdown files for publication.""" +import re +from pathlib import Path +from typing import Any + +import frontmatter + +from .exceptions import InvalidPostError +from .models import Post, PostMetadata + + +class MarkdownProcessor: + """Process markdown files for publication.""" + + def __init__(self, publication_id: str, github_raw_url: str, repository: str, branch: str): + self.publication_id = publication_id + self.github_raw_url = github_raw_url + self.repository = repository + self.branch = branch + + def process_file(self, file_path: Path) -> Post: + """Process a markdown file into a Post domain object.""" + metadata, content = self._read_file(file_path) + self._validate_content(content) + + processed_content = self._process_content(content, file_path) + post_metadata = self._process_metadata(metadata) + + return Post( + file_path=file_path, metadata=post_metadata, content=processed_content, publication_id=self.publication_id + ) + + def _read_file(self, file_path: Path) -> tuple[dict[str, Any], str]: + """Read and parse a markdown file.""" + with file_path.open("r") as f: + post = frontmatter.load(f) + return post.metadata, post.content + + def _validate_content(self, content: str) -> None: + """Ensure content is not empty.""" + if not content.strip(): + raise InvalidPostError("Post content cannot be empty") + + def _process_metadata(self, metadata: dict[str, Any]) -> PostMetadata: + """Process and validate metadata.""" + if "title" not in metadata: + raise InvalidPostError("Post must have a title") + + # Process tags if they exist + if "tags" in metadata: + metadata["tags"] = [ + {"slug": tag.strip().lower(), "name": tag.strip()} for tag in metadata["tags"].split(",") + ] + + return PostMetadata(**metadata) + + def _process_content(self, content: str, file_path: Path) -> str: + """Process content, updating image URLs to absolute paths.""" + relative_image_regex = re.compile(r"!\[(.*?)\]\((?!http)(.*?)\)") + return relative_image_regex.sub( + lambda m: f"![{m.group(1)}]({self._get_resource_url(file_path.parent / m.group(2))})", + content, + ) + + def _get_resource_url(self, path: Path) -> str: + """Get the absolute URL for a resource in the GitHub repository.""" + return f"{self.github_raw_url}/{self.repository}/{self.branch}/{path.as_posix()}" diff --git a/src/models.py b/src/models.py new file mode 100644 index 0000000..4e8b80f --- /dev/null +++ b/src/models.py @@ -0,0 +1,40 @@ +"""Domain models for blog posts.""" +from dataclasses import dataclass +from pathlib import Path +from typing import Optional + + +@dataclass +class PostMetadata: + """Post metadata from frontmatter.""" + + title: str + subtitle: Optional[str] = None + slug: Optional[str] = None + tags: list[dict[str, str]] = None + publishedAt: Optional[str] = None + coverImage: Optional[str] = None + coverImageAttribution: Optional[str] = None + enableTableOfContents: bool = False + delisted: bool = False + disableComments: bool = False + + +@dataclass +class Post: + """Domain model for a blog post.""" + + file_path: Path + metadata: PostMetadata + content: str + publication_id: str + + @property + def slug(self) -> str: + """Get post slug, generating from title if not set.""" + return self.metadata.slug or self._generate_slug(self.metadata.title) + + @staticmethod + def _generate_slug(title: str) -> str: + """Generate a URL-friendly slug from a title.""" + return "-".join(title.strip().lower().split()) diff --git a/src/post_service.py b/src/post_service.py new file mode 100644 index 0000000..fc032bf --- /dev/null +++ b/src/post_service.py @@ -0,0 +1,99 @@ +"""Service for managing blog posts.""" +from pathlib import Path +from typing import Any, Optional + +from sgqlc.operation import Operation + +from .exceptions import PublicationError +from .graphql_client import GraphQLClient +from .markdown_processor import MarkdownProcessor +from .models import Post +from .schema import Mutation, Query + + +class PostService: + """Service for managing blog posts.""" + + def __init__( + self, + graphql_client: GraphQLClient, + markdown_processor: MarkdownProcessor, + settings: Any, + ): + self.graphql_client = graphql_client + self.markdown_processor = markdown_processor + self.settings = settings + + def publish_post(self, file_path: Path) -> dict[str, Any]: + """Publish or update a post from a markdown file.""" + post = self.markdown_processor.process_file(file_path) + post_id = self.get_post_id(post.slug) + + post_data = self._build_post_data(post, post_id) + return self._publish_or_update(post_data, post_id) + + def get_post_id(self, slug: str) -> Optional[str]: + """Get the ID of an existing post by slug.""" + op = Operation(Query) + publication = op.publication(host=self.settings.PUBLICATION_HOST) + post = publication.post(slug=slug) + post.id() + + response = self.graphql_client.execute(op) + post_data = response.get("publication", {}).get("post") + return post_data["id"] if post_data else None + + def _build_post_data(self, post: Post, post_id: Optional[str] = None) -> dict[str, Any]: + """Build the post data for the API.""" + data = { + "title": post.metadata.title, + "subtitle": post.metadata.subtitle, + "publicationId": post.publication_id, + "contentMarkdown": post.content, + "tags": post.metadata.tags, + "publishedAt": post.metadata.publishedAt, + "slug": post.slug, + "coverImageOptions": { + "coverImageURL": post.metadata.coverImage, + "coverImageAttribution": post.metadata.coverImageAttribution, + }, + } + + if post_id: + data["id"] = post_id + data["settings"] = { + "isTableOfContentEnabled": post.metadata.enableTableOfContents, + "delisted": post.metadata.delisted, + "disableComments": post.metadata.disableComments, + } + else: + data["settings"] = { + "enableTableOfContent": post.metadata.enableTableOfContents, + "delisted": post.metadata.delisted, + "slugOverridden": True, + } + data["disableComments"] = post.metadata.disableComments + + return data + + def _publish_or_update(self, post_data: dict[str, Any], post_id: Optional[str]) -> dict[str, Any]: + """Publish a new post or update an existing one.""" + op = Operation(Mutation) + + if post_id: + mutation = op.update_post(input=post_data) + else: + mutation = op.publish_post(input=post_data) + + post = mutation.post() + post.id() + post.title() + post.slug() + + response = self.graphql_client.execute(op) + operation_name = "updatePost" if post_id else "publishPost" + + if operation_name not in response: + raise PublicationError(f"Failed to {operation_name.split('_', maxsplit=1)[0]} post") + + return response[operation_name]["post"] diff --git a/src/schema.py b/src/schema.py new file mode 100644 index 0000000..3ca6b56 --- /dev/null +++ b/src/schema.py @@ -0,0 +1,934 @@ +"""GraphQL schema for Hashnode API.""" +import sgqlc.types +import sgqlc.types.datetime +import sgqlc.types.relay + +schema = sgqlc.types.Schema() + + +# Unexport Node/PageInfo, let schema re-declare them +schema -= sgqlc.types.relay.Node +schema -= sgqlc.types.relay.PageInfo + + +######################################################################## +# Scalars and Enumerations +######################################################################## +Boolean = sgqlc.types.Boolean +DateTime = sgqlc.types.datetime.DateTime +ID = sgqlc.types.ID +String = sgqlc.types.String +Int = sgqlc.types.Int + + +class ObjectId(sgqlc.types.Scalar): + __schema__ = schema + + +# class SortOrder(sgqlc.types.Enum): +# __schema__ = schema +# __choices__ = ("asc", "dsc") + + +# class TagPostsSort(sgqlc.types.Enum): +# __schema__ = schema +# __choices__ = ("popular", "recent", "trending") + + +# class TimePeriod(sgqlc.types.Enum): +# __schema__ = schema +# __choices__ = ("LAST_N_DAYS", "LAST_N_HOURS", "LAST_N_MONTHS", "LAST_N_WEEKS", "LAST_N_YEARS") + + +# class URL(sgqlc.types.Scalar): +# __schema__ = schema + + +class UrlPattern(sgqlc.types.Enum): + __schema__ = schema + __choices__ = ("DEFAULT", "SIMPLE") + + +######################################################################## +# Input Objects +######################################################################## +# class AbsoluteTimeRange(sgqlc.types.Input): +# __schema__ = schema +# __field_names__ = ("from_", "to") +# from_ = sgqlc.types.Field(DateTime, graphql_name="from") +# to = sgqlc.types.Field(DateTime, graphql_name="to") + + +class CoverImageOptionsInput(sgqlc.types.Input): + __schema__ = schema + __field_names__ = ( + "cover_image_url", + "is_cover_attribution_hidden", + "cover_image_attribution", + "cover_image_photographer", + "stick_cover_to_bottom", + ) + cover_image_url = sgqlc.types.Field(String, graphql_name="coverImageURL") + is_cover_attribution_hidden = sgqlc.types.Field(Boolean, graphql_name="isCoverAttributionHidden") + cover_image_attribution = sgqlc.types.Field(String, graphql_name="coverImageAttribution") + cover_image_photographer = sgqlc.types.Field(String, graphql_name="coverImagePhotographer") + stick_cover_to_bottom = sgqlc.types.Field(Boolean, graphql_name="stickCoverToBottom") + + +class PublicationPostConnectionFilter(sgqlc.types.Input): + __schema__ = schema + __field_names__ = ("tags", "tag_slugs", "exclude_pinned_post", "deleted_only") + tags = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ObjectId)), graphql_name="tags") + tag_slugs = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="tagSlugs") + exclude_pinned_post = sgqlc.types.Field(Boolean, graphql_name="excludePinnedPost") + deleted_only = sgqlc.types.Field(Boolean, graphql_name="deletedOnly") + + +class PublicationPostsViaPageFilter(sgqlc.types.Input): + __schema__ = schema + __field_names__ = ("tags", "tag_slugs", "exclude_pinned_posts") + tags = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="tags") + tag_slugs = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(String)), graphql_name="tagSlugs") + exclude_pinned_posts = sgqlc.types.Field(Boolean, graphql_name="excludePinnedPosts") + + +class PublishPostInput(sgqlc.types.Input): + __schema__ = schema + __field_names__ = ( + "title", + "subtitle", + "publication_id", + "content_markdown", + "published_at", + "cover_image_options", + "slug", + "original_article_url", + "tags", + "disable_comments", + "meta_tags", + "publish_as", + "series_id", + "settings", + "co_authors", + ) + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + subtitle = sgqlc.types.Field(String, graphql_name="subtitle") + publication_id = sgqlc.types.Field(sgqlc.types.non_null(ObjectId), graphql_name="publicationId") + content_markdown = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="contentMarkdown") + published_at = sgqlc.types.Field(DateTime, graphql_name="publishedAt") + cover_image_options = sgqlc.types.Field(CoverImageOptionsInput, graphql_name="coverImageOptions") + slug = sgqlc.types.Field(String, graphql_name="slug") + original_article_url = sgqlc.types.Field(String, graphql_name="originalArticleURL") + tags = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null("PublishPostTagInput")), graphql_name="tags") + disable_comments = sgqlc.types.Field(Boolean, graphql_name="disableComments") + publish_as = sgqlc.types.Field(ObjectId, graphql_name="publishAs") + series_id = sgqlc.types.Field(ObjectId, graphql_name="seriesId") + settings = sgqlc.types.Field("PublishPostSettingsInput", graphql_name="settings") + co_authors = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ObjectId)), graphql_name="coAuthors") + + +class PublishPostSettingsInput(sgqlc.types.Input): + __schema__ = schema + __field_names__ = ("scheduled", "enable_table_of_content", "slug_overridden", "delisted") + scheduled = sgqlc.types.Field(Boolean, graphql_name="scheduled") + enable_table_of_content = sgqlc.types.Field(Boolean, graphql_name="enableTableOfContent") + slug_overridden = sgqlc.types.Field(Boolean, graphql_name="slugOverridden") + delisted = sgqlc.types.Field(Boolean, graphql_name="delisted") + + +class PublishPostTagInput(sgqlc.types.Input): + __schema__ = schema + __field_names__ = ("id", "slug", "name") + id = sgqlc.types.Field(ObjectId, graphql_name="id") + slug = sgqlc.types.Field(String, graphql_name="slug") + name = sgqlc.types.Field(String, graphql_name="name") + + +# class RelativeTimeRange(sgqlc.types.Input): +# __schema__ = schema +# __field_names__ = ("relative", "n") +# relative = sgqlc.types.Field(sgqlc.types.non_null(TimePeriod), graphql_name="relative") +# n = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="n") + + +class RemovePostInput(sgqlc.types.Input): + __schema__ = schema + __field_names__ = ("id",) + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + +class RestorePostInput(sgqlc.types.Input): + __schema__ = schema + __field_names__ = ("id",) + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + +class SearchPostsOfPublicationFilter(sgqlc.types.Input): + __schema__ = schema + __field_names__ = ("query", "publication_id", "deleted_only", "author_ids", "tag_ids", "time") + query = sgqlc.types.Field(String, graphql_name="query") + publication_id = sgqlc.types.Field(sgqlc.types.non_null(ObjectId), graphql_name="publicationId") + deleted_only = sgqlc.types.Field(Boolean, graphql_name="deletedOnly") + author_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="authorIds") + tag_ids = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ID)), graphql_name="tagIds") + time = sgqlc.types.Field("TimeFilter", graphql_name="time") + + +# class TagPostConnectionFilter(sgqlc.types.Input): +# __schema__ = schema +# __field_names__ = ("sort_by",) +# sort_by = sgqlc.types.Field(TagPostsSort, graphql_name="sortBy") + + +# class TimeFilter(sgqlc.types.Input): +# __schema__ = schema +# __field_names__ = ("absolute", "relative") +# absolute = sgqlc.types.Field(AbsoluteTimeRange, graphql_name="absolute") +# relative = sgqlc.types.Field(RelativeTimeRange, graphql_name="relative") + + +class UpdatePostInput(sgqlc.types.Input): + __schema__ = schema + __field_names__ = ( + "id", + "title", + "subtitle", + "content_markdown", + "published_at", + "cover_image_options", + "slug", + "original_article_url", + "tags", + "meta_tags", + "publish_as", + "co_authors", + "series_id", + "settings", + "publication_id", + ) + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + title = sgqlc.types.Field(String, graphql_name="title") + subtitle = sgqlc.types.Field(String, graphql_name="subtitle") + content_markdown = sgqlc.types.Field(String, graphql_name="contentMarkdown") + published_at = sgqlc.types.Field(DateTime, graphql_name="publishedAt") + cover_image_options = sgqlc.types.Field(CoverImageOptionsInput, graphql_name="coverImageOptions") + slug = sgqlc.types.Field(String, graphql_name="slug") + original_article_url = sgqlc.types.Field(String, graphql_name="originalArticleURL") + tags = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(PublishPostTagInput)), graphql_name="tags") + publish_as = sgqlc.types.Field(ObjectId, graphql_name="publishAs") + co_authors = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null(ObjectId)), graphql_name="coAuthors") + series_id = sgqlc.types.Field(ObjectId, graphql_name="seriesId") + settings = sgqlc.types.Field("UpdatePostSettingsInput", graphql_name="settings") + publication_id = sgqlc.types.Field(ObjectId, graphql_name="publicationId") + + +class UpdatePostSettingsInput(sgqlc.types.Input): + __schema__ = schema + __field_names__ = ("is_table_of_content_enabled", "delisted", "disable_comments", "pin_to_blog") + is_table_of_content_enabled = sgqlc.types.Field(Boolean, graphql_name="isTableOfContentEnabled") + delisted = sgqlc.types.Field(Boolean, graphql_name="delisted") + disable_comments = sgqlc.types.Field(Boolean, graphql_name="disableComments") + pin_to_blog = sgqlc.types.Field(Boolean, graphql_name="pinToBlog") + + +######################################################################## +# Output Objects and Interfaces +######################################################################## +# class Connection(sgqlc.types.Interface): +# __schema__ = schema +# __field_names__ = ("edges", "page_info") +# edges = sgqlc.types.Field( +# sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("Edge"))), graphql_name="edges" +# ) +# page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo") + + +# class Edge(sgqlc.types.Interface): +# __schema__ = schema +# __field_names__ = ("node", "cursor") +# node = sgqlc.types.Field(sgqlc.types.non_null("Node"), graphql_name="node") +# cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") + + +# class Feature(sgqlc.types.Interface): +# __schema__ = schema +# __field_names__ = ("is_enabled",) +# is_enabled = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isEnabled") + + +# class ITag(sgqlc.types.Interface): +# __schema__ = schema +# __field_names__ = ("id", "name", "slug", "logo", "tagline", "info", "posts_count") +# id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") +# name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") +# slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="slug") +# logo = sgqlc.types.Field(String, graphql_name="logo") +# tagline = sgqlc.types.Field(String, graphql_name="tagline") +# info = sgqlc.types.Field("Content", graphql_name="info") +# posts_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="postsCount") + + +class Node(sgqlc.types.Interface): + __schema__ = schema + __field_names__ = ("id",) + id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") + + +# class PageConnection(sgqlc.types.Interface): +# __schema__ = schema +# __field_names__ = ("nodes", "page_info") +# nodes = sgqlc.types.Field( +# sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(Node))), graphql_name="nodes" +# ) +# page_info = sgqlc.types.Field(sgqlc.types.non_null("OffsetPageInfo"), graphql_name="pageInfo") + + +# class Views(sgqlc.types.Interface): +# __schema__ = schema +# __field_names__ = ("id", "total") +# id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="id") +# total = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="total") + + +class AudioUrls(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ("male", "female") + male = sgqlc.types.Field(String, graphql_name="male") + female = sgqlc.types.Field(String, graphql_name="female") + + +class Content(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ("markdown", "html", "text") + markdown = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="markdown") + html = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="html") + text = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="text") + + +# class CustomCSS(sgqlc.types.Type): +# __schema__ = schema +# __field_names__ = ("home", "post", "static", "home_minified", "post_minified", "static_minified") +# home = sgqlc.types.Field(String, graphql_name="home") +# post = sgqlc.types.Field(String, graphql_name="post") +# static = sgqlc.types.Field(String, graphql_name="static") +# home_minified = sgqlc.types.Field(String, graphql_name="homeMinified") +# post_minified = sgqlc.types.Field(String, graphql_name="postMinified") +# static_minified = sgqlc.types.Field(String, graphql_name="staticMinified") + + +class DomainInfo(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ("hashnode_subdomain", "domain", "www_prefixed_domain") + hashnode_subdomain = sgqlc.types.Field(String, graphql_name="hashnodeSubdomain") + domain = sgqlc.types.Field("DomainStatus", graphql_name="domain") + www_prefixed_domain = sgqlc.types.Field("DomainStatus", graphql_name="wwwPrefixedDomain") + + +class Mutation(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ( + "publish_post", + "add_post_to_series", + "update_post", + "remove_post", + "restore_post", + ) + publish_post = sgqlc.types.Field( + sgqlc.types.non_null("PublishPostPayload"), + graphql_name="publishPost", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(PublishPostInput), graphql_name="input", default=None)),) + ), + ) + update_post = sgqlc.types.Field( + sgqlc.types.non_null("UpdatePostPayload"), + graphql_name="updatePost", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(UpdatePostInput), graphql_name="input", default=None)),) + ), + ) + remove_post = sgqlc.types.Field( + sgqlc.types.non_null("RemovePostPayload"), + graphql_name="removePost", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RemovePostInput), graphql_name="input", default=None)),) + ), + ) + restore_post = sgqlc.types.Field( + sgqlc.types.non_null("RestorePostPayload"), + graphql_name="restorePost", + args=sgqlc.types.ArgDict( + (("input", sgqlc.types.Arg(sgqlc.types.non_null(RestorePostInput), graphql_name="input", default=None)),) + ), + ) + + +# class OffsetPageInfo(sgqlc.types.Type): +# __schema__ = schema +# __field_names__ = ("has_next_page", "has_previous_page", "previous_page", "next_page") +# has_next_page = sgqlc.types.Field(Boolean, graphql_name="hasNextPage") +# has_previous_page = sgqlc.types.Field(Boolean, graphql_name="hasPreviousPage") +# previous_page = sgqlc.types.Field(Int, graphql_name="previousPage") +# next_page = sgqlc.types.Field(Int, graphql_name="nextPage") + + +class OpenGraphMetaData(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ("image",) + image = sgqlc.types.Field(String, graphql_name="image") + + +class PageInfo(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ("has_next_page", "end_cursor") + has_next_page = sgqlc.types.Field(Boolean, graphql_name="hasNextPage") + end_cursor = sgqlc.types.Field(String, graphql_name="endCursor") + + +class PostCoverImage(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ("url", "is_portrait", "attribution", "photographer", "is_attribution_hidden") + url = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="url") + is_portrait = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isPortrait") + attribution = sgqlc.types.Field(String, graphql_name="attribution") + photographer = sgqlc.types.Field(String, graphql_name="photographer") + is_attribution_hidden = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isAttributionHidden") + + +class PostFeatures(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ("table_of_contents", "badges") + table_of_contents = sgqlc.types.Field( + sgqlc.types.non_null("TableOfContentsFeature"), graphql_name="tableOfContents" + ) + + +class PostPreferences(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ("pinned_to_blog", "disable_comments", "stick_cover_to_bottom", "is_delisted") + pinned_to_blog = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="pinnedToBlog") + disable_comments = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="disableComments") + stick_cover_to_bottom = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="stickCoverToBottom") + is_delisted = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isDelisted") + + +# class PublicationFeatures(sgqlc.types.Type): +# __schema__ = schema +# __field_names__ = ( +# "view_count", +# "read_time", +# "audio_blog", +# "text_selection_sharer", +# "custom_css", +# "headless_cms", +# "pro_team", +# "gpt_bot_crawling", +# ) +# view_count = sgqlc.types.Field(sgqlc.types.non_null("ViewCountFeature"), graphql_name="viewCount") +# read_time = sgqlc.types.Field(sgqlc.types.non_null("ReadTimeFeature"), graphql_name="readTime") +# text_selection_sharer = sgqlc.types.Field( +# sgqlc.types.non_null("TextSelectionSharerFeature"), graphql_name="textSelectionSharer" +# ) +# custom_css = sgqlc.types.Field(sgqlc.types.non_null("CustomCSSFeature"), graphql_name="customCSS") +# headless_cms = sgqlc.types.Field(sgqlc.types.non_null("HeadlessCMSFeature"), graphql_name="headlessCMS") +# pro_team = sgqlc.types.Field(sgqlc.types.non_null("ProTeamFeature"), graphql_name="proTeam") +# gpt_bot_crawling = sgqlc.types.Field(sgqlc.types.non_null("GPTBotCrawlingFeature"), graphql_name="gptBotCrawling") + + +class PublicationIntegrations(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ( + "fb_pixel_id", + "fathom_site_id", + "fathom_custom_domain_enabled", + "fathom_custom_domain", + "hotjar_site_id", + "matomo_site_id", + "matomo_url", + "ga_tracking_id", + "plausible_analytics_enabled", + "wm_payment_pointer", + "umami_website_uuid", + "umami_share_id", + "g_tag_manager_id", + "koala_public_key", + "ms_clarity_id", + ) + fb_pixel_id = sgqlc.types.Field(String, graphql_name="fbPixelID") + fathom_site_id = sgqlc.types.Field(String, graphql_name="fathomSiteID") + fathom_custom_domain_enabled = sgqlc.types.Field(Boolean, graphql_name="fathomCustomDomainEnabled") + fathom_custom_domain = sgqlc.types.Field(String, graphql_name="fathomCustomDomain") + hotjar_site_id = sgqlc.types.Field(String, graphql_name="hotjarSiteID") + matomo_site_id = sgqlc.types.Field(String, graphql_name="matomoSiteID") + matomo_url = sgqlc.types.Field(String, graphql_name="matomoURL") + ga_tracking_id = sgqlc.types.Field(String, graphql_name="gaTrackingID") + plausible_analytics_enabled = sgqlc.types.Field(Boolean, graphql_name="plausibleAnalyticsEnabled") + wm_payment_pointer = sgqlc.types.Field(String, graphql_name="wmPaymentPointer") + umami_website_uuid = sgqlc.types.Field(String, graphql_name="umamiWebsiteUUID") + umami_share_id = sgqlc.types.Field(String, graphql_name="umamiShareId") + g_tag_manager_id = sgqlc.types.Field(String, graphql_name="gTagManagerID") + koala_public_key = sgqlc.types.Field(String, graphql_name="koalaPublicKey") + ms_clarity_id = sgqlc.types.Field(String, graphql_name="msClarityID") + + +class PublicationLinks(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ( + "twitter", + "instagram", + "github", + "website", + "hashnode", + "youtube", + "dailydev", + "linkedin", + "mastodon", + "facebook", + "bluesky", + ) + twitter = sgqlc.types.Field(String, graphql_name="twitter") + instagram = sgqlc.types.Field(String, graphql_name="instagram") + github = sgqlc.types.Field(String, graphql_name="github") + website = sgqlc.types.Field(String, graphql_name="website") + hashnode = sgqlc.types.Field(String, graphql_name="hashnode") + youtube = sgqlc.types.Field(String, graphql_name="youtube") + dailydev = sgqlc.types.Field(String, graphql_name="dailydev") + linkedin = sgqlc.types.Field(String, graphql_name="linkedin") + mastodon = sgqlc.types.Field(String, graphql_name="mastodon") + facebook = sgqlc.types.Field(String, graphql_name="facebook") + bluesky = sgqlc.types.Field(String, graphql_name="bluesky") + + +# class PublishPostPayload(sgqlc.types.Type): +# __schema__ = schema +# __field_names__ = ("post",) +# post = sgqlc.types.Field("Post", graphql_name="post") + + +class Query(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ( + "top_commenters", + "me", + "tag", + "publication", + "feed", + "post", + "search_posts_of_publication", + ) + tag = sgqlc.types.Field( + "Tag", + graphql_name="tag", + args=sgqlc.types.ArgDict( + (("slug", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="slug", default=None)),) + ), + ) + publication = sgqlc.types.Field( + "Publication", + graphql_name="publication", + args=sgqlc.types.ArgDict( + ( + ("id", sgqlc.types.Arg(ObjectId, graphql_name="id", default=None)), + ("host", sgqlc.types.Arg(String, graphql_name="host", default=None)), + ) + ), + ) + post = sgqlc.types.Field( + "Post", + graphql_name="post", + args=sgqlc.types.ArgDict((("id", sgqlc.types.Arg(sgqlc.types.non_null(ID), graphql_name="id", default=None)),)), + ) + search_posts_of_publication = sgqlc.types.Field( + sgqlc.types.non_null("SearchPostConnection"), + graphql_name="searchPostsOfPublication", + args=sgqlc.types.ArgDict( + ( + ("first", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="first", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ( + "filter", + sgqlc.types.Arg( + sgqlc.types.non_null(SearchPostsOfPublicationFilter), graphql_name="filter", default=None + ), + ), + ) + ), + ) + + +# class RemovePostPayload(sgqlc.types.Type): +# __schema__ = schema +# __field_names__ = ("post",) +# post = sgqlc.types.Field("Post", graphql_name="post") + + +# class RestorePostPayload(sgqlc.types.Type): +# __schema__ = schema +# __field_names__ = ("post",) +# post = sgqlc.types.Field("Post", graphql_name="post") + + +class SEO(sgqlc.types.Type): + __schema__ = schema + __field_names__ = ("title", "description") + title = sgqlc.types.Field(String, graphql_name="title") + description = sgqlc.types.Field(String, graphql_name="description") + + +# class UpdatePostPayload(sgqlc.types.Type): +# __schema__ = schema +# __field_names__ = ("post",) +# post = sgqlc.types.Field("Post", graphql_name="post") + + +# class Badge(sgqlc.types.Type, Node): +# __schema__ = schema +# __field_names__ = ("name", "description", "image", "date_assigned", "info_url", "suppressed") +# name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") +# description = sgqlc.types.Field(String, graphql_name="description") +# image = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="image") +# date_assigned = sgqlc.types.Field(DateTime, graphql_name="dateAssigned") +# info_url = sgqlc.types.Field(String, graphql_name="infoURL") +# suppressed = sgqlc.types.Field(Boolean, graphql_name="suppressed") + + +# class CustomCSSFeature(sgqlc.types.Type, Feature): +# __schema__ = schema +# __field_names__ = "published" +# published = sgqlc.types.Field(CustomCSS, graphql_name="published") + + +# class FeedPostConnection(sgqlc.types.relay.Connection, Connection): +# __schema__ = schema +# __field_names__ = () + + +# class PopularTag(sgqlc.types.Type, ITag, Node): +# __schema__ = schema +# __field_names__ = ("posts_count_in_period",) +# posts_count_in_period = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="postsCountInPeriod") + + +# class PopularTagEdge(sgqlc.types.Type, Edge): +# __schema__ = schema +# __field_names__ = () + + +class Post(sgqlc.types.Type, Node): + __schema__ = schema + __field_names__ = ( + "slug", + "previous_slugs", + "title", + "subtitle", + "author", + "co_authors", + "tags", + "url", + "canonical_url", + "publication", + "cuid", + "cover_image", + "brief", + "read_time_in_minutes", + "views", + "series", + "reaction_count", + "response_count", + "featured", + "contributors", + "bookmarked", + "content", + "featured_at", + "published_at", + "updated_at", + "preferences", + "audio_urls", + "seo", + "og_meta_data", + "has_latex_in_post", + "is_followed", + "is_auto_published_from_rss", + "features", + "sourced_from_github", + ) + slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="slug") + previous_slugs = sgqlc.types.Field( + sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(String))), graphql_name="previousSlugs" + ) + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + subtitle = sgqlc.types.Field(String, graphql_name="subtitle") + tags = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null("Tag")), graphql_name="tags") + url = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="url") + canonical_url = sgqlc.types.Field(String, graphql_name="canonicalUrl") + publication = sgqlc.types.Field("Publication", graphql_name="publication") + cuid = sgqlc.types.Field(String, graphql_name="cuid") + cover_image = sgqlc.types.Field(PostCoverImage, graphql_name="coverImage") + brief = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="brief") + read_time_in_minutes = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="readTimeInMinutes") + views = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="views") + series = sgqlc.types.Field("Series", graphql_name="series") + reaction_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="reactionCount") + response_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="responseCount") + featured = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="featured") + bookmarked = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="bookmarked") + content = sgqlc.types.Field(sgqlc.types.non_null(Content), graphql_name="content") + featured_at = sgqlc.types.Field(DateTime, graphql_name="featuredAt") + published_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="publishedAt") + updated_at = sgqlc.types.Field(DateTime, graphql_name="updatedAt") + preferences = sgqlc.types.Field(sgqlc.types.non_null(PostPreferences), graphql_name="preferences") + audio_urls = sgqlc.types.Field(AudioUrls, graphql_name="audioUrls") + seo = sgqlc.types.Field(SEO, graphql_name="seo") + og_meta_data = sgqlc.types.Field(OpenGraphMetaData, graphql_name="ogMetaData") + has_latex_in_post = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasLatexInPost") + is_followed = sgqlc.types.Field(Boolean, graphql_name="isFollowed") + is_auto_published_from_rss = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isAutoPublishedFromRSS") + features = sgqlc.types.Field(sgqlc.types.non_null(PostFeatures), graphql_name="features") + sourced_from_github = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="sourcedFromGithub") + + +class Publication(sgqlc.types.Type, Node): + __schema__ = schema + __field_names__ = ( + "title", + "display_title", + "description_seo", + "about", + "url", + "canonical_url", + "author", + "favicon", + "header_color", + "meta_tags", + "integrations", + "followers_count", + "imprint", + "imprint_v2", + "is_team", + "links", + "domain_info", + "is_headless", + "series", + "series_list", + "posts", + "posts_via_page", + "pinned_post", + "post", + "redirected_post", + "og_meta_data", + "features", + "static_page", + "static_pages", + "is_git_hub_backup_enabled", + "is_github_as_source_connected", + "url_pattern", + "has_badges", + "sponsorship", + "allow_contributor_edits", + ) + title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") + display_title = sgqlc.types.Field(String, graphql_name="displayTitle") + description_seo = sgqlc.types.Field(String, graphql_name="descriptionSEO") + about = sgqlc.types.Field(Content, graphql_name="about") + url = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="url") + canonical_url = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="canonicalURL") + favicon = sgqlc.types.Field(String, graphql_name="favicon") + header_color = sgqlc.types.Field(String, graphql_name="headerColor") + integrations = sgqlc.types.Field(PublicationIntegrations, graphql_name="integrations") + followers_count = sgqlc.types.Field(Int, graphql_name="followersCount") + imprint = sgqlc.types.Field(String, graphql_name="imprint") + imprint_v2 = sgqlc.types.Field(Content, graphql_name="imprintV2") + is_team = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isTeam") + links = sgqlc.types.Field(PublicationLinks, graphql_name="links") + domain_info = sgqlc.types.Field(sgqlc.types.non_null(DomainInfo), graphql_name="domainInfo") + is_headless = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isHeadless") + series = sgqlc.types.Field( + "Series", + graphql_name="series", + args=sgqlc.types.ArgDict( + (("slug", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="slug", default=None)),) + ), + ) + series_list = sgqlc.types.Field( + sgqlc.types.non_null("SeriesConnection"), + graphql_name="seriesList", + args=sgqlc.types.ArgDict( + ( + ("first", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="first", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ) + ), + ) + posts = sgqlc.types.Field( + sgqlc.types.non_null("PublicationPostConnection"), + graphql_name="posts", + args=sgqlc.types.ArgDict( + ( + ("first", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="first", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ("filter", sgqlc.types.Arg(PublicationPostConnectionFilter, graphql_name="filter", default=None)), + ) + ), + ) + posts_via_page = sgqlc.types.Field( + sgqlc.types.non_null("PublicationPostPageConnection"), + graphql_name="postsViaPage", + args=sgqlc.types.ArgDict( + ( + ("page_size", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="pageSize", default=None)), + ("page", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="page", default=None)), + ("filter", sgqlc.types.Arg(PublicationPostsViaPageFilter, graphql_name="filter", default=None)), + ) + ), + ) + pinned_post = sgqlc.types.Field(Post, graphql_name="pinnedPost") + post = sgqlc.types.Field( + Post, + graphql_name="post", + args=sgqlc.types.ArgDict( + (("slug", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="slug", default=None)),) + ), + ) + redirected_post = sgqlc.types.Field( + Post, + graphql_name="redirectedPost", + args=sgqlc.types.ArgDict( + (("slug", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="slug", default=None)),) + ), + ) + static_page = sgqlc.types.Field( + "StaticPage", + graphql_name="staticPage", + args=sgqlc.types.ArgDict( + (("slug", sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name="slug", default=None)),) + ), + ) + static_pages = sgqlc.types.Field( + sgqlc.types.non_null("StaticPageConnection"), + graphql_name="staticPages", + args=sgqlc.types.ArgDict( + ( + ("first", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="first", default=None)), + ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), + ) + ), + ) + is_git_hub_backup_enabled = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="isGitHubBackupEnabled") + is_github_as_source_connected = sgqlc.types.Field( + sgqlc.types.non_null(Boolean), graphql_name="isGithubAsSourceConnected" + ) + url_pattern = sgqlc.types.Field(sgqlc.types.non_null(UrlPattern), graphql_name="urlPattern") + has_badges = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hasBadges") + allow_contributor_edits = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="allowContributorEdits") + + +# class ReadTimeFeature(sgqlc.types.Type, Feature): +# __schema__ = schema +# __field_names__ = () + + +# class SearchPostConnection(sgqlc.types.relay.Connection, Connection): +# __schema__ = schema +# __field_names__ = () + + +# class Series(sgqlc.types.Type, Node): +# __schema__ = schema +# __field_names__ = ( +# "name", +# "created_at", +# "description", +# "cover_image", +# "author", +# "cuid", +# "slug", +# "sort_order", +# "posts", +# ) +# name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="name") +# created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name="createdAt") +# description = sgqlc.types.Field(Content, graphql_name="description") +# cover_image = sgqlc.types.Field(String, graphql_name="coverImage") +# cuid = sgqlc.types.Field(ID, graphql_name="cuid") +# slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="slug") +# sort_order = sgqlc.types.Field(sgqlc.types.non_null(SortOrder), graphql_name="sortOrder") +# posts = sgqlc.types.Field( +# sgqlc.types.non_null("SeriesPostConnection"), +# graphql_name="posts", +# args=sgqlc.types.ArgDict( +# ( +# ("first", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="first", default=None)), +# ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), +# ) +# ), +# ) + + +# class StaticPage(sgqlc.types.Type, Node): +# __schema__ = schema +# __field_names__ = ("title", "slug", "content", "hidden", "og_meta_data", "seo") +# title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") +# slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="slug") +# content = sgqlc.types.Field(sgqlc.types.non_null(Content), graphql_name="content") +# hidden = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name="hidden") +# og_meta_data = sgqlc.types.Field(OpenGraphMetaData, graphql_name="ogMetaData") +# seo = sgqlc.types.Field(SEO, graphql_name="seo") + + +# class TableOfContentsFeature(sgqlc.types.Type, Feature): +# __schema__ = schema +# __field_names__ = ("items",) +# items = sgqlc.types.Field( +# sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null("TableOfContentsItem"))), graphql_name="items" +# ) + + +# class TableOfContentsItem(sgqlc.types.Type, Node): +# __schema__ = schema +# __field_names__ = ("level", "slug", "title", "parent_id") +# level = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="level") +# slug = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="slug") +# title = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="title") +# parent_id = sgqlc.types.Field(ID, graphql_name="parentId") + + +# class Tag(sgqlc.types.Type, ITag, Node): +# __schema__ = schema +# __field_names__ = ("posts",) +# posts = sgqlc.types.Field( +# sgqlc.types.non_null(FeedPostConnection), +# graphql_name="posts", +# args=sgqlc.types.ArgDict( +# ( +# ("first", sgqlc.types.Arg(sgqlc.types.non_null(Int), graphql_name="first", default=None)), +# ("after", sgqlc.types.Arg(String, graphql_name="after", default=None)), +# ( +# "filter", +# sgqlc.types.Arg(sgqlc.types.non_null(TagPostConnectionFilter), graphql_name="filter", default=None), +# ), +# ) +# ), +# ) + + +# class TagEdge(sgqlc.types.Type, Edge): +# __schema__ = schema +# __field_names__ = () + + +# class TextSelectionSharerFeature(sgqlc.types.Type, Feature): +# __schema__ = schema +# __field_names__ = () + + +# class ViewCountFeature(sgqlc.types.Type, Feature): +# __schema__ = schema +# __field_names__ = () + + +######################################################################## +# Schema Entry Points +######################################################################## +schema.query_type = Query +schema.mutation_type = Mutation +schema.subscription_type = None diff --git a/src/settings.py b/src/settings.py new file mode 100644 index 0000000..7107fd5 --- /dev/null +++ b/src/settings.py @@ -0,0 +1,39 @@ +"""Application settings using Pydantic for validation.""" +from pathlib import Path + +from pydantic_settings import BaseSettings + + +class Settings(BaseSettings): + """Application settings using Pydantic for validation.""" + + ACCESS_TOKEN: str + POSTS_DIRECTORY: Path = Path("") + PUBLICATION_HOST: str + GITHUB_REPOSITORY: str + GITHUB_REF: str + GITHUB_OUTPUT: str | None = None + + ADDED_FILES: list[Path] = [] + CHANGED_FILES: list[Path] = [] + + HASHNODE_API_URL: str = "https://gql.hashnode.com" + GITHUB_RAW_URL: str = "https://raw.githubusercontent.com" + + class Config: + """Pydantic configuration.""" + + env_file = ".env" + + @property + def headers(self) -> dict[str, str]: + """Get API headers with authorization.""" + return {"Authorization": f"Bearer {self.ACCESS_TOKEN}"} + + @property + def branch(self) -> str: + """Get Git branch name.""" + return self.GITHUB_REF.split("/")[-1] + + +settings = Settings() diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..9c4ecbb --- /dev/null +++ b/uv.lock @@ -0,0 +1,216 @@ +version = 1 +requires-python = ">=3.13" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, + { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, + { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, + { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, + { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, + { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, + { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, + { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, + { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, + { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, + { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, + { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, + { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, + { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, + { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, +] + +[[package]] +name = "graphql-core" +version = "3.2.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/b5/ebc6fe3852e2d2fdaf682dddfc366934f3d2c9ef9b6d1b0e6ca348d936ba/graphql_core-3.2.5.tar.gz", hash = "sha256:e671b90ed653c808715645e3998b7ab67d382d55467b7e2978549111bbabf8d5", size = 504664 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/dc/078bd6b304de790618ebb95e2aedaadb78f4527ac43a9ad8815f006636b6/graphql_core-3.2.5-py3-none-any.whl", hash = "sha256:2f150d5096448aa4f8ab26268567bbfeef823769893b39c1a2e1409590939c8a", size = 203189 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "publish-github-to-hashnode" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "pydantic-settings" }, + { name = "python-frontmatter" }, + { name = "requests" }, + { name = "sgqlc" }, +] + +[package.metadata] +requires-dist = [ + { name = "pydantic-settings", specifier = ">=2.6.1" }, + { name = "python-frontmatter", specifier = ">=1.1.0" }, + { name = "requests", specifier = ">=2.32.3" }, + { name = "sgqlc", specifier = ">=16.4" }, +] + +[[package]] +name = "pydantic" +version = "2.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/78/58c36d0cf331b659d0ccd99175e3523c457b4f8e67cb92a8fdc22ec1667c/pydantic-2.10.0.tar.gz", hash = "sha256:0aca0f045ff6e2f097f1fe89521115335f15049eeb8a7bef3dafe4b19a74e289", size = 781980 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/ee/255cbfdbf5c47650de70ac8a5425107511f505ed0366c29d537f7f1842e1/pydantic-2.10.0-py3-none-any.whl", hash = "sha256:5e7807ba9201bdf61b1b58aa6eb690916c40a47acfb114b1b4fef3e7fd5b30fc", size = 454346 }, +] + +[[package]] +name = "pydantic-core" +version = "2.27.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/cd/8331ae216bcc5a3f2d4c6b941c9f63de647e2700d38133f4f7e0132a00c4/pydantic_core-2.27.0.tar.gz", hash = "sha256:f57783fbaf648205ac50ae7d646f27582fc706be3977e87c3c124e7a92407b10", size = 412675 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/b2/740159bdfe532d856e340510246aa1fd723b97cadf1a38153bdfb52efa28/pydantic_core-2.27.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5f2b19b8d6fca432cb3acf48cf5243a7bf512988029b6e6fd27e9e8c0a204d85", size = 1886935 }, + { url = "https://files.pythonhosted.org/packages/ca/2a/2f435d9fd591c912ca227f29c652a93775d35d54677b57c3157bbad823b5/pydantic_core-2.27.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c86679f443e7085ea55a7376462553996c688395d18ef3f0d3dbad7838f857a2", size = 1805318 }, + { url = "https://files.pythonhosted.org/packages/ba/f2/755b628009530b19464bb95c60f829b47a6ef7930f8ca1d87dac90fd2848/pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:510b11e9c3b1a852876d1ccd8d5903684336d635214148637ceb27366c75a467", size = 1822284 }, + { url = "https://files.pythonhosted.org/packages/3d/c2/a12744628b1b55c5384bd77657afa0780868484a92c37a189fb460d1cfe7/pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb704155e73b833801c247f39d562229c0303f54770ca14fb1c053acb376cf10", size = 1848522 }, + { url = "https://files.pythonhosted.org/packages/60/1d/dfcb8ab94a4637d4cf682550a2bf94695863988e7bcbd6f4d83c04178e17/pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ce048deb1e033e7a865ca384770bccc11d44179cf09e5193a535c4c2f497bdc", size = 2031678 }, + { url = "https://files.pythonhosted.org/packages/ee/c8/f9cbcab0275e031c4312223c75d999b61fba60995003cd89dc4866300059/pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58560828ee0951bb125c6f2862fbc37f039996d19ceb6d8ff1905abf7da0bf3d", size = 2672948 }, + { url = "https://files.pythonhosted.org/packages/41/f9/c613546237cf58ed7a7fa9158410c14d0e7e0cbbf95f83a905c9424bb074/pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb4785894936d7682635726613c44578c420a096729f1978cd061a7e72d5275", size = 2152419 }, + { url = "https://files.pythonhosted.org/packages/49/71/b951b03a271678b1d1b79481dac38cf8bce8a4e178f36ada0e9aff65a679/pydantic_core-2.27.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2883b260f7a93235488699d39cbbd94fa7b175d3a8063fbfddd3e81ad9988cb2", size = 1986408 }, + { url = "https://files.pythonhosted.org/packages/9a/2c/07b0d5b5e1cdaa07b7c23e758354377d294ff0395116d39c9fa734e5d89e/pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c6fcb3fa3855d583aa57b94cf146f7781d5d5bc06cb95cb3afece33d31aac39b", size = 1995895 }, + { url = "https://files.pythonhosted.org/packages/63/09/c21e0d7438c7e742209cc8603607c8d389df96018396c8a2577f6e24c5c5/pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:e851a051f7260e6d688267eb039c81f05f23a19431bd7dfa4bf5e3cb34c108cd", size = 2085914 }, + { url = "https://files.pythonhosted.org/packages/68/e4/5ed8f09d92655dcd0a86ee547e509adb3e396cef0a48f5c31e3b060bb9d0/pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edb1bfd45227dec8d50bc7c7d86463cd8728bcc574f9b07de7369880de4626a3", size = 2150217 }, + { url = "https://files.pythonhosted.org/packages/cd/e6/a202f0e1b81c729130404e82d9de90dc4418ec01df35000d48d027c38501/pydantic_core-2.27.0-cp313-none-win32.whl", hash = "sha256:678f66462058dd978702db17eb6a3633d634f7aa0deaea61e0a674152766d3fc", size = 1830973 }, + { url = "https://files.pythonhosted.org/packages/06/3d/21ed0f308e6618ce6c5c6bfb9e71734a9a3256d5474a53c8e5aaaba498ca/pydantic_core-2.27.0-cp313-none-win_amd64.whl", hash = "sha256:d28ca7066d6cdd347a50d8b725dc10d9a1d6a1cce09836cf071ea6a2d4908be0", size = 1974853 }, + { url = "https://files.pythonhosted.org/packages/d7/18/e5744a132b81f98b9f92e15f33f03229a1d254ce7af942b1422ec2ac656f/pydantic_core-2.27.0-cp313-none-win_arm64.whl", hash = "sha256:6f4a53af9e81d757756508b57cae1cf28293f0f31b9fa2bfcb416cc7fb230f9d", size = 1877469 }, +] + +[[package]] +name = "pydantic-settings" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b5/d4/9dfbe238f45ad8b168f5c96ee49a3df0598ce18a0795a983b419949ce65b/pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0", size = 75646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/f9/ff95fd7d760af42f647ea87f9b8a383d891cdb5e5dbd4613edaeb094252a/pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87", size = 28595 }, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, +] + +[[package]] +name = "python-frontmatter" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/de/910fa208120314a12f9a88ea63e03707261692af782c99283f1a2c8a5e6f/python-frontmatter-1.1.0.tar.gz", hash = "sha256:7118d2bd56af9149625745c58c9b51fb67e8d1294a0c76796dafdc72c36e5f6d", size = 16256 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/87/3c8da047b3ec5f99511d1b4d7a5bc72d4b98751c7e78492d14dc736319c5/python_frontmatter-1.1.0-py3-none-any.whl", hash = "sha256:335465556358d9d0e6c98bbeb69b1c969f2a4a21360587b9873bfc3b213407c1", size = 9834 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "sgqlc" +version = "16.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "graphql-core" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/14/ab/a37f739b4cadd77ac9603cb473db545462062784cbbb2f2d2c45309eb986/sgqlc-16.4.tar.gz", hash = "sha256:a1a32db1c573edae229dbb61f6ae0a546aecceaaed2bac0652992a73e8c95017", size = 242759 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/b7/82120310147682c2980f245d8d691cc5f3b87772013ff09bd0a917ce0df9/sgqlc-16.4-py3-none-any.whl", hash = "sha256:91f9e7e624c76613288b917f583c6b19c8d908a213199f1cb6e918e8f3901246", size = 82204 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, +]