Skip to content

Commit

Permalink
Add ruff and pre-commit linting (#15)
Browse files Browse the repository at this point in the history
  • Loading branch information
jaimergp authored Nov 4, 2024
1 parent 429015b commit 09962eb
Show file tree
Hide file tree
Showing 13 changed files with 312 additions and 146 deletions.
2 changes: 1 addition & 1 deletion .devcontainer/pixi/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ RUN curl -L -o /usr/local/bin/pixi -fsSL --compressed "https://github.com/prefix
USER vscode
WORKDIR /home/vscode

RUN echo 'eval "$(pixi completion -s bash)"' >> /home/vscode/.bashrc
RUN echo 'eval "$(pixi completion -s bash)"' >> /home/vscode/.bashrc
2 changes: 1 addition & 1 deletion .devcontainer/pixi/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@
"forwardPorts": [
8501
]
}
}
2 changes: 1 addition & 1 deletion .devcontainer/streamlit-cloud/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,4 @@
"forwardPorts": [
8501
]
}
}
30 changes: 30 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# disable autofixing PRs, commenting "pre-commit.ci autofix" on a pull request triggers a autofix
ci:
autofix_prs: false
repos:
# generic verification and formatting
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
# standard end of line/end of file cleanup
- id: mixed-line-ending
- id: end-of-file-fixer
- id: trailing-whitespace
# ensure syntaxes are valid
- id: check-toml
- id: check-yaml
# catch git merge/rebase problems
- id: check-merge-conflict
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.1
hooks:
# lint & attempt to correct failures (e.g. pyupgrade)
- id: ruff
args: [--fix]
# compatible replacement for black
- id: ruff-format
- repo: meta
# see https://pre-commit.com/#meta-hooks
hooks:
- id: check-hooks-apply
- id: check-useless-excludes
1 change: 0 additions & 1 deletion .streamlit/config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,3 @@ gatherUsageStats = false

[logger]
level = "debug"

2 changes: 1 addition & 1 deletion app_config.schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -362,4 +362,4 @@
],
"title": "AppConfig",
"type": "object"
}
}
12 changes: 6 additions & 6 deletions conda_metadata_app/app.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
from pathlib import Path

import streamlit as st

from conda_metadata_app.app_config import AppConfig

"""
If deploying a streamlit app as a Python module, we cannot use
the automatic pages/ subpages. Instead, we need to define the
pages manually.
"""

from pathlib import Path

import streamlit as st

from conda_metadata_app.app_config import AppConfig

pages_dir = Path(__file__).parent / "pages"

pages = [
Expand Down
62 changes: 46 additions & 16 deletions conda_metadata_app/app_config.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,29 @@
"""
Pydantic models to generate the schemas of the app_config.toml configuration files
"""

import functools
import json
import os
from collections.abc import Iterable
from enum import StrEnum
from typing import Self, Literal

from pydantic import AnyHttpUrl, BaseModel, field_validator, TypeAdapter, ValidationError, model_validator, ConfigDict
from pydantic_settings import BaseSettings, SettingsConfigDict, PydanticBaseSettingsSource, TomlConfigSettingsSource
from typing import Literal, Self

from pydantic import (
AnyHttpUrl,
BaseModel,
ConfigDict,
TypeAdapter,
ValidationError,
field_validator,
model_validator,
)
from pydantic_settings import (
BaseSettings,
PydanticBaseSettingsSource,
SettingsConfigDict,
TomlConfigSettingsSource,
)


class HashableBaseModel(BaseModel):
Expand Down Expand Up @@ -63,6 +80,7 @@ class PackageFilter(BaseModel):
By default, no packages are filtered. By setting allowed_names or allowed_prefixes, only packages that match
the criteria will be considered.
"""

allowed_names: list[str] = []
"""
Whitelist of package names. Only packages with names in this list will be considered.
Expand Down Expand Up @@ -163,7 +181,9 @@ class Channel(BaseModel):
"""
How to discover artifacts in the channel, given a package name.
"""
arch_subdir_discovery: ArchSubdirDiscoveryChoice | ArchSubdirList = ArchSubdirDiscoveryChoice.CHANNELDATA
arch_subdir_discovery: ArchSubdirDiscoveryChoice | ArchSubdirList = (
ArchSubdirDiscoveryChoice.CHANNELDATA
)
"""
How to discover architecture subdirectories in the channel.
Use an ArchSubdirList to specify a list of subdirectories.
Expand All @@ -178,15 +198,15 @@ class Channel(BaseModel):
"""
Enable this for conda-forge to map package names to feedstock names.
This is used for provenance URLs (see below).
If this is False, the package name is used as the feedstock name.
"""
provenance_url_pattern: str | None = None
"""
A URL pattern to link to the provenance of a package. The URL pattern should contain a `{feedstock}` placeholder
for the feedstock (!) name (see map_conda_forge_package_to_feedstock).
Each placeholder will be replaced with the feedstock name.
For conda-forge, this should be https://github.com/conda-forge/{feedstock}-feedstock.
A remote URL present in the metadata always takes precedence over this URL pattern.
"""
Expand All @@ -204,7 +224,9 @@ def _validate_provenance_url_pattern(cls, provenance_url_pattern: str | None) ->
try:
TypeAdapter(AnyHttpUrl).validate_python(replaced_url)
except ValidationError:
raise ValueError("provenance_url_pattern must be a valid URL pattern with a {} placeholder.")
raise ValueError(
"provenance_url_pattern must be a valid URL pattern with a {} placeholder."
)

return provenance_url_pattern

Expand All @@ -220,8 +242,13 @@ def _validate_provenance_url_pattern(cls, provenance_url_pattern: str | None) ->

@model_validator(mode="after")
def check_supports_broken_label_artifact_discovery(self) -> Self:
if self.supports_broken_label and self.artifact_discovery != ArtifactDiscoveryChoice.ANACONDA_API:
raise ValueError("supports_broken_label is only supported for Anaconda API artifact discovery.")
if (
self.supports_broken_label
and self.artifact_discovery != ArtifactDiscoveryChoice.ANACONDA_API
):
raise ValueError(
"supports_broken_label is only supported for Anaconda API artifact discovery."
)
return self

dashboards: list[str] = []
Expand Down Expand Up @@ -281,7 +308,6 @@ def _check_single_or_no_auth(self) -> Self:

return self


@property
def rss_url(self) -> str:
return f"{self.url}/rss.xml"
Expand All @@ -296,7 +322,9 @@ def get_repodata_url(self, arch_subdir: str) -> str:
def get_zstd_repodata_url(self, arch_subdir: str) -> str:
return self.get_repodata_url(arch_subdir) + ".zst"

def get_artifact_download_url(self, arch_subdir: str, package_name: str, version: str, build_string: str, extension: str) -> str:
def get_artifact_download_url(
self, arch_subdir: str, package_name: str, version: str, build_string: str, extension: str
) -> str:
return f"{self.url}/{arch_subdir}/{package_name}-{version}-{build_string}.{extension}"

model_config = ConfigDict(extra="forbid", use_attribute_docstrings=True)
Expand All @@ -306,7 +334,7 @@ class Dashboard(BaseModel):
url_pattern: str
"""
The URL pattern of the dashboard. The URL pattern can contain the following placeholders within curly {} braces:
- `channel`: The channel name. If the channel name contains a slash, only the second part is used.
- `name`: The name of the package.
- `version`: The version of the package.
Expand Down Expand Up @@ -335,7 +363,6 @@ def _validate_url_pattern(cls, url_pattern: str) -> str:
model_config = ConfigDict(extra="forbid", use_attribute_docstrings=True)



class AppConfig(BaseSettings):
channels: dict[str, Channel]
"""
Expand Down Expand Up @@ -372,7 +399,9 @@ def _validate_dashboards(self) -> Self:
raise ValueError(f"Dashboard {dashboard_name} is not defined.")
return self

model_config = SettingsConfigDict(toml_file="app_config.toml", extra="forbid", use_attribute_docstrings=True)
model_config = SettingsConfigDict(
toml_file="app_config.toml", extra="forbid", use_attribute_docstrings=True
)

@classmethod
def settings_customise_sources(
Expand All @@ -389,7 +418,8 @@ def settings_customise_sources(
def export_json_schema() -> None:
with open("app_config.schema.json", "w") as f:
json.dump(AppConfig.model_json_schema(), f, indent=2)
f.write("\n")


if __name__ == '__main__':
if __name__ == "__main__":
export_json_schema()
Loading

0 comments on commit 09962eb

Please sign in to comment.