Skip to content

Commit

Permalink
overall ruff error fix
Browse files Browse the repository at this point in the history
  • Loading branch information
mustafasoylu committed Nov 29, 2023
1 parent 5c3d364 commit c737800
Show file tree
Hide file tree
Showing 12 changed files with 54 additions and 143 deletions.
24 changes: 6 additions & 18 deletions src/somesy/cli/init.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,39 +26,29 @@ def config():
input_file = Path(input_file)
options = {
"input_file": input_file,
"no_sync_cff": not typer.confirm(
"Do you want to sync to a CFF file?", default=True
),
"no_sync_cff": not typer.confirm("Do you want to sync to a CFF file?", default=True),
}
cff_file = typer.prompt("CFF file path", default="CITATION.cff")
if cff_file is not None or cff_file != "":
options["cff_file"] = cff_file

options["no_sync_pyproject"] = not typer.confirm(
"Do you want to sync to a pyproject.toml file?", default=True
)
options["no_sync_pyproject"] = not typer.confirm("Do you want to sync to a pyproject.toml file?", default=True)

pyproject_file = typer.prompt("pyproject.toml file path", default="pyproject.toml")
if pyproject_file is not None or pyproject_file != "":
options["pyproject_file"] = pyproject_file

options["sync_package_json"] = typer.confirm(
"Do you want to sync to a package.json file?", default=False
)
options["sync_package_json"] = typer.confirm("Do you want to sync to a package.json file?", default=False)
package_json_file = typer.prompt("package.json file path", default="package.json")
if package_json_file is not None or package_json_file != "":
options["package_json_file"] = package_json_file

options["no_sync_codemeta"] = not typer.confirm(
"Do you want to sync to a codemeta.json file?", default=True
)
options["no_sync_codemeta"] = not typer.confirm("Do you want to sync to a codemeta.json file?", default=True)
codemeta_file = typer.prompt("codemeta.json file path", default="codemeta.json")
if codemeta_file is not None or codemeta_file != "":
options["codemeta_file"] = codemeta_file

options["show_info"] = typer.confirm(
"Do you want to show info about the sync process?"
)
options["show_info"] = typer.confirm("Do you want to show info about the sync process?")
options["verbose"] = typer.confirm("Do you want to show verbose logs?")
options["debug"] = typer.confirm("Do you want to show debug logs?")

Expand All @@ -73,6 +63,4 @@ def config():
logger.debug(f"CLI options entered: {options}")

init_config(input_file, options)
logger.info(
f"[bold green]Input file is updated/created at {input_file}[/bold green]"
)
logger.info(f"[bold green]Input file is updated/created at {input_file}[/bold green]")
12 changes: 3 additions & 9 deletions src/somesy/cli/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,19 +123,13 @@ def run_sync(somesy_input: SomesyInput):
logger.info("[bold green]Synchronizing project metadata...[/bold green]")
logger.info("Files to sync:")
if not conf.no_sync_pyproject:
logger.info(
f" - [italic]pyproject.toml[/italic]:\t[grey]{conf.pyproject_file}[/grey]"
)
logger.info(f" - [italic]pyproject.toml[/italic]:\t[grey]{conf.pyproject_file}[/grey]")
if not conf.no_sync_package_json:
logger.info(
f" - [italic]package.json[/italic]:\t[grey]{conf.package_json_file}[/grey]"
)
logger.info(f" - [italic]package.json[/italic]:\t[grey]{conf.package_json_file}[/grey]")
if not conf.no_sync_cff:
logger.info(f" - [italic]CITATION.cff[/italic]:\t[grey]{conf.cff_file}[/grey]")
if not conf.no_sync_codemeta:
logger.info(
f" - [italic]codemeta.json[/italic]:\t[grey]{conf.codemeta_file}[/grey]\n"
)
logger.info(f" - [italic]codemeta.json[/italic]:\t[grey]{conf.codemeta_file}[/grey]\n")
# ----
sync_command(somesy_input)
# ----
Expand Down
6 changes: 5 additions & 1 deletion src/somesy/core/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -519,7 +519,11 @@ class ContributionTypeEnum(MyEnum):


class Country(MyEnum):
"""Country codes from https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 . It is used for the country of a person in project metadata."""
"""
Country codes from https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2.
It is used for the country of a person in project metadata.
"""

AD = "AD"
AE = "AE"
Expand Down
12 changes: 3 additions & 9 deletions src/somesy/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@ def version(value: bool):
@app.callback()
def common(
ctx: typer.Context,
version: bool = typer.Option(
None, "--version", help=version.__doc__, callback=version
),
version: bool = typer.Option(None, "--version", help=version.__doc__, callback=version),
show_info: bool = typer.Option(
None,
"--info",
Expand All @@ -50,18 +48,14 @@ def common(
init_log()

if sum(map(int, map(bool, [show_info, verbose, debug]))) > 1:
typer.echo(
"Only one of --info, --verbose or --debug may be set!", file=sys.stderr
)
typer.echo("Only one of --info, --verbose or --debug may be set!", file=sys.stderr)
raise typer.Exit(1)

if show_info or verbose or debug:
# NOTE: only explicitly setting log level if a flag is passed,
# in order to distinguish from using the "default log level"
# (needed to check if the user did override the log level as a CLI flag)
set_log_level(
SomesyLogLevel.from_flags(info=show_info, verbose=verbose, debug=debug)
)
set_log_level(SomesyLogLevel.from_flags(info=show_info, verbose=verbose, debug=debug))


# add subcommands
Expand Down
26 changes: 7 additions & 19 deletions src/somesy/package_json/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@ class PackageAuthor(BaseModel):

name: Annotated[Optional[str], Field(description="Author name")]
email: Annotated[Optional[EmailStr], Field(description="Author email")]
url: Annotated[
Optional[HttpUrlStr], Field(description="Author website or orcid page")
]
url: Annotated[Optional[HttpUrlStr], Field(description="Author website or orcid page")]


class PackageRepository(BaseModel):
Expand All @@ -34,7 +32,7 @@ class PackageLicense(BaseModel):

NPM_PKG_AUTHOR = r"^(.*?)\s*(?:<([^>]+)>)?\s*(?:\(([^)]+)\))?$"
NPM_PKG_NAME = r"^(@[a-z0-9-~][a-z0-9-._~]*\/)?[a-z0-9-~][a-z0-9-._~]*$"
NPM_PKG_VERSION = r"^(?:0|[1-9]\d*)\.(?:0|[1-9]\d*)\.(?:0|[1-9]\d*)(?:-(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"
NPM_PKG_VERSION = r"^(?:0|[1-9]\d*)\.(?:0|[1-9]\d*)\.(?:0|[1-9]\d*)(?:-(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" # noqa: E501


class PackageJsonConfig(BaseModel):
Expand All @@ -45,9 +43,7 @@ class PackageJsonConfig(BaseModel):
name: Annotated[str, Field(description="Package name")]
version: Annotated[str, Field(description="Package version")]
description: Annotated[Optional[str], Field(description="Package description")]
author: Annotated[
Optional[Union[str, PackageAuthor]], Field(description="Package author")
]
author: Annotated[Optional[Union[str, PackageAuthor]], Field(description="Package author")]
maintainers: Annotated[
Optional[List[Union[str, PackageAuthor]]],
Field(description="Package maintainers"),
Expand All @@ -56,18 +52,10 @@ class PackageJsonConfig(BaseModel):
Optional[List[Union[str, PackageAuthor]]],
Field(description="Package contributors"),
] = None
license: Annotated[
Optional[Union[str, PackageLicense]], Field(description="Package license")
]
repository: Annotated[
Optional[Union[PackageRepository, str]], Field(description="Package repository")
]
homepage: Annotated[
Optional[HttpUrlStr], Field(description="Package homepage")
] = None
keywords: Annotated[
Optional[List[str]], Field(description="Keywords that describe the package")
] = None
license: Annotated[Optional[Union[str, PackageLicense]], Field(description="Package license")]
repository: Annotated[Optional[Union[PackageRepository, str]], Field(description="Package repository")]
homepage: Annotated[Optional[HttpUrlStr], Field(description="Package homepage")] = None
keywords: Annotated[Optional[List[str]], Field(description="Keywords that describe the package")] = None

# convert package author to dict if it is a string
@classmethod
Expand Down
48 changes: 14 additions & 34 deletions src/somesy/pyproject/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,39 +42,23 @@ class PoetryConfig(BaseModel):
Field(description="An SPDX license identifier."),
]
authors: Annotated[Set[str], Field(description="Package authors")]
maintainers: Annotated[
Optional[Set[str]], Field(description="Package maintainers")
] = None
readme: Annotated[
Optional[Union[Path, List[Path]]], Field(description="Package readme file(s)")
] = None
homepage: Annotated[
Optional[HttpUrlStr], Field(description="Package homepage")
] = None
repository: Annotated[
Optional[HttpUrlStr], Field(description="Package repository")
] = None
documentation: Annotated[
Optional[HttpUrlStr], Field(description="Package documentation page")
] = None
keywords: Annotated[
Optional[Set[str]], Field(description="Keywords that describe the package")
] = None
classifiers: Annotated[
Optional[List[str]], Field(description="pypi classifiers")
] = None
urls: Annotated[
Optional[Dict[str, HttpUrlStr]], Field(description="Package URLs")
] = None
maintainers: Annotated[Optional[Set[str]], Field(description="Package maintainers")] = None
readme: Annotated[Optional[Union[Path, List[Path]]], Field(description="Package readme file(s)")] = None
homepage: Annotated[Optional[HttpUrlStr], Field(description="Package homepage")] = None
repository: Annotated[Optional[HttpUrlStr], Field(description="Package repository")] = None
documentation: Annotated[Optional[HttpUrlStr], Field(description="Package documentation page")] = None
keywords: Annotated[Optional[Set[str]], Field(description="Keywords that describe the package")] = None
classifiers: Annotated[Optional[List[str]], Field(description="pypi classifiers")] = None
urls: Annotated[Optional[Dict[str, HttpUrlStr]], Field(description="Package URLs")] = None

@field_validator("version")
@classmethod
def validate_version(cls, v):
"""Validate version using PEP 440."""
try:
_ = parse_version(v)
except ValueError:
raise ValueError("Invalid version")
except ValueError as err:
raise ValueError("Invalid version") from err
return v

@field_validator("authors", "maintainers")
Expand Down Expand Up @@ -156,14 +140,10 @@ class SetuptoolsConfig(BaseModel):
model_config = dict(use_enum_values=True)

name: Annotated[str, Field(pattern=r"^[A-Za-z0-9]+([_-][A-Za-z0-9]+)*$")]
version: Annotated[
str, Field(pattern=r"^\d+(\.\d+)*((a|b|rc)\d+)?(post\d+)?(dev\d+)?$")
]
version: Annotated[str, Field(pattern=r"^\d+(\.\d+)*((a|b|rc)\d+)?(post\d+)?(dev\d+)?$")]
description: str
readme: Optional[Union[Path, List[Path], File]] = None
license: Optional[Union[LicenseEnum, List[LicenseEnum]]] = Field(
None, description="An SPDX license identifier."
)
license: Optional[Union[LicenseEnum, List[LicenseEnum]]] = Field(None, description="An SPDX license identifier.")
authors: Optional[List[STPerson]] = None
maintainers: Optional[List[STPerson]] = None
keywords: Optional[Set[str]] = None
Expand All @@ -176,8 +156,8 @@ def validate_version(cls, v):
"""Validate version using PEP 440."""
try:
_ = parse_version(v)
except ValueError:
raise ValueError("Invalid version")
except ValueError as err:
raise ValueError("Invalid version") from err
return v

@field_validator("readme")
Expand Down
14 changes: 3 additions & 11 deletions tests/input/test_core_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,15 +68,10 @@ def test_somesy_input(somesy_input):
assert isinstance(somesy_input.project, ProjectMetadata)
assert somesy_input.project.name == "testproject"
assert somesy_input.project.version == "1.0.0"
assert (
somesy_input.project.description
== "This is a test project for demonstration purposes."
)
assert somesy_input.project.description == "This is a test project for demonstration purposes."
assert somesy_input.project.keywords == ["test", "demo", "example"]
assert somesy_input.project.license == LicenseEnum.MIT
assert (
str(somesy_input.project.repository) == "https://github.com/example/testproject"
)
assert str(somesy_input.project.repository) == "https://github.com/example/testproject"
assert str(somesy_input.project.homepage) == "https://example.com/testproject"
assert len(somesy_input.project.people) == 3
authors = somesy_input.project.authors()
Expand All @@ -85,10 +80,7 @@ def test_somesy_input(somesy_input):
assert authors[0].email == "john.doe@example.com"
assert str(authors[0].orcid) == "https://orcid.org/0000-0000-0000-0000"
assert authors[0].contribution == "The main developer, maintainer, and tester."
assert (
authors[0].contribution_begin
== datetime.strptime("2023-01-15", "%Y-%m-%d").date()
)
assert authors[0].contribution_begin == datetime.strptime("2023-01-15", "%Y-%m-%d").date()
assert authors[0].contribution_types == [
ContributionTypeEnum.maintenance,
ContributionTypeEnum.code,
Expand Down
10 changes: 2 additions & 8 deletions tests/input/test_pyproject_validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,7 @@ def test_poetry_validate(tmp_path):
"""Test validating a pyproject file in both poetry and setuptools formats."""

# create a pyproject file in poetry format but with a invalid values
reject_poetry_object = {
"tool": {
"poetry": {"name": "somesy", "version": "abc", "authors": ["John Doe <"]}
}
}
reject_poetry_object = {"tool": {"poetry": {"name": "somesy", "version": "abc", "authors": ["John Doe <"]}}}
invalid_poetry_path = tmp_path / "pyproject.toml"
with open(invalid_poetry_path, "w+") as f:
dump(reject_poetry_object, f)
Expand All @@ -27,9 +23,7 @@ def test_poetry_validate(tmp_path):
Pyproject(invalid_poetry_path)

# create a pyproject file in setuptools format but with a invalid values
reject_setuptools_object = {
"project": {"name": "somesy", "version": "abc", "authors": ["John Doe <"]}
}
reject_setuptools_object = {"project": {"name": "somesy", "version": "abc", "authors": ["John Doe <"]}}
with open(invalid_poetry_path, "w+") as f:
dump(reject_setuptools_object, f)
with pytest.raises(ValueError):
Expand Down
20 changes: 5 additions & 15 deletions tests/output/test_cff_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,7 @@ def to_cff_keys(lst):
assert list(dct["authors"][0].keys()) == to_cff_keys(person._key_order)

# jane becomes john -> modified person
person1b = person.model_copy(
update={"given_names": "John", "author": True, "publication_author": True}
)
person1b = person.model_copy(update={"given_names": "John", "author": True, "publication_author": True})

# different Jane Doe with different orcid -> new person
person2 = person.model_copy(
Expand All @@ -91,12 +89,8 @@ def to_cff_keys(lst):
cff.save()

# existing author order preserved
assert cff.authors[0] == person1b.model_dump(
by_alias=True, exclude={"author", "publication_author"}
)
assert cff.authors[1] == person2.model_dump(
by_alias=True, exclude={"author", "publication_author"}
)
assert cff.authors[0] == person1b.model_dump(by_alias=True, exclude={"author", "publication_author"})
assert cff.authors[1] == person2.model_dump(by_alias=True, exclude={"author", "publication_author"})
# existing author field order preserved
dct = cff._yaml.load(open(cff_path, "r"))
assert list(dct["authors"][0].keys()) == to_cff_keys(person1b._key_order)
Expand Down Expand Up @@ -126,11 +120,7 @@ def to_cff_keys(lst):

assert len(cff.authors) == 2
assert len(cff.maintainers) == 1
assert cff.authors[0] == person1c.model_dump(
by_alias=True, exclude={"author", "publication_author"}
)
assert cff.authors[1] == person3.model_dump(
by_alias=True, exclude={"author", "publication_author"}
)
assert cff.authors[0] == person1c.model_dump(by_alias=True, exclude={"author", "publication_author"})
assert cff.authors[1] == person3.model_dump(by_alias=True, exclude={"author", "publication_author"})
dct = cff._yaml.load(open(cff_path, "r"))
assert list(dct["authors"][0].keys()) == to_cff_keys(person1c._key_order)
8 changes: 2 additions & 6 deletions tests/output/test_package_json_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,7 @@ def package_json_file(create_files, file_types):

def test_content_match(package_json: PackageJSON):
assert package_json.name == "test-package"
assert (
package_json.description == "This is a test package for demonstration purposes."
)
assert package_json.description == "This is a test package for demonstration purposes."
assert package_json.license == "MIT"
assert len(package_json.authors) == 1

Expand Down Expand Up @@ -63,9 +61,7 @@ def test_person_merge(package_json_file, person: Person):
pj.save()

# jane becomes john -> modified person
person1b = person.model_copy(
update={"given_names": "John", "author": True, "publication_author": True}
)
person1b = person.model_copy(update={"given_names": "John", "author": True, "publication_author": True})

# different Jane Doe with different orcid -> new person
person2 = person.model_copy(
Expand Down
9 changes: 2 additions & 7 deletions tests/output/test_pyproject_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,7 @@ def test_content_match(pyproject_poetry, pyproject_setuptools):
# create a function to check both file formats
def assert_content_match(pyproject_file):
assert pyproject_file.name == "test-package"
assert (
pyproject_file.description
== "This is a test package for demonstration purposes."
)
assert pyproject_file.description == "This is a test package for demonstration purposes."
assert pyproject_file.license == "MIT"
assert len(pyproject_file.authors) == 1

Expand Down Expand Up @@ -107,9 +104,7 @@ def test_person_merge_pyproject(request, writer_class, writer_file_fixture, pers
# ----

# jane becomes john -> modified person
person1b = person.model_copy(
update={"given_names": "John", "author": True, "publication_author": True}
)
person1b = person.model_copy(update={"given_names": "John", "author": True, "publication_author": True})

# different Jane Doe with different orcid -> new person
person2 = person.model_copy(
Expand Down
Loading

0 comments on commit c737800

Please sign in to comment.