Skip to content

Commit

Permalink
chore: Fix ruff and add stricter linting (#16)
Browse files Browse the repository at this point in the history
Co-authored-by: Ege Kocabas <kocabas.ege@tum.de>
  • Loading branch information
robinholzi and egekocabas committed Dec 14, 2023
1 parent d51a4ae commit f88c3cb
Show file tree
Hide file tree
Showing 9 changed files with 68 additions and 45 deletions.
4 changes: 4 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.1.4"
hooks:
- id: ruff
types_or: [python, pyi, jupyter]
args: [--fix]
- id: ruff-format
types_or: [python, pyi, jupyter]
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.10.1
hooks:
Expand Down
11 changes: 7 additions & 4 deletions parma_mining/github/analytics_client.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import json
import os
import urllib.parse

import httpx
from dotenv import load_dotenv

from parma_mining.github.model import ResponseModel
import os
import json
import urllib.parse


class AnalyticsClient:
Expand All @@ -24,7 +26,8 @@ def send_post_request(self, api_endpoint, data):
return response.json()
else:
raise Exception(
f"API request failed with status code {response.status_code}, response: {response.text}"
f"API request failed with status code {response.status_code},"
f"response: {response.text}"
)

def register_measurements(self, mapping, parent_id=None, source_module_id=None):
Expand Down
15 changes: 7 additions & 8 deletions parma_mining/github/api/main.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,18 @@
"""Main entrypoint for the API routes in of parma-analytics."""
import json
from typing import List
import os

from dotenv import load_dotenv
from fastapi import FastAPI
from starlette import status

from parma_mining.github.analytics_client import AnalyticsClient
from parma_mining.github.client import GitHubClient
from parma_mining.github.model import (
OrganizationModel,
DiscoveryModel,
CompaniesRequest,
DiscoveryModel,
ResponseModel,
)
from dotenv import load_dotenv
import os

from parma_mining.github.normalization_map import GithubNormalizationMap

load_dotenv()
Expand Down Expand Up @@ -72,7 +71,7 @@ def get_organization_details(companies: CompaniesRequest):
# Write data to db via endpoint in analytics backend
try:
analytics_client.feed_raw_data(data)
except:
except Exception:
print("Error writing to db")
else:
# To be included in logging
Expand All @@ -82,7 +81,7 @@ def get_organization_details(companies: CompaniesRequest):

@app.get(
"/search/companies",
response_model=List[DiscoveryModel],
response_model=list[DiscoveryModel],
status_code=status.HTTP_200_OK,
)
def search_organizations(query: str):
Expand Down
16 changes: 8 additions & 8 deletions parma_mining/github/client.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
from typing import List
from fastapi import HTTPException
from github import Github, GithubException, Auth
from github import Auth, Github, GithubException
from starlette import status
from parma_mining.github.model import OrganizationModel, RepositoryModel, DiscoveryModel
from parma_mining.github.normalization_map import GithubNormalizationMap

from parma_mining.github.model import DiscoveryModel, OrganizationModel, RepositoryModel


class GitHubClient:
def __init__(self, token: str):
self.client = Github(auth=Auth.Token(token))

# Retrieve organization details and statistics on all repositories of the organization
# Get organization details and statistics
# on all repositories of the organization
def get_organization_details(self, org_name: str) -> OrganizationModel:
try:
organization = self.client.get_organization(org_name)
Expand Down Expand Up @@ -52,17 +52,17 @@ def get_organization_details(self, org_name: str) -> OrganizationModel:
org_info["repos"].append(parsed_repo)

return OrganizationModel.model_validate(org_info)
except GithubException as e:
except GithubException:
raise GithubException

def search_organizations(self, query: str) -> List[DiscoveryModel]:
def search_organizations(self, query: str) -> list[DiscoveryModel]:
try:
organizations = self.client.search_users(query + " type:org")
return [
DiscoveryModel.model_validate({"name": org.login, "url": org.html_url})
for org in organizations
]
except GithubException as e:
except GithubException:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Error searching organizations",
Expand Down
44 changes: 22 additions & 22 deletions parma_mining/github/model.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,36 @@
import json
from datetime import datetime

from pydantic import BaseModel
from typing import Optional


class RepositoryModel(BaseModel):
"""Model to structure the JSON Data."""

name: str
description: Optional[str]
stars: Optional[int]
forks: Optional[int]
language: Optional[str]
created_at: Optional[datetime]
updated_at: Optional[datetime]
pushed_at: Optional[datetime]
html_url: Optional[str]
clone_url: Optional[str]
svn_url: Optional[str]
homepage: Optional[str]
size: Optional[int]
stargazers_count: Optional[int]
watchers_count: Optional[int]
forks_count: Optional[int]
open_issues_count: Optional[int]
description: str | None
stars: int | None
forks: int | None
language: str | None
created_at: datetime | None
updated_at: datetime | None
pushed_at: datetime | None
html_url: str | None
clone_url: str | None
svn_url: str | None
homepage: str | None
size: int | None
stargazers_count: int | None
watchers_count: int | None
forks_count: int | None
open_issues_count: int | None


class OrganizationModel(BaseModel):
name: Optional[str]
description: Optional[str]
name: str | None
description: str | None
url: str
repos: Optional[list[RepositoryModel]]
repos: list[RepositoryModel] | None

def updated_model_dump(self) -> str:
"""Dump the CompanyModel instance to a JSON string."""
Expand All @@ -47,8 +47,8 @@ def updated_model_dump(self) -> str:


class DiscoveryModel(BaseModel):
name: Optional[str]
url: Optional[str]
name: str | None
url: str | None


class CompaniesRequest(BaseModel):
Expand Down
3 changes: 3 additions & 0 deletions parma_mining/mining_common/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Mining Common

Generic mining logic that we might abstract into a shared subrepository in the future.
6 changes: 6 additions & 0 deletions parma_mining/mining_common/const.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
HTTP_200 = 200
HTTP_201 = 201
HTTP_400 = 400
HTTP_401 = 401
HTTP_403 = 403
HTTP_404 = 404
5 changes: 5 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,11 @@ exclude = '''
line-length = 88
select = ["F", "E", "W", "I", "N", "UP", "D", "PL"]
target-version = "py311"
ignore = ["D100", "D101", "D102", "D103", "D104", "D107"] # TODO: remove after midterm

[tool.ruff.lint.isort]
case-sensitive = true
detect-same-package = true

[tool.mypy]
python_version = '3.11'
Expand Down
9 changes: 6 additions & 3 deletions tests/test_organization_details.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
from unittest.mock import MagicMock

import pytest
from fastapi import HTTPException
from fastapi.testclient import TestClient
from starlette import status
from unittest.mock import MagicMock

from parma_mining.github.api.main import app
from parma_mining.mining_common.const import HTTP_200, HTTP_404

client = TestClient(app)

Expand Down Expand Up @@ -65,7 +68,7 @@ def test_get_organization_details(

mock_analytics_client.assert_called()

assert response.status_code == 200
assert response.status_code == HTTP_200


def test_get_organization_details_bad_request(mocker):
Expand All @@ -84,4 +87,4 @@ def test_get_organization_details_bad_request(mocker):
}

response = client.post("/companies", json=payload)
assert response.status_code == 404
assert response.status_code == HTTP_404

0 comments on commit f88c3cb

Please sign in to comment.