diff --git a/.env-example b/.env-example index 8253b5e..580ff83 100644 --- a/.env-example +++ b/.env-example @@ -1,6 +1,11 @@ -GH_TOKEN = " " -REPOSITORY = "organization/repository" +GH_ENTERPRISE_URL="" +GH_TOKEN = "" +END_DATE = "" ORGANIZATION = "organization" +REPOSITORY = "organization/repository" START_DATE = "" -END_DATE = "" -GH_ENTERPRISE_URL=" " + +# GITHUB APP +GH_APP_ID = "" +GH_INSTALLATION_ID = "" +GH_PRIVATE_KEY = "" diff --git a/.github/linters/.flake8 b/.github/linters/.flake8 index 73c76f2..c6347e7 100644 --- a/.github/linters/.flake8 +++ b/.github/linters/.flake8 @@ -1,2 +1,4 @@ [flake8] +exclude = venv,.venv,.git,__pycache__ max-line-length = 150 +statistics = True diff --git a/.github/linters/.isort.cfg b/.github/linters/.isort.cfg new file mode 100644 index 0000000..f238bf7 --- /dev/null +++ b/.github/linters/.isort.cfg @@ -0,0 +1,2 @@ +[settings] +profile = black diff --git a/.github/linters/.mypy.ini b/.github/linters/.mypy.ini index 18c5ef9..f0d4703 100644 --- a/.github/linters/.mypy.ini +++ b/.github/linters/.mypy.ini @@ -1,4 +1,5 @@ -# Global options: - [mypy] disable_error_code = attr-defined, import-not-found + +[mypy-github3.*] +ignore_missing_imports = True diff --git a/.github/linters/.python-lint b/.github/linters/.python-lint new file mode 100644 index 0000000..6de8dd2 --- /dev/null +++ b/.github/linters/.python-lint @@ -0,0 +1,646 @@ +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +extension-pkg-whitelist= + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. +ignore=CVS, + .git, + __pycache__, + venv, + .venv, + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.11 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. If left empty, argument names will be checked with the set +# naming style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +#class-attribute-rgx= + +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. If left empty, class names will be checked with the set naming style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. If left empty, function names will be checked with the set +# naming style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +#typealias-rgx= + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. If left empty, variable names will be checked with the set +# naming style. +#variable-rgx= + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + asyncSetUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=bad-inline-option, + deprecated-pragma, + duplicate-code, + locally-disabled, + file-ignored, + import-error, + line-too-long, + raw-checker-failed, + suppressed-message, + too-many-arguments, + too-many-branches, + too-many-locals, + too-many-statements, + useless-suppression, + use-symbolic-message-instead, + use-implicit-booleaness-not-comparison-to-string, + use-implicit-booleaness-not-comparison-to-zero, + wrong-import-order + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable= + + +[METHOD_ARGS] + +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + +# Regular expression of note tags to take in consideration. +notes-rgx= + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are: text, parseable, colorized, +# json2 (improved json format), json (old json format) and msvs (visual +# studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=yes + +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work. +spelling-dict= + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of names allowed to shadow builtins +allowed-redefined-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io diff --git a/.github/workflows/contributors_report.yaml b/.github/workflows/contributors_report.yaml index 749e1c5..097f123 100644 --- a/.github/workflows/contributors_report.yaml +++ b/.github/workflows/contributors_report.yaml @@ -6,12 +6,14 @@ on: - cron: '3 2 1 * *' permissions: - issues: write + contents: read jobs: contributor_report: name: contributor report runs-on: ubuntu-latest + permissions: + issues: write steps: - name: Get dates for last month @@ -19,14 +21,14 @@ jobs: run: | # Calculate the first day of the previous month start_date=$(date -d "last month" +%Y-%m-01) - + # Calculate the last day of the previous month end_date=$(date -d "$start_date +1 month -1 day" +%Y-%m-%d) - + #Set an environment variable with the date range echo "START_DATE=$start_date" >> "$GITHUB_ENV" echo "END_DATE=$end_date" >> "$GITHUB_ENV" - + - name: Run contributor action uses: github/contributors@v1 env: @@ -35,7 +37,7 @@ jobs: END_DATE: ${{ env.END_DATE }} REPOSITORY: github/contributors SPONSOR_INFO: "true" - + - name: Create issue uses: peter-evans/create-issue-from-file@v5 with: diff --git a/.github/workflows/docker-ci.yml b/.github/workflows/docker-ci.yml index 14323bc..1d44650 100644 --- a/.github/workflows/docker-ci.yml +++ b/.github/workflows/docker-ci.yml @@ -7,12 +7,12 @@ on: pull_request: branches: main -jobs: +permissions: + contents: read +jobs: build: - runs-on: ubuntu-latest - steps: - uses: actions/checkout@v4 - name: Build the Docker image diff --git a/.github/workflows/major-version-updater.yml b/.github/workflows/major-version-updater.yml index f8c36a4..05dad0b 100644 --- a/.github/workflows/major-version-updater.yml +++ b/.github/workflows/major-version-updater.yml @@ -5,9 +5,14 @@ on: release: types: published +permissions: + contents: read + jobs: update-major-version-tag: runs-on: ubuntu-latest + permissions: + contents: write steps: - name: Checkout Repo uses: actions/checkout@v4 diff --git a/.github/workflows/python-ci.yml b/.github/workflows/python-ci.yml index 336198d..9c6dc43 100644 --- a/.github/workflows/python-ci.yml +++ b/.github/workflows/python-ci.yml @@ -10,14 +10,15 @@ on: pull_request: branches: main +permissions: + contents: read + jobs: build: - runs-on: ubuntu-latest strategy: matrix: python-version: [3.11, 3.12] - steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} @@ -27,8 +28,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - python -m pip install flake8 pylint pytest pytest-cov - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + pip install -r requirements.txt -r requirements-test.txt - name: Lint with flake8 and pylint run: | make lint diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 14e0b9b..c987beb 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -7,9 +7,15 @@ on: branches: - main +permissions: + contents: read + jobs: update_release_draft: runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write steps: - uses: release-drafter/release-drafter@v6 env: diff --git a/.github/workflows/super-linter.yaml b/.github/workflows/super-linter.yaml index e49ed86..6cc811b 100644 --- a/.github/workflows/super-linter.yaml +++ b/.github/workflows/super-linter.yaml @@ -1,55 +1,32 @@ --- -################################# -################################# -## Super Linter GitHub Actions ## -################################# -################################# name: Lint Code Base -############################# -# Start the job on all push # -############################# on: pull_request: branches: main -############### -# Set the Job # -############### +permissions: + contents: read + jobs: build: - # Name the Job name: Lint Code Base - # Set the agent to run on runs-on: ubuntu-latest - - ############################################ - # Grant status permission for MULTI_STATUS # - ############################################ permissions: contents: read packages: read statuses: write - - ################## - # Load all steps # - ################## steps: - ########################## - # Checkout the code base # - ########################## - name: Checkout Code uses: actions/checkout@v4 with: - # Full git history is needed to get a proper - # list of changed files within `super-linter` fetch-depth: 0 - - ################################ - # Run Linter against code base # - ################################ + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt -r requirements-test.txt - name: Lint Code Base - uses: super-linter/super-linter@v5 + uses: super-linter/super-linter@v6 env: DEFAULT_BRANCH: main GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 1730183..0000000 --- a/.pylintrc +++ /dev/null @@ -1,12 +0,0 @@ -[MAIN] -ignore=.git, - __pycache__, - venv, - .venv, - - -[MESSAGES CONTROL] -disable= - line-too-long, - too-many-arguments, - duplicate-code, diff --git a/Dockerfile b/Dockerfile index 9f64d6d..36b9bdd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,5 @@ +#checkov:skip=CKV_DOCKER_2 +#checkov:skip=CKV_DOCKER_3 FROM python:3.12-slim LABEL com.github.actions.name="contributors" \ com.github.actions.description="GitHub Action that given an organization or repository, produces information about the contributors over the specified time period." \ diff --git a/Makefile b/Makefile index 454bdfa..69a9f60 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,10 @@ clean: .PHONY: lint lint: # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics --exclude=venv,.venv,.git,__pycache__ + flake8 . --config=.github/linters/.flake8 --count --select=E9,F63,F7,F82 --show-source # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=15 --max-line-length=150 --statistics --exclude=venv,.venv,.git,__pycache__ - pylint --rcfile=.pylintrc --fail-under=9.0 *.py + flake8 . --config=.github/linters/.flake8 --count --exit-zero --max-complexity=15 --max-line-length=150 + isort --settings-file=.github/linters/.isort.cfg . + pylint --rcfile=.github/linters/.python-lint --fail-under=9.0 *.py + mypy --config-file=.github/linters/.mypy.ini *.py + black . diff --git a/README.md b/README.md index 9278429..024e190 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ # Contributors action -[![Python package](https://github.com/github/contributors/actions/workflows/python-ci.yml/badge.svg)](https://github.com/github/contributors/actions/workflows/python-ci.yml) [![Docker Image CI](https://github.com/github/contributors/actions/workflows/docker-ci.yml/badge.svg)](https://github.com/github/contributors/actions/workflows/docker-ci.yml) [![CodeQL](https://github.com/github/contributors/actions/workflows/github-code-scanning/codeql/badge.svg)](https://github.com/github/contributors/actions/workflows/github-code-scanning/codeql) +[![Python package](https://github.com/github/contributors/actions/workflows/python-ci.yml/badge.svg)](https://github.com/github/contributors/actions/workflows/python-ci.yml) +[![Docker Image CI](https://github.com/github/contributors/actions/workflows/docker-ci.yml/badge.svg)](https://github.com/github/contributors/actions/workflows/docker-ci.yml) +[![CodeQL](https://github.com/github/contributors/actions/workflows/github-code-scanning/codeql/badge.svg)](https://github.com/github/contributors/actions/workflows/github-code-scanning/codeql) This is a GitHub Action that given an organization or specified repositories, produces information about the [contributors](https://chaoss.community/kb/metric-contributors/) over the specified time period. @@ -32,10 +34,17 @@ Find out more in the [GitHub API documentation](https://docs.github.com/en/rest/ 1. Create a repository to host this GitHub Action or select an existing repository. 1. Select a best fit workflow file from the [examples below](#example-workflows). 1. Copy that example into your repository (from step 1) and into the proper directory for GitHub Actions: `.github/workflows/` directory with the file extension `.yml` (ie. `.github/workflows/contributors.yml`) -1. Edit the values (`ORGANIZATION`, `REPOSITORY`, `START_DATE`, `END_DATE`) from the sample workflow with your information. If no start and end date are supplied, the action will consider the entire repository history and be unable to determine if contributors are new or returning. If running on a whole organization then no repository is needed. If running the action on just one repository or a list of repositories, then no organization is needed. +1. Edit the values (`ORGANIZATION`, `REPOSITORY`, `START_DATE`, `END_DATE`) from the sample workflow with your information. + - If no start and end date are supplied, the action will consider the entire repository history and be unable to determine if contributors are new or returning. + - If running on a whole organization then no repository is needed. + - If running the action on just one repository or a list of repositories, then no organization is needed. 1. Also edit the value for `GH_ENTERPRISE_URL` if you are using a GitHub Server and not using github.com. For github.com users, don't put anything in here. -1. If you are running this action on an organization or repository other than the one where the workflow file is going to be, then update the value of `GH_TOKEN`. Do this by creating a [GitHub API token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) with permissions to read the repository/organization and write issues. Then take the value of the API token you just created, and [create a repository secret](https://docs.github.com/en/actions/security-guides/encrypted-secrets) where the name of the secret is `GH_TOKEN` and the value of the secret the API token. Then finally update the workflow file to use that repository secret by changing `GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}` to `GH_TOKEN: ${{ secrets.GH_TOKEN }}`. The name of the secret can really be anything. It just needs to match between when you create the secret name and when you refer to it in the workflow file. -1. If you want the resulting issue with the output to appear in a different repository other than the one the workflow file runs in, update the line `token: ${{ secrets.GITHUB_TOKEN }}` with your own GitHub API token stored as a repository secret. This process is the same as described in the step above. More info on creating secrets can be found [here](https://docs.github.com/en/actions/security-guides/encrypted-secrets). +1. If you are running this action on an organization or repository other than the one where the workflow file is going to be, then update the value of `GH_TOKEN`. + - Do this by creating a [GitHub API token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) with permissions to read the repository/organization and write issues. + - Then take the value of the API token you just created, and [create a repository secret](https://docs.github.com/en/actions/security-guides/encrypted-secrets) where the name of the secret is `GH_TOKEN` and the value of the secret the API token. + - Then finally update the workflow file to use that repository secret by changing `GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}` to `GH_TOKEN: ${{ secrets.GH_TOKEN }}`. The name of the secret can really be anything. It just needs to match between when you create the secret name and when you refer to it in the workflow file. +1. If you want the resulting issue with the output to appear in a different repository other than the one the workflow file runs in, update the line `token: ${{ secrets.GITHUB_TOKEN }}` with your own GitHub API token stored as a repository secret. + - This process is the same as described in the step above. More info on creating secrets can be found [here](https://docs.github.com/en/actions/security-guides/encrypted-secrets). 1. Commit the workflow file to the default branch (often `master` or `main`) 1. Wait for the action to trigger based on the `schedule` entry or manually trigger the workflow as shown in the [documentation](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow). @@ -43,16 +52,39 @@ Find out more in the [GitHub API documentation](https://docs.github.com/en/rest/ Below are the allowed configuration options: -| field | required | default | description | -|-----------------------|----------|---------|-------------| -| `GH_TOKEN` | True | "" | The GitHub Token used to scan the repository or organization. Must have read access to all repository you are interested in scanning. | -| `GH_ENTERPRISE_URL` | False | "" | The `GH_ENTERPRISE_URL` is used to connect to an enterprise server instance of GitHub. github.com users should not enter anything here. | -| `ORGANIZATION` | Required to have `ORGANIZATION` or `REPOSITORY` | | The name of the GitHub organization which you want the contributor information of all repos from. ie. github.com/github would be `github` | -| `REPOSITORY` | Required to have `ORGANIZATION` or `REPOSITORY` | | The name of the repository and organization which you want the contributor information from. ie. `github/contributors` or a comma separated list of multiple repositories `github/contributor,super-linter/super-linter` | -| `START_DATE` | False | Beginning of time | The date from which you want to start gathering contributor information. ie. Aug 1st, 2023 would be `2023-08-01` If `start_date` and `end_date` are specified then the action will determine if the contributor is new. A new contributor is one that has contributed in the date range specified but not before the start date. **Performance Note:** Using start and end dates will reduce speed of the action by approximately 63X. ie without dates if the action takes 1.7 seconds, it will take 1 minute and 47 seconds.| -| `END_DATE` | False | Current Date | The date at which you want to stop gathering contributor information. Must be later than the `START_DATE`. ie. Aug 2nd, 2023 would be `2023-08-02` If `start_date` and `end_date` are specified then the action will determine if the contributor is new. A new contributor is one that has contributed in the date range specified but not before the start date. | -| `SPONSOR_INFO` | False | False | If you want to include sponsor information in the output. This will include the sponsor count and the sponsor URL. This will impact action performance. ie. SPONSOR_INFO = "False" or SPONSOR_INFO = "True" | -| `LINK_TO_PROFILE` | False | True | If you want to link usernames to their GitHub profiles in the output. ie. LINK_TO_PROFILE = "True" or LINK_TO_PROFILE = "False" | +#### Authentication + +This action can be configured to authenticate with GitHub App Installation or Personal Access Token (PAT). If all configuration options are provided, the GitHub App Installation configuration has precedence. You can choose one of the following methods to authenticate: + +##### GitHub App Installation + +| field | required | default | description | +|-------------------------------|----------|---------|-------------| +| `GH_APP_ID` | True | `""` | GitHub Application ID. See [documentation](https://docs.github.com/en/apps/creating-github-apps/authenticating-with-a-github-app/about-authentication-with-a-github-app) for more details. | +| `GH_APP_INSTALLATION_ID` | True | `""` | GitHub Application Installation ID. See [documentation](https://docs.github.com/en/apps/creating-github-apps/authenticating-with-a-github-app/about-authentication-with-a-github-app) for more details. | +| `GH_APP_PRIVATE_KEY` | True | `""` | GitHub Application Private Key. See [documentation](https://docs.github.com/en/apps/creating-github-apps/authenticating-with-a-github-app/about-authentication-with-a-github-app) for more details. | + +##### Personal Access Token (PAT) + +| field | required | default | description | +|-------------------------------|----------|---------|-------------| +| `GH_TOKEN` | True | `""` | The GitHub Token used to scan the repository. Must have read access to all repository you are interested in scanning. | + +#### Other Configuration Options + +| field | required | default | description | +|---------------------------|----------|----------|-------------| +| `GH_ENTERPRISE_URL` | False | "" | The `GH_ENTERPRISE_URL` is used to connect to an enterprise server instance of GitHub. github.com users should not enter anything here. | +| `ORGANIZATION` | Required to have `ORGANIZATION` or `REPOSITORY` | | The name of the GitHub organization which you want the contributor information of all repos from. ie. github.com/github would be `github` | +| `REPOSITORY` | Required to have `ORGANIZATION` or `REPOSITORY` | | The name of the repository and organization which you want the contributor information from. ie. `github/contributors` or a comma separated list of multiple repositories `github/contributor,super-linter/super-linter` | +| `START_DATE` | False | Beginning of time | The date from which you want to start gathering contributor information. ie. Aug 1st, 2023 would be `2023-08-01`. | +| `END_DATE` | False | Current Date | The date at which you want to stop gathering contributor information. Must be later than the `START_DATE`. ie. Aug 2nd, 2023 would be `2023-08-02` | +| `SPONSOR_INFO` | False | False | If you want to include sponsor information in the output. This will include the sponsor count and the sponsor URL. This will impact action performance. ie. SPONSOR_INFO = "False" or SPONSOR_INFO = "True" | +| `LINK_TO_PROFILE` | False | True | If you want to link usernames to their GitHub profiles in the output. ie. LINK_TO_PROFILE = "True" or LINK_TO_PROFILE = "False" | + +**Note**: If `start_date` and `end_date` are specified then the action will determine if the contributor is new. A new contributor is one that has contributed in the date range specified but not before the start date. + +**Performance Note:** Using start and end dates will reduce speed of the action by approximately 63X. ie without dates if the action takes 1.7 seconds, it will take 1 minute and 47 seconds. ### Example workflows @@ -66,12 +98,14 @@ on: - cron: '3 2 1 * *' permissions: - issues: write + contents: read jobs: contributor_report: name: contributor report runs-on: ubuntu-latest + permissions: + issues: write steps: - name: Get dates for last month @@ -107,6 +141,7 @@ jobs: ## Example Markdown output with `start_date` and `end_date` supplied +```markdown # Contributors - Date range for contributor list: 2021-01-01 to 2023-10-10 @@ -119,10 +154,12 @@ jobs: | Username | Contribution Count | New Contributor | Commits | | --- | --- | --- | --- | | @zkoppert | 143 | False | [super-linter/super-linter](https://github.com/super-linter/super-linter/commits?author=zkoppert&since=2021-01-01&until=2023-10-10) | +``` ## Example Markdown output with no dates supplied -# Contributors +```markdown +#### Contributors - Organization: super-linter @@ -133,6 +170,7 @@ jobs: | Username | Contribution Count | New Contributor | Sponsor URL | Commits | | --- | --- | --- | --- | --- | | @zkoppert | 1913 | False | [Sponsor Link](https://github.com/sponsors/zkoppert) | [super-linter/super-linter](https://github.com/super-linter/super-linter/commits?author=zkoppert&since=2021-09-01&until=2023-09-30) | +``` ## Local usage without Docker diff --git a/auth.py b/auth.py index 7d39446..825ff14 100644 --- a/auth.py +++ b/auth.py @@ -3,24 +3,40 @@ import github3 -def auth_to_github(token: str, ghe: str) -> github3.GitHub: +def auth_to_github( + gh_app_id: str, + gh_app_installation_id: int, + gh_app_private_key_bytes: bytes, + token: str, + ghe: str, +) -> github3.GitHub: """ Connect to GitHub.com or GitHub Enterprise, depending on env variables. Args: + gh_app_id (str): the GitHub App ID + gh_installation_id (int): the GitHub App Installation ID + gh_app_private_key (bytes): the GitHub App Private Key token (str): the GitHub personal access token ghe (str): the GitHub Enterprise URL Returns: github3.GitHub: the GitHub connection object """ - if not token: - raise ValueError("GH_TOKEN environment variable not set") - - if ghe: + if gh_app_id and gh_app_private_key_bytes and gh_app_installation_id: + gh = github3.github.GitHub() + gh.login_as_app_installation( + gh_app_private_key_bytes, gh_app_id, gh_app_installation_id + ) + github_connection = gh + elif ghe and token: github_connection = github3.github.GitHubEnterprise(ghe, token=token) - else: + elif token: github_connection = github3.login(token=token) + else: + raise ValueError( + "GH_TOKEN or the set of [GH_APP_ID, GH_APP_INSTALLATION_ID, GH_APP_PRIVATE_KEY] environment variables are not set" + ) if not github_connection: raise ValueError("Unable to authenticate to GitHub") diff --git a/contributors.py b/contributors.py index 7c95769..a074689 100644 --- a/contributors.py +++ b/contributors.py @@ -16,6 +16,9 @@ def main(): ( organization, repository_list, + gh_app_id, + gh_app_installation_id, + gh_app_private_key_bytes, token, ghe, start_date, @@ -25,7 +28,9 @@ def main(): ) = env.get_env_vars() # Auth to GitHub.com - github_connection = auth.auth_to_github(token, ghe) + github_connection = auth.auth_to_github( + gh_app_id, gh_app_installation_id, gh_app_private_key_bytes, token, ghe + ) # Get the contributors contributors = get_all_contributors( diff --git a/env.py b/env.py index f7b45ab..3cc796b 100644 --- a/env.py +++ b/env.py @@ -3,16 +3,68 @@ produces information about the contributors over the specified time period. """ +import datetime import os from os.path import dirname, join -from typing import Any from dotenv import load_dotenv -def get_env_vars() -> ( - tuple[str | None, list[str], str, str, str | None, str | None, str | Any, str | Any] -): +def get_bool_env_var(env_var_name: str) -> bool: + """Get a boolean environment variable. + + Args: + env_var_name: The name of the environment variable to retrieve. + + Returns: + The value of the environment variable as a boolean. + """ + return os.environ.get(env_var_name, "").strip().lower() == "true" + + +def get_int_env_var(env_var_name: str) -> int | None: + """Get an integer environment variable. + + Args: + env_var_name: The name of the environment variable to retrieve. + + Returns: + The value of the environment variable as an integer or None. + """ + env_var = os.environ.get(env_var_name) + if env_var is None or not env_var.strip(): + return None + try: + return int(env_var) + except ValueError: + return None + + +def validate_date_format(env_var_name: str) -> str: + """Validate the date format of the environment variable. + + Args: + env_var_name: The name of the environment variable to retrieve. + + Returns: + The value of the environment variable as a string. + """ + date_to_validate = os.getenv(env_var_name, "") + pattern = "%Y-%m-%d" + try: + datetime.datetime.strptime(date_to_validate, pattern) + except ValueError as exc: + raise ValueError( + f"{env_var_name} environment variable not in the format YYYY-MM-DD" + ) from exc + return date_to_validate + + +def get_env_vars( + test: bool = False, +) -> tuple[ + str | None, list[str], int | None, int | None, bytes, str, str, str, str, bool, bool +]: """ Get the environment variables for use in the action. @@ -22,6 +74,9 @@ def get_env_vars() -> ( Returns: str: the organization to get contributor information for List[str]: A list of the repositories to get contributor information for + int|None: the GitHub App ID to use for authentication + int|None: the GitHub App Installation ID to use for authentication + bytes: the GitHub App Private Key as bytes to use for authentication str: the GitHub token to use for authentication str: the GitHub Enterprise URL to use for authentication str: the start date to get contributor information from @@ -30,9 +85,10 @@ def get_env_vars() -> ( str: whether to link username to Github profile in markdown output """ - # Load from .env file if it exists - dotenv_path = join(dirname(__file__), ".env") - load_dotenv(dotenv_path) + + if not test: + dotenv_path = join(dirname(__file__), ".env") + load_dotenv(dotenv_path) organization = os.getenv("ORGANIZATION") repositories_str = os.getenv("REPOSITORY") @@ -42,40 +98,31 @@ def get_env_vars() -> ( "ORGANIZATION and REPOSITORY environment variables were not set. Please set one" ) - token = os.getenv("GH_TOKEN") - # required env variable - if not token: + gh_app_id = get_int_env_var("GH_APP_ID") + gh_app_private_key_bytes = os.environ.get("GH_APP_PRIVATE_KEY", "").encode("utf8") + gh_app_installation_id = get_int_env_var("GH_APP_INSTALLATION_ID") + + if gh_app_id and (not gh_app_private_key_bytes or not gh_app_installation_id): + raise ValueError( + "GH_APP_ID set and GH_APP_INSTALLATION_ID or GH_APP_PRIVATE_KEY variable not set" + ) + + token = os.getenv("GH_TOKEN", "") + if ( + not gh_app_id + and not gh_app_private_key_bytes + and not gh_app_installation_id + and not token + ): raise ValueError("GH_TOKEN environment variable not set") ghe = os.getenv("GH_ENTERPRISE_URL", default="").strip() - start_date = os.getenv("START_DATE") - # make sure that start date is in the format YYYY-MM-DD - if start_date and len(start_date) != 10: - raise ValueError("START_DATE environment variable not in the format YYYY-MM-DD") - - end_date = os.getenv("END_DATE") - # make sure that end date is in the format YYYY-MM-DD - if end_date and len(end_date) != 10: - raise ValueError("END_DATE environment variable not in the format YYYY-MM-DD") - - sponsor_info = os.getenv("SPONSOR_INFO") - # make sure that sponsor_string is a boolean - if sponsor_info: - sponsor_info = sponsor_info.lower().strip() - if sponsor_info not in ["true", "false", ""]: - raise ValueError( - "SPONSOR_INFO environment variable not a boolean. ie. True or False or blank" - ) - - link_to_profile = os.getenv("LINK_TO_PROFILE") - # make sure that link_to_profile is a boolean - if link_to_profile: - link_to_profile = link_to_profile.lower().strip() - if link_to_profile not in ["true", "false", ""]: - raise ValueError( - "LINK_TO_PROFILE environment variable not a boolean. ie. True or False or blank" - ) + start_date = validate_date_format("START_DATE") + end_date = validate_date_format("END_DATE") + + sponsor_info = get_bool_env_var("SPONSOR_INFO") + link_to_profile = get_bool_env_var("LINK_TO_PROFILE") # Separate repositories_str into a list based on the comma separator repositories_list = [] @@ -87,6 +134,9 @@ def get_env_vars() -> ( return ( organization, repositories_list, + gh_app_id, + gh_app_installation_id, + gh_app_private_key_bytes, token, ghe, start_date, diff --git a/requirements-test.txt b/requirements-test.txt new file mode 100644 index 0000000..46a3030 --- /dev/null +++ b/requirements-test.txt @@ -0,0 +1,8 @@ +black==24.3.0 +flake8==7.0.0 +mypy==1.8.0 +mypy-extensions==1.0.0 +pylint==3.1.0 +pytest==8.1.1 +pytest-cov==5.0.0 +types-requests==2.31.0.20240311 diff --git a/requirements.txt b/requirements.txt index 33f6c9c..09bf660 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,3 @@ github3.py==4.0.1 python-dotenv==1.0.1 -pytest==8.1.1 -pytest-cov==5.0.0 -Requests==2.31.0 \ No newline at end of file +requests==2.31.0 diff --git a/test_auth.py b/test_auth.py index 9c8786c..69fc9f5 100644 --- a/test_auth.py +++ b/test_auth.py @@ -1,8 +1,10 @@ """Test cases for the auth module.""" + import unittest -from unittest.mock import patch +from unittest.mock import MagicMock, patch import auth +import github3.github class TestAuth(unittest.TestCase): @@ -10,6 +12,17 @@ class TestAuth(unittest.TestCase): Test case for the auth module. """ + @patch("github3.github.GitHub.login_as_app_installation") + def test_auth_to_github_with_github_app(self, mock_login): + """ + Test the auth_to_github function when GitHub app + parameters provided. + """ + mock_login.return_value = MagicMock() + result = auth.auth_to_github(12345, 678910, b"hello", "", "") + + self.assertIsInstance(result, github3.github.GitHub) + @patch("github3.login") def test_auth_to_github_with_token(self, mock_login): """ @@ -17,7 +30,7 @@ def test_auth_to_github_with_token(self, mock_login): """ mock_login.return_value = "Authenticated to GitHub.com" - result = auth.auth_to_github("token", "") + result = auth.auth_to_github("", "", b"", "token", "") self.assertEqual(result, "Authenticated to GitHub.com") @@ -27,7 +40,7 @@ def test_auth_to_github_without_token(self): Expect a ValueError to be raised. """ with self.assertRaises(ValueError): - auth.auth_to_github("", "") + auth.auth_to_github("", "", b"", "", "") @patch("github3.github.GitHubEnterprise") def test_auth_to_github_with_ghe(self, mock_ghe): @@ -35,7 +48,7 @@ def test_auth_to_github_with_ghe(self, mock_ghe): Test the auth_to_github function when the GitHub Enterprise URL is provided. """ mock_ghe.return_value = "Authenticated to GitHub Enterprise" - result = auth.auth_to_github("token", "https://github.example.com") + result = auth.auth_to_github("", "", b"", "token", "https://github.example.com") self.assertEqual(result, "Authenticated to GitHub Enterprise") diff --git a/test_env.py b/test_env.py index de6c0c3..7ca8f99 100644 --- a/test_env.py +++ b/test_env.py @@ -1,5 +1,6 @@ """This is the test module for the env module.""" +import os import unittest from unittest.mock import patch @@ -11,25 +12,51 @@ class TestEnv(unittest.TestCase): Test case for the env module. """ - @patch("os.getenv") - def test_get_env_vars(self, mock_getenv): + def setUp(self): + env_keys = [ + "DRY_RUN", + "END_DATE", + "GH_APP_ID", + "GH_ENTERPRISE_URL", + "GH_APP_INSTALLATION_ID", + "GH_APP_PRIVATE_KEY", + "GH_TOKEN", + "ORGANIZATION", + "REPOSITORY", + "START_DATE", + ] + for key in env_keys: + if key in os.environ: + del os.environ[key] + + @patch.dict( + os.environ, + { + "ORGANIZATION": "org", + "REPOSITORY": "repo,repo2", + "GH_APP_ID": "", + "GH_APP_INSTALLATION_ID": "", + "GH_APP_PRIVATE_KEY": "", + "GH_TOKEN": "token", + "GH_ENTERPRISE_URL": "", + "START_DATE": "2022-01-01", + "END_DATE": "2022-12-31", + "SPONSOR_INFO": "False", + "LINK_TO_PROFILE": "True", + }, + clear=True, + ) + def test_get_env_vars(self): """ Test the get_env_vars function when all environment variables are set correctly. """ - mock_getenv.side_effect = [ - "org", - "repo,repo2", - "token", - "", - "2022-01-01", - "2022-12-31", - "False", - "True", - ] ( organization, repository_list, + gh_app_id, + gh_app_installation_id, + gh_app_private_key_bytes, token, ghe, start_date, @@ -40,23 +67,74 @@ def test_get_env_vars(self, mock_getenv): self.assertEqual(organization, "org") self.assertEqual(repository_list, ["repo", "repo2"]) + self.assertIsNone(gh_app_id) + self.assertIsNone(gh_app_installation_id) + self.assertEqual(gh_app_private_key_bytes, b"") self.assertEqual(token, "token") self.assertEqual(ghe, "") self.assertEqual(start_date, "2022-01-01") self.assertEqual(end_date, "2022-12-31") - self.assertEqual(sponsor_info, "false") - self.assertEqual(link_to_profile, "true") + self.assertFalse(sponsor_info) + self.assertTrue(link_to_profile) - @patch("os.getenv") - def test_get_env_vars_missing_values(self, mock_getenv): + @patch.dict( + os.environ, + { + "ORGANIZATION": "org", + "REPOSITORY": "repo,repo2", + "GH_APP_ID": "", + "GH_APP_INSTALLATION_ID": "", + "GH_APP_PRIVATE_KEY": "", + "GH_TOKEN": "", + "GH_ENTERPRISE_URL": "", + "START_DATE": "2022-01-01", + "END_DATE": "2022-12-31", + "SPONSOR_INFO": "False", + "LINK_TO_PROFILE": "True", + }, + clear=True, + ) + def test_get_env_vars_missing_values(self): """ Test the get_env_vars function when none of the environment variables are set. Expect a ValueError to be raised. """ - mock_getenv.side_effect = [None, None, None, None, None, None, None, None] - with self.assertRaises(ValueError): + with self.assertRaises(ValueError) as cm: + env.get_env_vars() + the_exception = cm.exception + self.assertEqual(str(the_exception), "GH_TOKEN environment variable not set") + + @patch.dict( + os.environ, + { + "ORGANIZATION": "org", + "REPOSITORY": "repo,repo2", + "GH_APP_ID": "", + "GH_APP_INSTALLATION_ID": "", + "GH_APP_PRIVATE_KEY": "", + "GH_TOKEN": "token", + "GH_ENTERPRISE_URL": "", + "START_DATE": "2022/01/01", + "END_DATE": "2022-12-31", + "SPONSOR_INFO": "False", + "LINK_TO_PROFILE": "True", + }, + clear=True, + ) + def test_get_env_vars_invalid_start_date(self): + """ + Test the get_env_vars function when invalid start date given. + Expect a ValueError to be raised. + """ + + with self.assertRaises(ValueError) as cm: env.get_env_vars() + the_exception = cm.exception + self.assertEqual( + str(the_exception), + "START_DATE environment variable not in the format YYYY-MM-DD", + ) if __name__ == "__main__": diff --git a/test_markdown.py b/test_markdown.py index c389c27..ebee539 100644 --- a/test_markdown.py +++ b/test_markdown.py @@ -1,4 +1,5 @@ """This is the test module for the markdown module""" + import unittest from unittest.mock import mock_open, patch