diff --git a/.github/workflows/pr-reviewer.yaml b/.github/workflows/pr-reviewer.yaml new file mode 100644 index 000000000..81502d7e2 --- /dev/null +++ b/.github/workflows/pr-reviewer.yaml @@ -0,0 +1,97 @@ +name: CI PR Reviewer Pipeline +on: + pull_request: + branches: + - main + workflow_dispatch: + +jobs: + review: + runs-on: ubuntu-latest + env: + X_API_KEY: ${{ secrets.SYSTEM_API_KEY }} + X_API_CONSUMER: ${{ secrets.SYSTEM_CONSUMER_UUID }} + API_HOST: "https://app-gippi-api-s-latest-uksouth.azurewebsites.net/" + WORKING_DIRECTORY: ${{ github.workspace }}/ + steps: + - name: Checkout code + uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - name: Create a diff file + run: | + git diff origin/main...remotes/origin/${{ github.head_ref }} > ${{ env.working_directory }}diff.txt && cat ${{ env.working_directory }}diff.txt + + - name: Generate a response + run: | + API_HOST=$(printenv API_HOST) + WORKING_DIRECTORY=$(printenv WORKING_DIRECTORY) + X_API_CONSUMER=$(printenv X_API_CONSUMER) + X_API_KEY=$(printenv X_API_KEY) + DIFF_FILE="diff.txt" + RESPONSE_MD_FILE="response.md" + + if [ ! -f "${WORKING_DIRECTORY}${DIFF_FILE}" ]; then + echo "File ${WORKING_DIRECTORY}${DIFF_FILE} not found." + exit 1 + fi + + file_contents=$(cat "${WORKING_DIRECTORY}${DIFF_FILE}") + json_body=$(jq -n --arg pt "pullrequest-review" --arg p "$file_contents" '{prompt_type: $pt, prompt: $p}') + + response=$(curl -s -i -X POST "${API_HOST}/predefined" \ + -H "Content-Type: application/json" \ + -H "X-API-CONSUMER: ${X_API_CONSUMER}" \ + -H "X-API-KEY: ${X_API_KEY}" \ + -d "$json_body") + + echo "Response: $response" + + response_code=$(echo "$response" | awk -F' ' '/HTTP\/1.1/{print $2}' | head -n 1) + + if [ "$response_code" -eq 200 ]; then + echo "File contents sent successfully." + # Remove headers + response_body=$(echo "$response" | tail -n +2) + # Remove more headers + response_body=$(echo "$response_body" | sed '/^date: /Id' | sed '/^server: /Id' | sed '/^content-length: /Id' | sed '/^content-type: /Id') + # remove trailing and leading quotes + response_body=$(echo "$response_body" | sed 's/^"\(.*\)"$/\1/') + # remove the initial markdown code block ident if it exists + response_body=$(echo "$response_body" | sed 's/```markdown//') + # remove the last code block ident + response_body=$(echo "$response_body" | sed 's/```//') + + # Write to file + echo -e "$response_body" > "${WORKING_DIRECTORY}${RESPONSE_MD_FILE}" + else + echo "Error sending file contents: $response_code" + echo -e "Request to AEP failed to process" > "${WORKING_DIRECTORY}${RESPONSE_MD_FILE}" + fi + + if [ $? -eq 0 ]; then + echo "Response saved as response.md" + else + echo "Error writing to file in ${WORKING_DIRECTORY}." + exit 1 + fi + + - name: Get the response as a variable + id: get_response + run: | + { + echo 'response<> "$GITHUB_ENV" + + - uses: actions/github-script@v6 + with: + script: | + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: process.env.response + }) \ No newline at end of file diff --git a/.github/workflows/pr-summary.yaml b/.github/workflows/pr-summary.yaml new file mode 100644 index 000000000..c6898b36e --- /dev/null +++ b/.github/workflows/pr-summary.yaml @@ -0,0 +1,101 @@ +name: CI PR Summary Pipeline +on: + pull_request: + branches: + - main + workflow_dispatch: + +jobs: + review: + runs-on: ubuntu-latest + env: + X_API_KEY: ${{ secrets.SYSTEM_API_KEY }} + X_API_CONSUMER: ${{ secrets.SYSTEM_CONSUMER_UUID }} + API_HOST: "https://app-gippi-api-s-latest-uksouth.azurewebsites.net/" + WORKING_DIRECTORY: ${{ github.workspace }}/ + steps: + - name: Checkout code + uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - name: Create a diff file + run: | + git diff origin/main...remotes/origin/${{ github.head_ref }} > ${{ env.working_directory }}diff.txt && cat ${{ env.working_directory }}diff.txt + + - name: Generate a response + run: | + API_HOST=$(printenv API_HOST) + WORKING_DIRECTORY=$(printenv WORKING_DIRECTORY) + X_API_CONSUMER=$(printenv X_API_CONSUMER) + X_API_KEY=$(printenv X_API_KEY) + DIFF_FILE="diff.txt" + RESPONSE_MD_FILE="response.md" + + if [ ! -f "${WORKING_DIRECTORY}${DIFF_FILE}" ]; then + echo "File ${WORKING_DIRECTORY}${DIFF_FILE} not found." + exit 1 + fi + + file_contents=$(cat "${WORKING_DIRECTORY}${DIFF_FILE}") + json_body=$(jq -n --arg pt "pullrequest-summary-perfile" --arg p "$file_contents" '{prompt_type: $pt, prompt: $p}') + + response=$(curl -s -i -X POST "${API_HOST}/predefined" \ + -H "Content-Type: application/json" \ + -H "X-API-CONSUMER: ${X_API_CONSUMER}" \ + -H "X-API-KEY: ${X_API_KEY}" \ + -d "$json_body") + + echo "Response: $response" + + response_code=$(echo "$response" | awk -F' ' '/HTTP\/1.1/{print $2}' | head -n 1) + + if [ "$response_code" -eq 200 ]; then + echo "File contents sent successfully." + # Remove headers + response_body=$(echo "$response" | tail -n +2) + # Remove more headers + response_body=$(echo "$response_body" | sed '/^date: /Id' | sed '/^server: /Id' | sed '/^content-length: /Id' | sed '/^content-type: /Id') + # remove trailing and leading quotes + response_body=$(echo "$response_body" | sed 's/^"\(.*\)"$/\1/') + # remove the initial markdown code block ident if it exists + response_body=$(echo "$response_body" | sed 's/```markdown//') + # remove the last code block ident + response_body=$(echo "$response_body" | sed 's/```//') + + # Write to file + echo -e "$response_body" > "${WORKING_DIRECTORY}${RESPONSE_MD_FILE}" + else + echo "Error sending file contents: $response_code" + echo -e "Request to AEP failed to process" > "${WORKING_DIRECTORY}${RESPONSE_MD_FILE}" + fi + + if [ $? -eq 0 ]; then + echo "Response saved as response.md" + else + echo "Error writing to file in ${WORKING_DIRECTORY}." + exit 1 + fi + + - name: Get the response as a variable + id: get_response + run: | + { + echo 'response<> "$GITHUB_ENV" + + - uses: actions/github-script@v6 + with: + script: | + const prBody = context.payload.pull_request.body || ''; + const updatedBody = prBody.includes('## 🤖AEP PR SUMMARY🤖') + ? prBody.replace(/## 🤖AEP PR SUMMARY🤖[\s\S]*/, '') + '\n\n## 🤖AEP PR SUMMARY🤖\n\n' + process.env.response + : prBody + '\n\n## 🤖AEP PR SUMMARY🤖\n\n' + process.env.response; + github.rest.pulls.update({ + pull_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: updatedBody + }) \ No newline at end of file diff --git a/.github/workflows/terraform-precheck.yaml b/.github/workflows/terraform-precheck.yaml new file mode 100644 index 000000000..d8870ba01 --- /dev/null +++ b/.github/workflows/terraform-precheck.yaml @@ -0,0 +1,46 @@ +name: Terraform Pre-Check + +on: + pull_request: + branches: + - main + push: + branches: + - DTSPO-18475-Fix-Up + workflow_dispatch: + +permissions: + contents: write + +jobs: + pre-check: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Checkout cnp-azuredevops-libraries + uses: actions/checkout@v4 + with: + repository: hmcts/cnp-azuredevops-libraries + path: cnp-azuredevops-libraries + + - name: Make Terraform Setup Script Executable + run: chmod +x cnp-azuredevops-libraries/scripts/tfenv-install-terraform.sh + + - name: Setup Terraform + run: ./cnp-azuredevops-libraries/scripts/tfenv-install-terraform.sh + shell: bash + + - name: Terraform Format + run: terraform fmt -recursive + working-directory: components + + - name: Commit and Push Changes + working-directory: components + run: | + git config --global user.email "hmcts-platform-operations@HMCTS.NET" + git config --global user.name "hmcts-platform-operations" + git add -u + git diff-index --quiet HEAD || git commit -m "Format with terraform fmt" + git push origin HEAD:${{ github.head_ref }} || echo "No changes to push" \ No newline at end of file diff --git a/.github/workflows/terraform.yaml b/.github/workflows/terraform.yaml new file mode 100644 index 000000000..22c2ae1ae --- /dev/null +++ b/.github/workflows/terraform.yaml @@ -0,0 +1,70 @@ +name: Terraform Deploy + +on: + pull_request: + branches: + - main + workflow_dispatch: + push: + branches: + - DTSPO-18475-Fix-Up + +permissions: + contents: write + +jobs: + terraform: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Az CLI login + uses: azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Terraform Init + working-directory: components + env: + STORAGE_ACCOUNT: ${{ secrets.STORAGE_ACCOUNT }} + CONTAINER_NAME: ${{ secrets.CONTAINER_NAME }} + RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }} + ARM_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} + ARM_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }} + ARM_SUBSCRIPTION_ID: ${{ secrets.DCD_CFT_SANDBOX_SUBSCRIPTION }} + ARM_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} + run: terraform init -backend-config="storage_account_name=$STORAGE_ACCOUNT" -backend-config="container_name=$CONTAINER_NAME" -backend-config="resource_group_name=$RESOURCE_GROUP_NAME" -reconfigure + + - name: Terraform Validate + working-directory: components + id: validate + env: + ARM_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} + ARM_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }} + ARM_SUBSCRIPTION_ID: ${{ secrets.DCD_CFT_SANDBOX_SUBSCRIPTION }} + ARM_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} + run: terraform validate + + - name: Terraform Plan + working-directory: components + id: plan + env: + ARM_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} + ARM_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }} + ARM_SUBSCRIPTION_ID: ${{ secrets.DCD_CFT_SANDBOX_SUBSCRIPTION }} + ARM_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} + OAUTH_TOKEN: ${{ secrets.OAUTH_TOKEN }} + run: terraform plan -var="oauth_token=${{ secrets.OAUTH_TOKEN }}" + + - name: Terraform Apply + if: github.event_name == 'push' && github.ref == 'refs/heads/main' + working-directory: components + env: + ARM_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} + ARM_SUBSCRIPTION_ID: ${{ secrets.DCD_CFT_SANDBOX_SUBSCRIPTION }} + ARM_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} + ARM_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }} + OAUTH_TOKEN: ${{ secrets.OAUTH_TOKEN }} + run: terraform apply -var="oauth_token=${{ secrets.OAUTH_TOKEN }}" -auto-approve + diff --git a/.github/workflows/update-repos.yaml b/.github/workflows/update-repos.yaml index d625ffa71..12fc74b4d 100644 --- a/.github/workflows/update-repos.yaml +++ b/.github/workflows/update-repos.yaml @@ -6,7 +6,7 @@ on: workflow_dispatch: jobs: - update-file: + update-files: runs-on: ubuntu-latest steps: @@ -23,15 +23,38 @@ jobs: python -m pip install --upgrade pip pip install pyyaml requests - - name: Run update script + - name: Run set_org_custom_properties script + env: + OAUTH_TOKEN: ${{ secrets.OAUTH_TOKEN }} + run: python custom-properties/set_org_custom_properties.py + + - name: Run update-repo-list script run: python scripts/update-repo-list.py + - name: Run update-readme script + run: python scripts/update-readme.py + + - name: Install jq + run: sudo apt-get install jq -y + + - name: List Repositories + run: | + echo "Listing Repositories" + for repo in $(jq -r '.[]' < ./production-repos.json); do + echo "Listing repository: $repo" + curl -H "Authorization: token ${{ secrets.OAUTH_TOKEN }}" \ + -H "Accept: application/vnd.github.v3+json" \ + https://api.github.com/repos/hmcts/$repo + done + shell: bash + continue-on-error: true + - name: Commit and push changes run: | git config --global user.name 'hmcts-platform-operations' git config --global user.email 'github-platform-operations@HMCTS.NET' - git add production-repos.json - git commit -m 'Update repository list' + git add production-repos.json readme.md + git commit -m 'Update repository list and readme' git push env: - GITHUB_TOKEN: ${{ secrets.OAUTH_TOKEN }} + GITHUB_TOKEN: ${{ secrets.OAUTH_TOKEN }} \ No newline at end of file diff --git a/ReadMe.md b/ReadMe.md index 16f296261..4644ebd76 100644 --- a/ReadMe.md +++ b/ReadMe.md @@ -2,63 +2,100 @@ This repository contains code to manage GitHub repository branch protection rules for HMCTS. -## Overview +# Overview -This Terraform configuration automates the process of setting up branch protection rules across multiple GitHub repositories. It implements a batching system to handle a large number of repositories efficiently while respecting GitHub API rate limits. +This Terraform configuration automates the process of setting up rule sets at the organisation level. - [Rate Limits Page](https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?apiVersion=2022-11-28) + + +| **Repository Type** | **Count** | +|---------------------------|-----------| +| Production Repositories | [309](../production-repos.json) | +| Development Repositories | 0 | + + ## Getting Started ### Prerequisites - [Terraform](https://www.terraform.io/downloads.html) (version 1.5.7 or later) -- GitHub Personal Access Token with appropriate permissions. - -### Configuration +- Oauth or PAT Token with appropriate permissions. -1. Clone this repository: -git clone https://github.com/hmcts/github-repository-rules.git -2. Create a `terraform.tfvars` file with your GitHub token: -3. The python file runs as a cron job via GitHub Actions pipeline at midnight and updates the JSON file with new repositories. ## What This Does -- Reads a list of repositories from `prod-repos.json` -- Checks for the existence of 'main' and 'master' branches in each repository. -- Applies branch protection rules to existing branches. -- Processes repositories in batches to manage API rate limits. +- Reads a list of repositories from `production-repos.json` +- Creates a ruleset at the organisation level, this applies standardisation across all repositories. +- Creates custom properties for repositories, such as marking repositories as "is_production." + ## Maintenance To add or remove repositories follow the below: 1. Open a fresh PR from the master branch ensuring you have pulled down recent changes to the master branch. -2. Update the `prod-repos.json` file with any repository you want. Ensure that its in the format of just the repo name eg: "github-repository-rules" +2. Applies standardised rule sets to repositories listed in the `production-repos.json` file, ensuring consistent management and configuration across all repositories. 3. Create a PR and allow the GH Actions pipeline to run a Terraform Plan to confirm changes are accepted. -4. Once this first pipeline checks out, the second pipeline will apply your changes and update the branch protection rules. +4. Once the plan is good, you can merge your PR into main branch and the pipeline will trigger an apply. 5. Once applied delete your branch. -## Recent Changes +## Troubleshooting -We recently addressed issues with scaling to a larger number of repositories. Here's a summary of the changes: +- Check your Terraform version and ensure there are no underlying bugs with the provider versions. +- Ensure you have formatted your repository name correctly as it may not pick it up properly. -1. Implemented a batching system that splits repositories into smaller groups of 20. -2. Processes each batch sequentially with built-in delays between batches. -3. Only applies branch protection rules after all batches have been processed. +## Terraform documentation -These changes allow us to handle a significantly larger number of repositories without overwhelming the GitHub API or causing Terraform to crash. The system is now more scalable for future growth. +## Requirements -## Project Structure +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | >= 1.5.7 | +| [github](#requirement\_github) | ~> 6.0 | -- `main.tf`: Contains the main Terraform configuration for branch protection rules. -- `data.tf`: Defines data sources for GitHub repositories and branches. -- `locals.tf`: Contains local variables for processing repository data. -- `outputs.tf`: Defines outputs for branch summaries and counts. -- `prod-repos.json`: List of repositories to manage. +## Providers -## Troubleshooting +| Name | Version | +|------|---------| +| [azurerm](#provider\_azurerm) | n/a | +| [github](#provider\_github) | ~> 6.0 | +| [local](#provider\_local) | n/a | -- Check your Terraform version and ensure there are no underlying bugs with the provider versions. -- Ensure you have formatted your repository name correctly as it may not pick it up properly. +## Modules + +| Name | Source | Version | +|------|--------|---------| +| [tags](#module\_tags) | git::https://github.com/hmcts/terraform-module-common-tags.git | master | + +## Resources + +| Name | Type | +|------|------| +| [azurerm_resource_group.rg](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/resource_group) | resource | +| [azurerm_storage_account.sa](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/storage_account) | resource | +| [azurerm_storage_container.tfstate](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/storage_container) | resource | +| [github_organization_ruleset.default_ruleset](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/organization_ruleset) | resource | +| [github_team.admin](https://registry.terraform.io/providers/integrations/github/latest/docs/data-sources/team) | data source | +| [local_file.repos_json](https://registry.terraform.io/providers/hashicorp/local/latest/docs/data-sources/file) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [builtFrom](#input\_builtFrom) | Information about the build source or version | `string` | `"https://github.com/hmcts/github-repository-rules"` | no | +| [env](#input\_env) | The environment for the deployment (e.g., dev, staging, prod) | `string` | `"dev"` | no | +| [location](#input\_location) | The location for the resources | `string` | `"UK South"` | no | +| [oauth\_token](#input\_oauth\_token) | OAUTH token to use for authentication. | `string` | n/a | yes | +| [override\_action](#input\_override\_action) | The action to override | `string` | `"plan"` | no | +| [product](#input\_product) | The product name or identifier | `string` | `"sds-platform"` | no | +| [resource\_group\_name](#input\_resource\_group\_name) | The name of the resource group | `string` | `"rule-set-rg"` | no | +| [storage\_account\_name](#input\_storage\_account\_name) | The name of the storage account | `string` | `"rulesetsa"` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [common\_tags](#output\_common\_tags) | n/a | \ No newline at end of file diff --git a/components/data.tf b/components/data.tf index 68ce78b82..3182b5712 100644 --- a/components/data.tf +++ b/components/data.tf @@ -1,15 +1,7 @@ data "github_team" "admin" { - slug = "test" + slug = "platform-operations" # Add more teams here if you want to exlcude them from the rulesets } data "local_file" "repos_json" { filename = "${path.module}./production-repos.json" } - -data "github_branch" "existing_branches" { - for_each = { - for combo in local.repo_branch_combinations : "${combo.repo}:${combo.branch}" => combo - } - repository = each.value.repo - branch = each.value.branch -} diff --git a/components/locals.tf b/components/locals.tf index 6a19ab37a..757ba06c7 100644 --- a/components/locals.tf +++ b/components/locals.tf @@ -1,8 +1,6 @@ locals { # List of repositories to exclude from the production-repos.json file - excluded_repositories = [ - "test-repo-uteppyig", - ] + excluded_repositories = [] # Add any repositories here you would like to exclude # Read repositories from JSON file all_repositories = jsondecode(data.local_file.repos_json.content) @@ -12,41 +10,8 @@ locals { for repo in local.all_repositories : repo if !contains(local.excluded_repositories, repo) ] - - branches_to_check = ["main", "master"] - batch_size = 10 - - # Split repositories into batches of 10 to help handle the API Rate limits - repo_batches = chunklist(local.included_repositories, local.batch_size) - - repo_branch_combinations = flatten([ - for batch in local.repo_batches : [ - for repo in batch : [ - for branch in local.branches_to_check : { - repo = repo - branch = branch - } - ] - ] - ]) - - # Create a map of existing branches - existing_branches = { - for key, branch in data.github_branch.existing_branches : - key => branch - } - - # Checks if a main/master branch exists on the repositories - branch_summary = { - for repo in local.included_repositories : - repo => { - main = contains(keys(local.existing_branches), "${repo}:main") - master = contains(keys(local.existing_branches), "${repo}:master") - } - } } - locals { env_display_names = { sbox = "Sandbox" @@ -64,8 +29,4 @@ locals { "costCentre" = "" } enforced_tags = module.tags.common_tags -} - - - - +} \ No newline at end of file diff --git a/components/outputs.tf b/components/outputs.tf index 5e34a937a..62c76cb79 100644 --- a/components/outputs.tf +++ b/components/outputs.tf @@ -4,24 +4,4 @@ output "common_tags" { Product = var.product BuiltFrom = var.builtFrom } -} - - -# This outout below will summarise how many repos have a master, main or both branches on the repos -output "branch_count" { - value = { - total_repos = length(local.included_repositories) - repos_with_main = sum([for repo, branches in local.branch_summary : branches.main ? 1 : 0]) - repos_with_master = sum([for repo, branches in local.branch_summary : branches.master ? 1 : 0]) - repos_with_both = sum([for repo, branches in local.branch_summary : (branches.main && branches.master) ? 1 : 0]) - } - description = "Summary of branch counts" -} - -# output "existing_branches" { -# value = keys(local.existing_branches) -# } - -# output "branch_summary" { -# value = local.branch_summary -# } +} \ No newline at end of file diff --git a/components/provider.tf b/components/provider.tf index 31bdaae7a..a0e4c48be 100644 --- a/components/provider.tf +++ b/components/provider.tf @@ -3,7 +3,7 @@ provider "azurerm" { } provider "github" { - owner = "hmcts-test" + owner = "hmcts" token = var.oauth_token } @@ -24,20 +24,7 @@ terraform { required_providers { github = { source = "integrations/github" - version = "~> 5.0" + version = "~> 6.0" } } } - -# required_providers { -# github = { -# source = "integrations/github" -# version = "6.2.1" -# } -# azurerm = { -# source = "hashicorp/azurerm" -# version = "3.109.0" -# } -# } -# } - diff --git a/components/variables.tf b/components/variables.tf index 6d4a4f24a..be25fcd05 100644 --- a/components/variables.tf +++ b/components/variables.tf @@ -44,4 +44,4 @@ variable "builtFrom" { description = "Information about the build source or version" type = string default = "https://github.com/hmcts/github-repository-rules" -} +} \ No newline at end of file diff --git a/custom-properties/set_org_custom_properties.py b/custom-properties/set_org_custom_properties.py new file mode 100644 index 000000000..4c1b9c028 --- /dev/null +++ b/custom-properties/set_org_custom_properties.py @@ -0,0 +1,197 @@ +import os +import requests +import json +import logging + +# Set up logging +logging.basicConfig(level=logging.INFO) + +# GitHub API base URL +API_BASE = "https://api.github.com" + +# Get OAuth token from environment variable +TOKEN = os.environ.get('OAUTH_TOKEN') +if not TOKEN: + raise ValueError("OAUTH_TOKEN environment variable is not set") + +# Your organisation name +ORG_NAME = "hmcts" + +# Headers for API requests +headers = { + "Authorization": f"Bearer {TOKEN}", + "Accept": "application/vnd.github+json" +} + +def define_custom_property(org_name): + """ + Define a custom property for the organisation. + + 1. Creates a custom property called "is_production" at the organisation level, which is then passed down to the individual repository level. + 2. Sends a PUT request to GitHub's API to create the property. + 3. Defines the property as a boolean (true/false) value. + 4. The JSON file is where all the production repositories are stored, these will then be used to assign custom properties to. + + Error Handling: + + 1. Checks if the API response status code is not 200. + 2. Logs an error message with the specific reason from the API, or a generic HTTP status code error if no specific message is provided. + 3. Raises an HTTP error if the request was unsuccessful. + + Args: + org_name (str): The name of the GitHub organisation. + + Returns: + int: The status code of the API response (200 if successful). + + Raises: + requests.RequestException: If the API request to GitHub fails. + + """ + + url = f"{API_BASE}/orgs/{org_name}/properties/schema/is_production" + data = { + "value_type": "true_false", + "required": False, + "default_value": "", + "description": "Indicates if the repository is in production", + "allowed_values": None, # Set to None as required by API + "values_editable_by": "org_and_repo_actors" + } + response = requests.put(url, headers=headers, json=data) + if response.status_code != 200: + error_message = response.json().get('message', f"HTTP {response.status_code} error") + logging.error(f"Failed to define custom property for {org_name}: {error_message}") + response.raise_for_status() + return response.status_code + + +def set_custom_properties(repo_full_name, properties): + """ + 1. Sets custom properties for the repositories listed from the JSON file. + 2. Sends a PATCH request to GitHub's API to update the repository's properties. + + Sets the custom properties for a repository. + + Error Handling: + 1. Checks if the API response status code is not 204. + 2. Logs an error message with the specific reason from the API, or a generic HTTP status code error if no specific message can be provided. + 3. Raises an HTTP error if the request was unsuccessful. + + Sets the custom properties for a repository. + + Args: + repo_full_name (str): The full name of the repository (org/repo). + properties (dict): The custom properties to set. + + Returns: + int: The status code of the API response. + + Raises: + requests.RequestException: If the API request fails. + + """ + + owner, repo = repo_full_name.split('/') + url = f"{API_BASE}/repos/{owner}/{repo}/properties/values" + data = { + "properties": [ + {"property_name": key, "value": value} + for key, value in properties.items() + ] + } + response = requests.patch(url, headers=headers, json=data) + if response.status_code != 204: + error_message = response.json().get('message', f"HTTP {response.status_code} error") + logging.error(f"Failed to set properties for {repo_full_name}: {error_message}") + response.raise_for_status() + return response.status_code + +def get_custom_properties(repo_full_name): + """ + Get custom properties for a repository. + + 1. Retrieves the current custom properties of the repositories. + 2. Sends a GET request to GitHub's API for the specific repository. + 3. Returns the custom properties as a JSON object. + + Args: + repo_full_name (str): The full name of the repository (org/repo). + + Returns: + dict: The custom properties of the repository. + + Raises: + requests.RequestException: If the API request fails. + + """ + + owner, repo = repo_full_name.split('/') + url = f"{API_BASE}/repos/{owner}/{repo}/properties/values" + response = requests.get(url, headers=headers) + response.raise_for_status() + return response.json() + +def load_production_repos(): + """ + 1. Loads a list of production repositories from a JSON file. + 2. Reads from the production-repos.json. + 3. Parses the JSON content and returns it as a list. + + + Error Handling: + 1. Handles FileNotFoundError by logging an error if the JSON file is not found, including the expected file path and current directory contents. + 2. Handles JSONDecodeError by logging an error if the JSON file cannot be parsed correctly, including the specific error encountered. + + """ + + script_dir = os.path.dirname(__file__) + json_file_path = os.path.join(script_dir, '../production-repos.json') + + try: + with open(json_file_path, 'r') as f: + repos = json.load(f) + return repos + except FileNotFoundError: + logging.error(f"Error: 'production-repos.json' not found at {os.path.abspath(json_file_path)}") + logging.error("Contents of the current directory: %s", os.listdir('.')) + raise + except json.JSONDecodeError as e: + logging.error(f"Error decoding JSON from {json_file_path}: {e}") + raise + + +# Define the custom property at the organisation level +try: + status = define_custom_property(ORG_NAME) + logging.info(f"Defined custom property for {ORG_NAME}: Status {status}") +except requests.RequestException as e: + logging.error(f"Failed to define custom property for {ORG_NAME}: {str(e)}") + +# Load production repositories +production_repos = load_production_repos() + +logging.info(f"Repositories found in production-repos.json:") +for repo in production_repos: + logging.info(f"- {repo}") + +# Apply custom properties to each repository and verify +for repo_name in production_repos: + repo_full_name = f"{ORG_NAME}/{repo_name}" + custom_properties = { + "is_production": "true" + } + + logging.info(f"\nSetting custom property for: {repo_name}") + try: + status = set_custom_properties(repo_full_name, custom_properties) + logging.info(f"Set properties for {repo_full_name}: Status {status}") + + # Verify the properties were set correctly + retrieved_properties = get_custom_properties(repo_full_name) + logging.info(f"Custom properties for {repo_full_name}: {retrieved_properties}") + + except requests.RequestException as e: + logging.error(f"Failed to set properties for {repo_full_name}: {str(e)}") + +logging.info("\nScript execution completed.") \ No newline at end of file diff --git a/production-repos.json b/production-repos.json index bd90a8639..fde48c8a1 100644 --- a/production-repos.json +++ b/production-repos.json @@ -1,311 +1,311 @@ [ - "cnp-module-shutterpage", - "et-sya-api", + "bulk-scan-processor", + "wa-standalone-task-bpmn", + "probate-caveats-frontend", + "cnp-idam-master", + "civil-citizen-ui", + "ia-bail-case-api", + "lau-case-backend", + "dm-shared-infrastructure", + "bulk-scan-payment-processor", "ia-hearings-api", - "rpe-pdf-service", - "cnp-module-key-vault", - "juror-public", - "hmc-hmi-inbound-adapter", - "juror-bureau", - "blob-router-service", - "ccd-case-document-am-api", - "pip-account-management", - "rpx-xui-manage-organisations", - "tax-tribunals-datacapture", - "hmi-apim-infrastructures", - "pre-vault", - "finrem-case-orchestration-service", - "cnp-module-vnet", - "ccpayfr-shared-infrastructure", - "am-shared-infrastructure", - "cnp-module-metric-alert", - "terraform-module-dynatrace-oneagent", - "ctsc-work-allocation", - "idam-api", - "probate-persistence-service", - "pip-shared-infrastructure-bootstap", - "probate-submit-service", - "service-auth-provider-app", - "hmc-cft-hearing-service", - "cnp-module-api-mgmt-subscription", + "ccfr-fees-register-admin-web", + "rd-user-profile-api", + "nfdiv-case-api", + "pip-publication-services", + "cnp-pipelinemetrics-database", + "adoption-shared-infrastructure", + "ia-timed-event-service", + "adoption-web", + "pcq-loader", "probate-frontend", - "div-evidence-management-client-api", - "ia-bail-case-api", - "ccpay-paymentoutcome-web", - "terraform-module-application-insights", - "prl-citizen-frontend", - "idam-user-dashboard", - "wa-workflow-api", - "prl-wa-task-configuration", - "prl-shared-infrastructure", - "ccd-case-management-web", - "ecm-consumer", - "pre-functions", - "draft-store", - "cui-ra", - "sptribs-frontend", - "help-with-fees-shared-infrastructure", - "cmc-claim-store", - "sptribs-shared-infrastructure", - "ethos-repl-docmosis-service", + "rpa-professional-api", "civil-sdt-gateway", - "pre-shared-infrastructure", + "terraform-module-servicebus-queue", + "cnp-module-automation-runbook-start-stop-vm", + "cnp-module-webapp", + "probate-submit-service", + "cnp-module-api-mgmt-api-policy", + "hmc-cft-hearing-service", + "sscs-track-your-appeal-notifications", + "div-document-generator-client", "ctsc-shared-infrastructure", - "civil-camunda-bpmn-definition", - "bar-api", - "darts-automation", - "am-role-assignment-batch-service", - "nfdiv-shared-infrastructure", - "ccpay-bubble", - "probate-shared-infrastructure", - "cmc-legal-rep-frontend", - "ccd-definition-store-api", - "ia-home-office-integration-api", - "ccd-admin-web-api", - "pre-network", - "cnp-module-storage", - "terraform-module-sdp-db-user", - "terraform-module-servicebus-namespace", - "fis-hmc-api", - "cnp-module-waf", - "cmc-ccd-domain", - "ccd-admin-web", - "rd-shared-infrastructure", - "ccpay-bulkscanning-app", - "sscs-case-loader", - "cnp-module-action-group", - "div-respondent-frontend", + "civil-orchestrator-service", + "probate-health-monitor", + "em-hrs-api", + "rd-commondata-dataload", + "am-role-assignment-refresh-batch", + "ccd-shared-infrastructure", "ccd-case-activity-api", - "tax-tribunals-shared-infrastructure", - "am-judicial-booking-service", - "pip-shared-infrastructures", - "bar-shared-infrastructure", - "cnp-owaspdependencycheck-database", + "juror-public", + "finrem-shared-infrastructure", + "rd-judicial-data-load", + "help-with-fees-shared-infrastructure", + "cnp-idam-compute", + "am-role-assignment-batch-service", + "ccpay-paymentoutcome-web", "sds-toffee-shared-infrastructure", - "bulk-scan-orchestrator", - "pip-data-management", - "darts-gateway", + "ccpay-refunds-app", + "wa-shared-infrastructure", + "disposer-idam-user", + "service-auth-provider-app", "ccd-logstash", - "lau-shared-infrastructure", + "cnp-module-storage-account", + "family-api-gateway", + "send-letter-service", + "darts-shared-infrastructure", + "cnp-module-api-mgmt-subscription", + "am-org-role-mapping-service", + "rd-professional-api", + "libragob-shared-infrastructure", + "nfdiv-frontend", + "pdm-shared-infrastructure", + "cnp-idam-storage", + "pip-account-management", + "rd-judicial-api", + "civil-camunda-bpmn-definition", + "probate-ccd-data-migration-tool", + "finrem-case-orchestration-service", + "ccpay-payment-app", + "et-ccd-callbacks", + "rd-location-ref-api", + "bar-shared-infrastructure", "em-shared-infrastructure", + "fpl-ccd-configuration", + "hmi-shared-infrastructures", + "rd-commondata-api", + "terraform-module-postgresql-flexible", + "cnp-keda-shared-infrastucture", + "juror-bureau", + "ia-case-payments-api", + "rd-caseworker-ref-api", + "ccpay-payment-api-gateway", + "ccfr-fees-register-app", + "ia-case-access-api", + "civil-sdt", + "div-case-orchestration-service", + "pre-functions", + "probate-persistence-service", + "fpl-wa-task-configuration", + "pip-subscription-management", + "ccd-admin-web", + "rpe-pdf-service", + "ccpay-bulkscanning-app", + "cnp-owaspdependencycheck-database", + "rd-shared-infrastructure", + "ccpayfr-shared-infrastructure", + "idam-shared-infrastructure", + "sds-toffee-recipes-service", + "docmosis-infrastructure", + "rpx-xui-webapp", + "cnp-module-shutterpage", + "sscs-cor-backend", + "pcq-frontend", "ccd-user-profile-api", - "cnp-module-webapp", - "sscs-submit-your-appeal", - "bulk-scan-payment-processor", - "ia-case-notifications-api", - "rd-location-ref-api", - "cmc-citizen-frontend", + "camunda-bpm", + "cui-ra", + "cnp-core-compute", + "pip-shared-infrastructures", + "juror-scheduler-api", + "bulk-scan-shared-infrastructure", + "sscs-task-configuration", + "pdda-shared-infrastructure", + "ccd-definition-designer-api", + "cmc-shared-infrastructure", + "cnp-module-api-mgmt", + "et-sya-api", + "ccpay-service-request-cpo-update-service", + "probate-shared-infrastructure", "c100-application", - "ccpay-functions-node", - "sptribs-case-api", - "sscs-track-your-appeal-notifications", - "send-letter-service", + "terraform-module-servicebus-topic", + "pip-shared-infrastructure-bootstap", + "cnp-module-metric-alert", + "terraform-module-sdp-db-user", + "pre-shared-infrastructure", + "sscs-bulk-scan", + "dg-docassembly-api", + "cnp-module-api-mgmt-api-operation", + "cmc-claim-submit-api", "div-shared-infrastructure", - "camunda-shared-infrastructure", + "div-decree-nisi-frontend", + "ctsc-work-allocation", + "hmi-rota-dtu", + "lau-idam-backend", "pcq-consolidation-service", + "cnp-plum-recipes-service", + "prl-citizen-frontend", + "sds-toffee-frontend", + "div-decree-absolute-frontend", + "ethos-repl-docmosis-service", + "tax-tribunals-shared-infrastructure", + "cnp-module-api-mgmt-api", + "probate-back-office", + "ia-aip-frontend", + "cnp-module-action-group", + "cnp-module-api-mgmt-product", + "ccd-case-print-service", + "hmc-shared-infrastructure", + "juror-scheduler-execution", + "div-evidence-management-client-api", + "reform-scan-shared-infra", + "cnp-rhubarb-frontend", + "tax-tribunals-datacapture", + "ccpay-bubble", + "ecm-consumer", + "ccd-definition-store-api", + "cnp-module-application-insights", "ccd-elastic-search", - "adoption-shared-infrastructure", - "wa-task-configuration-api", - "em-native-pdf-annotator-app", - "fpl-ccd-data-migration-tool", - "juror-scheduler-api", - "rd-judicial-api", + "ia-case-api", + "probate-business-service", + "cmc-ccd-domain", + "pip-data-management", "snl-shared-infrastructure", - "fact-admin", - "em-annotation-api", + "bulk-scan-orchestrator", + "rd-profile-sync", + "sscs-evidence-share", "cnp-module-trafficmanager", - "civil-citizen-ui", - "cnp-idam-master", - "sscs-cor-frontend", - "cnp-module-redis", - "disposer-idam-user", - "fact-shared-infrastructure", - "fact-frontend", - "libragob-shared-infrastructure", - "cnp-rhubarb-frontend", - "pip-subscription-management", - "juror-pnc", - "rpa-coh-continuous-online-resolution", - "rpx-shared-infrastructure", - "disposer-shared-infrastructure", - "civil-orchestrator-service", - "sscs-ccd-callback-orchestrator", - "darts-proxy", - "ia-case-access-api", - "ccpay-notifications-service", - "div-case-orchestration-service", - "cnp-core-compute", - "c100-shared-infrastructure", - "sscs-cor-backend", - "fpl-ccd-configuration", - "ccd-case-print-service", - "div-petitioner-frontend", - "cnp-module-trafficmanager-endpoint", - "pre-api", - "et-message-handler", - "aac-manage-case-assignment", - "am-role-assignment-service", + "camunda-shared-infrastructure", + "ecm-shared-infrastructure", + "et-sya-frontend", + "terraform-module-common-tags", + "rd-location-ref-data-load", + "cmc-claim-store", + "cpo-case-payment-orders-api", "pcq-shared-infrastructure", - "div-decree-nisi-frontend", - "div-validation-service", - "cnp-module-api-mgmt", - "terraform-module-log-analytics-workspace-id", - "rd-caseworker-ref-api", - "cnp-module-api-mgmt-api-operation", + "makeaplea-shared-infrastructure", + "cet-shared-infrastructure", + "div-health-monitor", + "cnp-module-vnet", + "cnp-rhubarb-recipes-service", + "ccd-admin-web-api", + "fact-shared-infrastructure", + "ccd-case-disposer", + "bar-api", + "terraform-module-application-insights", "fis-shared-infrastructure", - "et-ccd-callbacks", - "ts-translation-service", - "em-hrs-api", - "lau-case-backend", - "sscs-track-your-appeal-frontend", - "darts-shared-infrastructure", - "div-fees-and-payments-service", - "rd-judicial-data-load", + "hmi-apim-infrastructures", + "cnp-idam-vault", + "em-ccd-orchestrator", + "fis-hmc-api", + "wa-workflow-api", + "wa-task-monitor", + "am-role-assignment-service", + "ccd-case-management-web", + "cnp-rhubarb-shared-infrastructure", "civil-ccd-definition", + "darts-api", + "feature-toggle-api", + "ts-translation-service", + "am-judicial-booking-service", + "document-management-store-app", "probate-orchestrator-service", - "et-sya-frontend", - "cnp-module-application-insights", - "dg-docassembly-api", - "fact-api", - "civil-sdt", - "rpa-em-ccd-orchestrator", - "docmosis-infrastructure", - "sscs-shared-infrastructure", - "ia-case-api", - "ccpay-service-request-cpo-update-service", - "juror-shared-infrastructure", - "div-document-generator-client", - "cnp-module-automation-runbook-start-stop-vm", - "am-org-role-mapping-service", - "div-health-monitor", - "sscs-task-configuration", - "cmc-claim-submit-api", - "rpa-professional-api", - "juror-scheduler-execution", - "makeaplea-shared-infrastructure", - "adoption-web", - "pdm-shared-infrastructure", - "rd-commondata-api", - "wa-case-event-handler", - "terraform-module-servicebus-topic", - "ccd-definition-designer-api", - "cnp-rhubarb-recipes-service", - "sds-toffee-frontend", - "ccpay-payment-api-gateway", - "cnp-keda-shared-infrastucture", - "hmc-shared-infrastructure", - "sscs-evidence-share", - "ia-task-configuration", + "terraform-module-servicebus-subscription", + "sptribs-case-api", + "sscs-case-loader", + "ia-shared-infrastructure", + "c100-shared-infrastructure", + "nfdiv-shared-infrastructure", + "pre-portal", + "juror-pnc", + "rpa-coh-continuous-online-resolution", + "wa-task-configuration-api", + "civil-general-applications", + "sptribs-frontend", + "sds-keda-infrastructure", + "div-case-maintenance-service", + "sscs-ccd-callback-orchestrator", "prl-cos-api", - "ccpay-payment-app", - "sptribs-dss-update-case-web", - "ia-case-payments-api", - "wa-task-management-api", - "feature-toggle-api", - "pip-publication-services", - "darts-portal", + "idam-api", + "disposer-shared-infrastructure", + "cnp-module-postgres", + "rpx-xui-manage-organisations", + "cnp-module-redis", "reform-scan-notification-service", + "dtsse-shared-infrastructure", + "ccd-case-document-am-api", + "civil-general-apps-ccd-definition", + "idam-user-dashboard", + "rpe-shared-infrastructure", + "terraform-module-dynatrace-oneagent", + "fact-frontend", + "rpa-em-ccd-orchestrator", + "cnp-module-trafficmanager-endpoint", + "sscs-hearings-api", + "darts-automation", + "div-fees-and-payments-service", + "cmc-legal-rep-frontend", + "cnp-module-storage", + "pcq-backend", + "cnp-core-infrastructure", + "ccd-data-store-api", + "sscs-tribunals-case-api", + "rpa-jui-webapp", + "terraform-module-log-analytics-workspace-id", + "div-respondent-frontend", + "fpl-ccd-data-migration-tool", "prd-pui-registration", + "div-petitioner-frontend", + "hmi-shared-infrastructures-bootstrap", + "et-pet-shared-infrastructure", + "prl-shared-infrastructure", + "pre-api", + "prl-wa-task-configuration", + "cnp-plum-shared-infrastructure", + "darts-portal", + "sptribs-dss-update-case-web", + "cnp-module-key-vault", + "sptribs-shared-infrastructure", + "ia-task-configuration", + "terraform-module-servicebus-namespace", + "ia-home-office-integration-api", + "rpx-xui-terms-and-conditions", + "em-native-pdf-annotator-app", + "prl-dgs-api", + "rpx-shared-infrastructure", + "juror-shared-infrastructure", + "ccpay-functions-node", + "hmc-hmi-inbound-adapter", + "prl-ccd-definitions", + "wa-task-management-api", + "juror-api", + "ccpay-notifications-service", "probatemandb", - "cnp-pipelinemetrics-database", - "pdda-shared-infrastructure", - "bulk-scan-processor", - "cpo-case-payment-orders-api", + "adoption-cos-api", "em-stitching-api", - "terraform-module-servicebus-subscription", + "cnp-module-waf", + "sscs-track-your-appeal-frontend", + "fact-admin", + "pre-network", + "em-hrs-ingestor", + "aac-manage-case-assignment", + "div-validation-service", "rpx-xui-approve-org", - "bar-web", - "cnp-module-api-mgmt-api", - "juror-api", - "ccd-api-gateway", - "prl-dgs-api", - "wa-shared-infrastructure", - "cnp-module-api-mgmt-product", - "ia-aip-frontend", - "pcq-frontend", - "civil-service", - "pcq-loader", - "probate-caveats-frontend", - "rpe-shared-infrastructure", - "em-icp-api", - "cet-shared-infrastructure", - "div-case-maintenance-service", - "ia-shared-infrastructure", - "idam-shared-infrastructure", - "nfdiv-frontend", - "cnp-module-api-mgmt-api-policy", - "sscs-bulk-scan", - "dm-shared-infrastructure", - "wa-task-monitor", - "adoption-cos-api", - "probate-business-service", - "nfdiv-case-api", - "sscs-hearings-api", - "sds-toffee-recipes-service", - "prl-ccd-definitions", - "ecm-shared-infrastructure", - "rd-professional-api", - "rpa-jui-webapp", - "terraform-module-postgresql-flexible", - "lau-frontend", + "draft-store", + "blob-router-service", + "cnp-module-app-service-plan", + "sscs-shared-infrastructure", "div-case-data-formatter", + "pre-vault", + "darts-gateway", "ia-case-documents-api", - "rd-user-profile-api", - "finrem-shared-infrastructure", - "darts-api", - "cnp-module-storage-account", - "ccpay-refunds-app", - "div-decree-absolute-frontend", - "em-hrs-ingestor", - "ccfr-fees-register-app", - "cnp-plum-shared-infrastructure", - "rd-commondata-dataload", - "cnp-module-palo-alto", - "family-api-gateway", - "terraform-module-common-tags", - "am-role-assignment-refresh-batch", - "cnp-rhubarb-shared-infrastructure", - "probate-health-monitor", - "sscs-tribunals-case-api", - "cmc-shared-infrastructure", - "pcq-backend", - "hmi-rota-dtu", - "sds-keda-infrastructure", - "dtsse-shared-infrastructure", - "rpx-xui-webapp", - "ccd-shared-infrastructure", - "document-management-store-app", - "cnp-idam-vault", - "cnp-module-app-service-plan", - "pre-portal", - "et-pet-shared-infrastructure", - "reform-scan-shared-infra", - "hmi-shared-infrastructures", - "fpl-wa-task-configuration", - "wa-standalone-task-bpmn", - "rpx-xui-terms-and-conditions", - "camunda-bpm", - "ia-timed-event-service", - "lau-idam-backend", - "probate-ccd-data-migration-tool", - "rd-profile-sync", - "hmi-shared-infrastructures-bootstrap", - "rd-location-ref-data-load", - "civil-general-applications", - "et-shared-infrastructure", - "cnp-plum-recipes-service", - "cnp-module-postgres", - "cnp-idam-storage", - "terraform-module-servicebus-queue", - "ccd-data-store-api", - "cnp-core-infrastructure", - "cnp-idam-compute", - "em-ccd-orchestrator", - "ccfr-fees-register-admin-web", - "civil-general-apps-ccd-definition", - "bulk-scan-shared-infrastructure", - "ccd-case-disposer", + "fact-api", + "civil-service", + "sscs-submit-your-appeal", "employment-tribunals-shared-infrastructure", + "et-message-handler", + "cmc-citizen-frontend", + "ccd-api-gateway", + "et-shared-infrastructure", + "wa-case-event-handler", + "cnp-module-palo-alto", + "lau-shared-infrastructure", + "sscs-cor-frontend", + "darts-proxy", + "bar-web", + "ia-case-notifications-api", + "lau-frontend", + "em-annotation-api", + "em-icp-api", "idam-web-public", - "probate-back-office" + "am-shared-infrastructure" ] \ No newline at end of file diff --git a/scripts/update-readme.py b/scripts/update-readme.py new file mode 100644 index 000000000..1604280cb --- /dev/null +++ b/scripts/update-readme.py @@ -0,0 +1,115 @@ +import os +import json +import logging + +# Setup logging +logging.basicConfig(level=logging.INFO) + +# File paths +script_dir = os.path.dirname(__file__) +JSON_FILE_PATH = os.path.join(script_dir, '../production-repos.json') +README_FILE_PATH = os.path.join(script_dir, '../ReadMe.md') + +def load_repos(file_path): + """ + Load repositories from the given JSON file. + + 1. Opens and reads the JSON file from the path above. + 2. Parses the JSON content and ensures it is a list. + 3. Returns the list of repositories. + + Error Handling: + + 1. Logs an error if the file is not found at the path specified above. + + Args: + file_path: The path to the JSON file containing the repositories. + + Returns: + list: A list of repositories parsed from the JSON file. + + Raises: + FileNotFoundError: If the JSON file path is not found. + ValueError: If the JSON content is not a list. + json.JSONDecodeError: If the JSON file contains invalid JSON. + + """ + try: + with open(file_path, 'r') as f: + repos = json.load(f) + if not isinstance(repos, list): + raise ValueError("JSON content is not a list") + return repos + except FileNotFoundError: + logging.error(f"Error: '{file_path}' not found.") + raise + +def update_readme(prod_count, dev_count, prod_link): + """ + Update the README file with a count displayed of the number of production repositories as custom properties can't be searched by in GitHub. + + 1. Reads the existing README file content. + 2. Updates the section between markers with new repository counts. + 3. Writes the updated content back to the README file. + + Error Handling: + + 1. Prints "Failed to update README file" if the README file cannot be found at the path we defined above. + + Args: + 1. prod_count: This integer is the number of production repositories. + 2. dev_count: The number of development repositories. + 3. prod_link: The file path to the production repositories JSON file. + + """ + try: + with open(README_FILE_PATH, 'r') as file: + readme_content = file.readlines() + + table_content = f""" +| **Repository Type** | **Count** | +|---------------------------|-----------| +| Production Repositories | [{prod_count}]({prod_link}) | +| Development Repositories | {dev_count} | +""" + start_marker = "" + end_marker = "" + start_index = None + end_index = None + + for i, line in enumerate(readme_content): + if start_marker in line: + start_index = i + if end_marker in line: + end_index = i + + if start_index is not None and end_index is not None: + readme_content = ( + readme_content[:start_index + 1] + + [table_content] + + readme_content[end_index:] + ) + else: + readme_content.append(f"\n{start_marker}\n{table_content}\n{end_marker}\n") + + with open(README_FILE_PATH, 'w') as file: + file.writelines(readme_content) + except Exception as e: + logging.error(f"Failed to update README file: {str(e)}") + raise + +# Load production repositories +try: + production_repos = load_repos(JSON_FILE_PATH) + production_count = len(production_repos) + logging.info(f"Number of production repositories: {production_count}") + + # Placeholder value for dev repo count, can be updated similarly + development_count = 0 # Update this to load actual data if available + + # Local link to the production-repos.json file + prod_link = "../production-repos.json" + + update_readme(production_count, development_count, prod_link) +except Exception as e: + logging.error(f"Failed to load or update repositories: {str(e)}") \ No newline at end of file diff --git a/scripts/update-repo-list.py b/scripts/update-repo-list.py index 00bbb4fb1..cc1802daf 100644 --- a/scripts/update-repo-list.py +++ b/scripts/update-repo-list.py @@ -62,4 +62,4 @@ def clean_repo_name(repo_url): # Update the local file with open(repo_file, 'w') as f: - json.dump(all_repos, f, indent=2) + json.dump(all_repos, f, indent=2) \ No newline at end of file