From 4ffaca5d9c6715549671cc3a4d74d90aedf5eeb9 Mon Sep 17 00:00:00 2001 From: Sophie Stone Date: Fri, 29 Aug 2025 15:19:33 +0100 Subject: [PATCH 01/20] Add config.json --- config/config.json | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 config/config.json diff --git a/config/config.json b/config/config.json new file mode 100644 index 0000000..5455ebe --- /dev/null +++ b/config/config.json @@ -0,0 +1,6 @@ +{ + "features": { + "show_log_locally": false, + "use_local_config": false + } +} \ No newline at end of file From fd32d6140f146a977bfe6a14159cd5668c7850b2 Mon Sep 17 00:00:00 2001 From: Sophie Stone Date: Tue, 2 Sep 2025 13:01:46 +0100 Subject: [PATCH 02/20] Get config file in handler --- src/main.py | 57 ++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 56 insertions(+), 1 deletion(-) diff --git a/src/main.py b/src/main.py index d1f4ea1..14d6603 100644 --- a/src/main.py +++ b/src/main.py @@ -8,7 +8,7 @@ import json import logging import os -from typing import Optional +from typing import Optional, Any import boto3 import github_api_toolkit @@ -298,6 +298,53 @@ def get_team_history( return response.json() +def get_dict_value(dictionary: dict, key: str) -> Any: + """Gets a value from a dictionary and raises an exception if it is not found. + + Args: + dictionary (dict): The dictionary to get the value from. + key (str): The key to get the value for. + + Raises: + Exception: If the key is not found in the dictionary. + + Returns: + Any: The value of the key in the dictionary. + """ + value = dictionary.get(key) + + if value is None: + raise Exception(f"Key {key} not found in the dictionary.") + + return value + + +def get_config_file(path: str) -> Any: + """Loads a configuration file as a dictionary. + + Args: + path (str): The path to the configuration file. + + Raises: + Exception: If the configuration file is not found. + + Returns: + Any: The configuration file as a dictionary. + """ + try: + with open(path) as f: + config = json.load(f) + except FileNotFoundError: + error_message = f"{path} configuration file not found. Please check the path." + raise Exception(error_message) from None + + if type(config) is not dict: + error_message = f"{path} configuration file is not a dictionary. Please check the file contents." + raise Exception(error_message) + + return config + + def handler(event: dict, context) -> str: # pylint: disable=unused-argument """AWS Lambda handler function for GitHub Copilot usage data aggregation. @@ -315,12 +362,20 @@ def handler(event: dict, context) -> str: # pylint: disable=unused-argument Returns: str: Completion message. """ + + # Load config file + config = get_config_file("./config/config.json") + + features = get_dict_value(config, "features") + # Create an S3 client session = boto3.Session() s3 = session.client("s3") logger.info("S3 client created") + # TODO: Check whether to use local config or cloud config + # Get the .pem file from AWS Secrets Manager secret_manager = session.client("secretsmanager", region_name=secret_region) From 5ea8af39473c25cb9a4bf1158aaef0a110b9cbc6 Mon Sep 17 00:00:00 2001 From: Sophie Stone Date: Wed, 10 Sep 2025 15:50:06 +0100 Subject: [PATCH 03/20] Add local logging support --- config/config.json | 2 +- src/main.py | 34 ++++++++++++++++++++++++++++------ 2 files changed, 29 insertions(+), 7 deletions(-) diff --git a/config/config.json b/config/config.json index 5455ebe..064e4b7 100644 --- a/config/config.json +++ b/config/config.json @@ -1,6 +1,6 @@ { "features": { "show_log_locally": false, - "use_local_config": false + "write_data_locally": false } } \ No newline at end of file diff --git a/src/main.py b/src/main.py index 14d6603..adc1aa6 100644 --- a/src/main.py +++ b/src/main.py @@ -8,7 +8,7 @@ import json import logging import os -from typing import Optional, Any +from typing import Any, Optional import boto3 import github_api_toolkit @@ -33,6 +33,8 @@ logger = logging.getLogger() +logger.setLevel(logging.INFO) + # Example Log Output: # # Standard output: @@ -248,7 +250,11 @@ def create_dictionary( def update_s3_object( - s3_client: boto3.client, bucket_name: str, object_name: str, data: dict + s3_client: boto3.client, + bucket_name: str, + object_name: str, + data: dict, + write_data_locally: bool = False, # TODO write_data_locally ) -> bool: """Update an S3 object with new data. @@ -339,7 +345,9 @@ def get_config_file(path: str) -> Any: raise Exception(error_message) from None if type(config) is not dict: - error_message = f"{path} configuration file is not a dictionary. Please check the file contents." + error_message = ( + f"{path} configuration file is not a dictionary. Please check the file contents." + ) raise Exception(error_message) return config @@ -362,20 +370,34 @@ def handler(event: dict, context) -> str: # pylint: disable=unused-argument Returns: str: Completion message. """ - # Load config file config = get_config_file("./config/config.json") features = get_dict_value(config, "features") + show_log_locally = get_dict_value(features, "show_log_locally") + + write_data_locally = get_dict_value(features, "write_data_locally") + + # Remove any existing handlers to avoid duplicate logs + if logger.hasHandlers(): + logger.handlers.clear() + + # Toggle local logging + if show_log_locally: + # Add a StreamHandler to log to the console + console_handler = logging.StreamHandler() + console_handler.setLevel(logging.INFO) + formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") + console_handler.setFormatter(formatter) + logger.addHandler(console_handler) + # Create an S3 client session = boto3.Session() s3 = session.client("s3") logger.info("S3 client created") - # TODO: Check whether to use local config or cloud config - # Get the .pem file from AWS Secrets Manager secret_manager = session.client("secretsmanager", region_name=secret_region) From 4b4ee5f7c377134cc3bfd2dce19657ef1168181e Mon Sep 17 00:00:00 2001 From: Sophie Stone Date: Wed, 17 Sep 2025 17:14:57 +0100 Subject: [PATCH 04/20] Add local writes --- src/main.py | 57 +++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 42 insertions(+), 15 deletions(-) diff --git a/src/main.py b/src/main.py index adc1aa6..a1a554d 100644 --- a/src/main.py +++ b/src/main.py @@ -115,13 +115,14 @@ def get_copilot_team_date(gh: github_api_toolkit.github_interface, page: int) -> def get_and_update_historic_usage( - s3: boto3.client, gh: github_api_toolkit.github_interface + s3: boto3.client, gh: github_api_toolkit.github_interface, write_data_locally: bool ) -> tuple: """Get and update historic usage data from GitHub Copilot. Args: s3 (boto3.client): An S3 client. gh (github_api_toolkit.github_interface): An instance of the github_interface class. + write_data_locally (bool): Whether to write data locally instead of to S3. Returns: tuple: A tuple containing the updated historic usage data and a list of dates added. @@ -155,18 +156,28 @@ def get_and_update_historic_usage( extra={"no_days_added": len(dates_added), "dates_added": dates_added}, ) - # Write the updated historic_usage to historic_usage_data.json - update_s3_object(s3, BUCKET_NAME, OBJECT_NAME, historic_usage) + if not write_data_locally: + # Write the updated historic_usage to historic_usage_data.json + update_s3_object(s3, BUCKET_NAME, OBJECT_NAME, historic_usage) + else: + local_path = f"local_data/{OBJECT_NAME}" + os.makedirs("local_data", exist_ok=True) + with open(local_path, "w", encoding="utf-8") as f: + json.dump(historic_usage, f, indent=4) + logger.info( + "Historic usage data written locally to %s (S3 skipped)", local_path + ) return historic_usage, dates_added -def get_and_update_copilot_teams(s3: boto3.client, gh: github_api_toolkit.github_interface) -> list: +def get_and_update_copilot_teams(s3: boto3.client, gh: github_api_toolkit.github_interface, write_data_locally: bool) -> list: """Get and update GitHub Teams with Copilot Data. Args: s3 (boto3.client): An S3 client. gh (github_api_toolkit.github_interface): An instance of the github_interface class. + write_data_locally (bool): Whether to write data locally instead of to S3. Returns: list: A list of GitHub Teams with Copilot Data. @@ -193,7 +204,14 @@ def get_and_update_copilot_teams(s3: boto3.client, gh: github_api_toolkit.github extra={"no_teams": len(copilot_teams)}, ) - update_s3_object(s3, BUCKET_NAME, "copilot_teams.json", copilot_teams) + if not write_data_locally: + update_s3_object(s3, BUCKET_NAME, "copilot_teams.json", copilot_teams) + else: + local_path = "local_data/copilot_teams.json" + os.makedirs("local_data", exist_ok=True) + with open(local_path, "w", encoding="utf-8") as f: + json.dump(copilot_teams, f, indent=4) + logger.info("Copilot teams data written locally to %s (S3 skipped)", local_path) return copilot_teams @@ -249,14 +267,15 @@ def create_dictionary( return list(existing_team_data_map.values()) +# TODO: refactor update_s3_object to accept write_data_locally to handle local writes logic def update_s3_object( s3_client: boto3.client, bucket_name: str, object_name: str, data: dict, - write_data_locally: bool = False, # TODO write_data_locally + write_data_locally: bool = False, ) -> bool: - """Update an S3 object with new data. + """Update an S3 object with new data or write locally based on the flag. Args: s3_client (boto3.client): The S3 client. @@ -419,10 +438,10 @@ def handler(event: dict, context) -> str: # pylint: disable=unused-argument logger.info("API Controller created") # Copilot Usage Data (Historic) - historic_usage, dates_added = get_and_update_historic_usage(s3, gh) + historic_usage, dates_added = get_and_update_historic_usage(s3, gh, write_data_locally) # GitHub Teams with Copilot Data - copilot_teams = get_and_update_copilot_teams(s3, gh) + copilot_teams = get_and_update_copilot_teams(s3, gh, write_data_locally) logger.info("Getting history of each team identified previously") @@ -436,11 +455,19 @@ def handler(event: dict, context) -> str: # pylint: disable=unused-argument logger.info("Existing team history has %d entries", len(existing_team_history)) - # Convert to dictionary for quick lookup - updated_team_history = create_dictionary(gh, copilot_teams, existing_team_history) + if not write_data_locally: + # Convert to dictionary for quick lookup + updated_team_history = create_dictionary(gh, copilot_teams, existing_team_history) - # Write updated team history to S3 - update_s3_object(s3, BUCKET_NAME, "teams_history.json", updated_team_history) + # Write updated team history to S3 + update_s3_object(s3, BUCKET_NAME, "teams_history.json", updated_team_history) + else: + local_path = "local_data/teams_history.json" + os.makedirs("local_data", exist_ok=True) + updated_team_history = create_dictionary(gh, copilot_teams, existing_team_history) + with open(local_path, "w", encoding="utf-8") as f: + json.dump(updated_team_history, f, indent=4) + logger.info("Team history written locally to %s (S3 skipped)", local_path) logger.info( "Process complete", @@ -459,5 +486,5 @@ def handler(event: dict, context) -> str: # pylint: disable=unused-argument # # Dev Only # # Uncomment the following line to run the script locally -# if __name__ == "__main__": -# handler(None, None) +if __name__ == "__main__": + handler(None, None) From 3c3fb481f3505234fdb203f76e7ffb16b770f1d1 Mon Sep 17 00:00:00 2001 From: Sophie Stone Date: Wed, 17 Sep 2025 17:16:09 +0100 Subject: [PATCH 05/20] Comment out local script execution --- src/main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main.py b/src/main.py index a1a554d..decc1f4 100644 --- a/src/main.py +++ b/src/main.py @@ -486,5 +486,5 @@ def handler(event: dict, context) -> str: # pylint: disable=unused-argument # # Dev Only # # Uncomment the following line to run the script locally -if __name__ == "__main__": - handler(None, None) +# if __name__ == "__main__": +# handler(None, None) From c94fcf394515f6e488a3c055d81d281b3f7dd09d Mon Sep 17 00:00:00 2001 From: Sophie Stone Date: Fri, 26 Sep 2025 15:02:49 +0100 Subject: [PATCH 06/20] Add configuration.md --- docs/configuration.md | 69 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 docs/configuration.md diff --git a/docs/configuration.md b/docs/configuration.md new file mode 100644 index 0000000..575b13a --- /dev/null +++ b/docs/configuration.md @@ -0,0 +1,69 @@ +# Configuration + +The copilot lambda uses a local configuration file to manage its settings, located within `./config/config.json`. + +## `config.json` + +The `config.json` file contains the following: + +```json +{ + "features": { + "show_log_locally": false, + "write_data_locally": false + }, +} +``` + +### `features` Section + +This section contains feature flags that control which of the tool's features are enabled or disabled. + +#### `show_log_locally` + +TODO: Confirm this section +If set to `true`, the tool will output logs to a `debug.log` file at the root of the project directory. This is useful for debugging purposes. If set to `false`, logs will not be saved locally. + +When deploying to AWS, this should be set to `false` to avoid files being written to the local filesystem. + +#### `write_data_locally` + +TODO: Update this section +If set to `true`, the tool will use the local configuration file (`config.json`) for its settings (overriding any cloud configuration). If set to `false`, the tool will fetch the configuration from the cloud (S3 bucket). + +**When deploying to AWS, this must be set to `false` to ensure the tool writes to AWS.** + +When debugging locally, you can set this to `true` to use the local configuration file. This is useful if you need to see the logs locally, without affecting the cloud deployment. + +### Example During Local Testing + +When testing locally, you might set the `config.json` file as follows: + +```json +{ + "features": { + "show_log_locally": true, + "write_data_locally": true + }, +} +``` + +TODO: Confirm +This will ensure that the local configuration is used, logs are saved to `debug.log`, and no notifications are created during testing. + +### Example On AWS + +When deploying to AWS, the `config.json` file should be set as follows: + +```json +{ + "features": { + "show_log_locally": false, + "write_data_locally": false + }, +} +``` + +This configuration ensures that the tool does not log or write data locally + +**It is essential that `write_data_locally` is set to `false` when deploying to AWS.** From d9fefcebafd6bcdccee3adf99103c42b410a0c21 Mon Sep 17 00:00:00 2001 From: Sophie Stone Date: Fri, 26 Sep 2025 15:03:46 +0100 Subject: [PATCH 07/20] Capitalise Copilot --- docs/configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/configuration.md b/docs/configuration.md index 575b13a..b9a3622 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -1,6 +1,6 @@ # Configuration -The copilot lambda uses a local configuration file to manage its settings, located within `./config/config.json`. +The Copilot lambda uses a local configuration file to manage its settings, located within `./config/config.json`. ## `config.json` From 29676feb0bc252441c82f2810783f756dac9ea7a Mon Sep 17 00:00:00 2001 From: Sophie Stone Date: Wed, 1 Oct 2025 18:12:13 +0100 Subject: [PATCH 08/20] Add local_data to .gitignore --- .gitignore | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 3fff196..ebc51a1 100644 --- a/.gitignore +++ b/.gitignore @@ -65,4 +65,7 @@ terraform.rc .vscode/ # Ignore MegaLinter reports -megalinter-reports/ \ No newline at end of file +megalinter-reports/ + +# Ignore locally written data +local_data \ No newline at end of file From 41da37a347b8758be6abf9aaa943c15084906b1e Mon Sep 17 00:00:00 2001 From: Sophie Stone Date: Mon, 13 Oct 2025 12:52:07 +0100 Subject: [PATCH 09/20] Resolve todos and add local_data to clean command --- Makefile | 1 + docs/configuration.md | 11 +++-------- src/main.py | 4 +--- 3 files changed, 5 insertions(+), 11 deletions(-) diff --git a/Makefile b/Makefile index 6b980a7..54058a9 100644 --- a/Makefile +++ b/Makefile @@ -12,6 +12,7 @@ clean: ## Clean the temporary files. rm -rf .pytest_cache rm -rf tests/__pycache__ rm -rf .coverage + rm -rf local_data .PHONY: black-check black-check: ## Run black for code formatting, without fixing. diff --git a/docs/configuration.md b/docs/configuration.md index b9a3622..19c9eaa 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -21,15 +21,13 @@ This section contains feature flags that control which of the tool's features ar #### `show_log_locally` -TODO: Confirm this section -If set to `true`, the tool will output logs to a `debug.log` file at the root of the project directory. This is useful for debugging purposes. If set to `false`, logs will not be saved locally. +If set to `true`, the tool will output logs to the terminal. When deploying to AWS, this should be set to `false` to avoid files being written to the local filesystem. #### `write_data_locally` -TODO: Update this section -If set to `true`, the tool will use the local configuration file (`config.json`) for its settings (overriding any cloud configuration). If set to `false`, the tool will fetch the configuration from the cloud (S3 bucket). +If set to `true`, the tool will skip writing to S3 and instead write data for copilot teams, historic usage, and teams history to `local_data`. **When deploying to AWS, this must be set to `false` to ensure the tool writes to AWS.** @@ -48,9 +46,6 @@ When testing locally, you might set the `config.json` file as follows: } ``` -TODO: Confirm -This will ensure that the local configuration is used, logs are saved to `debug.log`, and no notifications are created during testing. - ### Example On AWS When deploying to AWS, the `config.json` file should be set as follows: @@ -64,6 +59,6 @@ When deploying to AWS, the `config.json` file should be set as follows: } ``` -This configuration ensures that the tool does not log or write data locally +This configuration ensures that the tool does not log or write data locally. **It is essential that `write_data_locally` is set to `false` when deploying to AWS.** diff --git a/src/main.py b/src/main.py index decc1f4..a6867d7 100644 --- a/src/main.py +++ b/src/main.py @@ -267,15 +267,13 @@ def create_dictionary( return list(existing_team_data_map.values()) -# TODO: refactor update_s3_object to accept write_data_locally to handle local writes logic def update_s3_object( s3_client: boto3.client, bucket_name: str, object_name: str, data: dict, - write_data_locally: bool = False, ) -> bool: - """Update an S3 object with new data or write locally based on the flag. + """Update an S3 object with new data. Args: s3_client (boto3.client): The S3 client. From 6b1f425e4eeace3c9c77db977508fd3d6bab2c8b Mon Sep 17 00:00:00 2001 From: Sophie Stone Date: Mon, 13 Oct 2025 14:30:46 +0100 Subject: [PATCH 10/20] Lint config.json --- config/config.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/config/config.json b/config/config.json index 064e4b7..b01f8a1 100644 --- a/config/config.json +++ b/config/config.json @@ -1,6 +1,6 @@ { - "features": { - "show_log_locally": false, - "write_data_locally": false - } -} \ No newline at end of file + "features": { + "show_log_locally": false, + "write_data_locally": false + } +} From 0a33bdfc04d0d25c18e2b6940104fcdba5bb98bf Mon Sep 17 00:00:00 2001 From: Sophie Stone Date: Mon, 13 Oct 2025 14:31:40 +0100 Subject: [PATCH 11/20] Lint main.py --- src/main.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main.py b/src/main.py index a6867d7..494c5aa 100644 --- a/src/main.py +++ b/src/main.py @@ -164,14 +164,14 @@ def get_and_update_historic_usage( os.makedirs("local_data", exist_ok=True) with open(local_path, "w", encoding="utf-8") as f: json.dump(historic_usage, f, indent=4) - logger.info( - "Historic usage data written locally to %s (S3 skipped)", local_path - ) + logger.info("Historic usage data written locally to %s (S3 skipped)", local_path) return historic_usage, dates_added -def get_and_update_copilot_teams(s3: boto3.client, gh: github_api_toolkit.github_interface, write_data_locally: bool) -> list: +def get_and_update_copilot_teams( + s3: boto3.client, gh: github_api_toolkit.github_interface, write_data_locally: bool +) -> list: """Get and update GitHub Teams with Copilot Data. Args: From 4da30a76204175339f4b9b7020cede977f83fa24 Mon Sep 17 00:00:00 2001 From: Kieran Pritchard Date: Tue, 14 Oct 2025 14:59:15 +0100 Subject: [PATCH 12/20] fix(logging): Simplify local log writing. - Outputs logs to debug.log depending on config. - Standardise output directory to output/ - Cleanup misleading logging messages --- .gitignore | 3 ++- src/main.py | 31 +++++++++++++------------------ 2 files changed, 15 insertions(+), 19 deletions(-) diff --git a/.gitignore b/.gitignore index ebc51a1..43e62dd 100644 --- a/.gitignore +++ b/.gitignore @@ -68,4 +68,5 @@ terraform.rc megalinter-reports/ # Ignore locally written data -local_data \ No newline at end of file +output/ +debug.log \ No newline at end of file diff --git a/src/main.py b/src/main.py index 494c5aa..e0f7f8f 100644 --- a/src/main.py +++ b/src/main.py @@ -93,7 +93,7 @@ def get_copilot_team_date(gh: github_api_toolkit.github_interface, page: int) -> team_html_url = team.get("html_url", "") logger.info( - "Team %s has Copilot data", + f"Team {team_name} has Copilot data", extra={ "team_name": team_name, "team_slug": team_slug, @@ -160,8 +160,8 @@ def get_and_update_historic_usage( # Write the updated historic_usage to historic_usage_data.json update_s3_object(s3, BUCKET_NAME, OBJECT_NAME, historic_usage) else: - local_path = f"local_data/{OBJECT_NAME}" - os.makedirs("local_data", exist_ok=True) + local_path = f"output/{OBJECT_NAME}" + os.makedirs("output", exist_ok=True) with open(local_path, "w", encoding="utf-8") as f: json.dump(historic_usage, f, indent=4) logger.info("Historic usage data written locally to %s (S3 skipped)", local_path) @@ -207,8 +207,8 @@ def get_and_update_copilot_teams( if not write_data_locally: update_s3_object(s3, BUCKET_NAME, "copilot_teams.json", copilot_teams) else: - local_path = "local_data/copilot_teams.json" - os.makedirs("local_data", exist_ok=True) + local_path = "output/copilot_teams.json" + os.makedirs("output", exist_ok=True) with open(local_path, "w", encoding="utf-8") as f: json.dump(copilot_teams, f, indent=4) logger.info("Copilot teams data written locally to %s (S3 skipped)", local_path) @@ -316,7 +316,8 @@ def get_team_history( response = gh.get(f"/orgs/{org}/team/{team}/copilot/metrics", params=query_params) if not isinstance(response, Response): - logger.error("Unexpected response type: %s", type(response)) + # If the response is not a Response object, no copilot data is available for this team + # We can return None which is then handled by the calling function return None return response.json() @@ -396,18 +397,12 @@ def handler(event: dict, context) -> str: # pylint: disable=unused-argument write_data_locally = get_dict_value(features, "write_data_locally") - # Remove any existing handlers to avoid duplicate logs - if logger.hasHandlers(): - logger.handlers.clear() - # Toggle local logging if show_log_locally: - # Add a StreamHandler to log to the console - console_handler = logging.StreamHandler() - console_handler.setLevel(logging.INFO) - formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") - console_handler.setFormatter(formatter) - logger.addHandler(console_handler) + logging.basicConfig( + filename="debug.log", + filemode="w", + ) # Create an S3 client session = boto3.Session() @@ -460,8 +455,8 @@ def handler(event: dict, context) -> str: # pylint: disable=unused-argument # Write updated team history to S3 update_s3_object(s3, BUCKET_NAME, "teams_history.json", updated_team_history) else: - local_path = "local_data/teams_history.json" - os.makedirs("local_data", exist_ok=True) + local_path = "output/teams_history.json" + os.makedirs("output", exist_ok=True) updated_team_history = create_dictionary(gh, copilot_teams, existing_team_history) with open(local_path, "w", encoding="utf-8") as f: json.dump(updated_team_history, f, indent=4) From 698a28a932f4ae6da1a9c65b71c8152f63c2a8b5 Mon Sep 17 00:00:00 2001 From: Kieran Pritchard Date: Tue, 14 Oct 2025 14:59:32 +0100 Subject: [PATCH 13/20] docs(readme): reorganise for clarity --- README.md | 232 +++++++++++++++++++++++++++--------------------------- 1 file changed, 116 insertions(+), 116 deletions(-) diff --git a/README.md b/README.md index fbfd889..3c2d409 100644 --- a/README.md +++ b/README.md @@ -14,16 +14,6 @@ The Copilot dashboard can be found on the Copilot tab within the Digital Landsca - [Table of Contents](#table-of-contents) - [Prerequisites](#prerequisites) - [Makefile](#makefile) - - [Documentation](#documentation) - - [Testing](#testing) - - [Linting](#linting) - - [Python](#python) - - [Markdown](#markdown) - - [Markdown Configuration](#markdown-configuration) - - [Markdown GitHub Action](#markdown-github-action) - - [Megalinter](#megalinter) - - [Megalinter Configuration](#megalinter-configuration) - - [Megalinter GitHub Action](#megalinter-github-action) - [AWS Lambda Scripts](#aws-lambda-scripts) - [Setup - Running in a container](#setup---running-in-a-container) - [Setup - running outside of a Container (Development only)](#setup---running-outside-of-a-container-development-only) @@ -40,6 +30,16 @@ The Copilot dashboard can be found on the Copilot tab within the Digital Landsca - [Allowlisting your IP](#allowlisting-your-ip) - [Setting up a pipeline](#setting-up-a-pipeline) - [Triggering a pipeline](#triggering-a-pipeline) + - [Documentation](#documentation) + - [Testing](#testing) + - [Linting](#linting) + - [Python](#python) + - [Markdown](#markdown) + - [Markdown Configuration](#markdown-configuration) + - [Markdown GitHub Action](#markdown-github-action) + - [Megalinter](#megalinter) + - [Megalinter Configuration](#megalinter-configuration) + - [Megalinter GitHub Action](#megalinter-github-action) ## Prerequisites @@ -62,110 +62,6 @@ This repository has a Makefile for executing common commands. To view all comman make all ``` -## Documentation - -This project uses MkDocs for documentation which gets deployed to GitHub Pages at a repository level. - -For more information about MkDocs, see the below documentation. - -[Getting Started with MkDocs](https://www.mkdocs.org/getting-started/) - -There is a guide to getting started on this repository's GitHub Pages site. - -## Testing - -This project uses Pytest for testing. The tests can be found in the `tests` folder. - -To run all tests, use `make test`. - -On pull request or push to the `main` branch, the tests will automatically run. The workflow will fail if any tests fail, or if test coverage is below 95%. - -The related workflow can be found in `.github/workflows/ci.yml`. - -## Linting - -### Python - -This project uses Black, Ruff, and Pylint for linting and code formatting on Python files in `src`. Configurations for each are located in `pyproject.toml`. - -The following Makefile commands can be used to run linting and optionally apply fixes or run a specific linter: - -```bash -black-check ## Run black for code formatting, without fixing. - -black-apply ## Run black and fix code formatting. - -ruff-check ## Run ruff for linting and code formatting, without fixing. - -ruff-apply ## Run ruff and fix linting and code formatting. - -pylint ## Run pylint for code analysis. - -lint ## Run Python linters without fixing. - -lint-apply ## Run black and ruff with auto-fix, and Pylint. -``` - -On pull request or push to the `main` branch, `make lint-check` will automatically run to check code quality, failing if there are any issues. It is up to the developer to apply fixes. - -The related workflow can be found in `.github/workflows/ci.yml`. - -### Markdown - -Markdown linting runs in a docker image, so docker must be running before attempting to lint. - -To lint all markdown files, run the following command: - -```bash -make md-check -``` - -To fix all markdown files, run the following command: - -```bash -make md-apply -``` - -#### Markdown Configuration - -The `.markdownlint.json` file in the root of the repository contains the configuration for markdownlint. This file is used to set the rules and settings for linting markdown files. - -Currently, MD013 (line length) is disabled. This is because the default line length of 80 characters is too restrictive. - -For a full list of rules, see [Markdownlint Rules / Aliases](https://github.com/DavidAnson/markdownlint?tab=readme-ov-file#rules--aliases) - -The `.markdownlintignore` file in the root of the repository is also used to prevent markdownlint running on unnecessary files such as `venv`. - -#### Markdown GitHub Action - -On pull request or push to the `main` branch, `make md-check` will automatically run to check for linting errors, failing if there are any issues. It is up to the developer to apply fixes. - -The related workflow can be found in `.github/workflows/ci.yml`. - -### Megalinter - -In addition to Python and Markdown-specific linting, this project uses Megalinter to catch all other types of linting errors across the project. - -Megalinter runs in a docker image, so docker must be running before attempting to lint. - -To lint with Megalinter, run: - -```bash -make megalint -``` - -After running, Megalinter will create a folder named `megalinter-reports` containing detailed logs on linting. - -#### Megalinter Configuration - -The configuration file for Megalinter can be found in the root of the repository, named `.mega-linter.yml`. - -#### Megalinter GitHub Action - -On pull request or push to the `main` branch, Megalinter will automatically run to check project-wide linting, failing if there are any issues. - -The related workflow can be found in `.github/workflows/megalinter.yml`. - ## AWS Lambda Scripts This script: @@ -193,7 +89,7 @@ Further information can be found in [this project's documentation](/docs/index.m **Example Output:** | REPOSITORY | TAG | IMAGE ID | CREATED | SIZE | - |-----------------------------|--------|--------------|----------------|-------| + | --------------------------- | ------ | ------------ | -------------- | ----- | | copilot-usage-lambda-script | latest | 0bbe73d9256f | 11 seconds ago | 224MB | 3. Run the image locally mapping local host port (9000) to container port (8080) and passing in AWS credentials to download a .pem file from the AWS Secrets Manager to the running container. These credentials will also be used to upload and download `historic_usage_data.json` to and from S3. @@ -233,7 +129,7 @@ Further information can be found in [this project's documentation](/docs/index.m **Example output:** | CONTAINER ID | IMAGE | COMMAND | CREATED | STATUS | PORTS | NAMES | - |--------------|-----------------------------|------------------------|----------------|---------------|-------------------------------------------|--------------| + | ------------ | --------------------------- | ---------------------- | -------------- | ------------- | ----------------------------------------- | ------------ | | 3f7d64676b1a | copilot-usage-lambda-script | "/lambda-entrypoint.…" | 44 seconds ago | Up 44 seconds | 0.0.0.0:9000->8080/tcp, :::9000->8080/tcp | nice_ritchie | Stop the container @@ -485,3 +381,107 @@ Once the pipeline has been set, you can manually trigger a build on the Concours ```bash fly -t aws-sdp trigger-job -j github-copilot-usage-lambda-/build-and-push ``` + +## Documentation + +This project uses MkDocs for documentation which gets deployed to GitHub Pages at a repository level. + +For more information about MkDocs, see the below documentation. + +[Getting Started with MkDocs](https://www.mkdocs.org/getting-started/) + +There is a guide to getting started on this repository's GitHub Pages site. + +## Testing + +This project uses Pytest for testing. The tests can be found in the `tests` folder. + +To run all tests, use `make test`. + +On pull request or push to the `main` branch, the tests will automatically run. The workflow will fail if any tests fail, or if test coverage is below 95%. + +The related workflow can be found in `.github/workflows/ci.yml`. + +## Linting + +### Python + +This project uses Black, Ruff, and Pylint for linting and code formatting on Python files in `src`. Configurations for each are located in `pyproject.toml`. + +The following Makefile commands can be used to run linting and optionally apply fixes or run a specific linter: + +```bash +black-check ## Run black for code formatting, without fixing. + +black-apply ## Run black and fix code formatting. + +ruff-check ## Run ruff for linting and code formatting, without fixing. + +ruff-apply ## Run ruff and fix linting and code formatting. + +pylint ## Run pylint for code analysis. + +lint ## Run Python linters without fixing. + +lint-apply ## Run black and ruff with auto-fix, and Pylint. +``` + +On pull request or push to the `main` branch, `make lint-check` will automatically run to check code quality, failing if there are any issues. It is up to the developer to apply fixes. + +The related workflow can be found in `.github/workflows/ci.yml`. + +### Markdown + +Markdown linting runs in a docker image, so docker must be running before attempting to lint. + +To lint all markdown files, run the following command: + +```bash +make md-check +``` + +To fix all markdown files, run the following command: + +```bash +make md-apply +``` + +#### Markdown Configuration + +The `.markdownlint.json` file in the root of the repository contains the configuration for markdownlint. This file is used to set the rules and settings for linting markdown files. + +Currently, MD013 (line length) is disabled. This is because the default line length of 80 characters is too restrictive. + +For a full list of rules, see [Markdownlint Rules / Aliases](https://github.com/DavidAnson/markdownlint?tab=readme-ov-file#rules--aliases) + +The `.markdownlintignore` file in the root of the repository is also used to prevent markdownlint running on unnecessary files such as `venv`. + +#### Markdown GitHub Action + +On pull request or push to the `main` branch, `make md-check` will automatically run to check for linting errors, failing if there are any issues. It is up to the developer to apply fixes. + +The related workflow can be found in `.github/workflows/ci.yml`. + +### Megalinter + +In addition to Python and Markdown-specific linting, this project uses Megalinter to catch all other types of linting errors across the project. + +Megalinter runs in a docker image, so docker must be running before attempting to lint. + +To lint with Megalinter, run: + +```bash +make megalint +``` + +After running, Megalinter will create a folder named `megalinter-reports` containing detailed logs on linting. + +#### Megalinter Configuration + +The configuration file for Megalinter can be found in the root of the repository, named `.mega-linter.yml`. + +#### Megalinter GitHub Action + +On pull request or push to the `main` branch, Megalinter will automatically run to check project-wide linting, failing if there are any issues. + +The related workflow can be found in `.github/workflows/megalinter.yml`. From f75ee4ab649b073d7dde6b3e604ac2b5d47ae5b5 Mon Sep 17 00:00:00 2001 From: Kieran Pritchard Date: Tue, 14 Oct 2025 15:30:09 +0100 Subject: [PATCH 14/20] lint(python): Fix pylint issues --- src/main.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/src/main.py b/src/main.py index e0f7f8f..d54991a 100644 --- a/src/main.py +++ b/src/main.py @@ -93,7 +93,8 @@ def get_copilot_team_date(gh: github_api_toolkit.github_interface, page: int) -> team_html_url = team.get("html_url", "") logger.info( - f"Team {team_name} has Copilot data", + "Team % has Copilot data", + team_name, extra={ "team_name": team_name, "team_slug": team_slug, @@ -338,7 +339,7 @@ def get_dict_value(dictionary: dict, key: str) -> Any: value = dictionary.get(key) if value is None: - raise Exception(f"Key {key} not found in the dictionary.") + raise ValueError(f"Key {key} not found in the dictionary.") return value @@ -356,22 +357,22 @@ def get_config_file(path: str) -> Any: Any: The configuration file as a dictionary. """ try: - with open(path) as f: + with open(path, encoding="utf-8") as f: config = json.load(f) except FileNotFoundError: error_message = f"{path} configuration file not found. Please check the path." - raise Exception(error_message) from None + raise FileNotFoundError(error_message) from None - if type(config) is not dict: + if not isinstance(config, dict): error_message = ( f"{path} configuration file is not a dictionary. Please check the file contents." ) - raise Exception(error_message) + raise TypeError(error_message) return config -def handler(event: dict, context) -> str: # pylint: disable=unused-argument +def handler(event: dict, context) -> str: # pylint: disable=unused-argument, too-many-locals """AWS Lambda handler function for GitHub Copilot usage data aggregation. This function: From c177f42774874f9b5adcc02c2b2b208265e05f51 Mon Sep 17 00:00:00 2001 From: Kieran Pritchard Date: Tue, 14 Oct 2025 15:38:22 +0100 Subject: [PATCH 15/20] test(main): Fix failing tests --- tests/test_main.py | 23 ++++++----------------- 1 file changed, 6 insertions(+), 17 deletions(-) diff --git a/tests/test_main.py b/tests/test_main.py index f45beab..b270f24 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -70,7 +70,7 @@ def test_get_and_update_copilot_teams_single_page(self, mock_update_s3_object): with patch( "src.main.get_copilot_team_date", return_value=[{"name": "team1"}] ) as mock_get_team_date: - result = get_and_update_copilot_teams(s3, gh) + result = get_and_update_copilot_teams(s3, gh, False) assert result == [{"name": "team1"}] mock_get_team_date.assert_called_once_with(gh, 1) mock_update_s3_object.assert_called_once() @@ -93,7 +93,7 @@ def test_get_and_update_copilot_teams_multiple_pages(self, mock_update_s3_object "src.main.get_copilot_team_date", side_effect=[[{"name": "team1"}], [{"name": "team2"}], [{"name": "team3"}]], ) as mock_get_team_date: - result = get_and_update_copilot_teams(s3, gh) + result = get_and_update_copilot_teams(s3, gh, False) assert result == [{"name": "team1"}, {"name": "team2"}, {"name": "team3"}] assert mock_get_team_date.call_count == 3 mock_update_s3_object.assert_called_once() @@ -107,7 +107,7 @@ def test_get_and_update_copilot_teams_no_teams(self, mock_update_s3_object): gh.get.return_value = mock_response with patch("src.main.get_copilot_team_date", return_value=[]) as mock_get_team_date: - result = get_and_update_copilot_teams(s3, gh) + result = get_and_update_copilot_teams(s3, gh, False) assert result == [] mock_get_team_date.assert_called_once_with(gh, 1) mock_update_s3_object.assert_called_once() @@ -138,17 +138,6 @@ def test_get_team_history_success(self): ) assert result == [{"date": "2024-01-01", "usage": 5}] - def test_get_team_history_unexpected_response_type(self, caplog): - gh = MagicMock() - gh.get.return_value = "not_a_response" - - with caplog.at_level("ERROR"): - result = get_team_history(gh, "dev-team") - assert result is None - assert any( - "Unexpected response type" in record.getMessage() for record in caplog.records - ) - def test_get_team_history_with_no_query_params(self): gh = MagicMock() mock_response = MagicMock(spec=Response) @@ -370,7 +359,7 @@ def test_get_and_update_historic_usage_success(self): ) } - result, dates_added = get_and_update_historic_usage(s3, gh) + result, dates_added = get_and_update_historic_usage(s3, gh, False) assert result == [ {"date": "2024-01-01", "usage": 10}, {"date": "2024-01-02", "usage": 20}, @@ -395,7 +384,7 @@ def test_get_and_update_historic_usage_no_existing_data(self, caplog): operation_name="GetObject", ) - result, dates_added = get_and_update_historic_usage(s3, gh) + result, dates_added = get_and_update_historic_usage(s3, gh, False) assert result == [{"date": "2024-01-01", "usage": 10}] assert dates_added == ["2024-01-01"] s3.put_object.assert_called_once() @@ -418,7 +407,7 @@ def test_get_and_update_historic_usage_no_new_dates(self): ) } - result, dates_added = get_and_update_historic_usage(s3, gh) + result, dates_added = get_and_update_historic_usage(s3, gh, False) assert result == [{"date": "2024-01-01", "usage": 10}] assert dates_added == [] s3.put_object.assert_called_once() From 1c02017b869454ac4b6aa6376cc001959ded8916 Mon Sep 17 00:00:00 2001 From: Kieran Pritchard Date: Tue, 14 Oct 2025 15:46:26 +0100 Subject: [PATCH 16/20] style(md tables): Fix formatting to comply with megalinter - Prettier hates this but Megalinter love it :/ --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 3c2d409..6547e3c 100644 --- a/README.md +++ b/README.md @@ -89,7 +89,7 @@ Further information can be found in [this project's documentation](/docs/index.m **Example Output:** | REPOSITORY | TAG | IMAGE ID | CREATED | SIZE | - | --------------------------- | ------ | ------------ | -------------- | ----- | + |-----------------------------|--------|--------------|----------------|-------| | copilot-usage-lambda-script | latest | 0bbe73d9256f | 11 seconds ago | 224MB | 3. Run the image locally mapping local host port (9000) to container port (8080) and passing in AWS credentials to download a .pem file from the AWS Secrets Manager to the running container. These credentials will also be used to upload and download `historic_usage_data.json` to and from S3. @@ -129,7 +129,7 @@ Further information can be found in [this project's documentation](/docs/index.m **Example output:** | CONTAINER ID | IMAGE | COMMAND | CREATED | STATUS | PORTS | NAMES | - | ------------ | --------------------------- | ---------------------- | -------------- | ------------- | ----------------------------------------- | ------------ | + |--------------|-----------------------------|------------------------|----------------|---------------|-------------------------------------------|--------------| | 3f7d64676b1a | copilot-usage-lambda-script | "/lambda-entrypoint.…" | 44 seconds ago | Up 44 seconds | 0.0.0.0:9000->8080/tcp, :::9000->8080/tcp | nice_ritchie | Stop the container From 1ea19aa265e05b2ae55d2e0b7678ec09a85ab054 Mon Sep 17 00:00:00 2001 From: Kieran Pritchard Date: Tue, 14 Oct 2025 16:21:25 +0100 Subject: [PATCH 17/20] test: add tests - tests for locally writing files - tests for get_team_history - tests for get_dict_value - tests for get_config_file --- tests/test_main.py | 144 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 144 insertions(+) diff --git a/tests/test_main.py b/tests/test_main.py index b270f24..628f57f 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -18,6 +18,8 @@ get_team_history, handler, update_s3_object, + get_dict_value, + get_config_file, ) @@ -116,6 +118,22 @@ def test_get_and_update_copilot_teams_no_teams(self, mock_update_s3_object): assert args[2] == "copilot_teams.json" assert args[3] == [] + def test_write_data_locally_creates_file(self, tmp_path): + s3 = MagicMock() + gh = MagicMock() + response = MagicMock() + response.links = {} + gh.get.return_value = response + + with patch("src.main.get_copilot_team_date", return_value=[{"name": "teamA"}]): + with patch("src.main.os.makedirs") as mock_makedirs, \ + patch("src.main.open", create=True) as mock_open: + result = get_and_update_copilot_teams(s3, gh, True) + assert result == [{"name": "teamA"}] + mock_makedirs.assert_called_once_with("output", exist_ok=True) + mock_open.assert_called_once() + s3.put_object.assert_not_called() + class TestGetTeamHistory: def setup_method(self): @@ -412,6 +430,28 @@ def test_get_and_update_historic_usage_no_new_dates(self): assert dates_added == [] s3.put_object.assert_called_once() + def test_write_data_locally_creates_file(self, tmp_path): + s3 = MagicMock() + gh = MagicMock() + usage_data = [{"date": "2024-01-01", "usage": 10}] + gh.get.return_value.json.return_value = usage_data + + # S3 get_object raises ClientError + s3.get_object.side_effect = ClientError( + error_response={"Error": {"Code": "404", "Message": "Not Found"}}, + operation_name="GetObject", + ) + + # Patch os.makedirs and open to use tmp_path + with patch("src.main.os.makedirs") as mock_makedirs, \ + patch("src.main.open", create=True) as mock_open: + result, dates_added = get_and_update_historic_usage(s3, gh, True) + assert result == [{"date": "2024-01-01", "usage": 10}] + assert dates_added == ["2024-01-01"] + mock_makedirs.assert_called_once_with("output", exist_ok=True) + mock_open.assert_called_once() + s3.put_object.assert_not_called() + class TestCreateDictionary: def setup_method(self): @@ -490,3 +530,107 @@ def test_create_dictionary_no_new_history(self, caplog): result = create_dictionary(gh, copilot_teams, existing_team_history) assert result == [] assert mock_get_team_history.call_count == 1 + + +class TestGetTeamHistory: + def setup_method(self): + self.org_patch = patch("src.main.org", "test-org") + self.org_patch.start() + + def teardown_method(self): + self.org_patch.stop() + + def test_get_team_history_returns_metrics(self): + gh = MagicMock() + mock_response = MagicMock(spec=Response) + mock_response.json.return_value = [{"date": "2024-01-01", "usage": 5}] + gh.get.return_value = mock_response + + result = get_team_history(gh, "dev-team", {"since": "2024-01-01"}) + gh.get.assert_called_once_with( + "/orgs/test-org/team/dev-team/copilot/metrics", params={"since": "2024-01-01"} + ) + assert result == [{"date": "2024-01-01", "usage": 5}] + + def test_get_team_history_returns_empty_list(self): + gh = MagicMock() + mock_response = MagicMock(spec=Response) + mock_response.json.return_value = [] + gh.get.return_value = mock_response + + result = get_team_history(gh, "dev-team") + gh.get.assert_called_once_with("/orgs/test-org/team/dev-team/copilot/metrics", params=None) + assert result == [] + + def test_get_team_history_non_response_returns_none(self): + gh = MagicMock() + gh.get.return_value = "not_a_response" + + result = get_team_history(gh, "dev-team") + gh.get.assert_called_once_with("/orgs/test-org/team/dev-team/copilot/metrics", params=None) + assert result is None + + def test_get_team_history_with_query_params_none(self): + gh = MagicMock() + mock_response = MagicMock(spec=Response) + mock_response.json.return_value = [{"date": "2024-01-01", "usage": 5}] + gh.get.return_value = mock_response + + result = get_team_history(gh, "dev-team", None) + gh.get.assert_called_once_with("/orgs/test-org/team/dev-team/copilot/metrics", params=None) + assert result == [{"date": "2024-01-01", "usage": 5}] + + +class TestGetDictValue: + def test_get_dict_value_returns_value(self): + d = {"foo": "bar", "baz": 42} + assert get_dict_value(d, "foo") == "bar" + assert get_dict_value(d, "baz") == 42 + + def test_get_dict_value_raises_for_missing_key(self): + d = {"foo": "bar"} + try: + get_dict_value(d, "missing") + except ValueError as e: + assert str(e) == "Key missing not found in the dictionary." + else: + assert False, "ValueError not raised for missing key" + + def test_get_dict_value_returns_none_for_key_with_none_value(self): + d = {"foo": None} + try: + get_dict_value(d, "foo") + except ValueError as e: + assert str(e) == "Key foo not found in the dictionary." + else: + assert False, "ValueError not raised for None value" + + +class TestGetConfigFile: + def test_get_config_file_success(self, tmp_path): + config_data = {"features": {"show_log_locally": False}} + config_path = tmp_path / "config.json" + config_path.write_text(json.dumps(config_data), encoding="utf-8") + + result = get_config_file(str(config_path)) + assert result == config_data + + def test_get_config_file_file_not_found(self): + missing_path = "nonexistent_config.json" + try: + get_config_file(missing_path) + except FileNotFoundError as e: + assert missing_path in str(e) + else: + assert False, "FileNotFoundError not raised" + + def test_get_config_file_not_dict(self, tmp_path): + config_path = tmp_path / "config.json" + config_path.write_text(json.dumps([1, 2, 3]), encoding="utf-8") + + try: + get_config_file(str(config_path)) + except TypeError as e: + assert "is not a dictionary" in str(e) + else: + assert False, "TypeError not raised for non-dict config" From 67c9fe8cf3e627479a19ae4d95c3472467c23618 Mon Sep 17 00:00:00 2001 From: Kieran Pritchard Date: Tue, 14 Oct 2025 16:29:04 +0100 Subject: [PATCH 18/20] chore: Add comments to justify missing testing coverage --- src/main.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main.py b/src/main.py index d54991a..b378c1e 100644 --- a/src/main.py +++ b/src/main.py @@ -400,6 +400,9 @@ def handler(event: dict, context) -> str: # pylint: disable=unused-argument, to # Toggle local logging if show_log_locally: + # This is a nightmare to test as it's really hard to get to. + # At some point we should look to make a wrapper for logging + # so it can be tested more easily. logging.basicConfig( filename="debug.log", filemode="w", @@ -454,6 +457,8 @@ def handler(event: dict, context) -> str: # pylint: disable=unused-argument, to updated_team_history = create_dictionary(gh, copilot_teams, existing_team_history) # Write updated team history to S3 + # This line isn't covered by tests as it's painful to get to. + # The function itself is tested though. update_s3_object(s3, BUCKET_NAME, "teams_history.json", updated_team_history) else: local_path = "output/teams_history.json" From 2f4688b712de1a2754bd06277006b2b0e2765348 Mon Sep 17 00:00:00 2001 From: Kieran Pritchard Date: Tue, 14 Oct 2025 16:31:01 +0100 Subject: [PATCH 19/20] fix(Makefile): update make clean to rm renamed outputs --- Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 54058a9..bed1372 100644 --- a/Makefile +++ b/Makefile @@ -12,7 +12,8 @@ clean: ## Clean the temporary files. rm -rf .pytest_cache rm -rf tests/__pycache__ rm -rf .coverage - rm -rf local_data + rm -rf output + rm -rf debug.log .PHONY: black-check black-check: ## Run black for code formatting, without fixing. From 6614e5c3ecce4f640786d462b56a345012e96f60 Mon Sep 17 00:00:00 2001 From: Kieran Pritchard Date: Wed, 15 Oct 2025 14:37:57 +0100 Subject: [PATCH 20/20] chore: address PR comments --- Makefile | 1 + docs/configuration.md | 2 +- src/main.py | 6 +++--- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index bed1372..96713d8 100644 --- a/Makefile +++ b/Makefile @@ -12,6 +12,7 @@ clean: ## Clean the temporary files. rm -rf .pytest_cache rm -rf tests/__pycache__ rm -rf .coverage + rm -rf megalinter-reports/ rm -rf output rm -rf debug.log diff --git a/docs/configuration.md b/docs/configuration.md index 19c9eaa..f3ca326 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -27,7 +27,7 @@ When deploying to AWS, this should be set to `false` to avoid files being writte #### `write_data_locally` -If set to `true`, the tool will skip writing to S3 and instead write data for copilot teams, historic usage, and teams history to `local_data`. +If set to `true`, the tool will skip writing to the appropriate AWS S3 bucket and instead write data for copilot teams, historic usage, and teams history to `local_data`. **When deploying to AWS, this must be set to `false` to ensure the tool writes to AWS.** diff --git a/src/main.py b/src/main.py index b378c1e..d75891a 100644 --- a/src/main.py +++ b/src/main.py @@ -93,7 +93,7 @@ def get_copilot_team_date(gh: github_api_toolkit.github_interface, page: int) -> team_html_url = team.get("html_url", "") logger.info( - "Team % has Copilot data", + "Team %s has Copilot data", team_name, extra={ "team_name": team_name, @@ -123,7 +123,7 @@ def get_and_update_historic_usage( Args: s3 (boto3.client): An S3 client. gh (github_api_toolkit.github_interface): An instance of the github_interface class. - write_data_locally (bool): Whether to write data locally instead of to S3. + write_data_locally (bool): Whether to write data locally instead of to an S3 bucket. Returns: tuple: A tuple containing the updated historic usage data and a list of dates added. @@ -178,7 +178,7 @@ def get_and_update_copilot_teams( Args: s3 (boto3.client): An S3 client. gh (github_api_toolkit.github_interface): An instance of the github_interface class. - write_data_locally (bool): Whether to write data locally instead of to S3. + write_data_locally (bool): Whether to write data locally instead of to an S3 bucket. Returns: list: A list of GitHub Teams with Copilot Data.