diff --git a/.github/workflows/build-guidelines.yml b/.github/workflows/build-guidelines.yml index 54cdf09f..023d2bf4 100644 --- a/.github/workflows/build-guidelines.yml +++ b/.github/workflows/build-guidelines.yml @@ -1,5 +1,4 @@ name: Build - on: push: tags: @@ -17,25 +16,21 @@ on: # required: false # type: string # default: 'default value' - jobs: build: runs-on: ubuntu-latest steps: - name: Checkout repository uses: actions/checkout@v3 - - name: Install uv run: | curl -LsSf https://astral.sh/uv/install.sh | sh export PATH="/root/.cargo/bin:$PATH" uv --version - - name: Build documentation run: | mkdir -p build ./make.py 2>&1 | tee build/build.log - # Check for a wide range of error indicators in the log if grep -q -E "Traceback" build/build.log; then echo "::error::Build errors detected in log" @@ -46,17 +41,31 @@ jobs: # Check for the Sphinx temp error file reference and extract it if present TEMP_ERROR_FILE=$(grep -o '/tmp/sphinx-err-[^ ]*\.log' build/build.log | head -1) if [ ! -z "$TEMP_ERROR_FILE" ] && [ -f "$TEMP_ERROR_FILE" ]; then - echo "==== SPHINX DETAILED TRACEBACK START ====" - cat "$TEMP_ERROR_FILE" - echo "==== SPHINX DETAILED TRACEBACK END ====" - # Save this traceback for artifacts + echo "=== TRACEBACK ===" + echo "Saving traceback to build/sphinx_traceback.log" cp "$TEMP_ERROR_FILE" build/sphinx_traceback.log fi + # Check for FLS differences file reference and extract it if present + FLS_DIFF_FILE=$(grep -o '/tmp/fls_diff_[^ ]*\.txt' build/build.log | head -1) + if [ ! -z "$FLS_DIFF_FILE" ] && [ -f "$FLS_DIFF_FILE" ]; then + # Save this differences file for artifacts + echo "=== SPEC LOCK FILE DIFFERENCES ===" + echo "Saving spec lock file differences to to build/spec_lock_file_differences.log" + cp "$FLS_DIFF_FILE" build/spec_lock_file_differences.txt + fi + exit 1 + else + # Even if there's no error, still check for FLS differences file + FLS_DIFF_FILE=$(grep -o '/tmp/fls_diff_[^ ]*\.txt' build/build.log | head -1) + if [ ! -z "$FLS_DIFF_FILE" ] && [ -f "$FLS_DIFF_FILE" ]; then + echo "=== SPEC LOCK FILE DIFFERENCES, NO BUILD ERROR ===" + echo "Saving spec lock file differences to to build/spec_lock_file_differences.log" + cp "$FLS_DIFF_FILE" build/spec_lock_file_differences.txt + fi fi - - name: Archive build artifacts uses: actions/upload-artifact@v4 if: always() diff --git a/README.md b/README.md index 1ff6636d..3a0715b9 100644 --- a/README.md +++ b/README.md @@ -29,6 +29,52 @@ A machine-parseable artifact will be available at `build/html/needs.json`. (ToDo A record with checksums of the contents is available at `build/html/guidelines-ids.json`. Users of the coding guidelines can reference this file to determine if there have been changes to coding guidelines contents they should be aware of. +## Build breaking due to out-dated spec lock file + +It's a fairly common occurence for the build to break due to an out of date spec lock file, located at: + +``` +src/spec.lock +``` + +The `spec.lock` is checked against the current live version of the specification, which means that your local development may go out of date while you are developing a feature. + +### Continuing work while on a feature branch + +If you run into this while developing a feature, you may ignore this error by running the build with: + +```shell + ./make.py --ignore-spec-lock-diff +``` + +### If you need to audit the difference + +When the build breaks due to the difference a file is created here: + +``` +/tmp/fls_diff_.txt +``` + +which can be used to aid in auditing the differences. + +Follow the below steps to ensure that the guidelines remain a representation of the FLS: + +1. Check if there are any guidelines currently affected, if no, go to 6. +2. For each affected guideline, audit the previous text and current text of the appropriate paragraph-id in the FLS +3. If the prior and new text of that paragraph in the FLS does not effect the guideline, proceed back to 2. to the next affected guideline +4. If the prior and new text of that paragraph do differ in the FLS, then a rationalization step is required + 1. In the rationalization step, either yourself or another coding guidelines member must modify the guideline to comply with the new text +5. If you any affected coding guidelines remain proceed back to 2. to the next affected guideline +6. You are done + +Once you have completed the above steps, you will now update the local copy of the `spec.lock` file with the live version: + +```shell + ./make.py --update-spec-lock-file +``` + +Open a new PR with only the changes necessary to rationalize the guidelines with the new FLS text. + ## Contributing to the coding guidelines See [CONTRIBUTING.md](CONTRIBUTING.md). diff --git a/builder/build_cli.py b/builder/build_cli.py index 88b477e8..3e17dfb8 100644 --- a/builder/build_cli.py +++ b/builder/build_cli.py @@ -20,7 +20,7 @@ SPEC_CHECKSUM_URL = "https://spec.ferrocene.dev/paragraph-ids.json" SPEC_LOCKFILE = "spec.lock" -def build_docs(root, builder, clear, serve, debug): +def build_docs(root, builder, clear, serve, debug, spec_lock_consistency_check): dest = root / "build" args = ["-b", builder, "-d", dest / "doctrees"] @@ -36,6 +36,18 @@ def build_docs(root, builder, clear, serve, debug): args += ["-j", "auto"] if clear: args.append("-E") + + # Initialize an empty list for configuration options (without --define) + conf_opt_values = [] + # Add configuration options as needed + if not spec_lock_consistency_check: + conf_opt_values.append("enable_spec_lock_consistency=0") + # Only add the --define argument if there are options to define + if conf_opt_values: + args.append("--define") + for opt in conf_opt_values: + args.append(opt) + if serve: for extra_watch_dir in EXTRA_WATCH_DIRS: extra_watch_dir = root / extra_watch_dir @@ -99,6 +111,12 @@ def main(root): "-c", "--clear", help="disable incremental builds", action="store_true" ) group = parser.add_mutually_exclusive_group() + parser.add_argument( + "--ignore-spec-lock-diff", + help="ignore fls.lock file differences with live release -- for WIP branches only", + default=False, + action="store_true" + ) parser.add_argument( "--update-spec-lock-file", help="update fls.lock file", @@ -127,6 +145,6 @@ def main(root): update_spec_lockfile(SPEC_CHECKSUM_URL, root / "src" / SPEC_LOCKFILE) rendered = build_docs( - root, "xml" if args.xml else "html", args.clear, args.serve, args.debug + root, "xml" if args.xml else "html", args.clear, args.serve, args.debug, not args.ignore_spec_lock_diff ) diff --git a/exts/coding_guidelines/__init__.py b/exts/coding_guidelines/__init__.py index d43e2975..d9036065 100644 --- a/exts/coding_guidelines/__init__.py +++ b/exts/coding_guidelines/__init__.py @@ -44,6 +44,9 @@ def setup(app): app.add_config_value(name='fls_paragraph_ids_url', default='https://spec.ferrocene.dev/paragraph-ids.json', rebuild='env') + app.add_config_value(name='enable_spec_lock_consistency', + default=True, + rebuild='env') app.connect('env-check-consistency', fls_checks.check_fls) app.connect('build-finished', write_guidelines_ids.build_finished) diff --git a/exts/coding_guidelines/fls_checks.py b/exts/coding_guidelines/fls_checks.py index 63274cf5..37564ae8 100644 --- a/exts/coding_guidelines/fls_checks.py +++ b/exts/coding_guidelines/fls_checks.py @@ -37,6 +37,8 @@ def check_fls(app, env): for diff in differences: error_message += f" - {diff}\n" error_message += "\nPlease manually inspect FLS spec items whose checksums have changed as corresponding guidelines may need to account for these changes." + error_message += "\nOnce resolved, you may run the following to update the local spec lock file:" + error_message += "\n\t./make.py --update-spec-lock-file" logger.error(error_message) raise FLSValidationError(error_message) @@ -264,24 +266,33 @@ def gather_fls_paragraph_ids(json_url): def check_fls_lock_consistency(app, env, fls_raw_data): """ Compare live FLS JSON data with the lock file to detect changes - + Args: app: The Sphinx application env: The Sphinx environment fls_raw_data: Raw JSON data from the live specification - + Returns: Tuple containing: - Boolean indicating whether differences were found - List of difference descriptions with affected guidelines (for error reporting) """ + if not app.config.enable_spec_lock_consistency: + logger.warn("Spec lock file consistency check disabled") + return (False, []) + + import json + import tempfile + import os + from pathlib import Path + logger.info("Checking FLS lock file consistency") lock_path = app.confdir / 'spec.lock' - + # Get the needs data to find affected guidelines data = SphinxNeedsData(env) needs = data.get_needs_view() - + # Map of FLS IDs to guidelines that reference them fls_to_guidelines = {} for need_id, need in needs.items(): @@ -294,25 +305,26 @@ def check_fls_lock_consistency(app, env, fls_raw_data): 'id': need_id, 'title': need.get('title', 'Untitled') }) - - # Differences to report - differences = [] - has_differences = False - + # If no lock file exists, skip checking if not lock_path.exists(): logger.warning(f"No FLS lock file found at {lock_path}, skipping consistency check") return False, [] - + + # Initialize result variables + affected_guidelines = {} + has_differences = False + detailed_differences = [] # This will go to the temp file + try: # Load lock file with open(lock_path, 'r', encoding='utf-8') as f: locked_data = json.load(f) - + # Create maps of paragraph IDs to checksums for both live and locked data live_checksums = {} locked_checksums = {} - + # Extract from live data for document in fls_raw_data.get('documents', []): for section in document.get('sections', []): @@ -320,13 +332,13 @@ def check_fls_lock_consistency(app, env, fls_raw_data): para_id = paragraph.get('id', '') para_checksum = paragraph.get('checksum', '') para_number = paragraph.get('number', '') - + if para_id and para_id.startswith('fls_'): live_checksums[para_id] = { 'checksum': para_checksum, 'section_id': para_number } - + # Extract from locked data for document in locked_data.get('documents', []): for section in document.get('sections', []): @@ -334,90 +346,136 @@ def check_fls_lock_consistency(app, env, fls_raw_data): para_id = paragraph.get('id', '') para_checksum = paragraph.get('checksum', '') para_number = paragraph.get('number', '') - + if para_id and para_id.startswith('fls_'): locked_checksums[para_id] = { 'checksum': para_checksum, 'section_id': para_number } - + logger.info(f"Found {len(live_checksums)} paragraphs in live data") logger.info(f"Found {len(locked_checksums)} paragraphs in lock file") - - # Format affected guidelines information + + # Helper function to track affected guidelines + def track_affected_guidelines(fls_id, change_type): + for guideline in fls_to_guidelines.get(fls_id, []): + guideline_id = guideline['id'] + if guideline_id not in affected_guidelines: + affected_guidelines[guideline_id] = { + 'title': guideline['title'], + 'changes': [] + } + section_id = live_checksums.get(fls_id, {}).get('section_id') or locked_checksums.get(fls_id, {}).get('section_id') + affected_guidelines[guideline_id]['changes'].append({ + 'fls_id': fls_id, + 'change_type': change_type, + 'section_id': section_id + }) + + # Format affected guidelines information (for detailed log) def format_affected_guidelines(fls_id): affected = fls_to_guidelines.get(fls_id, []) if not affected: return " No guidelines affected" - + result = [] for guideline in affected: result.append(f" - {guideline['id']}: {guideline['title']}") return "\n".join(result) - + # Look for new IDs new_ids = set(live_checksums.keys()) - set(locked_checksums.keys()) if new_ids: for fls_id in sorted(new_ids): diff_msg = f"New FLS ID added: {fls_id} ({live_checksums[fls_id]['section_id']})" affected_msg = format_affected_guidelines(fls_id) - differences.append(f"{diff_msg}\n Affected guidelines:\n{affected_msg}") + detailed_differences.append(f"{diff_msg}\n Affected guidelines:\n{affected_msg}") + track_affected_guidelines(fls_id, "added") has_differences = True - + # Look for removed IDs removed_ids = set(locked_checksums.keys()) - set(live_checksums.keys()) if removed_ids: for fls_id in sorted(removed_ids): diff_msg = f"FLS ID removed: {fls_id} ({locked_checksums[fls_id]['section_id']})" affected_msg = format_affected_guidelines(fls_id) - differences.append(f"{diff_msg}\n Affected guidelines:\n{affected_msg}") + detailed_differences.append(f"{diff_msg}\n Affected guidelines:\n{affected_msg}") + track_affected_guidelines(fls_id, "removed") has_differences = True - + # Check for checksum changes on existing IDs common_ids = set(live_checksums.keys()) & set(locked_checksums.keys()) for fls_id in sorted(common_ids): live_checksum = live_checksums[fls_id]['checksum'] locked_checksum = locked_checksums[fls_id]['checksum'] - + changes = [] - + change_type = None + if live_checksum != locked_checksum: changes.append( f"Content changed for FLS ID {fls_id} ({live_checksums[fls_id]['section_id']}): " + f"checksum was {locked_checksum[:8]}... now {live_checksum[:8]}..." ) - + change_type = "content_changed" + # Also check if section IDs have changed live_section = live_checksums[fls_id]['section_id'] locked_section = locked_checksums[fls_id]['section_id'] - + if live_section != locked_section: changes.append( f"Section changed for FLS ID {fls_id}: {locked_section} -> {live_section}" ) - + if change_type is None: + change_type = "section_changed" + if changes: affected_msg = format_affected_guidelines(fls_id) - differences.append(f"{changes[0]}\n Affected guidelines:\n{affected_msg}") - + detailed_differences.append(f"{changes[0]}\n Affected guidelines:\n{affected_msg}") + # Add any additional changes separately for i in range(1, len(changes)): - differences.append(changes[i]) - + detailed_differences.append(changes[i]) + + if change_type: + track_affected_guidelines(fls_id, change_type) + has_differences = True - + + # Add detailed guideline-focused summary for the log file + if affected_guidelines: + detailed_differences.append("\n\nDETAILED AFFECTED GUIDELINES:") + for guideline_id, info in sorted(affected_guidelines.items()): + # For each guideline, list the changed FLS IDs with their section IDs + changed_fls = [f"{c['fls_id']} ({c['section_id']})" for c in info['changes']] + detailed_differences.append(f"{guideline_id}: {info['title']}") + detailed_differences.append(f" Changed FLS paragraphs: {', '.join(changed_fls)}") + + temp_file = None + try: + with tempfile.NamedTemporaryFile(mode='w', delete=False, prefix='fls_diff_', suffix='.txt') as temp_file: + temp_file.write("\n".join(detailed_differences)) + temp_path = temp_file.name + logger.warning(f"Detailed FLS differences written to: {temp_path}") + except Exception as e: + logger.error(f"Failed to write detailed differences to temp file: {e}") + + # Create concise summary for return + summary = [] if has_differences: - logger.warning(f"Found {len(differences)} differences between live FLS data and lock file") - else: - logger.info("No differences found between live FLS data and lock file") - - return has_differences, differences - + summary.append(f"Found differences between live FLS data and lock file affecting {len(affected_guidelines)} guidelines") + for guideline_id, info in sorted(affected_guidelines.items()): + # Get unique FLS IDs + fls_ids = sorted(set(c['fls_id'] for c in info['changes'])) + summary.append(f"{guideline_id}: {', '.join(fls_ids)}") + + return has_differences, summary + except (json.JSONDecodeError, IOError) as e: logger.error(f"Error reading or parsing lock file {lock_path}: {e}") return False, [f"Failed to read lock file: {e}"] - def insert_fls_coverage(app, env, fls_ids): """ Enrich the fls_ids with whether each FLS ID is covered by coding guidelines